diff --git a/.dockerignore b/.dockerignore index 478c25bf6..25a704494 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,3 +1,14 @@ vendor sdk .git +.devbox +.direnv +.github +.moon +sdks +config +docs +nix +openapi +tests +worktrees diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 000000000..c381fc0f5 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,9 @@ +/api/ @formancehq/dev-backend +/cmd/ @formancehq/dev-backend +/config/ @formancehq/dev-backend +/core/ @formancehq/dev-backend +/ledger/ @formancehq/dev-backend +*.go @formancehq/dev-backend + +/.devcontainer @flemzord +/.github @flemzord diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 000000000..f90bcb15a --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1 @@ +github: FormanceHQ diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 000000000..f7757cdf9 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,27 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: bug +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Logs** +If applicable, add logs to help explain your problem. + +**Environment (please complete the following information):** + - OS: [e.g. ubuntu 20.04] + - Numary Version [e.g. 1.0.0-beta.4] + +**Additional context** +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 000000000..b36319bac --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,22 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: enhancement, rfc +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Summary** + +**Solution proposal** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.github/PULL_REQUEST_TEMPLATE/pull_request_sdk_template.md b/.github/PULL_REQUEST_TEMPLATE/pull_request_sdk_template.md new file mode 100644 index 000000000..3f9f8c2a9 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE/pull_request_sdk_template.md @@ -0,0 +1,8 @@ +# SDK : OPEN API GENERATOR NAME + +__Please add a description to this PR.__ + +## How to deploy + +__Please provide steps to deploy the generated sdk to official channels.__ +__Please be as exhaustive as possible__ diff --git a/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md new file mode 100644 index 000000000..3d9bbad90 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md @@ -0,0 +1,22 @@ +# Title + +__Please add a description to this PR.__ + +## Type of change + +- [ ] Bug fix (non-breaking change which fixes an issue) +- [ ] New feature (non-breaking change which adds functionality) +- [ ] Refactoring / Technical debt + +## What parts of the code are impacted ? +__Please describe the impacted parts of the code.__ + +# Checklist: + +- [ ] My code follows the style guidelines of this project +- [ ] I have performed a self-review of my code +- [ ] I have commented my code, particularly in hard-to-understand areas +- [ ] I have made corresponding changes to the documentation +- [ ] My changes generate no new warnings +- [ ] I have added tests that prove my fix is effective or that my feature works +- [ ] New and existing unit tests pass locally with my changes diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000..cd51d2c60 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,8 @@ +version: 2 +updates: + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + # Check for updates to GitHub Actions every weekday + interval: "daily" diff --git a/.github/labeler.yml b/.github/labeler.yml new file mode 100644 index 000000000..66ae675cd --- /dev/null +++ b/.github/labeler.yml @@ -0,0 +1,23 @@ +'@domain/core': + - core/* + - core/**/* + +'@domain/api': + - api/* + - api/**/* + +'@domain/ledger': + - ledger/* + - ledger/**/* + +'@domain/storage': + - storage/* + - storage/**/* + +'@domain/cmd': + - cmd/* + - cmd/**/* + +'@domain/ci': + - .github/* + - .github/**/* diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 000000000..f5fcdc122 --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,33 @@ +on: + push: + branches: + - main + +name: Main +jobs: + Tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: Install Task + uses: arduino/setup-task@v1 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + - uses: actions/setup-go@v4 + with: + go-version-file: go.mod + cache-dependency-path: go.sum + cache: true + - run: task tests + - name: Upload coverage report to Codecov + uses: codecov/codecov-action@v3 + with: + name: 'Ledger' + fail_ci_if_error: false # optional (default = false) + verbose: true # optional (default = false) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 000000000..de5ee38c1 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,71 @@ +name: Ledger - Release +on: + push: + tags: + - 'v*.*.*' + +jobs: + Tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: Install Task + uses: arduino/setup-task@v1 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + - uses: actions/setup-go@v4 + with: + go-version-file: go.mod + cache-dependency-path: go.sum + cache: true + - run: task tests + - name: Upload coverage report to Codecov + uses: codecov/codecov-action@v3 + with: + name: 'Ledger' + fail_ci_if_error: false # optional (default = false) + verbose: true # optional (default = false) + + GoReleaser: + name: GoReleaser + runs-on: ubuntu-latest + env: + DOCKER_CLI_EXPERIMENTAL: "enabled" + needs: + - Tests + steps: + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - uses: actions/setup-go@v4 + with: + go-version-file: 'go.mod' + cache: true + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: "NumaryBot" + password: ${{ secrets.NUMARY_GITHUB_TOKEN }} + - name: Run GoReleaser + uses: goreleaser/goreleaser-action@v5 + with: + distribution: goreleaser-pro + version: latest + install-only: true + - run: goreleaser release --clean -f .goreleaser.ledger.yml + env: + GITHUB_TOKEN: ${{ secrets.NUMARY_GITHUB_TOKEN }} + FURY_TOKEN: ${{ secrets.FURY_TOKEN }} + GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }} + SEGMENT_WRITE_KEY: ${{ secrets.SEGMENT_WRITE_KEY_OSS }} diff --git a/.gitignore b/.gitignore index 8a2bc512f..ae122c5da 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,3 @@ -numary coverage* /dist/ cmd/control/* @@ -13,3 +12,7 @@ sdk/sdks .env sqlstorage.test ledger.test +antlr-*-complete.jar +go.work +go.work.sum +benchs diff --git a/.golangci.yml b/.golangci.yml deleted file mode 100644 index 3f634b1f6..000000000 --- a/.golangci.yml +++ /dev/null @@ -1,28 +0,0 @@ -linters-settings: - goimports: - local-prefixes: github.com/golangci/golangci-lint - govet: - check-shadowing: false - - nolintlint: - allow-leading-space: true # don't require machine-readable nolint directives (i.e. with no leading space) - allow-unused: false # report any unused nolint directives - require-explanation: false # don't require an explanation for nolint directives - require-specific: false # don't require nolint directives to be specific about which linter is being skipped - -linters: - disable-all: true - enable: - - errcheck #Default linter - - gosimple #Default linter - - govet #Default linter - - ineffassign #Default linter - - staticcheck #Default linter - - typecheck #Default linter - - unused #Default linter - - gofmt - - gci - - goimports - -run: - timeout: 5m diff --git a/.goreleaser-darwin.yml b/.goreleaser-darwin.yml deleted file mode 100644 index f733dc0e2..000000000 --- a/.goreleaser-darwin.yml +++ /dev/null @@ -1,64 +0,0 @@ -project_name: numary -env: - - GO111MODULE=on - - GOPROXY=https://proxy.golang.org -before: - hooks: - - go mod download - -builds: - - binary: numary - id: numary_darwin - ldflags: - - -X github.com/numary/ledger/cmd.BuildDate={{ .Date }} - - -X github.com/numary/ledger/cmd.Version={{ .Version }} - - -X github.com/numary/ledger/cmd.Commit={{ .ShortCommit }} - - -X github.com/numary/ledger/cmd.DefaultSegmentWriteKey={{ .Env.SEGMENT_WRITE_KEY }} - - -s - env: - - CGO_ENABLED=1 - goos: - - darwin - goarch: - - amd64 - - arm64 - flags: - - -tags=json1 - -archives: - - id: "numary" - builds: - - numary_darwin - format: tar.gz - format_overrides: - - goos: windows - format: zip - name_template: "{{.ProjectName}}_{{.Version}}_{{.Os}}-{{.Arch}}" - replacements: - amd64: 64bit - 386: 32bit - arm: ARM - arm64: ARM64 - darwin: macOS - linux: Linux - windows: Windows - - -checksum: - name_template: 'checksums-darwin.txt' - -snapshot: - name_template: "{{ .Tag }}" - -brews: - - tap: - owner: formancehq - name: homebrew-tap - name: numary - folder: Formula - homepage: https://formance.com - skip_upload: auto - test: | - system "#{bin}/numary version" - install: | - bin.install "numary" diff --git a/.goreleaser.ledger.yml b/.goreleaser.ledger.yml new file mode 100644 index 000000000..1a095a7d4 --- /dev/null +++ b/.goreleaser.ledger.yml @@ -0,0 +1,147 @@ +project_name: ledger +builds: + - binary: ledger + id: ledger + ldflags: + - -X github.com/formancehq/ledger/cmd.BuildDate={{ .Date }} + - -X github.com/formancehq/ledger/cmd.Version={{ .Version }} + - -X github.com/formancehq/ledger/cmd.Commit={{ .ShortCommit }} + - -X github.com/formancehq/ledger/cmd.DefaultSegmentWriteKey={{ .Env.SEGMENT_WRITE_KEY }} + - -extldflags "-static" + env: + - CGO_ENABLED=0 + goos: + - linux + - windows + - darwin + goarch: + - amd64 + - arm64 + +archives: + - id: "ledger" + builds: + - ledger + format: tar.gz + format_overrides: + - goos: windows + format: zip + name_template: >- + {{ .ProjectName }}_ + {{- title .Os }}_ + {{- if eq .Arch "amd64" }}x86_64 + {{- else if eq .Arch "386" }}i386 + {{- else }}{{ .Arch }}{{ end }} + {{- if .Arm }}v{{ .Arm }}{{ end }} + +nfpms: + - id: packages + package_name: ledger + file_name_template: "{{ .ProjectName }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}" + builds: + - ledger + homepage: https://formance.com + maintainer: Maxence Maireaux + formats: + - deb + - rpm + +publishers: + - name: fury.io + ids: + - packages + dir: "{{ dir .ArtifactPath }}" + cmd: curl -F package=@{{ .ArtifactName }} https://{{ .Env.FURY_TOKEN }}@push.fury.io/ledger/ + +brews: + - repository: + owner: formancehq + name: homebrew-tap + name: ledger + folder: Formula + homepage: https://formance.com + skip_upload: auto + test: | + system "#{bin}/ledger version" + install: | + bin.install "ledger" + +nightly: + name_template: '{{ .FullCommit }}' + publish_release: false + +checksum: + name_template: '{{.ProjectName}}_checksums.txt' + +snapshot: + name_template: "{{ .Version }}" + +changelog: + sort: asc + use: github + filters: + exclude: + - '^docs:' + - '^test:' + - '^spec:' + - Merge pull request + - Merge remote-tracking branch + - Merge branch + - go mod tidy + groups: + - title: 'New Features' + regexp: "^.*feat[(\\w)]*:+.*$" + order: 0 + - title: 'Bug fixes' + regexp: "^.*fix[(\\w)]*:+.*$" + order: 10 + - title: Other work + order: 999 + +release: + prerelease: auto + footer: | + ## What to do next? + - Read the [documentation](https://docs.formance.com/) + - Join our [Slack server](https://formance.com/slack) + + +dockers: + - image_templates: ["ghcr.io/formancehq/{{ .ProjectName }}:{{ if not .IsNightly }}v{{ end }}{{ .Version }}-amd64"] + goarch: amd64 + dockerfile: build.Dockerfile + use: buildx + build_flag_templates: + - --platform=linux/amd64 + - --label=org.opencontainers.image.title={{ .ProjectName }} + - --label=org.opencontainers.image.description={{ .ProjectName }} + - --label=org.opencontainers.image.url=https://github.com/formancehq/stack + - --label=org.opencontainers.image.source=https://github.com/formancehq/stack + - --label=org.opencontainers.image.version={{ .Version }} + - --label=org.opencontainers.image.created={{ time "2006-01-02T15:04:05Z07:00" }} + - --label=org.opencontainers.image.revision={{ .FullCommit }} + - --label=org.opencontainers.image.licenses=MIT + - image_templates: [ "ghcr.io/formancehq/{{ .ProjectName }}:{{ if not .IsNightly }}v{{ end }}{{ .Version }}-arm64" ] + goarch: arm64 + dockerfile: build.Dockerfile + use: buildx + build_flag_templates: + - --platform=linux/arm64/v8 + - --label=org.opencontainers.image.title={{ .ProjectName }} + - --label=org.opencontainers.image.description={{ .ProjectName }} + - --label=org.opencontainers.image.url=https://github.com/formancehq/stack + - --label=org.opencontainers.image.source=https://github.com/formancehq/stack + - --label=org.opencontainers.image.version={{ .Version }} + - --label=org.opencontainers.image.created={{ time "2006-01-02T15:04:05Z07:00" }} + - --label=org.opencontainers.image.revision={{ .FullCommit }} + - --label=org.opencontainers.image.licenses=MIT + +docker_manifests: + - name_template: 'ghcr.io/formancehq/{{ .ProjectName }}:{{ if not .IsNightly }}v{{ end }}{{ .Version }}' + image_templates: + - 'ghcr.io/formancehq/{{ .ProjectName }}:{{ if not .IsNightly }}v{{ end }}{{ .Version }}-amd64' + - 'ghcr.io/formancehq/{{ .ProjectName }}:{{ if not .IsNightly }}v{{ end }}{{ .Version }}-arm64' + - name_template: '{{ if not .IsNightly }}ghcr.io/formancehq/{{ .ProjectName }}:latest{{ end }}' + image_templates: + - 'ghcr.io/formancehq/{{ .ProjectName }}:{{ if not .IsNightly }}v{{ end }}{{ .Version }}-amd64' + - 'ghcr.io/formancehq/{{ .ProjectName }}:{{ if not .IsNightly }}v{{ end }}{{ .Version }}-arm64' diff --git a/.goreleaser.yml b/.goreleaser.yml index c5a887853..b42858705 100644 --- a/.goreleaser.yml +++ b/.goreleaser.yml @@ -1,122 +1,43 @@ -project_name: numary +project_name: ledger monorepo: tag_prefix: components/ledger/ - dir: ./components/ledger/ + dir: ./ + +includes: + - from_file: + path: ./../../.goreleaser.default.yaml builds: - - binary: numary - id: numary_windows - ldflags: - - -X github.com/numary/ledger/cmd.BuildDate={{ .Date }} - - -X github.com/numary/ledger/cmd.Version={{ .Version }} - - -X github.com/numary/ledger/cmd.Commit={{ .ShortCommit }} - - -X github.com/numary/ledger/cmd.DefaultSegmentWriteKey={{ .Env.SEGMENT_WRITE_KEY }} - env: - - CGO_ENABLED=1 - - CC=x86_64-w64-mingw32-gcc - goos: - - windows - goarch: - - amd64 - flags: - - -tags=json1 - - binary: numary - id: numary_linux_amd64 + - binary: ledger + id: ledger ldflags: - - -X github.com/numary/ledger/cmd.BuildDate={{ .Date }} - - -X github.com/numary/ledger/cmd.Version={{ .Version }} - - -X github.com/numary/ledger/cmd.Commit={{ .ShortCommit }} - - -X github.com/numary/ledger/cmd.DefaultSegmentWriteKey={{ .Env.SEGMENT_WRITE_KEY }} + - -X github.com/formancehq/ledger/cmd.BuildDate={{ .Date }} + - -X github.com/formancehq/ledger/cmd.Version={{ .Version }} + - -X github.com/formancehq/ledger/cmd.Commit={{ .ShortCommit }} + - -X github.com/formancehq/ledger/cmd.DefaultSegmentWriteKey={{ .Env.SEGMENT_WRITE_KEY }} - -extldflags "-static" env: - - CGO_ENABLED=1 + - CGO_ENABLED=0 goos: - linux goarch: - amd64 - flags: - - -tags=json1,netgo - - binary: numary - id: numary_linux_arm64 - ldflags: - - -X github.com/numary/ledger/cmd.BuildDate={{ .Date }} - - -X github.com/numary/ledger/cmd.Version={{ .Version }} - - -X github.com/numary/ledger/cmd.Commit={{ .ShortCommit }} - - -X github.com/numary/ledger/cmd.DefaultSegmentWriteKey={{ .Env.SEGMENT_WRITE_KEY }} - - -extldflags "-static" - env: - - CGO_ENABLED=1 - - CC=aarch64-linux-gnu-gcc - goos: - - linux - goarch: - arm64 - flags: - - -tags=json1,netgo + archives: - - id: "numary" + - id: "{{.ProjectName}}" builds: - - numary_linux_amd64 - - numary_linux_arm64 - - numary_windows + - gateway format: tar.gz - format_overrides: - - goos: windows - format: zip - name_template: "{{.ProjectName}}_{{.Version}}_{{.Os}}-{{.Arch}}" - replacements: - amd64: 64bit - 386: 32bit - arm: ARM - arm64: ARM64 - darwin: macOS - linux: Linux - windows: Windows - - -checksum: - name_template: '{{.ProjectName}}_checksums.txt' - -snapshot: - name_template: "{{ .Tag }}" - -changelog: - sort: asc - use: github - filters: - exclude: - - '^docs:' - - '^test:' - - '^spec:' - - Merge pull request - - Merge remote-tracking branch - - Merge branch - - go mod tidy - groups: - - title: 'New Features' - regexp: "^.*feat[(\\w)]*:+.*$" - order: 0 - - title: 'Bug fixes' - regexp: "^.*fix[(\\w)]*:+.*$" - order: 10 - - title: Other work - order: 999 - -release: - prerelease: auto - footer: | - ## What to do next? - - Read the [documentation](https://docs.formance.com/) - - Join our [Slack server](https://formance.com/slack) + name_template: "{{.ProjectName}}_{{.Os}}-{{.Arch}}" nfpms: - id: packages - package_name: numary + package_name: ledger file_name_template: "{{ .ProjectName }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}" builds: - - numary_linux_amd64 - - numary_linux_arm64 + - ledger homepage: https://formance.com maintainer: Maxence Maireaux formats: @@ -128,43 +49,4 @@ publishers: ids: - packages dir: "{{ dir .ArtifactPath }}" - cmd: curl -F package=@{{ .ArtifactName }} https://{{ .Env.FURY_TOKEN }}@push.fury.io/numary/ - - -dockers: - - image_templates: ["ghcr.io/formancehq/{{ .ProjectName }}:{{ .Version }}-amd64"] - dockerfile: Dockerfile - use: buildx - build_flag_templates: - - --platform=linux/amd64 - - --label=org.opencontainers.image.title={{ .ProjectName }} - - --label=org.opencontainers.image.description={{ .ProjectName }} - - --label=org.opencontainers.image.url=https://github.com/formancehq/stack - - --label=org.opencontainers.image.source=https://github.com/formancehq/stack - - --label=org.opencontainers.image.version={{ .Version }} - - --label=org.opencontainers.image.created={{ time "2006-01-02T15:04:05Z07:00" }} - - --label=org.opencontainers.image.revision={{ .FullCommit }} - - --label=org.opencontainers.image.licenses=MIT - - image_templates: [ "ghcr.io/formancehq/{{ .ProjectName }}:{{ .Version }}-arm64" ] - goarch: arm64 - dockerfile: Dockerfile - use: buildx - build_flag_templates: - - --platform=linux/arm64/v8 - - --label=org.opencontainers.image.title={{ .ProjectName }} - - --label=org.opencontainers.image.description={{ .ProjectName }} - - --label=org.opencontainers.image.url=https://github.com/formancehq/stack - - --label=org.opencontainers.image.source=https://github.com/formancehq/stack - - --label=org.opencontainers.image.version={{ .Version }} - - --label=org.opencontainers.image.created={{ time "2006-01-02T15:04:05Z07:00" }} - - --label=org.opencontainers.image.revision={{ .FullCommit }} - - --label=org.opencontainers.image.licenses=MIT -docker_manifests: - - name_template: ghcr.io/formancehq/{{ .ProjectName }}:{{ .Version }} - image_templates: - - ghcr.io/formancehq/{{ .ProjectName }}:{{ .Version }}-amd64 - - ghcr.io/formancehq/{{ .ProjectName }}:{{ .Version }}-arm64 - - name_template: ghcr.io/formancehq/{{ .ProjectName }}:latest - image_templates: - - ghcr.io/formancehq/{{ .ProjectName }}:{{ .Version }}-amd64 - - ghcr.io/formancehq/{{ .ProjectName }}:{{ .Version }}-arm64 + cmd: curl -F package=@{{ .ArtifactName }} https://{{ .Env.FURY_TOKEN }}@push.fury.io/ledger/ diff --git a/Dockerfile b/Dockerfile index a003e7f7d..36facf7fb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,32 +1,26 @@ -FROM --platform=$BUILDPLATFORM golang:1.18 AS builder -RUN apt-get update && \ - apt-get install -y gcc-aarch64-linux-gnu gcc-x86-64-linux-gnu && \ - ln -s /usr/bin/aarch64-linux-gnu-gcc /usr/bin/arm64-linux-gnu-gcc && \ - ln -s /usr/bin/x86_64-linux-gnu-gcc /usr/bin/amd64-linux-gnu-gcc -# 1. Precompile the entire go standard library into the first Docker cache layer: useful for other projects too! -RUN CGO_ENABLED=1 GOOS=linux go install -v -installsuffix cgo -a std -ARG TARGETARCH -ARG APP_SHA -ARG VERSION +FROM golang:1.20-alpine3.16 AS builder ARG SEGMENT_WRITE_KEY +ARG VERSION +ARG APP_SHA + WORKDIR /src -COPY . . -WORKDIR /src/components/ledger -RUN go mod download +COPY libs libs +COPY components/ledger components/ledger +WORKDIR components/ledger +RUN --mount=type=cache,mode=0755,target=/go/pkg/mod go mod download RUN --mount=type=cache,id=gomod,target=/go/pkg/mod \ --mount=type=cache,id=gobuild,target=/root/.cache/go-build \ - CGO_ENABLED=1 GOOS=linux GOARCH=$TARGETARCH \ - CC=$TARGETARCH-linux-gnu-gcc \ - go build -o numary -tags json1,netgo \ - -ldflags="-X github.com/numary/ledger/cmd.Version=${VERSION} \ - -X github.com/numary/ledger/cmd.BuildDate=$(date +%s) \ - -X github.com/numary/ledger/cmd.Commit=${APP_SHA} \ - -X github.com/numary/ledger/cmd.DefaultSegmentWriteKey=${SEGMENT_WRITE_KEY}" ./ + go build -o ledger \ + -ldflags="-X github.com/formancehq/ledger/cmd.Version=${VERSION} \ + -X github.com/formancehq/ledger/cmd.BuildDate=$(date +%s) \ + -X github.com/formancehq/ledger/cmd.Commit=${APP_SHA} \ + -X github.com/formancehq/ledger/cmd.DefaultSegmentWriteKey=${SEGMENT_WRITE_KEY}" ./ -FROM ubuntu:jammy as app -RUN apt update && apt install -y ca-certificates wget && rm -rf /var/lib/apt/lists/* -COPY --from=builder /src/components/ledger/numary /usr/local/bin/numary +FROM alpine:3.16 as app +RUN apk update && apk add ca-certificates +COPY --from=builder /src/components/ledger/ledger /usr/local/bin/ledger +RUN chmod +x /usr/local/bin/ledger EXPOSE 3068 -ENTRYPOINT ["numary"] +ENTRYPOINT ["ledger"] ENV OTEL_SERVICE_NAME ledger -CMD ["server", "start"] +CMD ["serve"] diff --git a/LICENSE b/LICENSE index 8ae291923..90bf910c2 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2021 Numaire, Inc +Copyright (c) 2021 Formance, Inc Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/Procfile b/Procfile index 437523a0e..b2f76b779 100644 --- a/Procfile +++ b/Procfile @@ -1 +1 @@ -web: bin/ledger server start --server.http.bind_address 0.0.0.0:${PORT} --storage.driver postgres --storage.postgres.conn_string ${DATABASE_URL} --lock-strategy redis --lock-strategy-redis-url ${REDIS_URL} +web: bin/ledger server start --server.http.bind_address 0.0.0.0:${PORT} --storage.driver postgres --storage-postgres-conn-string ${DATABASE_URL} --lock-strategy redis --lock-strategy-redis-url ${REDIS_URL} diff --git a/README.md b/README.md index 40a33bdc3..7e6c5dea5 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,7 @@ Formance Ledger works as a standalone binary, the latest of which can be downloa ```SHELL -numary server start +ledger server start # Submit a first transaction echo " @@ -48,7 +48,7 @@ send [USD/2 599] ( ) " > example.num -numary exec quickstart example.num +ledger exec quickstart example.num # Get the balances of drivers:042 curl -X GET http://localhost:3068/quickstart/accounts/drivers:042 @@ -61,18 +61,6 @@ curl -X GET http://localhost:3068/quickstart/transactions You can find the complete Numary documentation at [docs.formance.com](https://docs.formance.com) -# Dashboard - -A simple [dashboard](https://github.com/formancehq/control) is built in the ledger binary, to make it easier to visualize transactions. It can be started with: - -```SHELL -numary ui -``` - -control-screenshot - -Alternatively, you can use the dashboard by heading to [control.formance.com](https://control.formance.com) which provides a hosted version that can connect to any ledger instance. - # Community If you need help, want to show us what you built or just hang out and chat about ledgers you are more than welcome on our [Slack](https://bit.ly/formance-slack) - looking forward to see you there! diff --git a/Taskfile.yaml b/Taskfile.yaml index b4971446d..6e67ee235 100644 --- a/Taskfile.yaml +++ b/Taskfile.yaml @@ -2,65 +2,24 @@ version: "3" -vars: - PKG: "./..." - FAILFAST: "-failfast" - TIMEOUT: "10m" - RUN: ".*" - TAGS: "-tags json1,netgo" - BENCH_TIME: "30s" - BENCH_RESULTS_DIR: "/tmp/benchmarks" - BENCH_RESULTS_FILE: "/tmp/benchmarks/ledger.txt" - BENCH_CPU_PROFILE: "/tmp/benchmarks/ledger.cpu.prof" - BENCH_MEM_PROFILE: "/tmp/benchmarks/ledger.mem.prof" +env: VERBOSE: "false" + BRANCH: + sh: "git rev-parse --abbrev-ref HEAD| cut -d / -f2" + BENCH_RESULTS_DIR: "./benchs" tasks: - default: - cmds: - - task: lint - - task: tests:local - lint: cmds: - - golangci-lint run --fix {{if eq .VERBOSE "true"}}-v{{end}} + - golangci-lint run --fix --allow-parallel-runners --config ./../../.golangci.yml tests: cmds: - > - go test {{.TAGS}} {{if eq .VERBOSE "true"}}-v{{end}} -coverpkg {{.PKG}} - -coverprofile coverage.out -covermode atomic {{.PKG}} - - tests:local: - cmds: - - task: tests:local:sqlite - - task: tests:local:postgres - - tests:local:sqlite: - cmds: - - > - go test {{.TAGS}} {{if eq .VERBOSE "true"}}-v{{end}} {{.FAILFAST}} -coverpkg {{.PKG}} - -coverprofile coverage.out -covermode atomic - -run={{.RUN}} -timeout {{.TIMEOUT}} {{.PKG}} | - sed ''/PASS/s//$(printf "\033[32mPASS\033[0m")/'' | - sed ''/FAIL/s//$(printf "\033[31mFAIL\033[0m")/'' | - sed ''/RUN/s//$(printf "\033[34mRUN\033[0m")/'' - - task: print:coverage - - tests:local:postgres: - deps: [postgres] - cmds: - - > - go test {{.TAGS}} {{if eq .VERBOSE "true"}}-v{{end}} {{.FAILFAST}} -coverpkg {{.PKG}} - -coverprofile coverage.out -covermode atomic - -run={{.RUN}} -timeout {{.TIMEOUT}} {{.PKG}} | + go test -failfast -coverpkg ./... -coverprofile coverage.out -covermode atomic ./... | sed ''/PASS/s//$(printf "\033[32mPASS\033[0m")/'' | sed ''/FAIL/s//$(printf "\033[31mFAIL\033[0m")/'' | sed ''/RUN/s//$(printf "\033[34mRUN\033[0m")/'' - - task: print:coverage - env: - NUMARY_STORAGE_DRIVER: "postgres" - NUMARY_STORAGE_POSTGRES_CONN_STRING: "postgresql://ledger:ledger@127.0.0.1/ledger" print:coverage: cmds: @@ -71,52 +30,114 @@ tasks: - echo "xdg-open coverage.html on linux" silent: true + libs/sync: + cmds: + - rm -rf ./libs/* + - cp -R ./../../libs/go-libs/* ./libs/ + bench: - deps: [postgres] + internal: true cmds: - mkdir -p {{.BENCH_RESULTS_DIR}} - > - go test {{.TAGS}} {{if eq .VERBOSE "true"}}-v{{end}} {{.PKG}} - -run=XXX -bench={{.RUN}} -benchmem -benchtime={{.BENCH_TIME}} -timeout {{.TIMEOUT}} - -cpuprofile {{.BENCH_CPU_PROFILE}} -memprofile {{.BENCH_MEM_PROFILE}} - | tee {{.BENCH_RESULTS_FILE}} - - benchstat {{.BENCH_RESULTS_FILE}} + go test -run BenchmarkParallelWrites -bench=. {{if eq .VERBOSE "true"}}-v{{end}} + -test.benchmem + -timeout 1h + -memprofile {{.BENCH_RESULTS_DIR}}/{{.BRANCH}}-memprofile-{{if eq .ASYNC "true"}}async{{else}}sync{{end}}.out + -cpuprofile {{.BENCH_RESULTS_DIR}}/{{.BRANCH}}-profile-{{if eq .ASYNC "true"}}async{{else}}sync{{end}}.out + -benchtime={{if .DURATION}}{{.DURATION}}{{else}}15s{{end}} + {{if eq .RACE "true"}}-race{{end}} + -count={{if .COUNT}}{{.COUNT}}{{else}}10{{end}} ./benchmarks | tee {{.BENCH_RESULTS_DIR}}/{{.BRANCH}}-{{if eq .ASYNC "true"}}async{{else}}sync{{end}}.stats env: - NUMARY_STORAGE_DRIVER: "postgres" - NUMARY_STORAGE_POSTGRES_CONN_STRING: "postgresql://ledger:ledger@127.0.0.1/ledger" + ASYNC: "{{.ASYNC}}" + GOMEMLIMIT: 1GiB + GOMAXPROCS: 2 + VERBOSE: false +# GOGC: "1000" # https://dave.cheney.net/tag/gogc + CGO_ENABLED: 0 +# GODEBUG: gctrace=1 #,gcpacertrace=1 - bench:cpu: + bench:async: cmds: - - go tool pprof -http=":" {{.BENCH_CPU_PROFILE}} + - task: bench + vars: + ASYNC: true - bench:mem: + bench:sync: cmds: - - go tool pprof -http=":" {{.BENCH_MEM_PROFILE}} + - task: bench - install: - deps: - - install:golangci-lint - - install:perf + bench:both: + cmds: + - task: bench:sync + - task: bench:async - install:golangci-lint: + bench:diff: + internal: true cmds: - - > - curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | - sh -s -- -b $(go env GOPATH)/bin latest - - golangci-lint --version + - > + benchstat {{.BENCH_RESULTS_DIR}}/{{if .SOURCE}}{{.SOURCE}}{{else}}main{{end}}-{{if eq .ASYNC "true"}}async{{else}}sync{{end}}.stats + {{.BENCH_RESULTS_DIR}}/{{.BRANCH}}-{{if eq .ASYNC "true"}}async{{else}}sync{{end}}.stats - install:perf: - - go install golang.org/x/perf/cmd/...@latest + bench:diff:sync: + cmds: + - task: bench:diff + + bench:diff:async: + cmds: + - task: bench:diff + vars: + ASYNC: "true" - postgres: + bench:diff:both: cmds: - - docker compose up -d postgres + - task: bench:diff:sync + - task: bench:diff:async - clean: + bench:pprof: + internal: true cmds: - - go clean - - rm -f {{.SERVICE}} coverage.out coverage.html - - docker compose down -v + - go tool pprof -http=":" {{.BENCH_RESULTS_DIR}}/{{.BRANCH}}-{{.KIND}}-{{if eq .ASYNC "true"}}async{{else}}sync{{end}}.out + + bench:cpu:pprof: + internal: true + cmds: + - task: bench:pprof + vars: + KIND: profile + ASYNC: "{{.ASYNC}}" + + bench:mem:pprof: + internal: true + cmds: + - task: bench:pprof + vars: + KIND: memprofile + ASYNC: "{{.ASYNC}}" + + bench:cpu:pprof:async: + cmds: + - task: bench:cpu:pprof + vars: + ASYNC: "true" + + bench:cpu:pprof:sync: + cmds: + - task: bench:cpu:pprof + vars: + ASYNC: "false" + + bench:mem:pprof:async: + cmds: + - task: bench:mem:pprof + vars: + ASYNC: "true" + + bench:mem:pprof:sync: + cmds: + - task: bench:mem:pprof + vars: + ASYNC: "false" sdk:template: desc: Extract templates @@ -126,7 +147,7 @@ tasks: msg: Please specify generator as first cli arg (ie "task template -- go") cmds: - > - docker run --rm -w /local -v ${PWD}:/local openapitools/openapi-generator-cli:latest author + docker run --rm -w /local -v ${PWD}:/local openapitools/openapi-generator-cli:v6.6.0 author template -g {{.CLI_ARGS}} -o templates/{{.CLI_ARGS}} sdk:generate: @@ -139,31 +160,15 @@ tasks: - wget https://raw.githubusercontent.com/formancehq/ledger/{{.VERSION}}/pkg/api/controllers/swagger.yaml -O swagger.yaml - sed -i -e "s/LEDGER_VERSION/{{.VERSION}}/g" swagger.yaml - > - docker run --rm -w /local -v ${PWD}:/local openapitools/openapi-generator-cli:latest generate + docker run --rm -w /local -v ${PWD}:/local openapitools/openapi-generator-cli:v6.6.0 generate -i ./swagger.yaml -g {{ (split "-" .CLI_ARGS)._0 }} -c ./configs/{{.CLI_ARGS}}.yaml -o ./sdks/{{.CLI_ARGS}} --git-user-id=formancehq - --git-repo-id=numary-sdk-{{.CLI_ARGS}} + --git-repo-id=formance-sdk-{{.CLI_ARGS}} -p packageVersion={{.VERSION}} - sdk:test: - desc: Test client code - dir: ./sdk - preconditions: - - sh: '[ "{{.CLI_ARGS}}" != "" ]' - msg: Please specify generator as first cli arg (ie "task test -- go") - - sh: "[[ -e sdks/{{.CLI_ARGS}}/Taskfile.yml ]]" - msg: "Not Taskfile found. You have to create a taskfile in ./sdks/{{.CLI_ARGS}}/ with a 'test' task" - vars: - RANDOM: - sh: "echo $$" - cmds: - - | - cd ./sdks/{{.CLI_ARGS}} - task test - goreleaser:test:pkg: desc: Test a package cmds: @@ -178,12 +183,12 @@ tasks: vars: Platform: "amd64" Image: fedora - Cmd: "{{.rpm}} numary_*_linux_amd64.rpm" + Cmd: "{{.rpm}} *_linux_amd64.rpm" - task: goreleaser:test:pkg vars: Platform: "arm64" Image: fedora - Cmd: "{{.rpm}} numary_*_linux_arm64.rpm" + Cmd: "{{.rpm}} *_linux_arm64.rpm" goreleaser:test:deb: desc: Tests deb packages @@ -194,28 +199,9 @@ tasks: vars: Platform: "amd64" Image: ubuntu - Cmd: "{{.dpkg}} numary_*_linux_amd64.deb" + Cmd: "{{.dpkg}} *_linux_amd64.deb" - task: goreleaser:test:pkg vars: Platform: "arm64" Image: ubuntu - Cmd: "{{.dpkg}} numary_*_linux_arm64.deb" - - run: - cmds: - - docker compose up -d --remove-orphans - - stop: - cmds: - - docker compose stop - - ps: - cmds: - - docker compose ps - - rm: - deps: [stop] - cmds: - - docker compose rm -f - - docker volume prune -f - - rm -f {{.BENCH_CPU_PROFILE}} {{.BENCH_MEM_PROFILE}} + Cmd: "{{.dpkg}} *_linux_arm64.deb" diff --git a/app.json b/app.json deleted file mode 100644 index 58ea7fee2..000000000 --- a/app.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "name": "Formance Ledger", - "description": "A programmable financial ledger to build money-moving apps", - "repository": "https://github.com/numary/ledger", - "logo": "https://avatars.githubusercontent.com/u/84325077?s=200", - "keywords": ["ledger", "go", "golang", "programming", "money", "finance"], - "addons": [ - { - "plan": "heroku-postgresql:hobby-dev" - }, - { - "plan": "heroku-redis:hobby-dev" - } - ], - "env": { - "NUMARY_AUTH_BASIC_ENABLED": { - "description": "Enable basic auth. (true or false)", - "required": true, - "value": "true" - }, - "NUMARY_AUTH_BASIC_CREDENTIALS": { - "description": "HTTP basic auth credentials (:)", - "required": false - } - }, - "stack": "heroku-22", - "success_url": "/_info" -} diff --git a/benchmarks/ledger_test.go b/benchmarks/ledger_test.go new file mode 100644 index 000000000..5a7b00e44 --- /dev/null +++ b/benchmarks/ledger_test.go @@ -0,0 +1,151 @@ +package benchmarks + +import ( + "bytes" + "encoding/json" + "fmt" + "math/big" + "net/http" + "net/http/httptest" + "net/url" + "os" + "runtime" + "sync" + "testing" + "time" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/api/backend" + v2 "github.com/formancehq/ledger/internal/api/v2" + "github.com/formancehq/ledger/internal/engine" + "github.com/formancehq/ledger/internal/opentelemetry/metrics" + "github.com/formancehq/ledger/internal/storage/storagetesting" + "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/google/uuid" + "github.com/stretchr/testify/require" + "go.uber.org/atomic" +) + +func BenchmarkParallelWrites(b *testing.B) { + + ctx := logging.TestingContext() + + driver := storagetesting.StorageDriver(b) + resolver := engine.NewResolver(driver, engine.WithLogger(logging.FromContext(ctx))) + b.Cleanup(func() { + require.NoError(b, resolver.CloseLedgers(ctx)) + }) + + ledgerName := uuid.NewString() + + backend := backend.NewDefaultBackend(driver, "latest", resolver) + router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx := logging.ContextWithLogger(r.Context(), logging.FromContext(ctx)) + router.ServeHTTP(w, r.WithContext(ctx)) + }) + + totalDuration := atomic.Int64{} + b.SetParallelism(1000) + runtime.GC() + b.ResetTimer() + startOfBench := time.Now() + counter := atomic.NewInt64(0) + longestTxLock := sync.Mutex{} + longestTransactionID := big.NewInt(0) + longestTransactionDuration := time.Duration(0) + b.RunParallel(func(pb *testing.PB) { + buf := bytes.NewBufferString("") + for pb.Next() { + buf.Reset() + id := counter.Add(1) + + //script := controllers.Script{ + // Script: core.Script{ + // Plain: fmt.Sprintf(` + // vars { + // account $account + // } + // + // send [USD/2 100] ( + // source = @world:%d allowing unbounded overdraft + // destination = $account + // )`, counter.Load()%100), + // }, + // Vars: map[string]any{ + // "account": fmt.Sprintf("accounts:%d", counter.Add(1)), + // }, + //} + + script := v2.Script{ + Script: ledger.Script{ + Plain: `vars { + account $account +} + +send [USD/2 100] ( + source = @world + destination = $account +)`, + }, + Vars: map[string]any{ + "account": fmt.Sprintf("accounts:%d", id), + }, + } + + // script := controllers.Script{ + // Script: core.Script{ + // Plain: `vars { + // account $account + // account $src + //} + // + //send [USD/2 100] ( + // source = $src allowing unbounded overdraft + // destination = $account + //)`, + // }, + // Vars: map[string]any{ + // "src": fmt.Sprintf("world:%d", id), + // "account": fmt.Sprintf("accounts:%d", id), + // }, + // } + + err := json.NewEncoder(buf).Encode(v2.PostTransactionRequest{ + Script: script, + }) + require.NoError(b, err) + + //ctx, _ := context.WithDeadline(ctx, time.Now().Add(10*time.Second)) + + req := httptest.NewRequest("POST", "/"+ledgerName+"/transactions", buf) + req = req.WithContext(ctx) + req.URL.RawQuery = url.Values{ + "async": []string{os.Getenv("ASYNC")}, + }.Encode() + rsp := httptest.NewRecorder() + + now := time.Now() + handler.ServeHTTP(rsp, req) + latency := time.Since(now).Milliseconds() + totalDuration.Add(latency) + + require.Equal(b, http.StatusOK, rsp.Code) + tx, _ := api.DecodeSingleResponse[ledger.Transaction](b, rsp.Body) + + longestTxLock.Lock() + if time.Millisecond*time.Duration(latency) > longestTransactionDuration { + longestTransactionID = tx.ID + longestTransactionDuration = time.Duration(latency) * time.Millisecond + } + longestTxLock.Unlock() + } + }) + + b.StopTimer() + b.Logf("Longest transaction: %d (%s)", longestTransactionID, longestTransactionDuration.String()) + b.ReportMetric((float64(time.Duration(b.N))/float64(time.Since(startOfBench)))*float64(time.Second), "t/s") + b.ReportMetric(float64(totalDuration.Load()/int64(b.N)), "ms/transaction") + runtime.GC() +} diff --git a/benchmarks/main_test.go b/benchmarks/main_test.go new file mode 100644 index 000000000..da45c38b1 --- /dev/null +++ b/benchmarks/main_test.go @@ -0,0 +1,26 @@ +package benchmarks + +import ( + "os" + "testing" + + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/formancehq/stack/libs/go-libs/pgtesting" + "github.com/ory/dockertest/v3/docker" +) + +func TestMain(m *testing.M) { + + if err := pgtesting.CreatePostgresServer(pgtesting.WithDockerHostConfigOption(func(hostConfig *docker.HostConfig) { + hostConfig.CPUCount = 2 + })); err != nil { + logging.Error(err) + os.Exit(1) + } + + code := m.Run() + if err := pgtesting.DestroyPostgresServer(); err != nil { + logging.Error(err) + } + os.Exit(code) +} diff --git a/build.Dockerfile b/build.Dockerfile new file mode 100644 index 000000000..66a786a6f --- /dev/null +++ b/build.Dockerfile @@ -0,0 +1,5 @@ +FROM ghcr.io/formancehq/base:22.04 +COPY ledger /usr/bin/ledger +ENV OTEL_SERVICE_NAME ledger +ENTRYPOINT ["/usr/bin/ledger"] +CMD ["serve"] diff --git a/cmd/container.go b/cmd/container.go index a8ce5c448..e95cdc49e 100644 --- a/cmd/container.go +++ b/cmd/container.go @@ -1,253 +1,43 @@ package cmd import ( - "crypto/tls" - "fmt" - "log" - "net/http" - "os" - "strings" - - "github.com/Shopify/sarama" - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/logging/logginglogrus" - "github.com/formancehq/go-libs/oauth2/oauth2introspect" - "github.com/formancehq/go-libs/otlp/otlptraces" - "github.com/formancehq/go-libs/publish" - "github.com/formancehq/go-libs/publish/publishhttp" - "github.com/formancehq/go-libs/publish/publishkafka" - "github.com/gin-contrib/cors" - "github.com/gin-gonic/gin" - "github.com/numary/ledger/cmd/internal" - "github.com/numary/ledger/pkg/api" - "github.com/numary/ledger/pkg/api/middlewares" - "github.com/numary/ledger/pkg/api/routes" - "github.com/numary/ledger/pkg/bus" - "github.com/numary/ledger/pkg/contextlogger" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/redis" - "github.com/numary/ledger/pkg/storage/sqlstorage" - "github.com/sirupsen/logrus" + "io" + + "github.com/formancehq/ledger/cmd/internal" + "github.com/formancehq/ledger/internal/engine" + driver "github.com/formancehq/ledger/internal/storage/driver" + "github.com/formancehq/stack/libs/go-libs/otlp/otlpmetrics" + "github.com/formancehq/stack/libs/go-libs/otlp/otlptraces" + "github.com/formancehq/stack/libs/go-libs/publish" + "github.com/formancehq/stack/libs/go-libs/service" "github.com/spf13/viper" - "github.com/uptrace/opentelemetry-go-extra/otellogrus" - "github.com/xdg-go/scram" - "go.opentelemetry.io/otel/trace" "go.uber.org/fx" ) const ServiceName = "ledger" -func NewContainer(v *viper.Viper, userOptions ...fx.Option) *fx.App { - +func resolveOptions(output io.Writer, userOptions ...fx.Option) []fx.Option { options := make([]fx.Option, 0) - if !v.GetBool(debugFlag) { - options = append(options, fx.NopLogger) - } - - debug := viper.GetBool(debugFlag) + options = append(options, fx.NopLogger) - l := logrus.New() - if debug { - l.Level = logrus.DebugLevel - } - if viper.GetBool(otlptraces.OtelTracesFlag) { - l.AddHook(otellogrus.NewHook(otellogrus.WithLevels( - logrus.PanicLevel, - logrus.FatalLevel, - logrus.ErrorLevel, - logrus.WarnLevel, - ))) - } - logging.SetFactory(contextlogger.NewFactory( - logging.StaticLoggerFactory(logginglogrus.New(l)), - )) + v := viper.GetViper() + debug := v.GetBool(service.DebugFlag) if debug { - sqlstorage.InstrumentalizeSQLDrivers() + driver.InstrumentalizeSQLDriver() } - topics := v.GetStringSlice(publisherTopicMappingFlag) - mapping := make(map[string]string) - for _, topic := range topics { - parts := strings.SplitN(topic, ":", 2) - if len(parts) != 2 { - panic("invalid topic flag") - } - mapping[parts[0]] = parts[1] - } - - options = append(options, publish.Module(), bus.LedgerMonitorModule()) - options = append(options, publish.TopicMapperPublisherModule(mapping)) - - switch { - case v.GetBool(publisherHttpEnabledFlag): - options = append(options, publishhttp.Module()) - case v.GetBool(publisherKafkaEnabledFlag): - sarama.Logger = log.New(os.Stdout, "[Sarama] ", log.LstdFlags) - options = append(options, - publishkafka.Module(ServiceName, v.GetStringSlice(publisherKafkaBrokerFlag)...), - publishkafka.ProvideSaramaOption( - publishkafka.WithConsumerReturnErrors(), - publishkafka.WithProducerReturnSuccess(), - ), - ) - if v.GetBool(publisherKafkaTLSEnabled) { - options = append(options, publishkafka.ProvideSaramaOption(publishkafka.WithTLS())) - } - if v.GetBool(publisherKafkaSASLEnabled) { - options = append(options, publishkafka.ProvideSaramaOption( - publishkafka.WithSASLEnabled(), - publishkafka.WithSASLCredentials( - v.GetString(publisherKafkaSASLUsername), - v.GetString(publisherKafkaSASLPassword), - ), - publishkafka.WithSASLMechanism(sarama.SASLMechanism(v.GetString(publisherKafkaSASLMechanism))), - publishkafka.WithSASLScramClient(func() sarama.SCRAMClient { - var fn scram.HashGeneratorFcn - switch v.GetInt(publisherKafkaSASLScramSHASize) { - case 512: - fn = publishkafka.SHA512 - case 256: - fn = publishkafka.SHA256 - default: - panic("sha size not handled") - } - return &publishkafka.XDGSCRAMClient{ - HashGeneratorFcn: fn, - } - }), - )) - } - } - - // Handle OpenTelemetry - options = append(options, otlptraces.CLITracesModule(v)) - - switch v.GetString(lockStrategyFlag) { - case "redis": - var tlsConfig *tls.Config - if v.GetBool(lockStrategyRedisTLSEnabledFlag) { - tlsConfig = &tls.Config{} - if v.GetBool(lockStrategyRedisTLSInsecureFlag) { - tlsConfig.InsecureSkipVerify = true - } - } - options = append(options, redis.Module(redis.Config{ - Url: v.GetString(lockStrategyRedisUrlFlag), - LockDuration: v.GetDuration(lockStrategyRedisDurationFlag), - LockRetry: v.GetDuration(lockStrategyRedisRetryFlag), - TLSConfig: tlsConfig, - })) - } - - // Handle api part - options = append(options, api.Module(api.Config{ - StorageDriver: v.GetString(storageDriverFlag), - Version: Version, - UseScopes: viper.GetBool(authBearerUseScopesFlag), - })) - - // Handle storage driver - options = append(options, sqlstorage.DriverModule(sqlstorage.ModuleConfig{ - StorageDriver: v.GetString(storageDriverFlag), - SQLiteConfig: func() *sqlstorage.SQLiteConfig { - if v.GetString(storageDriverFlag) != sqlstorage.SQLite.String() { - return nil - } - return &sqlstorage.SQLiteConfig{ - Dir: v.GetString(storageDirFlag), - DBName: v.GetString(storageSQLiteDBNameFlag), - } - }(), - PostgresConfig: func() *sqlstorage.PostgresConfig { - if v.GetString(storageDriverFlag) != sqlstorage.PostgreSQL.String() { - return nil - } - return &sqlstorage.PostgresConfig{ - ConnString: v.GetString(storagePostgresConnectionStringFlag), - } - }(), - })) - - options = append(options, internal.NewAnalyticsModule(v, Version)) - - options = append(options, fx.Provide( - fx.Annotate(func() []ledger.LedgerOption { - ledgerOptions := []ledger.LedgerOption{} - - if v.GetString(commitPolicyFlag) == "allow-past-timestamps" { - ledgerOptions = append(ledgerOptions, ledger.WithPastTimestamps) - } - - return ledgerOptions - }, fx.ResultTags(ledger.ResolverLedgerOptionsKey)), - )) - - // Handle resolver - options = append(options, ledger.ResolveModule( - v.GetInt64(cacheCapacityBytes), v.GetInt64(cacheMaxNumKeys))) - - // Api middlewares - options = append(options, routes.ProvidePerLedgerMiddleware(func(tp trace.TracerProvider) []gin.HandlerFunc { - res := make([]gin.HandlerFunc, 0) - - methods := make([]auth.Method, 0) - if httpBasicMethod := internal.HTTPBasicAuthMethod(v); httpBasicMethod != nil { - methods = append(methods, httpBasicMethod) - } - if v.GetBool(authBearerEnabledFlag) { - methods = append(methods, auth.NewHttpBearerMethod( - auth.NewIntrospectionValidator( - oauth2introspect.NewIntrospecter(v.GetString(authBearerIntrospectUrlFlag)), - v.GetBool(authBearerAudiencesWildcardFlag), - auth.AudienceIn(v.GetStringSlice(authBearerAudienceFlag)...), - ), - )) - } - if len(methods) > 0 { - res = append(res, func(c *gin.Context) { - handled := false - auth.Middleware(methods...)(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - handled = true - // The middleware replace the context of the request to include the agent - // We have to forward it to gin - c.Request = r - c.Next() - })).ServeHTTP(c.Writer, c.Request) - if !handled { - c.Abort() - } - }) - } - return res - }, fx.ParamTags(`optional:"true"`))) - - options = append(options, routes.ProvideMiddlewares(func(tp trace.TracerProvider) []gin.HandlerFunc { - res := make([]gin.HandlerFunc, 0) - - cc := cors.DefaultConfig() - cc.AllowAllOrigins = true - cc.AllowCredentials = true - cc.AddAllowHeaders("authorization") - - res = append(res, cors.New(cc)) - res = append(res, func(context *gin.Context) { - context.Next() - for _, err := range context.Errors { - logging.GetLogger(context.Request.Context()).Error(err) - } - }) - res = append(res, middlewares.Log()) - res = append(res, gin.CustomRecoveryWithWriter(os.Stderr, func(c *gin.Context, err interface{}) { - switch eerr := err.(type) { - case error: - _ = c.AbortWithError(http.StatusInternalServerError, eerr) - default: - _ = c.AbortWithError(http.StatusInternalServerError, fmt.Errorf("%s", err)) - } - })) - return res - }, fx.ParamTags(`optional:"true"`))) - - return fx.New(append(options, userOptions...)...) + options = append(options, + publish.CLIPublisherModule(v, ServiceName), + otlptraces.CLITracesModule(v), + otlpmetrics.CLIMetricsModule(v), + driver.CLIModule(v, output, debug), + internal.NewAnalyticsModule(v, Version), + engine.Module(engine.Configuration{ + NumscriptCache: engine.NumscriptCacheConfiguration{ + MaxCount: v.GetInt(numscriptCacheMaxCountFlag), + }, + }), + ) + + return append(options, userOptions...) } diff --git a/cmd/container_test.go b/cmd/container_test.go deleted file mode 100644 index c62a75a40..000000000 --- a/cmd/container_test.go +++ /dev/null @@ -1,272 +0,0 @@ -package cmd - -import ( - "context" - "errors" - "os" - "reflect" - "runtime" - "strings" - "testing" - "time" - - "github.com/ThreeDotsLabs/watermill/pubsub/gochannel" - "github.com/formancehq/go-libs/otlp/otlptraces" - "github.com/numary/ledger/internal/pgtesting" - "github.com/numary/ledger/pkg/api/middlewares" - "github.com/numary/ledger/pkg/bus" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/redis" - "github.com/numary/ledger/pkg/storage" - "github.com/numary/ledger/pkg/storage/sqlstorage" - "github.com/pborman/uuid" - "github.com/spf13/viper" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "go.opentelemetry.io/otel/sdk/trace" - "go.uber.org/fx" -) - -func TestContainers(t *testing.T) { - pgServer, err := pgtesting.PostgresServer() - if !assert.NoError(t, err) { - return - } - defer func(pgServer *pgtesting.PGServer) { - err := pgServer.Close() - if err != nil { - panic(err) - } - }(pgServer) - - type testCase struct { - name string - options []fx.Option - init func(*viper.Viper) - } - - for _, tc := range []testCase{ - { - name: "default", - init: func(v *viper.Viper) { - v.Set(storageDriverFlag, sqlstorage.SQLite.String()) - }, - }, - { - name: "default-with-opentelemetry-traces-on-stdout", - init: func(v *viper.Viper) { - v.Set(storageDriverFlag, sqlstorage.SQLite.String()) - v.Set(otlptraces.OtelTracesFlag, true) - v.Set(otlptraces.OtelTracesExporterFlag, "stdout") - }, - options: []fx.Option{ - fx.Invoke(fx.Annotate(func(lc fx.Lifecycle, t *testing.T, exp trace.SpanExporter, options ...trace.TracerProviderOption) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - assert.Len(t, options, 2) - if os.Getenv("CI") == "true" { // runtime.FuncForPC does not return same results locally or in the CI pipeline (probably related to inlining) - return nil - } - var ( - foundWithResource bool - foundWithSyncer bool - ) - for _, opt := range options { - if strings.Contains(runtime.FuncForPC(reflect.ValueOf(opt).Pointer()).Name(), "trace.WithSyncer") { - foundWithSyncer = true - } - if strings.Contains(runtime.FuncForPC(reflect.ValueOf(opt).Pointer()).Name(), "trace.WithResource") { - foundWithResource = true - } - } - assert.True(t, foundWithResource) - assert.True(t, foundWithSyncer) - return nil - }, - }) - }, fx.ParamTags(``, ``, ``, otlptraces.TracerProviderOptionKey))), - }, - }, - { - name: "default-with-opentelemetry-traces-on-stdout-and-batch", - init: func(v *viper.Viper) { - v.Set(storageDriverFlag, sqlstorage.SQLite.String()) - v.Set(otlptraces.OtelTracesFlag, true) - v.Set(otlptraces.OtelTracesExporterFlag, "stdout") - v.Set(otlptraces.OtelTracesBatchFlag, true) - }, - options: []fx.Option{ - fx.Invoke(fx.Annotate(func(lc fx.Lifecycle, t *testing.T, exp trace.SpanExporter, options ...trace.TracerProviderOption) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - if !assert.Len(t, options, 2) { - return nil - } - if os.Getenv("CI") == "true" { // runtime.FuncForPC does not returns same results locally or in the CI pipeline (probably related to inlining) - return nil - } - var ( - foundWithResource bool - foundWithBatcher bool - ) - for _, opt := range options { - if strings.Contains(runtime.FuncForPC(reflect.ValueOf(opt).Pointer()).Name(), "trace.WithBatch") { - foundWithBatcher = true - } - if strings.Contains(runtime.FuncForPC(reflect.ValueOf(opt).Pointer()).Name(), "trace.WithResource") { - foundWithResource = true - } - } - assert.True(t, foundWithResource) - assert.True(t, foundWithBatcher) - return nil - }, - }) - }, fx.ParamTags(``, ``, ``, otlptraces.TracerProviderOptionKey))), - }, - }, - { - name: "default-with-opentelemetry-traces-on-otlp", - init: func(v *viper.Viper) { - v.Set(storageDriverFlag, sqlstorage.SQLite.String()) - v.Set(otlptraces.OtelTracesFlag, true) - v.Set(otlptraces.OtelTracesExporterFlag, "otlp") - }, - }, - { - name: "default-with-opentelemetry-traces-on-jaeger", - init: func(v *viper.Viper) { - v.Set(storageDriverFlag, sqlstorage.SQLite.String()) - v.Set(otlptraces.OtelTracesFlag, true) - v.Set(otlptraces.OtelTracesExporterFlag, "jaeger") - }, - }, - { - name: "pg", - init: func(v *viper.Viper) { - v.Set(storageDriverFlag, sqlstorage.PostgreSQL.String()) - v.Set(storagePostgresConnectionStringFlag, pgServer.ConnString()) - }, - options: []fx.Option{ - fx.Invoke(func(lc fx.Lifecycle, t *testing.T, driver storage.Driver[storage.LedgerStore], storageFactory storage.Driver[storage.LedgerStore]) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - store, _, err := storageFactory.GetLedgerStore(ctx, "testing", true) - if err != nil { - return err - } - err = store.Close(ctx) - if err != nil { - return err - } - return nil - }, - }) - }), - }, - }, - { - name: "default-with-lock-strategy-memory", - init: func(v *viper.Viper) { - v.Set(lockStrategyFlag, "redis") - }, - }, - { - name: "default-with-lock-strategy-none", - init: func(v *viper.Viper) { - v.Set(lockStrategyFlag, "none") - }, - }, - { - name: "default-with-lock-strategy-redis", - init: func(v *viper.Viper) { - v.Set(lockStrategyFlag, "redis") - v.Set(lockStrategyRedisUrlFlag, "redis://redis:6789") - }, - options: []fx.Option{ - fx.Invoke(func(lc fx.Lifecycle, resolver *ledger.Resolver, locker middlewares.Locker) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - require.IsType(t, locker, &redis.Lock{}) - return nil - }, - }) - }), - }, - }, - { - name: "event-bus", - init: func(v *viper.Viper) {}, - options: []fx.Option{ - fx.Invoke(func(lc fx.Lifecycle, ch *gochannel.GoChannel, resolver *ledger.Resolver) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - messages, err := ch.Subscribe(ctx, bus.EventTypeSavedMetadata) - if err != nil { - return err - } - - name := uuid.New() - l, err := resolver.GetLedger(ctx, name) - if err != nil { - return err - } - errCh := make(chan error, 1) - go func() { - err := l.SaveMeta(ctx, core.MetaTargetTypeAccount, "world", core.Metadata{"foo": []byte(`"bar"`)}) - if err != nil { - errCh <- err - } - }() - - select { - case <-ctx.Done(): - return ctx.Err() - case err := <-errCh: - return err - case <-messages: - case <-time.After(time.Second): - return errors.New("timeout") - } - return nil - }, - }) - }), - }, - }, - } { - t.Run(tc.name, func(t *testing.T) { - run := make(chan struct{}, 1) - options := append(tc.options, - fx.Invoke(func() { - run <- struct{}{} - }), - fx.Provide(func() *testing.T { - return t - }), - ) - v := viper.New() - // Default options - v.Set(storageDriverFlag, sqlstorage.SQLite.String()) - v.Set(storageDirFlag, "/tmp") - v.Set(cacheCapacityBytes, 100000000) - v.Set(cacheMaxNumKeys, 100) - //v.Set(storageSQLiteDBNameFlag, uuid.New()) - tc.init(v) - app := NewContainer(v, options...) - - require.NoError(t, app.Start(context.Background())) - defer func(app *fx.App, ctx context.Context) { - require.NoError(t, app.Stop(ctx)) - }(app, context.Background()) - - select { - case <-run: - default: - t.Fatal("application not started correctly") - } - }) - } - -} diff --git a/cmd/control/gitkeep b/cmd/control/gitkeep deleted file mode 100644 index e69de29bb..000000000 diff --git a/cmd/doc.go b/cmd/doc.go index a10b00eb3..434ffd593 100644 --- a/cmd/doc.go +++ b/cmd/doc.go @@ -7,7 +7,7 @@ import ( "strings" "text/tabwriter" - "github.com/numary/ledger/cmd/internal" + "github.com/formancehq/ledger/cmd/internal" "github.com/spf13/cobra" "github.com/spf13/viper" ) diff --git a/cmd/internal/analytics.go b/cmd/internal/analytics.go index c7e2318da..8b54fd519 100644 --- a/cmd/internal/analytics.go +++ b/cmd/internal/analytics.go @@ -1,27 +1,17 @@ package internal import ( - "context" "time" "github.com/Masterminds/semver/v3" - "github.com/formancehq/go-libs/logging" - "github.com/numary/ledger/pkg/analytics" + "github.com/formancehq/ledger/internal/analytics" + "github.com/formancehq/stack/libs/go-libs/logging" "github.com/spf13/cobra" "github.com/spf13/viper" "go.uber.org/fx" ) const ( - // deprecated - segmentEnabledFlag = "segment-enabled" - // deprecated - segmentWriteKeyFlag = "segment-write-key" - // deprecated - segmentApplicationIdFlag = "segment-application-id" - // deprecated - segmentHeartbeatIntervalFlag = "segment-heartbeat-interval" - telemetryEnabledFlag = "telemetry-enabled" telemetryWriteKeyFlag = "telemetry-write-key" telemetryApplicationIdFlag = "telemetry-application-id" @@ -29,10 +19,6 @@ const ( ) func InitAnalyticsFlags(cmd *cobra.Command, defaultWriteKey string) { - cmd.PersistentFlags().Bool(segmentEnabledFlag, false, "Is segment enabled") - cmd.PersistentFlags().String(segmentApplicationIdFlag, "", "Segment application id") - cmd.PersistentFlags().String(segmentWriteKeyFlag, defaultWriteKey, "Segment write key") - cmd.PersistentFlags().Duration(segmentHeartbeatIntervalFlag, 4*time.Hour, "Segment heartbeat interval") cmd.PersistentFlags().Bool(telemetryEnabledFlag, true, "Is telemetry enabled") cmd.PersistentFlags().String(telemetryApplicationIdFlag, "", "telemetry application id") cmd.PersistentFlags().String(telemetryWriteKeyFlag, defaultWriteKey, "telemetry write key") @@ -40,42 +26,26 @@ func InitAnalyticsFlags(cmd *cobra.Command, defaultWriteKey string) { } func NewAnalyticsModule(v *viper.Viper, version string) fx.Option { - if v.GetBool(telemetryEnabledFlag) || v.GetBool(segmentEnabledFlag) { - applicationId := viper.GetString(telemetryApplicationIdFlag) - if applicationId == "" { - applicationId = viper.GetString(segmentApplicationIdFlag) - } - var appIdProviderModule fx.Option - if applicationId == "" { - appIdProviderModule = fx.Provide(analytics.FromStorageAppIdProvider) - } else { - appIdProviderModule = fx.Provide(func() analytics.AppIdProvider { - return analytics.AppIdProviderFn(func(ctx context.Context) (string, error) { - return applicationId, nil - }) - }) - } + if v.GetBool(telemetryEnabledFlag) { + applicationID := viper.GetString(telemetryApplicationIdFlag) writeKey := viper.GetString(telemetryWriteKeyFlag) - if writeKey == "" { - writeKey = viper.GetString(segmentWriteKeyFlag) - } interval := viper.GetDuration(telemetryHeartbeatIntervalFlag) - if interval == 0 { - interval = viper.GetDuration(segmentHeartbeatIntervalFlag) - } if writeKey == "" { - logging.GetLogger(context.Background()).Infof("telemetry enabled but no write key provided") + return fx.Invoke(func(l logging.Logger) { + l.Infof("telemetry enabled but no write key provided") + }) } else if interval == 0 { - logging.GetLogger(context.Background()).Error("telemetry heartbeat interval is 0") + return fx.Invoke(func(l logging.Logger) { + l.Error("telemetry heartbeat interval is 0") + }) } else { _, err := semver.NewVersion(version) if err != nil { - logging.GetLogger(context.Background()).Infof("telemetry enabled but version '%s' is not semver, skip", version) + return fx.Invoke(func(l logging.Logger) { + l.Infof("telemetry enabled but version '%s' is not semver, skip", version) + }) } else { - return fx.Options( - appIdProviderModule, - analytics.NewHeartbeatModule(version, writeKey, interval), - ) + return analytics.NewHeartbeatModule(version, writeKey, applicationID, interval) } } } diff --git a/cmd/internal/analytics_test.go b/cmd/internal/analytics_test.go index acc2789cd..5aa65e391 100644 --- a/cmd/internal/analytics_test.go +++ b/cmd/internal/analytics_test.go @@ -2,21 +2,14 @@ package internal import ( "context" - "net/http" - "os" "reflect" "testing" "time" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/storage" - "github.com/numary/ledger/pkg/storage/sqlstorage" - "github.com/pborman/uuid" "github.com/spf13/cobra" "github.com/spf13/viper" "github.com/stretchr/testify/require" "go.uber.org/fx" - "gopkg.in/segmentio/analytics-go.v3" ) func TestAnalyticsFlags(t *testing.T) { @@ -29,34 +22,6 @@ func TestAnalyticsFlags(t *testing.T) { } for _, testCase := range []testCase{ - { - name: "using deprecated segment enabled flag", - key: segmentEnabledFlag, - envValue: "true", - viperMethod: (*viper.Viper).GetBool, - expectedValue: true, - }, - { - name: "using deprecated segment write key flag", - key: segmentWriteKeyFlag, - envValue: "foo:bar", - viperMethod: (*viper.Viper).GetString, - expectedValue: "foo:bar", - }, - { - name: "using deprecated segment heartbeat interval flag", - key: segmentHeartbeatIntervalFlag, - envValue: "10s", - viperMethod: (*viper.Viper).GetDuration, - expectedValue: 10 * time.Second, - }, - { - name: "using deprecated segment application id flag", - key: segmentApplicationIdFlag, - envValue: "foo:bar", - viperMethod: (*viper.Viper).GetString, - expectedValue: "foo:bar", - }, { name: "using telemetry enabled flag", key: telemetryEnabledFlag, @@ -100,7 +65,6 @@ func TestAnalyticsFlags(t *testing.T) { require.Equal(t, testCase.expectedValue, rValue) }, } - InitHTTPBasicFlags(cmd) BindEnv(v) restoreEnvVar := setEnvVar(testCase.key, testCase.envValue) @@ -112,53 +76,6 @@ func TestAnalyticsFlags(t *testing.T) { } } -func TestAnalyticsModule(t *testing.T) { - v := viper.GetViper() - v.Set(telemetryEnabledFlag, true) - v.Set(telemetryWriteKeyFlag, "XXX") - v.Set(telemetryApplicationIdFlag, "appId") - v.Set(telemetryHeartbeatIntervalFlag, 10*time.Second) - - handled := make(chan struct{}) - - module := NewAnalyticsModule(v, "1.0.0") - app := fx.New( - module, - fx.Provide(func(lc fx.Lifecycle) (storage.Driver[ledger.Store], error) { - id := uuid.New() - driver := sqlstorage.NewDriver("sqlite", sqlstorage.NewSQLiteDB(os.TempDir(), id)) - lc.Append(fx.Hook{ - OnStart: driver.Initialize, - }) - return sqlstorage.NewLedgerStorageDriverFromRawDriver(driver), nil - }), - fx.Replace(analytics.Config{ - BatchSize: 1, - Transport: roundTripperFn(func(req *http.Request) (*http.Response, error) { - select { - case <-handled: - // Nothing to do, the chan has already been closed - default: - close(handled) - } - return &http.Response{ - StatusCode: http.StatusOK, - }, nil - }), - })) - require.NoError(t, app.Start(context.Background())) - defer func() { - require.NoError(t, app.Stop(context.Background())) - }() - - select { - case <-time.After(time.Second): - require.Fail(t, "Timeout waiting first stats from analytics module") - case <-handled: - } - -} - func TestAnalyticsModuleDisabled(t *testing.T) { v := viper.GetViper() v.Set(telemetryEnabledFlag, false) diff --git a/cmd/internal/env.go b/cmd/internal/env.go index 92520d6a6..3df704068 100644 --- a/cmd/internal/env.go +++ b/cmd/internal/env.go @@ -6,14 +6,9 @@ import ( "github.com/spf13/viper" ) -const ( - envPrefix = "numary" -) - var EnvVarReplacer = strings.NewReplacer(".", "_", "-", "_") func BindEnv(v *viper.Viper) { - v.SetEnvPrefix(envPrefix) v.SetEnvKeyReplacer(EnvVarReplacer) v.AutomaticEnv() } diff --git a/cmd/internal/http_basic.go b/cmd/internal/http_basic.go deleted file mode 100644 index ba1b0a5fb..000000000 --- a/cmd/internal/http_basic.go +++ /dev/null @@ -1,42 +0,0 @@ -package internal - -import ( - "strings" - - "github.com/formancehq/go-libs/auth" - "github.com/numary/ledger/pkg/api/routes" - "github.com/spf13/cobra" - "github.com/spf13/viper" -) - -const ( - serverHttpBasicAuthFlag = "server.http.basic_auth" - authBasicEnabledFlag = "auth-basic-enabled" - authBasicCredentialsFlag = "auth-basic-credentials" -) - -func HTTPBasicAuthMethod(v *viper.Viper) auth.Method { - basicAuth := v.GetStringSlice(serverHttpBasicAuthFlag) - if len(basicAuth) == 0 { - basicAuth = v.GetStringSlice(authBasicCredentialsFlag) - } - if len(basicAuth) > 0 && - (!v.IsSet(authBasicEnabledFlag) || v.GetBool(authBasicEnabledFlag)) { // Keep compatibility, we disable the feature only if the flag is explicitely set to false - credentials := auth.Credentials{} - for _, kv := range basicAuth { - parts := strings.SplitN(kv, ":", 2) - credentials[parts[0]] = auth.Credential{ - Password: parts[1], - Scopes: routes.AllScopes, - } - } - return auth.NewHTTPBasicMethod(credentials) - } - return nil -} - -func InitHTTPBasicFlags(cmd *cobra.Command) { - cmd.PersistentFlags().Bool(authBasicEnabledFlag, false, "Enable basic auth") - cmd.PersistentFlags().StringSlice(authBasicCredentialsFlag, []string{}, "HTTP basic auth credentials (:)") - cmd.PersistentFlags().String(serverHttpBasicAuthFlag, "", "Http basic auth") -} diff --git a/cmd/internal/http_basic_test.go b/cmd/internal/http_basic_test.go deleted file mode 100644 index 713f6a4a1..000000000 --- a/cmd/internal/http_basic_test.go +++ /dev/null @@ -1,140 +0,0 @@ -package internal - -import ( - "fmt" - "reflect" - "testing" - - "github.com/formancehq/go-libs/auth" - "github.com/spf13/cobra" - "github.com/spf13/viper" - "github.com/stretchr/testify/require" -) - -func TestViperEnvBinding(t *testing.T) { - - type testCase struct { - name string - key string - envValue string - viperMethod interface{} - expectedValue interface{} - } - - for _, testCase := range []testCase{ - { - name: "using deprecated credentials flag", - key: serverHttpBasicAuthFlag, - envValue: "foo:bar", - viperMethod: (*viper.Viper).GetString, - expectedValue: "foo:bar", - }, - { - name: "using credentials flag", - key: authBasicCredentialsFlag, - envValue: "foo:bar", - viperMethod: (*viper.Viper).GetStringSlice, - expectedValue: []string{"foo:bar"}, - }, - { - name: "using http basic enabled flags", - key: authBasicEnabledFlag, - envValue: "true", - viperMethod: (*viper.Viper).GetBool, - expectedValue: true, - }, - } { - t.Run(testCase.name, func(t *testing.T) { - v := viper.GetViper() - cmd := &cobra.Command{ - Run: func(cmd *cobra.Command, args []string) { - ret := reflect.ValueOf(testCase.viperMethod).Call([]reflect.Value{ - reflect.ValueOf(v), - reflect.ValueOf(testCase.key), - }) - require.Len(t, ret, 1) - - rValue := ret[0].Interface() - require.Equal(t, testCase.expectedValue, rValue) - }, - } - InitHTTPBasicFlags(cmd) - BindEnv(v) - - restoreEnvVar := setEnvVar(testCase.key, testCase.envValue) - defer restoreEnvVar() - - require.NoError(t, v.BindPFlags(cmd.PersistentFlags())) - - require.NoError(t, cmd.Execute()) - }) - } -} - -func TestHTTPBasicAuthMethod(t *testing.T) { - - type testCase struct { - name string - args []string - expectedBasicAuthMethod bool - } - - for _, testCase := range []testCase{ - { - name: "no flag defined", - args: []string{}, - expectedBasicAuthMethod: false, - }, - { - name: "with latest credentials flag", - args: []string{ - fmt.Sprintf("--%s=%s", authBasicCredentialsFlag, "foo:bar"), - }, - expectedBasicAuthMethod: true, - }, - { - name: "with deprecated credentials flag", - args: []string{ - fmt.Sprintf("--%s=%s", serverHttpBasicAuthFlag, "foo:bar"), - }, - expectedBasicAuthMethod: true, - }, - { - name: "with enabled flag set to false", - args: []string{ - fmt.Sprintf("--%s=%s", serverHttpBasicAuthFlag, "foo:bar"), - fmt.Sprintf("--%s=false", authBasicEnabledFlag), - }, - expectedBasicAuthMethod: false, - }, - { - name: "with enabled flag set to true", - args: []string{ - fmt.Sprintf("--%s=%s", serverHttpBasicAuthFlag, "foo:bar"), - fmt.Sprintf("--%s=true", authBasicEnabledFlag), - }, - expectedBasicAuthMethod: true, - }, - } { - t.Run(testCase.name, func(t *testing.T) { - var method auth.Method - cmd := &cobra.Command{ - RunE: func(cmd *cobra.Command, args []string) error { - method = HTTPBasicAuthMethod(viper.GetViper()) - return nil - }, - } - InitHTTPBasicFlags(cmd) - require.NoError(t, viper.BindPFlags(cmd.PersistentFlags())) - - cmd.SetArgs(testCase.args) - - require.NoError(t, cmd.Execute()) - if testCase.expectedBasicAuthMethod { - require.NotNil(t, method) - } else { - require.Nil(t, method) - } - }) - } -} diff --git a/cmd/internal/utils.go b/cmd/internal/utils.go index 52940bae5..c886ed8b4 100644 --- a/cmd/internal/utils.go +++ b/cmd/internal/utils.go @@ -1,27 +1,15 @@ package internal import ( - "fmt" - "net/http" "os" "strings" ) -func withPrefix(flag string) string { - return strings.ToUpper(fmt.Sprintf("%s_%s", envPrefix, EnvVarReplacer.Replace(flag))) -} - func setEnvVar(key, value string) func() { - prefixedFlag := withPrefix(key) - oldEnv := os.Getenv(prefixedFlag) - os.Setenv(prefixedFlag, value) + flag := strings.ToUpper(EnvVarReplacer.Replace(key)) + oldEnv := os.Getenv(flag) + os.Setenv(flag, value) return func() { - os.Setenv(prefixedFlag, oldEnv) + os.Setenv(flag, oldEnv) } } - -type roundTripperFn func(req *http.Request) (*http.Response, error) - -func (fn roundTripperFn) RoundTrip(req *http.Request) (*http.Response, error) { - return fn(req) -} diff --git a/cmd/root.go b/cmd/root.go index 35a699d11..a3fc943d7 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -3,55 +3,19 @@ package cmd import ( "fmt" "os" - "path" - "github.com/formancehq/go-libs/otlp/otlptraces" - "github.com/numary/ledger/cmd/internal" - "github.com/numary/ledger/pkg/redis" - _ "github.com/numary/ledger/pkg/storage/sqlstorage/migrates/9-add-pre-post-volumes" - "github.com/pkg/errors" + "github.com/formancehq/ledger/cmd/internal" + "github.com/formancehq/ledger/internal/storage/driver" + "github.com/formancehq/stack/libs/go-libs/otlp/otlpmetrics" + "github.com/formancehq/stack/libs/go-libs/otlp/otlptraces" + "github.com/formancehq/stack/libs/go-libs/publish" + "github.com/formancehq/stack/libs/go-libs/service" "github.com/spf13/cobra" "github.com/spf13/viper" ) const ( - debugFlag = "debug" - storageDirFlag = "storage.dir" - storageDriverFlag = "storage.driver" - storageSQLiteDBNameFlag = "storage.sqlite.db_name" - storagePostgresConnectionStringFlag = "storage.postgres.conn_string" - // Deprecated - storageCacheFlag = "storage.cache" - serverHttpBindAddressFlag = "server.http.bind_address" - uiHttpBindAddressFlag = "ui.http.bind_address" - lockStrategyFlag = "lock-strategy" - lockStrategyRedisUrlFlag = "lock-strategy-redis-url" - lockStrategyRedisDurationFlag = "lock-strategy-redis-duration" - lockStrategyRedisRetryFlag = "lock-strategy-redis-retry" - lockStrategyRedisTLSEnabledFlag = "lock-strategy-redis-tls-enabled" - lockStrategyRedisTLSInsecureFlag = "lock-strategy-redis-tls-insecure" - - publisherKafkaEnabledFlag = "publisher-kafka-enabled" - publisherKafkaBrokerFlag = "publisher-kafka-broker" - publisherKafkaSASLEnabled = "publisher-kafka-sasl-enabled" - publisherKafkaSASLUsername = "publisher-kafka-sasl-username" - publisherKafkaSASLPassword = "publisher-kafka-sasl-password" - publisherKafkaSASLMechanism = "publisher-kafka-sasl-mechanism" - publisherKafkaSASLScramSHASize = "publisher-kafka-sasl-scram-sha-size" - publisherKafkaTLSEnabled = "publisher-kafka-tls-enabled" - publisherTopicMappingFlag = "publisher-topic-mapping" - publisherHttpEnabledFlag = "publisher-http-enabled" - - authBearerEnabledFlag = "auth-bearer-enabled" - authBearerIntrospectUrlFlag = "auth-bearer-introspect-url" - authBearerAudienceFlag = "auth-bearer-audience" - authBearerAudiencesWildcardFlag = "auth-bearer-audiences-wildcard" - authBearerUseScopesFlag = "auth-bearer-use-scopes" - - commitPolicyFlag = "commit-policy" - - cacheCapacityBytes = "cache-capacity-bytes" - cacheMaxNumKeys = "cache-max-num-keys" + bindFlag = "bind" ) var ( @@ -65,22 +29,13 @@ func NewRootCommand() *cobra.Command { viper.SetDefault("version", Version) root := &cobra.Command{ - Use: "numary", - Short: "Numary", + Use: "ledger", + Short: "ledger", DisableAutoGenTag: true, - PersistentPreRunE: func(cmd *cobra.Command, args []string) error { - err := os.MkdirAll(viper.GetString(storageDirFlag), 0700) - if err != nil { - return errors.Wrap(err, "creating storage directory") - } - return nil - }, } - server := NewServer() + serve := NewServe() version := NewVersion() - start := NewServerStart() - server.AddCommand(start) conf := NewConfig() conf.AddCommand(NewConfigInit()) @@ -88,79 +43,30 @@ func NewRootCommand() *cobra.Command { store.AddCommand(NewStorageInit()) store.AddCommand(NewStorageList()) store.AddCommand(NewStorageUpgrade()) - store.AddCommand(NewStorageScan()) + store.AddCommand(NewStorageUpgradeAll()) store.AddCommand(NewStorageDelete()) - scriptExec := NewScriptExec() - scriptCheck := NewScriptCheck() - - root.AddCommand(server) + root.AddCommand(serve) root.AddCommand(conf) - root.AddCommand(UICmd) root.AddCommand(store) - root.AddCommand(scriptExec) - root.AddCommand(scriptCheck) root.AddCommand(version) - root.AddCommand(stickersCmd) root.AddCommand(NewDocCommand()) - home, err := os.UserHomeDir() - if err != nil { - home = "/root" - } - - root.PersistentFlags().Bool(debugFlag, false, "Debug mode") - root.PersistentFlags().String(storageDriverFlag, "sqlite", "Storage driver") - root.PersistentFlags().String(storageDirFlag, path.Join(home, ".numary/data"), "Storage directory (for sqlite)") - root.PersistentFlags().String(storageSQLiteDBNameFlag, "numary", "SQLite database name") - root.PersistentFlags().String(storagePostgresConnectionStringFlag, "postgresql://localhost/postgres", "Postgre connection string") - root.PersistentFlags().Bool(storageCacheFlag, true, "Storage cache") - root.PersistentFlags().String(serverHttpBindAddressFlag, "localhost:3068", "API bind address") - root.PersistentFlags().String(uiHttpBindAddressFlag, "localhost:3068", "UI bind address") - root.PersistentFlags().String(lockStrategyFlag, "memory", "Lock strategy (memory, none, redis)") - root.PersistentFlags().String(lockStrategyRedisUrlFlag, "", "Redis url when using redis locking strategy") - root.PersistentFlags().Duration(lockStrategyRedisDurationFlag, redis.DefaultLockDuration, "Lock duration") - root.PersistentFlags().Duration(lockStrategyRedisRetryFlag, redis.DefaultRetryInterval, "Retry lock period") - root.PersistentFlags().Bool(lockStrategyRedisTLSEnabledFlag, false, "Use tls on redis") - root.PersistentFlags().Bool(lockStrategyRedisTLSInsecureFlag, false, "Whether redis is trusted or not") - root.PersistentFlags().Bool(publisherKafkaEnabledFlag, false, "Publish write events to kafka") - root.PersistentFlags().StringSlice(publisherKafkaBrokerFlag, []string{}, "Kafka address is kafka enabled") - root.PersistentFlags().StringSlice(publisherTopicMappingFlag, []string{}, "Define mapping between internal event types and topics") - root.PersistentFlags().Bool(publisherHttpEnabledFlag, false, "Sent write event to http endpoint") - root.PersistentFlags().Bool(publisherKafkaSASLEnabled, false, "Enable SASL authentication on kafka publisher") - root.PersistentFlags().String(publisherKafkaSASLUsername, "", "SASL username") - root.PersistentFlags().String(publisherKafkaSASLPassword, "", "SASL password") - root.PersistentFlags().String(publisherKafkaSASLMechanism, "", "SASL authentication mechanism") - root.PersistentFlags().Int(publisherKafkaSASLScramSHASize, 512, "SASL SCRAM SHA size") - root.PersistentFlags().Bool(publisherKafkaTLSEnabled, false, "Enable TLS to connect on kafka") - root.PersistentFlags().Bool(authBearerEnabledFlag, false, "Enable bearer auth") - root.PersistentFlags().String(authBearerIntrospectUrlFlag, "", "OAuth2 introspect URL") - root.PersistentFlags().StringSlice(authBearerAudienceFlag, []string{}, "Allowed audiences") - root.PersistentFlags().Bool(authBearerAudiencesWildcardFlag, false, "Don't check audience") - root.PersistentFlags().Bool(authBearerUseScopesFlag, false, "Use scopes as defined by rfc https://datatracker.ietf.org/doc/html/rfc8693") - root.PersistentFlags().String(commitPolicyFlag, "", "Transaction commit policy (default or allow-past-timestamps)") - - // 100 000 000 bytes is 100 MB - root.PersistentFlags().Int(cacheCapacityBytes, 100000000, "Capacity in bytes of the cache storing Numscript in RAM") - root.PersistentFlags().Int(cacheMaxNumKeys, 100, "Maximum number of Numscript to be stored in the cache in RAM") + root.PersistentFlags().Bool(service.DebugFlag, false, "Debug mode") + root.PersistentFlags().Bool(service.JsonFormattingLoggerFlag, true, "Json formatting mode for logger") + root.PersistentFlags().String(bindFlag, "0.0.0.0:3068", "API bind address") + otlpmetrics.InitOTLPMetricsFlags(root.PersistentFlags()) otlptraces.InitOTLPTracesFlags(root.PersistentFlags()) - internal.InitHTTPBasicFlags(root) internal.InitAnalyticsFlags(root, DefaultSegmentWriteKey) + publish.InitCLIFlags(root) + driver.InitCLIFlags(root) - if err = viper.BindPFlags(root.PersistentFlags()); err != nil { + if err := viper.BindPFlags(root.PersistentFlags()); err != nil { panic(err) } - viper.SetConfigName("numary") - viper.SetConfigType("yaml") - viper.AddConfigPath("$HOME/.numary") - viper.AddConfigPath("/etc/numary") - if err = viper.ReadInConfig(); err != nil { - fmt.Printf("loading config file: %s\n", err) - } - internal.BindEnv(viper.GetViper()) return root diff --git a/cmd/root_test.go b/cmd/root_test.go deleted file mode 100644 index 55ceb6ddf..000000000 --- a/cmd/root_test.go +++ /dev/null @@ -1,134 +0,0 @@ -package cmd - -import ( - "bytes" - "context" - "net/http" - "os" - "testing" - "time" - - "github.com/numary/ledger/internal/pgtesting" - "github.com/pborman/uuid" - "github.com/stretchr/testify/assert" -) - -func TestServer(t *testing.T) { - - pgServer, err := pgtesting.PostgresServer() - assert.NoError(t, err) - defer func(pgServer *pgtesting.PGServer) { - if err := pgServer.Close(); err != nil { - panic(err) - } - }(pgServer) - - type env struct { - key string - value string - } - - type testCase struct { - name string - args []string - env []env - } - - for _, tc := range []testCase{ - { - name: "default", - env: []env{ - { - key: "NUMARY_STORAGE_DRIVER", - value: "sqlite", - }, - }, - }, - { - name: "pg", - args: []string{"--storage.driver", "postgres", "--storage.postgres.conn_string", pgServer.ConnString()}, - }, - { - name: "pg-with-env-var", - env: []env{ - { - key: "NUMARY_STORAGE_DRIVER", - value: "postgres", - }, - { - key: "NUMARY_STORAGE_POSTGRES_CONN_STRING", - value: pgServer.ConnString(), - }, - }, - }, - } { - t.Run(tc.name, func(t *testing.T) { - for _, e := range tc.env { - oldValue := os.Getenv(e.key) - if err := os.Setenv(e.key, e.value); err != nil { - panic(err) - } - defer func(key, value string) { - if err := os.Setenv(key, value); err != nil { - panic(err) - } - }(e.key, oldValue) - } - args := []string{"server", "start", "--debug"} - args = append(args, tc.args...) - root := NewRootCommand() - root.SetArgs(args) - root.SetOut(os.Stdout) - root.SetIn(os.Stdin) - root.SetErr(os.Stdout) - - terminated := make(chan struct{}) - - defer func() { - <-terminated - }() - - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - - go func() { - assert.NoError(t, root.ExecuteContext(ctx)) - close(terminated) - }() - - counter := time.Duration(0) - timeout := 5 * time.Second - delay := 200 * time.Millisecond - for { - rsp, err := http.DefaultClient.Get("http://localhost:3068/_info") - if err != nil || rsp.StatusCode != http.StatusOK { - if counter*delay < timeout { - counter++ - <-time.After(delay) - continue - } - if assert.FailNow(t, err.Error()) { - return - } - } - break - } - - res, err := http.DefaultClient.Post("http://localhost:3068/"+uuid.New()+"/transactions", "application/json", bytes.NewBufferString(`{ - "postings": [{ - "source": "world", - "destination": "central_bank", - "asset": "USD", - "amount": 100 - }] - }`)) - if !assert.NoError(t, err) { - return - } - if !assert.Equal(t, http.StatusOK, res.StatusCode) { - return - } - }) - } - -} diff --git a/cmd/script_check.go b/cmd/script_check.go deleted file mode 100644 index 8e2387040..000000000 --- a/cmd/script_check.go +++ /dev/null @@ -1,30 +0,0 @@ -package cmd - -import ( - "fmt" - "os" - - "github.com/formancehq/machine/script/compiler" - "github.com/sirupsen/logrus" - "github.com/spf13/cobra" -) - -func NewScriptCheck() *cobra.Command { - return &cobra.Command{ - Use: "check [script]", - Args: cobra.ExactArgs(1), - Run: func(cmd *cobra.Command, args []string) { - b, err := os.ReadFile(args[0]) - if err != nil { - logrus.Fatal(err) - } - - _, err = compiler.Compile(string(b)) - if err != nil { - logrus.Fatal(err) - } else { - fmt.Println("Script is correct ✅") - } - }, - } -} diff --git a/cmd/script_exec.go b/cmd/script_exec.go deleted file mode 100644 index a1a8d1322..000000000 --- a/cmd/script_exec.go +++ /dev/null @@ -1,106 +0,0 @@ -package cmd - -import ( - "bytes" - "encoding/json" - "fmt" - "net/http" - "net/url" - "os" - "regexp" - - "github.com/gin-gonic/gin" - "github.com/numary/ledger/pkg/api/controllers" - "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - "github.com/spf13/viper" -) - -const ( - previewFlag = "preview" -) - -func NewScriptExec() *cobra.Command { - cmd := &cobra.Command{ - Use: "exec [ledger] [script]", - Args: cobra.ExactArgs(2), - Run: func(cmd *cobra.Command, args []string) { - b, err := os.ReadFile(args[1]) - if err != nil { - logrus.Fatal(err) - } - - r := regexp.MustCompile(`^\n`) - s := string(b) - s = r.ReplaceAllString(s, "") - - b, err = json.Marshal(gin.H{ - "plain": s, - }) - if err != nil { - logrus.Fatal(err) - } - - logrus.Debugln(string(b)) - - req, err := http.NewRequest(http.MethodPost, fmt.Sprintf("http://%s/%s/script", - viper.Get(serverHttpBindAddressFlag), - args[0]), bytes.NewReader(b)) - if err != nil { - logrus.Fatal(err) - } - - values := url.Values{} - if viper.GetBool(previewFlag) { - values.Set("preview", "yes") - } - req.URL.RawQuery = values.Encode() - req.Header.Set("Content-Type", "application/json") - - res, err := http.DefaultClient.Do(req) - if err != nil { - logrus.Fatal(err) - } - - result := controllers.ScriptResponse{} - err = json.NewDecoder(res.Body).Decode(&result) - if err != nil { - logrus.Fatal(err) - } - - if result.ErrorCode != "" { - switch result.ErrorCode { - case "INTERNAL": - logrus.Fatal("unexpected error occured") - default: - logrus.Fatal(result.ErrorCode, result.ErrorMessage) - } - } - - fmt.Println("Script ran successfully ✅") - fmt.Println("Tx resume:") - fmt.Printf("ID: %d\r\n", result.Transaction.ID) - fmt.Println("Postings:") - for _, p := range result.Transaction.Postings { - fmt.Printf( - "\t Source: %s, Destination: %s, Amount: %s, Asset: %s\r\n", - p.Source, - p.Destination, - p.Amount, - p.Asset, - ) - } - if !viper.GetBool(previewFlag) { - fmt.Printf("Created transaction: http://%s/%s/transactions/%d\r\n", - viper.Get(serverHttpBindAddressFlag), - args[0], - result.Transaction.ID) - } - }, - } - cmd.Flags().Bool(previewFlag, false, "Preview mode (does not save transactions)") - if err := viper.BindPFlags(cmd.Flags()); err != nil { - panic(err) - } - return cmd -} diff --git a/cmd/serve.go b/cmd/serve.go new file mode 100644 index 000000000..aeb52f111 --- /dev/null +++ b/cmd/serve.go @@ -0,0 +1,86 @@ +package cmd + +import ( + "net/http" + "time" + + "github.com/formancehq/ledger/internal/storage/driver" + + "github.com/formancehq/ledger/internal/api" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/stack/libs/go-libs/ballast" + "github.com/formancehq/stack/libs/go-libs/httpserver" + "github.com/formancehq/stack/libs/go-libs/logging" + app "github.com/formancehq/stack/libs/go-libs/service" + "github.com/go-chi/chi/v5" + "github.com/spf13/cobra" + "github.com/spf13/viper" + "go.uber.org/fx" +) + +const ( + ballastSizeInBytesFlag = "ballast-size" + numscriptCacheMaxCountFlag = "numscript-cache-max-count" + readOnlyFlag = "read-only" + autoUpgradeFlag = "auto-upgrade" +) + +func NewServe() *cobra.Command { + cmd := &cobra.Command{ + Use: "serve", + RunE: func(cmd *cobra.Command, args []string) error { + return app.New(cmd.OutOrStdout(), resolveOptions( + cmd.OutOrStdout(), + ballast.Module(viper.GetUint(ballastSizeInBytesFlag)), + api.Module(api.Config{ + Version: Version, + ReadOnly: viper.GetBool(readOnlyFlag), + }), + fx.Invoke(func(lc fx.Lifecycle, driver *driver.Driver) { + if viper.GetBool(autoUpgradeFlag) { + lc.Append(fx.Hook{ + OnStart: driver.UpgradeAllLedgersSchemas, + }) + } + }), + fx.Invoke(func(lc fx.Lifecycle, h chi.Router, logger logging.Logger) { + + wrappedRouter := chi.NewRouter() + wrappedRouter.Use(func(handler http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + r = r.WithContext(logging.ContextWithLogger(r.Context(), logger)) + handler.ServeHTTP(w, r) + }) + }) + wrappedRouter.Use(Log()) + wrappedRouter.Mount("/", h) + + lc.Append(httpserver.NewHook(viper.GetString(bindFlag), wrappedRouter)) + }), + )...).Run(cmd.Context()) + }, + } + cmd.Flags().Uint(ballastSizeInBytesFlag, 0, "Ballast size in bytes, default to 0") + cmd.Flags().Int(numscriptCacheMaxCountFlag, 1024, "Numscript cache max count") + cmd.Flags().Bool(readOnlyFlag, false, "Read only mode") + cmd.Flags().Bool(autoUpgradeFlag, false, "Automatically upgrade all schemas") + return cmd +} + +func Log() func(h http.Handler) http.Handler { + return func(h http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + start := ledger.Now() + h.ServeHTTP(w, r) + latency := time.Since(start.Time) + logging.FromContext(r.Context()).WithFields(map[string]interface{}{ + "method": r.Method, + "path": r.URL.Path, + "latency": latency, + "user_agent": r.UserAgent(), + "params": r.URL.Query().Encode(), + }).Debug("Request") + }) + } +} diff --git a/cmd/server.go b/cmd/server.go deleted file mode 100644 index 2e0cac606..000000000 --- a/cmd/server.go +++ /dev/null @@ -1,9 +0,0 @@ -package cmd - -import "github.com/spf13/cobra" - -func NewServer() *cobra.Command { - return &cobra.Command{ - Use: "server", - } -} diff --git a/cmd/server_start.go b/cmd/server_start.go deleted file mode 100644 index ac9d624d8..000000000 --- a/cmd/server_start.go +++ /dev/null @@ -1,61 +0,0 @@ -package cmd - -import ( - "context" - "net" - "net/http" - - "github.com/formancehq/go-libs/logging" - "github.com/numary/ledger/pkg/api" - "github.com/spf13/cobra" - "github.com/spf13/viper" - "go.uber.org/fx" -) - -func NewServerStart() *cobra.Command { - return &cobra.Command{ - Use: "start", - RunE: func(cmd *cobra.Command, args []string) error { - app := NewContainer( - viper.GetViper(), - fx.Invoke(func(lc fx.Lifecycle, h *api.API) { - var ( - err error - listener net.Listener - ) - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - listener, err = net.Listen("tcp", viper.GetString(serverHttpBindAddressFlag)) - if err != nil { - return err - } - go func() { - httpErr := http.Serve(listener, h) - logging.Errorf("http.Serve: %s", httpErr) - }() - return nil - }, - OnStop: func(ctx context.Context) error { - return listener.Close() - }, - }) - }), - ) - errCh := make(chan error, 1) - go func() { - err := app.Start(cmd.Context()) - if err != nil { - errCh <- err - } - }() - select { - case err := <-errCh: - return err - case <-cmd.Context().Done(): - return app.Stop(context.Background()) - case <-app.Done(): - return app.Err() - } - }, - } -} diff --git a/cmd/stickers.go b/cmd/stickers.go deleted file mode 100644 index a696cec83..000000000 --- a/cmd/stickers.go +++ /dev/null @@ -1,45 +0,0 @@ -package cmd - -import ( - "fmt" - "os/exec" - "runtime" - "time" - - "github.com/sirupsen/logrus" - "github.com/spf13/cobra" -) - -func open(url string) { - var err error - - switch runtime.GOOS { - case "linux": - err = exec.Command("xdg-open", url).Start() - case "windows": - err = exec.Command("rundll32", "url.dll,FileProtocolHandler", url).Start() - case "darwin": - err = exec.Command("open", url).Start() - default: - fmt.Printf("you should head to: %s\n", url) - } - - if err != nil { - logrus.Fatal(err) - } -} - -var stickersCmd = &cobra.Command{ - Use: "stickers", - Run: func(cmd *cobra.Command, args []string) { - token := fmt.Sprintf("cli-%d", time.Now().Unix()) - url := fmt.Sprintf("https://airtable.com/shrp41dAnjv0LSlxW?prefill_Token=%s", token) - - fmt.Printf("You found a very special sub-command...\n\n") - fmt.Printf("Hit Enter to continue\n\n") - if _, err := fmt.Scanln(); err != nil { - panic(err) - } - open(url) - }, -} diff --git a/cmd/storage.go b/cmd/storage.go index bd306c69c..54cc5e249 100644 --- a/cmd/storage.go +++ b/cmd/storage.go @@ -2,14 +2,13 @@ package cmd import ( "context" - "database/sql" "errors" - "fmt" - "os" - "strings" - "github.com/numary/ledger/pkg/storage" - "github.com/numary/ledger/pkg/storage/sqlstorage" + "github.com/formancehq/ledger/internal/storage" + "github.com/formancehq/ledger/internal/storage/driver" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/formancehq/stack/libs/go-libs/service" "github.com/spf13/cobra" "github.com/spf13/viper" "go.uber.org/fx" @@ -25,35 +24,37 @@ func NewStorageInit() *cobra.Command { cmd := &cobra.Command{ Use: "init", RunE: func(cmd *cobra.Command, args []string) error { - app := NewContainer( - viper.GetViper(), - fx.Invoke(func(storageDriver storage.Driver[storage.LedgerStore], lc fx.Lifecycle) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - name := viper.GetString("name") - if name == "" { - return errors.New("name is empty") - } - fmt.Printf("Creating ledger '%s'...", name) - s, created, err := storageDriver.GetLedgerStore(ctx, name, true) - if err != nil { - return err - } + app := service.New( + cmd.OutOrStdout(), + resolveOptions( + cmd.OutOrStdout(), + fx.Invoke(func(storageDriver *driver.Driver, lc fx.Lifecycle) { + lc.Append(fx.Hook{ + OnStart: func(ctx context.Context) error { + name := viper.GetString("name") + if name == "" { + return errors.New("name is empty") + } - if !created { - fmt.Printf("Already initialized!\r\n") - return nil - } + exists, err := storageDriver.GetSystemStore().Exists(ctx, name) + if err != nil { + return err + } + + if exists { + return errors.New("ledger already exists") + } - _, err = s.Initialize(ctx) - if err != nil { + store, err := storageDriver.CreateLedgerStore(ctx, name) + if err != nil { + return err + } + + _, err = store.Migrate(ctx) return err - } - fmt.Printf(" OK\r\n") - return nil - }, - }) - }), + }, + }) + }))..., ) return app.Start(cmd.Context()) }, @@ -69,27 +70,26 @@ func NewStorageList() *cobra.Command { cmd := &cobra.Command{ Use: "list", RunE: func(cmd *cobra.Command, args []string) error { - app := NewContainer( - viper.GetViper(), - fx.Invoke(func(storageDriver storage.Driver[storage.LedgerStore], lc fx.Lifecycle) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - ledgers, err := storageDriver.GetSystemStore().ListLedgers(ctx) - if err != nil { - return err - } - if len(ledgers) == 0 { - fmt.Println("No ledger found.") + app := service.New(cmd.OutOrStdout(), + resolveOptions( + cmd.OutOrStdout(), + fx.Invoke(func(storageDriver *driver.Driver, lc fx.Lifecycle) { + lc.Append(fx.Hook{ + OnStart: func(ctx context.Context) error { + ledgers, err := storageDriver.GetSystemStore().ListLedgers(ctx) + if err != nil { + return err + } + if len(ledgers) == 0 { + logging.FromContext(ctx).Info("No ledger found.") + return nil + } + logging.FromContext(ctx).Infof("Ledgers: %v", ledgers) return nil - } - fmt.Println("Ledgers:") - for _, l := range ledgers { - fmt.Println("- " + l) - } - return nil - }, - }) - }), + }, + }) + }), + )..., ) return app.Start(cmd.Context()) }, @@ -97,146 +97,77 @@ func NewStorageList() *cobra.Command { return cmd } +func upgradeStore(ctx context.Context, store *ledgerstore.Store, name string) error { + modified, err := store.Migrate(ctx) + if err != nil { + return err + } + + if modified { + logging.FromContext(ctx).Infof("Storage '%s' upgraded", name) + } else { + logging.FromContext(ctx).Infof("Storage '%s' is up to date", name) + } + return nil +} + func NewStorageUpgrade() *cobra.Command { cmd := &cobra.Command{ - Use: "upgrade", - Args: cobra.ExactArgs(1), + Use: "upgrade", + Args: cobra.ExactArgs(1), + SilenceUsage: true, RunE: func(cmd *cobra.Command, args []string) error { - app := NewContainer( - viper.GetViper(), - fx.Invoke(func(storageDriver storage.Driver[storage.LedgerStore], lc fx.Lifecycle) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - name := args[0] - store, _, err := storageDriver.GetLedgerStore(ctx, name, false) - if err != nil { - return err - } - modified, err := store.Initialize(ctx) - if err != nil { - return err - } - if modified { - fmt.Printf("Storage '%s' migrated\r\n", name) - } else { - fmt.Printf("Storage '%s' left in place\r\n", name) - } - return nil - }, - }) - }), - ) - return app.Start(cmd.Context()) + + sqlDB, err := storage.OpenSQLDB(storage.ConnectionOptionsFromFlags(viper.GetViper(), cmd.OutOrStdout(), viper.GetBool(service.DebugFlag))) + if err != nil { + return err + } + defer sqlDB.Close() + + driver := driver.New(sqlDB) + if err := driver.Initialize(cmd.Context()); err != nil { + return err + } + + name := args[0] + store, err := driver.GetLedgerStore(cmd.Context(), name) + if err != nil { + return err + } + logger := service.GetDefaultLogger(cmd.OutOrStdout(), viper.GetBool(service.DebugFlag), false) + + return upgradeStore(logging.ContextWithLogger(cmd.Context(), logger), store, name) }, } return cmd } -func NewStorageScan() *cobra.Command { +func NewStorageUpgradeAll() *cobra.Command { cmd := &cobra.Command{ - Use: "scan", + Use: "upgrade-all", + Args: cobra.ExactArgs(0), + SilenceUsage: true, RunE: func(cmd *cobra.Command, args []string) error { - var opt fx.Option - - switch viper.GetString(storageDriverFlag) { - default: - return errors.New("Invalid storage driver: " + viper.GetString(storageDriverFlag)) - case "postgres": - opt = fx.Invoke(func(driver *sqlstorage.Driver, sqlDb *sql.DB, db sqlstorage.DB, lc fx.Lifecycle) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - rows, err := sqlDb.QueryContext(ctx, ` - SELECT s.schema_name - FROM information_schema.schemata s - JOIN pg_catalog.pg_tables t ON t.schemaname = s.schema_name AND t.tablename = 'transactions' - `) - if err != nil { - return err - } - defer func(rows *sql.Rows) { - if err := rows.Close(); err != nil { - panic(err) - } - }(rows) - for rows.Next() { - var ledgerName string - err := rows.Scan(&ledgerName) - if err != nil { - return err - } + logger := service.GetDefaultLogger(cmd.OutOrStdout(), viper.GetBool(service.DebugFlag), false) + ctx := logging.ContextWithLogger(cmd.Context(), logger) - if ledgerName == sqlstorage.SystemSchema { - continue - } - fmt.Printf("Registering ledger '%s'\r\n", ledgerName) - // This command is dedicated to upgrade ledger version before 1.4 - // It will be removed in a near future, so we can assert the system store type without risk - created, err := driver.GetSystemStore().(*sqlstorage.SystemStore). - Register(cmd.Context(), ledgerName) - if err != nil { - fmt.Printf("Error registering ledger '%s': %s\r\n", ledgerName, err) - continue - } - if created { - fmt.Printf("Ledger '%s' registered\r\n", ledgerName) - } else { - fmt.Printf("Ledger '%s' already registered\r\n", ledgerName) - } - } + sqlDB, err := storage.OpenSQLDB(storage.ConnectionOptionsFromFlags(viper.GetViper(), cmd.OutOrStdout(), viper.GetBool(service.DebugFlag))) + if err != nil { + return err + } + defer func() { + if err := sqlDB.Close(); err != nil { + logger.Errorf("Error closing database: %s", err) + } + }() - return nil - }, - }) - }) - case "sqlite": - opt = fx.Invoke(func(driver *sqlstorage.Driver, db sqlstorage.DB, lc fx.Lifecycle) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - files, err := os.ReadDir(viper.GetString(storageDirFlag)) - if err != nil { - return err - } - for _, f := range files { - if !strings.HasSuffix(f.Name(), ".db") { - fmt.Println("Skip file " + f.Name()) - continue - } - f := strings.TrimSuffix(f.Name(), ".db") - parts := strings.SplitN(f, "_", 2) - if len(parts) != 2 { - fmt.Println("Skip file " + f + ".db : Bad name") - continue - } - if parts[0] != viper.GetString(storageSQLiteDBNameFlag) { - fmt.Println("Skip file " + f + ".db : DB name not mathing") - continue - } - ledgerName := parts[1] - if ledgerName == sqlstorage.SystemSchema { - continue - } - fmt.Printf("Registering ledger '%s'\r\n", ledgerName) - created, err := driver.GetSystemStore().(*sqlstorage.SystemStore). - Register(cmd.Context(), ledgerName) - if err != nil { - fmt.Printf("Error registering ledger '%s': %s\r\n", ledgerName, err) - continue - } - if created { - fmt.Printf("Ledger '%s' registered\r\n", ledgerName) - } else { - fmt.Printf("Ledger '%s' already registered\r\n", ledgerName) - } - } - return nil - }, - }) - }) + driver := driver.New(sqlDB) + if err := driver.Initialize(ctx); err != nil { + return err } - app := NewContainer(viper.GetViper(), opt) - return app.Start(cmd.Context()) + return driver.UpgradeAllLedgersSchemas(ctx) }, } return cmd @@ -247,24 +178,25 @@ func NewStorageDelete() *cobra.Command { Use: "delete", Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - app := NewContainer( - viper.GetViper(), - fx.Invoke(func(storageDriver storage.Driver[storage.LedgerStore], lc fx.Lifecycle) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - name := args[0] - store, _, err := storageDriver.GetLedgerStore(ctx, name, false) - if err != nil { - return err - } - if err := store.Delete(ctx); err != nil { - return err - } - fmt.Println("Storage deleted!") - return nil - }, - }) - }), + app := service.New( + cmd.OutOrStdout(), + resolveOptions( + cmd.OutOrStdout(), + fx.Invoke(func(storageDriver *driver.Driver, lc fx.Lifecycle) { + lc.Append(fx.Hook{ + OnStart: func(ctx context.Context) error { + name := args[0] + store, err := storageDriver.GetLedgerStore(ctx, name) + if err != nil { + return err + } + if err := store.Delete(ctx); err != nil { + return err + } + return nil + }, + }) + }))..., ) return app.Start(cmd.Context()) }, diff --git a/cmd/ui.go b/cmd/ui.go deleted file mode 100644 index 0f75602b5..000000000 --- a/cmd/ui.go +++ /dev/null @@ -1,60 +0,0 @@ -package cmd - -import ( - "embed" - "fmt" - "net/http" - "os/exec" - "regexp" - "runtime" - - "github.com/formancehq/go-libs/logging" - "github.com/spf13/cobra" - "github.com/spf13/viper" -) - -//go:embed control -var uipath embed.FS - -func openuri(uri string) bool { - var err error - - switch runtime.GOOS { - case "linux": - err = exec.Command("xdg-open", uri).Start() - case "windows": - err = exec.Command("rundll32", "url.dll,FileProtocolHandler", uri).Start() - case "darwin": - err = exec.Command("open", uri).Start() - default: - err = fmt.Errorf("unsupported platform, open manually: %s", uri) - } - - return err != nil -} - -var UICmd = &cobra.Command{ - Use: "ui", - Run: func(cmd *cobra.Command, args []string) { - addr := viper.GetString("ui.http.bind_address") - - handler := http.FileServer(http.FS(uipath)) - - http.HandleFunc("/", func(rw http.ResponseWriter, r *http.Request) { - isFile := regexp.MustCompile(`\.[a-z]{2,}$`) - path := r.URL.Path - if !isFile.MatchString(path) { - path = "/" - } - r.URL.Path = fmt.Sprintf("/control%s", path) - - handler.ServeHTTP(rw, r) - }) - - openuri(addr) - fmt.Printf("Numary control is live on http://%s\n", addr) - - httpErr := http.ListenAndServe(addr, nil) - logging.Errorf("http.ListenAndServe: %s", httpErr) - }, -} diff --git a/codecov.yml b/codecov.yml deleted file mode 100644 index dad420a13..000000000 --- a/codecov.yml +++ /dev/null @@ -1,4 +0,0 @@ -ignore: - - "docs" - - ".github" - - ".devcontainer" diff --git a/docker-compose.override.yml b/docker-compose.override.yml deleted file mode 100644 index e2bfb5719..000000000 --- a/docker-compose.override.yml +++ /dev/null @@ -1,22 +0,0 @@ -services: - ledger: - image: golang:1.18-alpine - entrypoint: go run main.go server start - volumes: - - .:/src - working_dir: /src - environment: - CGO_ENABLED: 0 - NUMARY_DEBUG: "true" - NUMARY_OTEL_TRACES: "true" - NUMARY_OTEL_TRACES_EXPORTER: jaeger - NUMARY_OTEL_TRACES_EXPORTER_JAEGER_ENDPOINT: http://jaeger:14268/api/traces - NUMARY_OTEL_TRACES_EXPORTER_JAEGER_INSECURE: "true" - NUMARY_OTEL_METRICS: "false" - depends_on: - - jaeger - - jaeger: - image: jaegertracing/all-in-one:1.31 - ports: - - 16686:16686 diff --git a/docker-compose.release.yml b/docker-compose.release.yml new file mode 100644 index 000000000..d450c7631 --- /dev/null +++ b/docker-compose.release.yml @@ -0,0 +1,39 @@ +version: '3.8' +volumes: + postgres: +services: + postgres: + image: "postgres:13-alpine" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ledger"] + interval: 10s + timeout: 5s + retries: 5 + ports: + - "5432:5432" + command: + - -c + - max_connections=200 + environment: + POSTGRES_USER: "ledger" + POSTGRES_PASSWORD: "ledger" + POSTGRES_DB: "ledger" + PGDATA: /data/postgres + volumes: + - postgres:/data/postgres + + ledger: + image: "ghcr.io/formancehq/ledger:v1.10.4" + healthcheck: + test: ["CMD", "wget", "http://127.0.0.1:3068/_info", "-O", "-", "-q"] + interval: 10s + timeout: 5s + retries: 5 + depends_on: + postgres: + condition: service_healthy + ports: + - "3068:3068" + environment: + STORAGE_DRIVER: "postgres" + STORAGE_POSTGRES_CONN_STRING: "postgresql://ledger:ledger@postgres/ledger?sslmode=disable" diff --git a/docker-compose.yml b/docker-compose.yml index c5adfa8d4..5be7f5fc6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,52 +3,35 @@ volumes: postgres: services: postgres: - image: "postgres:13-alpine" + image: "postgres:15-alpine" healthcheck: test: ["CMD-SHELL", "pg_isready -U ledger"] interval: 10s timeout: 5s retries: 5 - ports: - - "5432:5432" + command: + - -c + - max_connections=200 environment: POSTGRES_USER: "ledger" POSTGRES_PASSWORD: "ledger" POSTGRES_DB: "ledger" PGDATA: /data/postgres volumes: - - postgres:/data/postgres + - postgres:/data/postgres ledger: - image: "ghcr.io/formancehq/ledger:latest" - healthcheck: - test: ["CMD", "wget", "http://127.0.0.1:3068/_info", "-O", "-", "-q"] - interval: 10s - timeout: 5s - retries: 5 - depends_on: - postgres: - condition: service_healthy - environment: - NUMARY_STORAGE_DRIVER: "postgres" - NUMARY_STORAGE_POSTGRES_CONN_STRING: "postgresql://ledger:ledger@postgres/ledger" - NUMARY_SERVER_HTTP_BIND_ADDRESS: "0.0.0.0:3068" - - proxy: - image: caddy:2 - depends_on: - - ledger - command: caddy reverse-proxy --from :3068 --to ledger:3068 + image: golang:1.19-alpine + entrypoint: go run main.go serve + volumes: + - .:/src ports: - - "3068:3068" - - control: - image: "ghcr.io/formancehq/ledger:latest" - command: ui + - 3068:3068 + working_dir: /src depends_on: - ledger: + postgres: condition: service_healthy - ports: - - "3078:3078" environment: - NUMARY_UI_HTTP_BIND_ADDRESS: "0.0.0.0:3078" + STORAGE_DRIVER: "postgres" + STORAGE_POSTGRES_CONN_STRING: "postgresql://ledger:ledger@postgres/ledger?sslmode=disable" + DEBUG: "true" diff --git a/examples/basic-auth/docker-compose.yml b/examples/basic-auth/docker-compose.yml index c394d93ad..9767eea25 100644 --- a/examples/basic-auth/docker-compose.yml +++ b/examples/basic-auth/docker-compose.yml @@ -12,8 +12,8 @@ services: service: ledger depends_on: - postgres - image: golang:1.18-alpine - entrypoint: go run main.go server start + image: golang:1.19-alpine + entrypoint: go run main.go serve volumes: - ../..:/src ports: @@ -21,6 +21,6 @@ services: working_dir: /src environment: CGO_ENABLED: 0 - NUMARY_DEBUG: "true" - NUMARY_AUTH_BASIC_ENABLED: "true" - NUMARY_AUTH_BASIC_CREDENTIALS: "user:password" + DEBUG: "true" + AUTH_BASIC_ENABLED: "true" + AUTH_BASIC_CREDENTIALS: "user:password" diff --git a/examples/jaeger-exporter/docker-compose.yml b/examples/jaeger-exporter/docker-compose.yml index d12eae910..746f1db65 100644 --- a/examples/jaeger-exporter/docker-compose.yml +++ b/examples/jaeger-exporter/docker-compose.yml @@ -17,8 +17,8 @@ services: depends_on: - postgres - jaeger - image: golang:1.18-alpine - entrypoint: go run main.go server start + image: golang:1.19-alpine + entrypoint: go run main.go serve volumes: - ../..:/src working_dir: /src @@ -26,8 +26,8 @@ services: - "3068:3068/tcp" environment: CGO_ENABLED: 0 - NUMARY_DEBUG: "true" - NUMARY_OTEL_TRACES: "true" - NUMARY_OTEL_TRACES_EXPORTER: jaeger - NUMARY_OTEL_TRACES_EXPORTER_JAEGER_ENDPOINT: http://jaeger:14268/api/traces - NUMARY_OTEL_SERVICE_NAME: ledger + DEBUG: "true" + OTEL_TRACES: "true" + OTEL_TRACES_EXPORTER: jaeger + OTEL_TRACES_EXPORTER_JAEGER_ENDPOINT: http://jaeger:14268/api/traces + OTEL_SERVICE_NAME: ledger diff --git a/examples/multi-node/docker-compose.yml b/examples/multi-node/docker-compose.yml deleted file mode 100644 index 59eae039a..000000000 --- a/examples/multi-node/docker-compose.yml +++ /dev/null @@ -1,42 +0,0 @@ ---- -volumes: - postgres: -services: - - postgres: - extends: - file: ../../docker-compose.yml - service: postgres - - redis: - image: redis - - ledger: - extends: - file: ../../docker-compose.yml - service: ledger - deploy: - replicas: 3 - depends_on: - - postgres - - redis - image: golang:1.18-alpine - entrypoint: go run main.go server start - volumes: - - ../..:/src - ports: [] - working_dir: /src - environment: - CGO_ENABLED: 0 - NUMARY_DEBUG: "true" - NUMARY_LOCK_STRATEGY: redis - NUMARY_LOCK_STRATEGY_REDIS_URL: redis://redis:6379 - NUMARY_LOCK_STRATEGY_REDIS_DURATION: 60s # Default value - NUMARY_LOCK_STRATEGY_REDIS_RETRY: 1s # Default value - NUMARY_LOCK_STRATEGY_REDIS_TLS_ENABLED: false - NUMARY_LOCK_STRATEGY_REDIS_TLS_INSECURE: false - - proxy: - extends: - file: ../../docker-compose.yml - service: proxy diff --git a/examples/otlp-exporter/docker-compose.yml b/examples/otlp-exporter/docker-compose.yml index 3416c26c7..3a6ea3b5b 100644 --- a/examples/otlp-exporter/docker-compose.yml +++ b/examples/otlp-exporter/docker-compose.yml @@ -6,28 +6,41 @@ services: extends: file: ../../docker-compose.yml service: postgres - otlp-exporter: - image: otel/opentelemetry-collector + prometheus: + image: prom/prometheus:latest + restart: always + volumes: + - ./prometheus.yaml:/etc/prometheus/prometheus.yml + ports: + - "9090:9090" + otel: + image: "otel/opentelemetry-collector-contrib:0.81.0" + command: [ "--config=/etc/otel-collector-config.yaml" ] + volumes: + - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml ledger: extends: file: ../../docker-compose.yml service: ledger depends_on: - postgres - - otlp-exporter - image: golang:1.18-alpine - entrypoint: go run main.go server start + - otel + image: golang:1.19-alpine + entrypoint: go run main.go serve volumes: - ../..:/src working_dir: /src environment: CGO_ENABLED: 0 - NUMARY_DEBUG: "true" - NUMARY_OTEL_TRACES: "true" - NUMARY_OTEL_TRACES_EXPORTER: otlp - NUMARY_OTEL_TRACES_EXPORTER_OTLP_ENDPOINT: otlp-exporter:4317 - NUMARY_OTEL_TRACES_EXPORTER_OTLP_INSECURE: "true" - NUMARY_OTEL_METRICS: "true" - NUMARY_OTEL_METRICS_EXPORTER: otlp - NUMARY_OTEL_METRICS_EXPORTER_OTLP_ENDPOINT: otlp-exporter:4317 - NUMARY_OTEL_METRICS_EXPORTER_OTLP_INSECURE: "true" + DEBUG: "true" + OTEL_TRACES: "true" + OTEL_TRACES_EXPORTER: otlp + OTEL_TRACES_EXPORTER_OTLP_ENDPOINT: otel:4317 + OTEL_TRACES_EXPORTER_OTLP_INSECURE: "true" + OTEL_METRICS: "true" + OTEL_METRICS_EXPORTER: otlp + OTEL_METRICS_EXPORTER_OTLP_ENDPOINT: otel:4317 + OTEL_METRICS_EXPORTER_OTLP_INSECURE: "true" + OTEL_SERVICE_NAME: ledger + OTEL_RESOURCE_ATTRIBUTES: version=develop + OTEL_METRICS_RUNTIME: "true" diff --git a/examples/otlp-exporter/otel-collector-config.yaml b/examples/otlp-exporter/otel-collector-config.yaml new file mode 100644 index 000000000..052d0b27a --- /dev/null +++ b/examples/otlp-exporter/otel-collector-config.yaml @@ -0,0 +1,56 @@ +# https://uptrace.dev/opentelemetry/prometheus-metrics.html#prometheus-exporter + +receivers: + otlp: + protocols: + grpc: + +exporters: + prometheus: + endpoint: "0.0.0.0:8889" +# namespace: test-space +# const_labels: +# label1: value1 +# 'another label': spaced value + send_timestamps: true +# enable_open_metrics: true +# add_metric_suffixes: false +# metric_expiration: 180m + resource_to_telemetry_conversion: + enabled: true + logging: + +processors: + batch: + +extensions: + health_check: + pprof: + endpoint: :1888 + zpages: + endpoint: :55679 + +connectors: + spanmetrics: + namespace: span.metrics + histogram: + explicit: + buckets: [ 100us, 1ms, 2ms, 6ms, 10ms, 100ms, 250ms ] + dimensions: + - name: http.status_code + - name: http.method + +service: + telemetry: + logs: + level: "debug" + extensions: [pprof, zpages, health_check] + pipelines: + traces: + receivers: [otlp] +# processors: [batch] + exporters: [spanmetrics] + metrics: + receivers: [otlp, spanmetrics] +# processors: [batch] + exporters: [prometheus] diff --git a/examples/otlp-exporter/prometheus.yaml b/examples/otlp-exporter/prometheus.yaml new file mode 100644 index 000000000..47b15efad --- /dev/null +++ b/examples/otlp-exporter/prometheus.yaml @@ -0,0 +1,5 @@ +scrape_configs: + - job_name: 'otel' + scrape_interval: 1s + static_configs: + - targets: ['otel:8889'] diff --git a/examples/publisher-http/docker-compose.yml b/examples/publisher-http/docker-compose.yml index bb0fabb78..94517820d 100644 --- a/examples/publisher-http/docker-compose.yml +++ b/examples/publisher-http/docker-compose.yml @@ -17,13 +17,13 @@ services: depends_on: - postgres - listener - image: golang:1.18-alpine - entrypoint: go run main.go server start + image: golang:1.19-alpine + entrypoint: go run main.go serve volumes: - ../..:/src working_dir: /src environment: CGO_ENABLED: 0 - NUMARY_DEBUG: "true" - NUMARY_PUBLISHER_HTTP_ENABLED: "true" - NUMARY_PUBLISHER_TOPIC_MAPPING: "*:http://listener:8080" + DEBUG: "true" + PUBLISHER_HTTP_ENABLED: "true" + PUBLISHER_TOPIC_MAPPING: "*:http://listener:8080" diff --git a/examples/publisher-kafka/docker-compose.yml b/examples/publisher-kafka/docker-compose.yml index 40df7f807..9bdff2276 100644 --- a/examples/publisher-kafka/docker-compose.yml +++ b/examples/publisher-kafka/docker-compose.yml @@ -27,14 +27,14 @@ services: depends_on: - postgres - kafka - image: golang:1.18-alpine - entrypoint: go run main.go server start + image: golang:1.19-alpine + entrypoint: go run main.go serve volumes: - ../..:/src working_dir: /src environment: CGO_ENABLED: 0 - NUMARY_DEBUG: "true" - NUMARY_PUBLISHER_KAFKA_ENABLED: "true" - NUMARY_PUBLISHER_KAFKA_BROKER: "kafka:9092" - NUMARY_PUBLISHER_TOPIC_MAPPING: "*:default" # Send all to 'default' topic + DEBUG: "true" + PUBLISHER_KAFKA_ENABLED: "true" + PUBLISHER_KAFKA_BROKER: "kafka:9092" + PUBLISHER_TOPIC_MAPPING: "*:default" # Send all to 'default' topic diff --git a/go.mod b/go.mod old mode 100755 new mode 100644 index bfe7c7a8e..cdcd2a1ce --- a/go.mod +++ b/go.mod @@ -1,84 +1,70 @@ -module github.com/numary/ledger +module github.com/formancehq/ledger -go 1.18 +go 1.20 require ( - github.com/DmitriyVTitov/size v1.5.0 github.com/Masterminds/semver/v3 v3.2.0 - github.com/Shopify/sarama v1.37.2 - github.com/ThreeDotsLabs/watermill v1.1.1 - github.com/buger/jsonparser v1.1.1 - github.com/dgraph-io/ristretto v0.1.1 - github.com/formancehq/go-libs v1.4.1 - github.com/formancehq/machine v1.4.5 - github.com/gin-contrib/cors v1.4.0 - github.com/gin-gonic/gin v1.8.1 - github.com/go-redis/redis/v8 v8.11.5 - github.com/go-redis/redismock/v8 v8.0.6 - github.com/golang-jwt/jwt v3.2.2+incompatible + github.com/ThreeDotsLabs/watermill v1.2.0 + github.com/alitto/pond v1.8.3 + github.com/antlr/antlr4/runtime/Go/antlr v1.4.10 + github.com/bluele/gcache v0.0.2 + github.com/formancehq/stack/libs/go-libs v0.0.0-20230517212829-71aaaacfd130 + github.com/go-chi/chi/v5 v5.0.8 + github.com/go-chi/cors v1.2.1 + github.com/golang/mock v1.4.4 github.com/google/go-cmp v0.5.9 github.com/google/uuid v1.3.0 - github.com/huandu/go-sqlbuilder v1.17.0 - github.com/jackc/pgconn v1.13.0 - github.com/jackc/pgx/v4 v4.17.2 + github.com/jackc/pgx/v5 v5.3.0 github.com/lib/pq v1.10.7 - github.com/mattn/go-sqlite3 v1.14.16 - github.com/mitchellh/mapstructure v1.5.0 - github.com/ory/dockertest/v3 v3.9.1 + github.com/logrusorgru/aurora v2.0.3+incompatible github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 github.com/pborman/uuid v1.2.1 github.com/pkg/errors v0.9.1 - github.com/psanford/memfs v0.0.0-20210214183328-a001468d78ef + github.com/riandyrn/otelchi v0.5.1 github.com/sirupsen/logrus v1.9.0 github.com/spf13/cobra v1.6.1 - github.com/spf13/viper v1.14.0 - github.com/stretchr/testify v1.8.1 - github.com/uptrace/opentelemetry-go-extra/otellogrus v0.1.17 - github.com/xdg-go/scram v1.1.2 - go.nhat.io/otelsql v0.7.0 - go.opentelemetry.io/contrib/instrumentation/github.com/gin-gonic/gin/otelgin v0.36.4 - go.opentelemetry.io/otel v1.11.2 - go.opentelemetry.io/otel/sdk v1.11.2 - go.opentelemetry.io/otel/trace v1.11.2 - go.uber.org/fx v1.18.2 + github.com/spf13/viper v1.15.0 + github.com/stretchr/testify v1.8.3 + github.com/uptrace/bun v1.1.14 + github.com/uptrace/bun/dialect/pgdialect v1.1.14 + github.com/uptrace/bun/extra/bunotel v1.1.14 + go.nhat.io/otelsql v0.11.0 + go.opentelemetry.io/otel v1.16.0 + go.opentelemetry.io/otel/metric v1.16.0 + go.opentelemetry.io/otel/trace v1.16.0 + go.uber.org/fx v1.19.2 + go.uber.org/mock v0.3.0 gopkg.in/segmentio/analytics-go.v3 v3.1.0 - gopkg.in/yaml.v3 v3.0.1 ) require ( github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect - github.com/Microsoft/go-winio v0.5.2 // indirect + github.com/Microsoft/go-winio v0.6.0 // indirect github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 // indirect + github.com/Shopify/sarama v1.38.1 // indirect github.com/ThreeDotsLabs/watermill-http v1.1.4 // indirect github.com/ThreeDotsLabs/watermill-kafka/v2 v2.2.2 // indirect + github.com/ThreeDotsLabs/watermill-nats/v2 v2.0.0 // indirect github.com/ajg/form v1.5.1 // indirect - github.com/antlr/antlr4/runtime/Go/antlr v1.4.10 // indirect - github.com/cenkalti/backoff/v4 v4.2.0 // indirect - github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/cenkalti/backoff/v4 v4.2.1 // indirect github.com/containerd/continuity v0.3.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect - github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect - github.com/docker/cli v20.10.14+incompatible // indirect - github.com/docker/docker v20.10.7+incompatible // indirect + github.com/docker/cli v20.10.17+incompatible // indirect + github.com/docker/docker v20.10.17+incompatible // indirect github.com/docker/go-connections v0.4.0 // indirect github.com/docker/go-units v0.4.0 // indirect - github.com/dustin/go-humanize v1.0.1 // indirect github.com/eapache/go-resiliency v1.3.0 // indirect - github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 // indirect + github.com/eapache/go-xerial-snappy v0.0.0-20230111030713-bf00bc1b83b6 // indirect github.com/eapache/queue v1.1.0 // indirect + github.com/felixge/httpsnoop v1.0.3 // indirect github.com/fsnotify/fsnotify v1.6.0 // indirect - github.com/gin-contrib/sse v0.1.0 // indirect github.com/go-chi/chi v4.1.2+incompatible // indirect github.com/go-chi/render v1.0.2 // indirect - github.com/go-logr/logr v1.2.3 // indirect + github.com/go-logr/logr v1.2.4 // indirect github.com/go-logr/stdr v1.2.2 // indirect - github.com/go-playground/locales v0.14.0 // indirect - github.com/go-playground/universal-translator v0.18.0 // indirect - github.com/go-playground/validator/v10 v10.11.1 // indirect - github.com/goccy/go-json v0.10.0 // indirect + github.com/go-ole/go-ole v1.2.6 // indirect github.com/gogo/protobuf v1.3.2 // indirect - github.com/golang/glog v1.0.0 // indirect - github.com/golang/protobuf v1.5.2 // indirect + github.com/golang/protobuf v1.5.3 // indirect github.com/golang/snappy v0.0.4 // indirect github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect github.com/grpc-ecosystem/grpc-gateway/v2 v2.15.0 // indirect @@ -86,74 +72,94 @@ require ( github.com/hashicorp/go-multierror v1.1.1 // indirect github.com/hashicorp/go-uuid v1.0.3 // indirect github.com/hashicorp/hcl v1.0.0 // indirect - github.com/huandu/xstrings v1.3.3 // indirect - github.com/imdario/mergo v0.3.12 // indirect - github.com/inconshreveable/mousetrap v1.0.1 // indirect - github.com/jackc/chunkreader/v2 v2.0.1 // indirect - github.com/jackc/pgio v1.0.0 // indirect + github.com/imdario/mergo v0.3.13 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect - github.com/jackc/pgproto3/v2 v2.3.1 // indirect - github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect - github.com/jackc/pgtype v1.12.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect github.com/jcmturner/aescts/v2 v2.0.0 // indirect github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect github.com/jcmturner/gofork v1.7.6 // indirect github.com/jcmturner/gokrb5/v8 v8.4.3 // indirect github.com/jcmturner/rpc/v2 v2.0.3 // indirect - github.com/json-iterator/go v1.1.12 // indirect - github.com/klauspost/compress v1.15.13 // indirect - github.com/leodido/go-urn v1.2.1 // indirect + github.com/jinzhu/inflection v1.0.0 // indirect + github.com/klauspost/compress v1.15.15 // indirect github.com/lithammer/shortuuid/v3 v3.0.7 // indirect - github.com/logrusorgru/aurora v2.0.3+incompatible // indirect + github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect github.com/magiconair/properties v1.8.7 // indirect - github.com/mattn/go-isatty v0.0.16 // indirect - github.com/moby/term v0.0.0-20201216013528-df9cb8a40635 // indirect - github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect - github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae // indirect + github.com/nats-io/nats.go v1.23.0 // indirect + github.com/nats-io/nkeys v0.3.0 // indirect + github.com/nats-io/nuid v1.0.1 // indirect github.com/oklog/ulid v1.3.1 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/image-spec v1.0.2 // indirect - github.com/opencontainers/runc v1.1.2 // indirect - github.com/pelletier/go-toml v1.9.5 // indirect - github.com/pelletier/go-toml/v2 v2.0.6 // indirect + github.com/opencontainers/runc v1.1.3 // indirect + github.com/ory/dockertest/v3 v3.9.1 // indirect + github.com/pelletier/go-toml/v2 v2.0.8 // indirect github.com/pierrec/lz4/v4 v4.1.17 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect github.com/segmentio/backo-go v1.0.1 // indirect + github.com/shirou/gopsutil/v3 v3.23.4 // indirect + github.com/shoenig/go-m1cpu v0.1.5 // indirect github.com/spf13/afero v1.9.3 // indirect github.com/spf13/cast v1.5.0 // indirect github.com/spf13/jwalterweatherman v1.1.0 // indirect github.com/spf13/pflag v1.0.5 // indirect - github.com/subosito/gotenv v1.4.1 // indirect - github.com/ugorji/go/codec v1.2.7 // indirect - github.com/uptrace/opentelemetry-go-extra/otelutil v0.1.17 // indirect + github.com/subosito/gotenv v1.4.2 // indirect + github.com/tklauser/go-sysconf v0.3.11 // indirect + github.com/tklauser/numcpus v0.6.0 // indirect + github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect + github.com/uptrace/opentelemetry-go-extra/otellogrus v0.1.21 // indirect + github.com/uptrace/opentelemetry-go-extra/otelsql v0.2.1 // indirect + github.com/uptrace/opentelemetry-go-extra/otelutil v0.1.21 // indirect + github.com/vmihailenco/msgpack/v5 v5.3.5 // indirect + github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect github.com/xdg-go/pbkdf2 v1.0.0 // indirect + github.com/xdg-go/scram v1.1.2 // indirect github.com/xdg-go/stringprep v1.0.4 // indirect - github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect + github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect github.com/xeipuuv/gojsonschema v1.2.0 // indirect github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c // indirect - go.opentelemetry.io/contrib/instrumentation/github.com/Shopify/sarama/otelsarama v0.37.0 // indirect - go.opentelemetry.io/contrib/propagators/b3 v1.12.0 // indirect - go.opentelemetry.io/otel/exporters/jaeger v1.11.2 // indirect - go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.11.2 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.11.2 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.11.2 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.11.2 // indirect - go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.11.2 // indirect - go.opentelemetry.io/otel/metric v0.33.0 // indirect + github.com/yusufpapurcu/wmi v1.2.2 // indirect + go.opentelemetry.io/contrib v1.0.0 // indirect + go.opentelemetry.io/contrib/instrumentation/github.com/Shopify/sarama/otelsarama v0.42.0 // indirect + go.opentelemetry.io/contrib/instrumentation/host v0.42.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.42.0 // indirect + go.opentelemetry.io/contrib/instrumentation/runtime v0.42.0 // indirect + go.opentelemetry.io/contrib/propagators/b3 v1.17.0 // indirect + go.opentelemetry.io/otel/exporters/jaeger v1.16.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.16.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.39.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.39.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.39.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.16.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.16.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.16.0 // indirect + go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v0.39.0 // indirect + go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.16.0 // indirect + go.opentelemetry.io/otel/sdk v1.16.0 // indirect + go.opentelemetry.io/otel/sdk/metric v0.39.0 // indirect go.opentelemetry.io/proto/otlp v0.19.0 // indirect go.uber.org/atomic v1.10.0 // indirect - go.uber.org/dig v1.15.0 // indirect + go.uber.org/dig v1.16.1 // indirect go.uber.org/multierr v1.9.0 // indirect go.uber.org/zap v1.24.0 // indirect - golang.org/x/crypto v0.4.0 // indirect - golang.org/x/net v0.4.0 // indirect - golang.org/x/sys v0.4.0 // indirect - golang.org/x/text v0.5.0 // indirect - google.golang.org/genproto v0.0.0-20221227171554-f9683d7f8bef // indirect - google.golang.org/grpc v1.51.0 // indirect - google.golang.org/protobuf v1.28.1 // indirect + golang.org/x/crypto v0.9.0 // indirect + golang.org/x/mod v0.11.0 // indirect + golang.org/x/net v0.10.0 // indirect + golang.org/x/sys v0.8.0 // indirect + golang.org/x/text v0.9.0 // indirect + golang.org/x/tools v0.6.0 // indirect + google.golang.org/genproto v0.0.0-20230306155012-7f2fa6fef1f4 // indirect + google.golang.org/grpc v1.55.0 // indirect + google.golang.org/protobuf v1.30.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect ) + +replace github.com/formancehq/stack/libs/go-libs => ./libs diff --git a/go.sum b/go.sum index 50c31866f..ae34c81ca 100644 --- a/go.sum +++ b/go.sum @@ -36,54 +36,52 @@ cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RX cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60= -github.com/DmitriyVTitov/size v1.5.0 h1:/PzqxYrOyOUX1BXj6J9OuVRVGe+66VL4D9FlUaW515g= -github.com/DmitriyVTitov/size v1.5.0/go.mod h1:le6rNI4CoLQV1b9gzp1+3d7hMAD/uu2QcJ+aYbNgiU0= -github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g= github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= -github.com/Microsoft/go-winio v0.5.2 h1:a9IhgEQBCUEk6QCdml9CiJGhAws+YwffDHEMp1VMrpA= -github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= +github.com/Microsoft/go-winio v0.6.0 h1:slsWYD/zyx7lCXoZVlvQrj0hPTM1HI4+v1sIda2yDvg= +github.com/Microsoft/go-winio v0.6.0/go.mod h1:cTAf44im0RAYeL23bpB+fzCyDH2MJiz2BO69KH/soAE= github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw= github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/Shopify/sarama v1.37.2 h1:LoBbU0yJPte0cE5TZCGdlzZRmMgMtZU/XgnUKZg9Cv4= -github.com/Shopify/sarama v1.37.2/go.mod h1:Nxye/E+YPru//Bpaorfhc3JsSGYwCaDDj+R4bK52U5o= +github.com/Shopify/sarama v1.38.1 h1:lqqPUPQZ7zPqYlWpTh+LQ9bhYNu2xJL6k1SJN4WVe2A= +github.com/Shopify/sarama v1.38.1/go.mod h1:iwv9a67Ha8VNa+TifujYoWGxWnu2kNVAQdSdZ4X2o5g= github.com/Shopify/toxiproxy/v2 v2.5.0 h1:i4LPT+qrSlKNtQf5QliVjdP08GyAH8+BUIc9gT0eahc= github.com/ThreeDotsLabs/watermill v1.1.0/go.mod h1:Qd1xNFxolCAHCzcMrm6RnjW0manbvN+DJVWc1MWRFlI= -github.com/ThreeDotsLabs/watermill v1.1.1 h1:+9NXqWQvplzxBru2CIInvVOZeKUnM+Nysg42fInl5sY= -github.com/ThreeDotsLabs/watermill v1.1.1/go.mod h1:Qd1xNFxolCAHCzcMrm6RnjW0manbvN+DJVWc1MWRFlI= +github.com/ThreeDotsLabs/watermill v1.2.0 h1:TU3TML1dnQ/ifK09F2+4JQk2EKhmhXe7Qv7eb5ZpTS8= +github.com/ThreeDotsLabs/watermill v1.2.0/go.mod h1:IuVxGk/kgCN0cex2S94BLglUiB0PwOm8hbUhm6g2Nx4= github.com/ThreeDotsLabs/watermill-http v1.1.4 h1:wRM54z/BPnIWjGbXMrOnwOlrCAESzoSNxTAHiLysFA4= github.com/ThreeDotsLabs/watermill-http v1.1.4/go.mod h1:mkQ9CC0pxTZerNwr281rBoOy355vYt/lePkmYSX/BRg= github.com/ThreeDotsLabs/watermill-kafka/v2 v2.2.2 h1:COB5neqVL8jGwoz1Y9dawQ7Xhxid1XXX8+1CI/PebVU= github.com/ThreeDotsLabs/watermill-kafka/v2 v2.2.2/go.mod h1:U001oyrHo+df3Q7hIXgKqxY2OW6woz64+GNuIxZokbM= +github.com/ThreeDotsLabs/watermill-nats/v2 v2.0.0 h1:ZbdQ+cHwOZmXByEoKUH8SS6qR/erNQfrsNpvH5z/gfk= +github.com/ThreeDotsLabs/watermill-nats/v2 v2.0.0/go.mod h1:X6pcl579pScj4mII3KM/WJ+bcOqORqiCToy92f4gqJ4= github.com/ajg/form v1.5.1 h1:t9c7v8JUKu/XxOGBU0yjNpaMloxGEJhUkqFRq0ibGeU= github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alitto/pond v1.8.3 h1:ydIqygCLVPqIX/USe5EaV/aSRXTRXDEI9JwuDdu+/xs= +github.com/alitto/pond v1.8.3/go.mod h1:CmvIIGd5jKLasGI3D87qDkQxjzChdKMmnXMg3fG6M6Q= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/antlr/antlr4/runtime/Go/antlr v1.4.10 h1:yL7+Jz0jTC6yykIK/Wh74gnTJnrGr5AyrNMXuA0gves= github.com/antlr/antlr4/runtime/Go/antlr v1.4.10/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY= github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/bluele/gcache v0.0.2 h1:WcbfdXICg7G/DGBh1PFfcirkWOQV+v077yF1pSy3DGw= +github.com/bluele/gcache v0.0.2/go.mod h1:m15KV+ECjptwSPxKhOhQoAFQVtUFjTVkc3H8o0t/fp0= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 h1:DDGfHa7BWjL4YnC6+E63dPcxHo2sUxDIu8g3QgEJdRY= github.com/bool64/shared v0.1.5 h1:fp3eUhBsrSjNCQPcSdQqZxxh9bBwrYiZ+zOKFkM0/2E= -github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= -github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= github.com/cenkalti/backoff/v3 v3.0.0/go.mod h1:cIeZDE3IrqwwJl6VUwCN6trj1oXrTS4rc0ij+ULvLYs= -github.com/cenkalti/backoff/v4 v4.2.0 h1:HN5dHm3WBOgndBH6E8V0q2jIYIR3s9yglV8k/+MN3u4= -github.com/cenkalti/backoff/v4 v4.2.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= +github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= -github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/checkpoint-restore/go-criu/v5 v5.3.0/go.mod h1:E/eQpaFtUKGOOSEBZgmKAcn+zUUwWxqcaKZlF54wK8E= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= @@ -98,45 +96,30 @@ github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= -github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= github.com/containerd/console v1.0.3/go.mod h1:7LqA/THxQ86k76b8c/EMSiaJ3h1eZkMkXar0TQ1gf3U= github.com/containerd/continuity v0.3.0 h1:nisirsYROK15TAMVukJOUyGJjz4BNQJBVsNvAXZJ/eg= github.com/containerd/continuity v0.3.0/go.mod h1:wJEAIwKOm/pBZuBd0JmeTvnLquTB1Ag8espWhkykbPM= -github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/creack/pty v1.1.11 h1:07n33Z8lZxZ2qwegKbObQohDhXDQxiMMz1NOUGYlesw= github.com/creack/pty v1.1.11/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/cyphar/filepath-securejoin v0.2.3/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/dgraph-io/ristretto v0.1.1 h1:6CWw5tJNgpegArSHpNHJKldNeq03FQCwYvfMVWajOK8= -github.com/dgraph-io/ristretto v0.1.1/go.mod h1:S1GPSBCYCIhmVNfcth17y2zZtQT6wzkzgwUve0VDWWA= -github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2 h1:tdlZCpZ/P9DhczCTSixgIKmwPv6+wP5DGjqLYw5SUiA= -github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= -github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= -github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= -github.com/docker/cli v20.10.14+incompatible h1:dSBKJOVesDgHo7rbxlYjYsXe7gPzrTT+/cKQgpDAazg= -github.com/docker/cli v20.10.14+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= -github.com/docker/docker v20.10.7+incompatible h1:Z6O9Nhsjv+ayUEeI1IojKbYcsGdgYSNqxe1s2MYzUhQ= -github.com/docker/docker v20.10.7+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/cli v20.10.17+incompatible h1:eO2KS7ZFeov5UJeaDmIs1NFEDRf32PaqRpvoEkKBy5M= +github.com/docker/cli v20.10.17+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/docker v20.10.17+incompatible h1:JYCuMrWaVNophQTOrMMoSwudOVEfcegoZZrleKc1xwE= +github.com/docker/docker v20.10.17+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= github.com/docker/go-units v0.4.0 h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw= github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= -github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= -github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= -github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= github.com/eapache/go-resiliency v1.3.0 h1:RRL0nge+cWGlxXbUzJ7yMcq6w2XBEr19dCN6HECGaT0= github.com/eapache/go-resiliency v1.3.0/go.mod h1:5yPzW0MIvSe0JDsv0v+DvcjEv2FyD6iZYSs1ZI+iQho= -github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 h1:YEetp8/yCZMuEPMUDHG0CW/brkkEp8mzqk2+ODEitlw= -github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= +github.com/eapache/go-xerial-snappy v0.0.0-20230111030713-bf00bc1b83b6 h1:8yY/I9ndfrgrXUbOGObLHKBR4Fl3nZXwM2c7OYTT8hM= +github.com/eapache/go-xerial-snappy v0.0.0-20230111030713-bf00bc1b83b6/go.mod h1:YvSRo5mw33fLEx1+DlK6L2VV43tJt5Eyel9n9XBcR+0= github.com/eapache/queue v1.1.0 h1:YOEu7KNc61ntiQlcEeUIoDTJ2o8mQznoNvUhiigpIqc= github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= @@ -147,27 +130,23 @@ github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.m github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/formancehq/go-libs v1.4.1 h1:rUKfUyZFq9aid+JUIqKN8Mk80Lx06Bx7AreHj+9vyTo= -github.com/formancehq/go-libs v1.4.1/go.mod h1:IK1zDIGRPi/o8sSApIc1W0VC1Y0DGdADzxvpbqlD8fk= -github.com/formancehq/machine v1.4.5 h1:tN7ftSmyW8WZFK0GStA/cyFyBRIX3EU02OLvYj45AJ4= -github.com/formancehq/machine v1.4.5/go.mod h1:VJgYOFY7rCzs/MyfWVOKASAf5bJHyX3v35ZmSIl2yic= +github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= +github.com/felixge/httpsnoop v1.0.2/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/felixge/httpsnoop v1.0.3 h1:s/nj+GCswXYzN5v2DpNMuMQYe+0DDwt5WVCU6CWBdXk= +github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= github.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k= github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= -github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= -github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/gin-contrib/cors v1.4.0 h1:oJ6gwtUl3lqV0WEIwM/LxPF1QZ5qe2lGWdY2+bz7y0g= -github.com/gin-contrib/cors v1.4.0/go.mod h1:bs9pNM0x/UsmHPBWT2xZz9ROh8xYjYkiURUfmBoMlcs= -github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= -github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= -github.com/gin-gonic/gin v1.8.1 h1:4+fr/el88TOO3ewCmQr8cx/CtZ/umlIRIs5M4NTNjf8= -github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk= github.com/go-chi/chi v4.0.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= github.com/go-chi/chi v4.1.2+incompatible h1:fGFk2Gmi/YKXk0OmGfBh0WgmN3XB8lVnEyNz34tQRec= github.com/go-chi/chi v4.1.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= +github.com/go-chi/chi/v5 v5.0.8 h1:lD+NLqFcAi1ovnVZpsnObHGW4xb4J8lNmoYVfECH1Y0= +github.com/go-chi/chi/v5 v5.0.8/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= +github.com/go-chi/cors v1.2.1 h1:xEC8UT3Rlp2QuWNEr4Fs/c2EAGVKBwy/1vHx3bppil4= +github.com/go-chi/cors v1.2.1/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58= github.com/go-chi/render v1.0.1/go.mod h1:pq4Rr7HbnsdaeHagklXub+p6Wd16Af5l9koip1OvJns= github.com/go-chi/render v1.0.2 h1:4ER/udB0+fMWB2Jlf15RV3F4A2FDuYi/9f+lFttR/Lg= github.com/go-chi/render v1.0.2/go.mod h1:/gr3hVkmYR0YlEy3LxCuVRFzEu9Ruok+gFqbIofjao0= @@ -175,57 +154,38 @@ github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9 github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= -github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.2.1/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= -github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.2.4 h1:g01GSCwiDw2xSZfjJ2/T9M+S6pFdcNtFYsp+Y43HYDQ= +github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/stdr v1.2.0/go.mod h1:YkVgnZu1ZjjL7xTxrfm/LLZBfkhTqSR1ydtm6jTKKwI= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= -github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A= -github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= -github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU= -github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= -github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho= -github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= -github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos= -github.com/go-playground/validator/v10 v10.11.1 h1:prmOlTVv+YjZjmRmNSF3VmspqJIxJWXmqUsHwfTRRkQ= -github.com/go-playground/validator/v10 v10.11.1/go.mod h1:i+3WkQ1FvaUjjxh1kSvIA4dMGDBiPU55YFDl0WbKdWU= -github.com/go-redis/redis/v8 v8.8.0/go.mod h1:F7resOH5Kdug49Otu24RjHWwgK7u9AmtqWMnCV1iP5Y= -github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI= -github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo= -github.com/go-redis/redismock/v8 v8.0.6 h1:rtuijPgGynsRB2Y7KDACm09WvjHWS4RaG44Nm7rcj4Y= -github.com/go-redis/redismock/v8 v8.0.6/go.mod h1:sDIF73OVsmaKzYe/1FJXGiCQ4+oHYbzjpaL9Vor0sS4= +github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= -github.com/goccy/go-json v0.10.0 h1:mXKd9Qw4NuzShiRlOXKews24ufknHO7gx30lsDyokKA= -github.com/goccy/go-json v0.10.0/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/godbus/dbus/v5 v5.0.6/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= -github.com/gofrs/uuid v4.0.0+incompatible h1:1SD/1F5pU8p29ybwgQSwpQk+mwdRrXCYuPhW6m+TnJw= -github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= -github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= -github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/glog v1.0.0 h1:nfP3RFugxnNRyKgeWd4oI1nYvXpxrx8ck8ZrcizshdQ= github.com/golang/glog v1.0.0/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4= +github.com/golang/glog v1.1.0 h1:/d3pCKDPWNnvIWe0vVUpNP32qc8U3PDVxySP/y360qE= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4 h1:l75CXGRSwbaYNpl/Z2X1XIIAMSCquvXgpVZDhwEIJsc= github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -242,8 +202,9 @@ github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QD github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= +github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= @@ -261,7 +222,6 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= @@ -306,68 +266,20 @@ github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/huandu/go-assert v1.1.5 h1:fjemmA7sSfYHJD7CUqs9qTwwfdNAx7/j2/ZlHXzNB3c= -github.com/huandu/go-assert v1.1.5/go.mod h1:yOLvuqZwmcHIC5rIzrBhT7D3Q9c3GFnd0JrPVhn/06U= -github.com/huandu/go-sqlbuilder v1.17.0 h1:G02PNvR6TOPiMzpOwQMKFNCiqGh19SwSbtymX7gEgpY= -github.com/huandu/go-sqlbuilder v1.17.0/go.mod h1:nUVmMitjOmn/zacMLXT0d3Yd3RHoO2K+vy906JzqxMI= -github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= -github.com/huandu/xstrings v1.3.3 h1:/Gcsuc1x8JVbJ9/rlye4xZnVAbEkGauT8lbebqcQws4= -github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/iancoleman/orderedmap v0.2.0 h1:sq1N/TFpYH++aViPcaKjys3bDClUEU7s5B+z6jq8pNA= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU= -github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= -github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc= +github.com/imdario/mergo v0.3.13 h1:lFzP57bqS/wsqKssCGmtLAb8A0wKjLGrve2q3PPVcBk= +github.com/imdario/mergo v0.3.13/go.mod h1:4lJ1jqUDcsbIECGy0RUJAXNIhg+6ocWgb1ALK2O4oXg= github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= -github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= -github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= -github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= -github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= -github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA= -github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE= -github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s= -github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o= -github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY= -github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI= -github.com/jackc/pgconn v1.13.0 h1:3L1XMNV2Zvca/8BYhzcRFS70Lr0WlDg16Di6SFGAbys= -github.com/jackc/pgconn v1.13.0/go.mod h1:AnowpAqO4CMIIJNZl2VJp+KrkAZciAkhEl0W0JIobpI= -github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= -github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= -github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= -github.com/jackc/pgmock v0.0.0-20201204152224-4fe30f7445fd/go.mod h1:hrBW0Enj2AZTNpt/7Y5rr2xe/9Mn757Wtb2xeBzPv2c= -github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65 h1:DadwsjnMwFjfWc9y5Wi/+Zz7xoE5ALHsRQlOctkOiHc= -github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65/go.mod h1:5R2h2EEX+qri8jOWMbJCtaPWkrrNc7OHwsp2TCqp7ak= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= -github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= -github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= -github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= -github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= -github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= -github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= -github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= -github.com/jackc/pgproto3/v2 v2.3.1 h1:nwj7qwf0S+Q7ISFfBndqeLwSwxs+4DPsbRFjECT1Y4Y= -github.com/jackc/pgproto3/v2 v2.3.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= -github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b h1:C8S2+VttkHFdOOCXJe+YGfa4vHYwlt4Zx+IVXQ97jYg= -github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= -github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg= -github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc= -github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= -github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM= -github.com/jackc/pgtype v1.12.0 h1:Dlq8Qvcch7kiehm8wPGIW0W3KsCCHJnRacKW0UM8n5w= -github.com/jackc/pgtype v1.12.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= -github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= -github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= -github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc= -github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs= -github.com/jackc/pgx/v4 v4.17.2 h1:0Ut0rpeKwvIVbMQ1KbMBU4h6wxehBI535LK6Flheh8E= -github.com/jackc/pgx/v4 v4.17.2/go.mod h1:lcxIZN44yMIrWI78a5CpucdD14hX0SBDbNRvjDBItsw= -github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jackc/puddle v1.3.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx/v5 v5.3.0 h1:/NQi8KHMpKWHInxXesC8yD4DhkXPrVhmnwYkjp9AmBA= +github.com/jackc/pgx/v5 v5.3.0/go.mod h1:t3JDKnCBlYIc0ewLF0Q7B8MXmoIaBOZj/ic7iHozM/8= github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8= github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs= github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo= @@ -380,36 +292,26 @@ github.com/jcmturner/gokrb5/v8 v8.4.3 h1:iTonLeSJOn7MVUtyMT+arAn5AKAPrkilzhGw8wE github.com/jcmturner/gokrb5/v8 v8.4.3/go.mod h1:dqRwJGXznQrzw6cWmyo6kH+E7jksEQG/CyVWsJEsJO0= github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY= github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc= +github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= +github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= -github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.15.13 h1:NFn1Wr8cfnenSJSA46lLq4wHCcBzKTSjnBIexDMMOV0= -github.com/klauspost/compress v1.15.13/go.mod h1:QPwzmACJjUTFsnSHH934V6woptycfrDDJnH7hvFVbGM= +github.com/klauspost/compress v1.15.15 h1:EF27CXIuDsYJ6mmvtBRlEuB2UVOqHG1tAXgZ7yIO+lw= +github.com/klauspost/compress v1.15.15/go.mod h1:ZcK2JAFqKOpnBlxcLsJzYfrS9X1akm9fHZNnD9+Vo/4= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= -github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= -github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= -github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= -github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.10.7 h1:p7ZhMD+KsSRozJr34udlUrhboJwWAgCg34+/ZZNvZZw= github.com/lib/pq v1.10.7/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lithammer/shortuuid/v3 v3.0.4/go.mod h1:RviRjexKqIzx/7r1peoAITm6m7gnif/h+0zmolKJjzw= @@ -417,50 +319,39 @@ github.com/lithammer/shortuuid/v3 v3.0.7 h1:trX0KTHy4Pbwo/6ia8fscyHoGA+mf1jWbPJV github.com/lithammer/shortuuid/v3 v3.0.7/go.mod h1:vMk8ke37EmiewwolSO1NLW8vP4ZaKlRuDIi8tWWmAts= github.com/logrusorgru/aurora v2.0.3+incompatible h1:tOpm7WcpBTn4fjmVfgpQq0EfczGlG91VSDkswnjF5A8= github.com/logrusorgru/aurora v2.0.3+incompatible/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= -github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= -github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-isatty v0.0.16 h1:bq3VjFmv/sOjHtdEhmkEV4x1AJtvUvOJ2PFAZ5+peKQ= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-sqlite3 v1.14.16 h1:yOQRA0RpS5PFz/oikGwBEqvAWhWg5ufRz4ETLjwpU1Y= -github.com/mattn/go-sqlite3 v1.14.16/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/minio/highwayhash v1.0.2 h1:Aak5U0nElisjDCfPSG79Tgzkn2gl66NxOMspRrKnA/g= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/moby/sys/mountinfo v0.5.0/go.mod h1:3bMD3Rg+zkqx8MRYPi7Pyb0Ie97QEBmdxbhnCLlSvSU= -github.com/moby/term v0.0.0-20201216013528-df9cb8a40635 h1:rzf0wL0CHVc8CEsgyygG0Mn9CNCCPZqOPaz8RiiHYQk= -github.com/moby/term v0.0.0-20201216013528-df9cb8a40635/go.mod h1:FBS0z0QWA44HXygs7VXDUOGoN/1TV3RuWkLO04am3wc= -github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae h1:O4SWKdcHVCvYqyDV+9CJA1fcDN2L11Bule0iFy3YlAI= +github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae/go.mod h1:E2VnQOmVuvZB6UYnnDB0qG5Nq/1tD9acaOpo6xmt0Kw= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= -github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= -github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= +github.com/nats-io/jwt/v2 v2.3.0 h1:z2mA1a7tIf5ShggOFlR1oBPgd6hGqcDYsISxZByUzdI= +github.com/nats-io/nats-server/v2 v2.9.8 h1:jgxZsv+A3Reb3MgwxaINcNq/za8xZInKhDg9Q0cGN1o= +github.com/nats-io/nats.go v1.23.0 h1:lR28r7IX44WjYgdiKz9GmUeW0uh/m33uD3yEjLZ2cOE= +github.com/nats-io/nats.go v1.23.0/go.mod h1:ki/Scsa23edbh8IRZbCuNXR9TDcbvfaSijKtaqQgw+Q= +github.com/nats-io/nkeys v0.3.0 h1:cgM5tL53EvYRU+2YLXIK0G2mJtK12Ft9oeooSZMA2G8= +github.com/nats-io/nkeys v0.3.0/go.mod h1:gvUNGjVcM2IPr5rCsRsC6Wb3Hr2CQAm08dsxtV6A5y4= +github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw= +github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= -github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= -github.com/onsi/ginkgo v1.15.0/go.mod h1:hF8qUzuuC8DJGygJH3726JnCZX4MYbRB8yFfISqnKUg= -github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= -github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= -github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.10.5/go.mod h1:gza4q3jKQJijlu05nKWRCW/GavJumGt8aNRxWg7mt48= -github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.0.2 h1:9yCKha/T5XdGtO0q9Q9a6T5NUCsTn/DrBg0D7ufOcFM= github.com/opencontainers/image-spec v1.0.2/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= -github.com/opencontainers/runc v1.1.2 h1:2VSZwLx5k/BfsBxMMipG/LYUnmqOD/BPkIVgQUcTlLw= -github.com/opencontainers/runc v1.1.2/go.mod h1:Tj1hFw6eFWp/o33uxGf5yF2BX5yz2Z6iptFpuvbbKqc= +github.com/opencontainers/runc v1.1.3 h1:vIXrkId+0/J2Ymu2m7VjGvbSlAId9XNRPhn2p4b+d8w= +github.com/opencontainers/runc v1.1.3/go.mod h1:1J5XiS+vdZ3wCyZybsuxXZWGrgSr8fFJHLXuG2PsnNg= github.com/opencontainers/runtime-spec v1.0.3-0.20210326190908-1c3f411f0417/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= github.com/opencontainers/selinux v1.10.0/go.mod h1:2i0OySw99QjzBBQByd1Gr9gSjvuho1lHsJxIJ3gGbJI= github.com/ory/dockertest/v3 v3.9.1 h1:v4dkG+dlu76goxMiTT2j8zV7s4oPPEppKT8K8p2f1kY= @@ -469,14 +360,10 @@ github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 h1:onHthvaw9LFnH4t2D github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58/go.mod h1:DXv8WO4yhMYhSNPKjeNKa5WY9YCIEBRbNzFFPJbWO6Y= github.com/pborman/uuid v1.2.1 h1:+ZZIw58t/ozdjRaXh/3awHfmWRbzYxJoAdNJxe/3pvw= github.com/pborman/uuid v1.2.1/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= -github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= -github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= -github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo= -github.com/pelletier/go-toml/v2 v2.0.6 h1:nrzqCb7j9cDFj2coyLNLaZuJTLjWjlaz6nvTvIwycIU= -github.com/pelletier/go-toml/v2 v2.0.6/go.mod h1:eumQOmlWiOPt5WriQQqoM5y18pDHwha2N+QD+EUNTek= +github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ= +github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4= github.com/pierrec/lz4/v4 v4.1.17 h1:kV4Ip+/hUBC+8T6+2EgburRtkE9ef4nbY3f4dFhGjMc= github.com/pierrec/lz4/v4 v4.1.17/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= -github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= @@ -484,6 +371,8 @@ github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= @@ -492,33 +381,27 @@ github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1: github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= -github.com/psanford/memfs v0.0.0-20210214183328-a001468d78ef h1:NKxTG6GVGbfMXc2mIk+KphcH6hagbVXhcFkbTgYleTI= -github.com/psanford/memfs v0.0.0-20210214183328-a001468d78ef/go.mod h1:tcaRap0jS3eifrEEllL6ZMd9dg8IlDpi2S1oARrQ+NI= github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 h1:N/ElC8H3+5XpJzTSTfLsJV/mx9Q9g7kxmchpfZyxgzM= github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/riandyrn/otelchi v0.5.1 h1:0/45omeqpP7f/cvdL16GddQBfAEmZvUyl2QzLSE6uYo= +github.com/riandyrn/otelchi v0.5.1/go.mod h1:ZxVxNEl+jQ9uHseRYIxKWRb3OY8YXFEu+EkNiiSNUEA= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= -github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= -github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= -github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= -github.com/seccomp/libseccomp-golang v0.9.2-0.20210429002308-3879420cc921/go.mod h1:JA8cRccbGaA1s33RQf7Y1+q9gHmZX1yB/z9WDN1C6fg= +github.com/seccomp/libseccomp-golang v0.9.2-0.20220502022130-f33da4d89646/go.mod h1:JA8cRccbGaA1s33RQf7Y1+q9gHmZX1yB/z9WDN1C6fg= github.com/segmentio/backo-go v1.0.1 h1:68RQccglxZeyURy93ASB/2kc9QudzgIDexJ927N++y4= github.com/segmentio/backo-go v1.0.1/go.mod h1:9/Rh6yILuLysoQnZ2oNooD2g7aBnvM7r/fNVxRNWfBc= -github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ= -github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= -github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ= -github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8= +github.com/shirou/gopsutil/v3 v3.23.4 h1:hZwmDxZs7Ewt75DV81r4pFMqbq+di2cbt9FsQBqLD2o= +github.com/shirou/gopsutil/v3 v3.23.4/go.mod h1:ZcGxyfzAMRevhUR2+cfhXDH6gQdFYE/t8j1nsU4mPI8= +github.com/shoenig/go-m1cpu v0.1.5 h1:LF57Z/Fpb/WdGLjt2HZilNnmZOxg/q2bSKTQhgbrLrQ= +github.com/shoenig/go-m1cpu v0.1.5/go.mod h1:Wwvst4LR89UxjeFtLRMrpgRiyY4xPsejnVZym39dbAQ= +github.com/shoenig/test v0.6.3 h1:GVXWJFk9PiOjN0KoJ7VrJGH6uLPnqxR7/fe3HUPfE0c= +github.com/shoenig/test v0.6.3/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= -github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= -github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= -github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0= github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= @@ -534,11 +417,10 @@ github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0 github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.14.0 h1:Rg7d3Lo706X9tHsJMUjdiwMpHB7W8WnSVOssIY+JElU= -github.com/spf13/viper v1.14.0/go.mod h1:WT//axPky3FdvXHzGw33dNdXXXfFQqmEalje+egj8As= +github.com/spf13/viper v1.15.0 h1:js3yy885G8xwJa6iOISGFwd+qlUo5AvyXb7CiihdtiU= +github.com/spf13/viper v1.15.0/go.mod h1:fFcTBJxvhhzSJiZy8n+PeW6t8l+KeT/uTARa0jHOQLA= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= @@ -550,30 +432,48 @@ github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= -github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/subosito/gotenv v1.4.1 h1:jyEFiXpy21Wm81FBN71l9VoMMV8H8jG+qIK3GCpY6Qs= -github.com/subosito/gotenv v1.4.1/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= -github.com/swaggest/assertjson v1.7.0 h1:SKw5Rn0LQs6UvmGrIdaKQbMR1R3ncXm5KNon+QJ7jtw= +github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.3 h1:RP3t2pwF7cMEbC1dqtB6poj3niw/9gnV4Cjg5oW5gtY= +github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/subosito/gotenv v1.4.2 h1:X1TuBLAMDFbaTAChgCBLu3DU3UPyELpnF2jjJ2cz/S8= +github.com/subosito/gotenv v1.4.2/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= +github.com/swaggest/assertjson v1.8.1 h1:Be2EHY9S2qwKWV+xWZB747Cd7Y79YK6JLdeyrgFvyMo= github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= -github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M= -github.com/ugorji/go/codec v1.2.7 h1:YPXUKf7fYbp/y8xloBqZOw2qaVggbfwMlI8WM3wZUJ0= -github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY= -github.com/uptrace/opentelemetry-go-extra/otellogrus v0.1.17 h1:FeCTrRenM5ucXWMpq3u4Wh2nWov9Co68aM2gINGlJRU= -github.com/uptrace/opentelemetry-go-extra/otellogrus v0.1.17/go.mod h1:CXKQH9iiW89FahjDENpC7ES9iUQTIyTE2V2aQlLQme8= -github.com/uptrace/opentelemetry-go-extra/otelutil v0.1.17 h1:fcKgoKi1dGCFr1zTP0mKzZDGcMliY2hBmBjpGVf/ee4= -github.com/uptrace/opentelemetry-go-extra/otelutil v0.1.17/go.mod h1:wl/W+O/95rYcMa67D9qQ+8/IJEztbyYSUkdT7L6t+p4= +github.com/tklauser/go-sysconf v0.3.11 h1:89WgdJhk5SNwJfu+GKyYveZ4IaJ7xAkecBo+KdJV0CM= +github.com/tklauser/go-sysconf v0.3.11/go.mod h1:GqXfhXY3kiPa0nAXPDIQIWzJbMCB7AmcWpGR8lSZfqI= +github.com/tklauser/numcpus v0.6.0 h1:kebhY2Qt+3U6RNK7UqpYNA+tJ23IBEGKkB7JQBfDYms= +github.com/tklauser/numcpus v0.6.0/go.mod h1:FEZLMke0lhOUG6w2JadTzp0a+Nl8PF/GFkQ5UVIcaL4= +github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc h1:9lRDQMhESg+zvGYmW5DyG0UqvY96Bu5QYsTLvCHdrgo= +github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc/go.mod h1:bciPuU6GHm1iF1pBvUfxfsH0Wmnc2VbpgvbI9ZWuIRs= +github.com/uptrace/bun v1.1.14 h1:S5vvNnjEynJ0CvnrBOD7MIRW7q/WbtvFXrdfy0lddAM= +github.com/uptrace/bun v1.1.14/go.mod h1:RHk6DrIisO62dv10pUOJCz5MphXThuOTpVNYEYv7NI8= +github.com/uptrace/bun/dialect/pgdialect v1.1.14 h1:b7+V1KDJPQSFYgkG/6YLXCl2uvwEY3kf/GSM7hTHRDY= +github.com/uptrace/bun/dialect/pgdialect v1.1.14/go.mod h1:v6YiaXmnKQ2FlhRD2c0ZfKd+QXH09pYn4H8ojaavkKk= +github.com/uptrace/bun/extra/bundebug v1.1.14 h1:9OCGfP9ZDlh41u6OLerWdhBtJAVGXHr0xtxO4xWi6t0= +github.com/uptrace/bun/extra/bunotel v1.1.14 h1:jKA1zNfD2/Y/O3eFP15ao+V0cMigXN+ReNbsVUqrOhg= +github.com/uptrace/bun/extra/bunotel v1.1.14/go.mod h1:BBuePZ4ciMqoeyRfef4GL7Z75FsiOm3Q3fvNt0z4sQk= +github.com/uptrace/opentelemetry-go-extra/otellogrus v0.1.21 h1:OXsouNDvuET5o1A4uvoCnAXuuNke8JlfZWceciyUlC8= +github.com/uptrace/opentelemetry-go-extra/otellogrus v0.1.21/go.mod h1:Xm3wlRGm5xzdAGPOvqydXPiGj0Da1q0OlUNm7Utoda4= +github.com/uptrace/opentelemetry-go-extra/otelsql v0.2.1 h1:sCYkntVVoSMuQuyRBaEkedb1qS1KeJJaqKbdtNfTsfM= +github.com/uptrace/opentelemetry-go-extra/otelsql v0.2.1/go.mod h1:1frv9RN1rlTq0jzCq+mVuEQisubZCQ4OU6S/8CaHzGY= +github.com/uptrace/opentelemetry-go-extra/otelutil v0.1.21 h1:HCqo51kNF8wxDMDhxcN5S6DlfZXigMtptRpkvjBCeVc= +github.com/uptrace/opentelemetry-go-extra/otelutil v0.1.21/go.mod h1:2MNqrUmDrt5E0glMuoJI/9FyGVpBKo1FqjSH60UOZFg= github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE= github.com/vishvananda/netns v0.0.0-20191106174202-0a2b9b5464df/go.mod h1:JP3t17pCcGlemwknint6hfoeCVQrEMVwxRLRjXpq+BU= +github.com/vmihailenco/msgpack/v5 v5.3.5 h1:5gO0H1iULLWGhs2H5tbAHIZTV8/cYafcFOr9znI5mJU= +github.com/vmihailenco/msgpack/v5 v5.3.5/go.mod h1:7xyJ9e+0+9SaZT0Wt1RGleJXzli6Q/V5KbhBonMG9jc= +github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= +github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY= github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8= github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= -github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= @@ -587,90 +487,90 @@ github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= -go.nhat.io/otelsql v0.7.0 h1:TBxa7dbHokzEZdo1m4ZGbgjrO822DDJVHbIKvaVRDBI= -go.nhat.io/otelsql v0.7.0/go.mod h1:eSIg4NPdvODcSUUCnypyMpi7CXDurAT0a4JvJBuPJTE= +github.com/yusufpapurcu/wmi v1.2.2 h1:KBNDSne4vP5mbSWnJbO+51IMOXJB67QiYCSBrubbPRg= +github.com/yusufpapurcu/wmi v1.2.2/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +go.nhat.io/otelsql v0.11.0 h1:wUrIKv6+YioYbArKLrfs3txmOjNGUNws2/eE0FzlVCo= +go.nhat.io/otelsql v0.11.0/go.mod h1:sh4rZG+McDPlmGy1hTGchPHIkMqmLpuy37Bnu3cmqhU= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= -go.opentelemetry.io/contrib/instrumentation/github.com/Shopify/sarama/otelsarama v0.37.0 h1:fv05KgzYZm0lDIqIfXAuktVpd7WgKuRO05kJjsDNFC0= -go.opentelemetry.io/contrib/instrumentation/github.com/Shopify/sarama/otelsarama v0.37.0/go.mod h1:SOPIANa+vHjORB2YtPx5IttHBQ6vtvfbVk8kCgIJ70g= -go.opentelemetry.io/contrib/instrumentation/github.com/gin-gonic/gin/otelgin v0.36.4 h1:3aFKDyPT5wE26maD84lCkyVBsrKMVS4auOlwE41vNc4= -go.opentelemetry.io/contrib/instrumentation/github.com/gin-gonic/gin/otelgin v0.36.4/go.mod h1:nrb8m/ngG1kcySp71EVtDZSjUG90MOow7YAbzQxCcDo= -go.opentelemetry.io/contrib/propagators/b3 v1.12.0 h1:OtfTF8bneN8qTeo/j92kcvc0iDDm4bm/c3RzaUJfiu0= -go.opentelemetry.io/contrib/propagators/b3 v1.12.0/go.mod h1:0JDB4elfPUWGsCH/qhaMkDzP1l8nB0ANVx8zXuAYEwg= -go.opentelemetry.io/otel v0.19.0/go.mod h1:j9bF567N9EfomkSidSfmMwIwIBuP37AMAIzVW85OxSg= -go.opentelemetry.io/otel v1.11.2 h1:YBZcQlsVekzFsFbjygXMOXSs6pialIZxcjfO/mBDmR0= -go.opentelemetry.io/otel v1.11.2/go.mod h1:7p4EUV+AqgdlNV9gL97IgUZiVR3yrFXYo53f9BM3tRI= -go.opentelemetry.io/otel/exporters/jaeger v1.11.2 h1:ES8/j2+aB+3/BUw51ioxa50V9btN1eew/2J7N7n1tsE= -go.opentelemetry.io/otel/exporters/jaeger v1.11.2/go.mod h1:nwcF/DK4Hk0auZ/a5vw20uMsaJSXbzeeimhN5f9d0Lc= -go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.11.2 h1:htgM8vZIF8oPSCxa341e3IZ4yr/sKxgu8KZYllByiVY= -go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.11.2/go.mod h1:rqbht/LlhVBgn5+k3M5QK96K5Xb0DvXpMJ5SFQpY6uw= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.11.2 h1:fqR1kli93643au1RKo0Uma3d2aPQKT+WBKfTSBaKbOc= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.11.2/go.mod h1:5Qn6qvgkMsLDX+sYK64rHb1FPhpn0UtxF+ouX1uhyJE= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.11.2 h1:ERwKPn9Aer7Gxsc0+ZlutlH1bEEAUXAUhqm3Y45ABbk= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.11.2/go.mod h1:jWZUM2MWhWCJ9J9xVbRx7tzK1mXKpAlze4CeulycwVY= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.11.2 h1:Us8tbCmuN16zAnK5TC69AtODLycKbwnskQzaB6DfFhc= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.11.2/go.mod h1:GZWSQQky8AgdJj50r1KJm8oiQiIPaAX7uZCFQX9GzC8= -go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v0.33.0 h1:hlnyYcK61UzruaUssIZvCHl72qSxGB1R55RexLKjFs8= -go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.11.2 h1:BhEVgvuE1NWLLuMLvC6sif791F45KFHi5GhOs1KunZU= -go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.11.2/go.mod h1:bx//lU66dPzNT+Y0hHA12ciKoMOH9iixEwCqC1OeQWQ= -go.opentelemetry.io/otel/metric v0.19.0/go.mod h1:8f9fglJPRnXuskQmKpnad31lcLJ2VmNNqIsx/uIwBSc= -go.opentelemetry.io/otel/metric v0.33.0 h1:xQAyl7uGEYvrLAiV/09iTJlp1pZnQ9Wl793qbVvED1E= -go.opentelemetry.io/otel/metric v0.33.0/go.mod h1:QlTYc+EnYNq/M2mNk1qDDMRLpqCOj2f/r5c7Fd5FYaI= -go.opentelemetry.io/otel/oteltest v0.19.0/go.mod h1:tI4yxwh8U21v7JD6R3BcA/2+RBoTKFexE/PJ/nSO7IA= -go.opentelemetry.io/otel/sdk v1.11.2 h1:GF4JoaEx7iihdMFu30sOyRx52HDHOkl9xQ8SMqNXUiU= -go.opentelemetry.io/otel/sdk v1.11.2/go.mod h1:wZ1WxImwpq+lVRo4vsmSOxdd+xwoUJ6rqyLc3SyX9aU= -go.opentelemetry.io/otel/sdk/metric v0.33.0 h1:oTqyWfksgKoJmbrs2q7O7ahkJzt+Ipekihf8vhpa9qo= -go.opentelemetry.io/otel/trace v0.19.0/go.mod h1:4IXiNextNOpPnRlI4ryK69mn5iC84bjBWZQA5DXz/qg= -go.opentelemetry.io/otel/trace v1.11.2 h1:Xf7hWSF2Glv0DE3MH7fBHvtpSBsjcBUe5MYAmZM/+y0= -go.opentelemetry.io/otel/trace v1.11.2/go.mod h1:4N+yC7QEz7TTsG9BSRLNAa63eg5E06ObSbKPmxQ/pKA= +go.opentelemetry.io/contrib v1.0.0 h1:khwDCxdSspjOLmFnvMuSHd/5rPzbTx0+l6aURwtQdfE= +go.opentelemetry.io/contrib v1.0.0/go.mod h1:EH4yDYeNoaTqn/8yCWQmfNB78VHfGX2Jt2bvnvzBlGM= +go.opentelemetry.io/contrib/instrumentation/github.com/Shopify/sarama/otelsarama v0.42.0 h1:bdoQBGxf0fxHaJo/Ry+RCi8k2ug7T5pqD/NdrwnbyyE= +go.opentelemetry.io/contrib/instrumentation/github.com/Shopify/sarama/otelsarama v0.42.0/go.mod h1:TDCzwfNfOr8Of31UVortnNAvRrgMyMS0zAXw9I3hoSc= +go.opentelemetry.io/contrib/instrumentation/host v0.42.0 h1:/GMlvboQJd4LWxNX/oGYLv06J5a/M/flauLruM/3U2g= +go.opentelemetry.io/contrib/instrumentation/host v0.42.0/go.mod h1:w6v1mVemRjTTdfejACjf+LgVA6zKtHOWmdAIf3icx7A= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.42.0 h1:pginetY7+onl4qN1vl0xW/V/v6OBZ0vVdH+esuJgvmM= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.42.0/go.mod h1:XiYsayHc36K3EByOO6nbAXnAWbrUxdjUROCEeeROOH8= +go.opentelemetry.io/contrib/instrumentation/runtime v0.42.0 h1:EbmAUG9hEAMXyfWEasIt2kmh/WmXUznUksChApTgBGc= +go.opentelemetry.io/contrib/instrumentation/runtime v0.42.0/go.mod h1:rD9feqRYP24P14t5kmhNMqsqm1jvKmpx2H2rKVw52V8= +go.opentelemetry.io/contrib/propagators/b3 v1.17.0 h1:ImOVvHnku8jijXqkwCSyYKRDt2YrnGXD4BbhcpfbfJo= +go.opentelemetry.io/contrib/propagators/b3 v1.17.0/go.mod h1:IkfUfMpKWmynvvE0264trz0sf32NRTZL4nuAN9AbWRc= +go.opentelemetry.io/otel v1.3.0/go.mod h1:PWIKzi6JCp7sM0k9yZ43VX+T345uNbAkDKwHVjb2PTs= +go.opentelemetry.io/otel v1.16.0 h1:Z7GVAX/UkAXPKsy94IU+i6thsQS4nb7LviLpnaNeW8s= +go.opentelemetry.io/otel v1.16.0/go.mod h1:vl0h9NUa1D5s1nv3A5vZOYWn8av4K8Ml6JDeHrT/bx4= +go.opentelemetry.io/otel/exporters/jaeger v1.16.0 h1:YhxxmXZ011C0aDZKoNw+juVWAmEfv/0W2XBOv9aHTaA= +go.opentelemetry.io/otel/exporters/jaeger v1.16.0/go.mod h1:grYbBo/5afWlPpdPZYhyn78Bk04hnvxn2+hvxQhKIQM= +go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.16.0 h1:t4ZwRPU+emrcvM2e9DHd0Fsf0JTPVcbfa/BhTDF03d0= +go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.16.0/go.mod h1:vLarbg68dH2Wa77g71zmKQqlQ8+8Rq3GRG31uc0WcWI= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.39.0 h1:f6BwB2OACc3FCbYVznctQ9V6KK7Vq6CjmYXJ7DeSs4E= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.39.0/go.mod h1:UqL5mZ3qs6XYhDnZaW1Ps4upD+PX6LipH40AoeuIlwU= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.39.0 h1:rm+Fizi7lTM2UefJ1TO347fSRcwmIsUAaZmYmIGBRAo= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.39.0/go.mod h1:sWFbI3jJ+6JdjOVepA5blpv/TJ20Hw+26561iMbWcwU= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.39.0 h1:IZXpCEtI7BbX01DRQEWTGDkvjMB6hEhiEZXS+eg2YqY= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.39.0/go.mod h1:xY111jIZtWb+pUUgT4UiiSonAaY2cD2Ts5zvuKLki3o= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.16.0 h1:cbsD4cUcviQGXdw8+bo5x2wazq10SKz8hEbtCRPcU78= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.16.0/go.mod h1:JgXSGah17croqhJfhByOLVY719k1emAXC8MVhCIJlRs= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.16.0 h1:TVQp/bboR4mhZSav+MdgXB8FaRho1RC8UwVn3T0vjVc= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.16.0/go.mod h1:I33vtIe0sR96wfrUcilIzLoA3mLHhRmz9S9Te0S3gDo= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.16.0 h1:iqjq9LAB8aK++sKVcELezzn655JnBNdsDhghU4G/So8= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.16.0/go.mod h1:hGXzO5bhhSHZnKvrDaXB82Y9DRFour0Nz/KrBh7reWw= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v0.39.0 h1:fl2WmyenEf6LYYlfHAtCUEDyGcpwJNqD4dHGO7PVm4w= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v0.39.0/go.mod h1:csyQxQ0UHHKVA8KApS7eUO/klMO5sd/av5CNZNU4O6w= +go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.16.0 h1:+XWJd3jf75RXJq29mxbuXhCXFDG3S3R4vBUeSI2P7tE= +go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.16.0/go.mod h1:hqgzBPTf4yONMFgdZvL/bK42R/iinTyVQtiWihs3SZc= +go.opentelemetry.io/otel/metric v1.16.0 h1:RbrpwVG1Hfv85LgnZ7+txXioPDoh6EdbZHo26Q3hqOo= +go.opentelemetry.io/otel/metric v1.16.0/go.mod h1:QE47cpOmkwipPiefDwo2wDzwJrlfxxNYodqc4xnGCo4= +go.opentelemetry.io/otel/sdk v1.3.0/go.mod h1:rIo4suHNhQwBIPg9axF8V9CA72Wz2mKF1teNrup8yzs= +go.opentelemetry.io/otel/sdk v1.16.0 h1:Z1Ok1YsijYL0CSJpHt4cS3wDDh7p572grzNrBMiMWgE= +go.opentelemetry.io/otel/sdk v1.16.0/go.mod h1:tMsIuKXuuIWPBAOrH+eHtvhTL+SntFtXF9QD68aP6p4= +go.opentelemetry.io/otel/sdk/metric v0.39.0 h1:Kun8i1eYf48kHH83RucG93ffz0zGV1sh46FAScOTuDI= +go.opentelemetry.io/otel/sdk/metric v0.39.0/go.mod h1:piDIRgjcK7u0HCL5pCA4e74qpK/jk3NiUoAHATVAmiI= +go.opentelemetry.io/otel/trace v1.3.0/go.mod h1:c/VDhno8888bvQYmbYLqe41/Ldmr/KKunbvWM4/fEjk= +go.opentelemetry.io/otel/trace v1.16.0 h1:8JRpaObFoW0pxuVPapkgH8UhHQj+bJW8jJsCZEu5MQs= +go.opentelemetry.io/otel/trace v1.16.0/go.mod h1:Yt9vYq1SdNz3xdjZZK7wcXv1qv2pwLkqr2QVwea0ef0= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.opentelemetry.io/proto/otlp v0.19.0 h1:IVN6GR+mhC4s5yfcTbmzHYODqvWAp3ZedA2SJPI1Nnw= go.opentelemetry.io/proto/otlp v0.19.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= -go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= -go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.10.0 h1:9qC72Qh0+3MqyJbAn8YU5xVq1frD8bn3JtD2oXtafVQ= go.uber.org/atomic v1.10.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= -go.uber.org/dig v1.15.0 h1:vq3YWr8zRj1eFGC7Gvf907hE0eRjPTZ1d3xHadD6liE= -go.uber.org/dig v1.15.0/go.mod h1:pKHs0wMynzL6brANhB2hLMro+zalv1osARTviTcqHLM= -go.uber.org/fx v1.18.2 h1:bUNI6oShr+OVFQeU8cDNbnN7VFsu+SsjHzUF51V/GAU= -go.uber.org/fx v1.18.2/go.mod h1:g0V1KMQ66zIRk8bLu3Ea5Jt2w/cHlOIp4wdRsgh0JaY= -go.uber.org/goleak v1.2.0 h1:xqgm/S+aQvhWFTtR0XK3Jvg7z8kGV8P4X14IzwN3Eqk= -go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= -go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= -go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= +go.uber.org/dig v1.16.1 h1:+alNIBsl0qfY0j6epRubp/9obgtrObRAc5aD+6jbWY8= +go.uber.org/dig v1.16.1/go.mod h1:557JTAUZT5bUK0SvCwikmLPPtdQhfvLYtO5tJgQSbnk= +go.uber.org/fx v1.19.2 h1:SyFgYQFr1Wl0AYstE8vyYIzP4bFz2URrScjwC4cwUvY= +go.uber.org/fx v1.19.2/go.mod h1:43G1VcqSzbIv77y00p1DRAsyZS8WdzuYdhZXmEUkMyQ= +go.uber.org/goleak v1.2.1 h1:NBol2c7O1ZokfZ0LEU9K6Whx/KnwvepVetCUhtKja4A= +go.uber.org/mock v0.3.0 h1:3mUxI1No2/60yUYax92Pt8eNOEecx2D3lcXZh2NEZJo= +go.uber.org/mock v0.3.0/go.mod h1:a6FSlNadKUHUa9IP5Vyt1zh4fC7uAwxMutEAscFbkZc= go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI= go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ= -go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= -go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= go.uber.org/zap v1.24.0 h1:FiJd5l1UOLj0wCgbSE0rwwXHzEdAZS6hiiSnxJN/D60= go.uber.org/zap v1.24.0/go.mod h1:2kMP+WWQ8aoFoedH3T2sq6iJ2yDWpHbP0f6MQbS9Gkg= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210314154223-e6e6c4f2bb5b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.4.0 h1:UVQgzMY87xqpKNgb+kDsll2Igd33HszWHFLmpaRMq/8= -golang.org/x/crypto v0.4.0/go.mod h1:3quD/ATkf6oY+rnes5c3ExXTbLc8mueNue5/DoinL80= +golang.org/x/crypto v0.9.0 h1:LF6fAI+IutBocDJ2OT0Q1g8plpYljMZ4+lty+dsqw3g= +golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -705,9 +605,10 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.11.0 h1:bUO06HqtnRcc/7l71XBe4WcqTZ+3AH1J59zWDDwLKgU= +golang.org/x/mod v0.11.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -719,7 +620,6 @@ golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -730,14 +630,12 @@ golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/ golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= @@ -745,8 +643,8 @@ golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96b golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220725212005-46097bf591d3/go.mod h1:AaygXjzTFtRAg2ttMY5RMuhpJ3cNnI0XpyFJD1iQRSM= -golang.org/x/net v0.4.0 h1:Q5QPcMlvfxFTAPV0+07Xz/MpK9NTXu2VDUuy0FeMfaU= -golang.org/x/net v0.4.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE= +golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -771,31 +669,23 @@ golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190606203320-7fc4e5ec1444/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191115151921-52ab43148777/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -808,13 +698,12 @@ golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200831180312-196b9ba8737a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -824,20 +713,17 @@ golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210906170528-6f6e22806c34/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211116061358-0a5406a5449c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.4.0 h1:Zr2JFtRQNX3BCZ8YtxRE9hNJYC8J6I1MVbMg6owUp18= -golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -850,11 +736,12 @@ golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.5.0 h1:OLmvp0KP+FVG99Ct/qFiL/Fhk4zp4QQnZ7b2U+5piUM= -golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.2.0 h1:52I/1L54xyEQAYdtcSuxtiT84KGYTBGXwayxmIpNJhE= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -863,7 +750,6 @@ golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3 golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= @@ -871,11 +757,8 @@ golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgw golang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -883,7 +766,6 @@ golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= @@ -907,14 +789,13 @@ golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82u golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/tools v0.6.0 h1:BOw41kyTf3PuCW1pVQf8+Cyg8pMlkYB1oo9iJ6D/lKM= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -983,8 +864,8 @@ google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20221227171554-f9683d7f8bef h1:uQ2vjV/sHTsWSqdKeLqmwitzgvjMl7o4IdtHwUDXSJY= -google.golang.org/genproto v0.0.0-20221227171554-f9683d7f8bef/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= +google.golang.org/genproto v0.0.0-20230306155012-7f2fa6fef1f4 h1:DdoeryqhaXp1LtT/emMP1BRJPHHKFi5akj/nbx/zNTA= +google.golang.org/genproto v0.0.0-20230306155012-7f2fa6fef1f4/go.mod h1:NWraEVixdDnqcqQ30jipen1STv2r/n24Wb7twVTGR4s= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -1005,8 +886,8 @@ google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAG google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= -google.golang.org/grpc v1.51.0 h1:E1eGv1FTqoLIdnBCZufiSHgKjlqG6fKFf6pPWtMTh8U= -google.golang.org/grpc v1.51.0/go.mod h1:wgNDFcnuBGmxLKI/qn4T+m5BtEBYXJPvibbUPsAIPww= +google.golang.org/grpc v1.55.0 h1:3Oj82/tFSCeUrRTg/5E/7d/W5A1tj6Ky1ABAuZuv5ag= +google.golang.org/grpc v1.55.0/go.mod h1:iYEXKGkEBhg1PjZQvoYEVPTDkHo1/bjTnfwTeGONTY8= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -1020,32 +901,24 @@ google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlba google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= -google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= +google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/segmentio/analytics-go.v3 v3.1.0 h1:UzxH1uaGZRpMKDhJyBz0pexz6yUoBU3x8bJsRk/HV6U= gopkg.in/segmentio/analytics-go.v3 v3.1.0/go.mod h1:4QqqlTlSSpVlWA9/9nDcPw+FkM2yv1NQoYjUbL9/JAw= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk= diff --git a/internal/account.go b/internal/account.go new file mode 100644 index 000000000..0f4a3726f --- /dev/null +++ b/internal/account.go @@ -0,0 +1,69 @@ +package ledger + +import ( + "encoding/json" + "regexp" + + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/uptrace/bun" +) + +const ( + WORLD = "world" +) + +type Account struct { + bun.BaseModel `bun:"table:accounts,alias:accounts"` + + Address string `json:"address"` + Metadata metadata.Metadata `json:"metadata"` +} + +func (a Account) copy() Account { + a.Metadata = a.Metadata.Copy() + return a +} + +func NewAccount(address string) Account { + return Account{ + Address: address, + Metadata: metadata.Metadata{}, + } +} + +type ExpandedAccount struct { + Account `bun:",extend"` + Volumes VolumesByAssets `json:"volumes,omitempty" bun:"volumes,type:jsonb"` + EffectiveVolumes VolumesByAssets `json:"effectiveVolumes,omitempty" bun:"effective_volumes,type:jsonb"` +} + +func NewExpandedAccount(address string) ExpandedAccount { + return ExpandedAccount{ + Account: Account{ + Address: address, + Metadata: metadata.Metadata{}, + }, + Volumes: map[string]*Volumes{}, + } +} + +func (v ExpandedAccount) MarshalJSON() ([]byte, error) { + type aux ExpandedAccount + return json.Marshal(struct { + aux + Balances BalancesByAssets `json:"balances"` + }{ + aux: aux(v), + Balances: v.Volumes.Balances(), + }) +} + +func (v ExpandedAccount) Copy() ExpandedAccount { + v.Account = v.Account.copy() + v.Volumes = v.Volumes.copy() + return v +} + +const AccountPattern = "^[a-zA-Z_]+[a-zA-Z0-9_:]*$" + +var AccountRegexp = regexp.MustCompile(AccountPattern) diff --git a/internal/analytics/analytics.go b/internal/analytics/analytics.go new file mode 100644 index 000000000..91a051339 --- /dev/null +++ b/internal/analytics/analytics.go @@ -0,0 +1,149 @@ +package analytics + +import ( + "context" + "crypto/sha256" + "encoding/base64" + "runtime" + "time" + + ledger "github.com/formancehq/ledger/internal" + storageerrors "github.com/formancehq/ledger/internal/storage" + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/pbnjay/memory" + "gopkg.in/segmentio/analytics-go.v3" +) + +const ( + ApplicationStats = "Application stats" + + VersionProperty = "version" + AccountsProperty = "accounts" + TransactionsProperty = "transactions" + LedgersProperty = "ledgers" + OSProperty = "os" + ArchProperty = "arch" + TimeZoneProperty = "tz" + CPUCountProperty = "cpuCount" + TotalMemoryProperty = "totalMemory" +) + +type heartbeat struct { + version string + interval time.Duration + client analytics.Client + stopChan chan chan struct{} + backend Backend +} + +func (m *heartbeat) Run(ctx context.Context) error { + + enqueue := func() { + err := m.enqueue(ctx) + if err != nil { + logging.FromContext(ctx).WithFields(map[string]interface{}{ + "error": err, + }).Error("enqueuing analytics") + } + } + + enqueue() + for { + select { + case ch := <-m.stopChan: + ch <- struct{}{} + return nil + case <-ctx.Done(): + return ctx.Err() + case <-time.After(m.interval): + enqueue() + } + } +} + +func (m *heartbeat) Stop(ctx context.Context) error { + ch := make(chan struct{}) + m.stopChan <- ch + select { + case <-ctx.Done(): + return ctx.Err() + case <-ch: + return nil + } +} + +func (m *heartbeat) enqueue(ctx context.Context) error { + + appID, err := m.backend.AppID(ctx) + if err != nil { + return err + } + + tz, _ := ledger.Now().Local().Zone() + + properties := analytics.NewProperties(). + Set(VersionProperty, m.version). + Set(OSProperty, runtime.GOOS). + Set(ArchProperty, runtime.GOARCH). + Set(TimeZoneProperty, tz). + Set(CPUCountProperty, runtime.NumCPU()). + Set(TotalMemoryProperty, memory.TotalMemory()/1024/1024) + + ledgers, err := m.backend.ListLedgers(ctx) + if err != nil { + return err + } + + ledgersProperty := map[string]any{} + + for _, l := range ledgers { + stats := map[string]any{} + if err := func() error { + store, err := m.backend.GetLedgerStore(ctx, l) + if err != nil && err != storageerrors.ErrStoreNotFound { + return err + } + + transactions, err := store.CountTransactions(ctx) + if err != nil { + return err + } + + accounts, err := store.CountAccounts(ctx) + if err != nil { + return err + } + stats[TransactionsProperty] = transactions + stats[AccountsProperty] = accounts + + return nil + }(); err != nil { + return err + } + + digest := sha256.New() + digest.Write([]byte(l)) + ledgerHash := base64.RawURLEncoding.EncodeToString(digest.Sum(nil)) + + ledgersProperty[ledgerHash] = stats + } + if len(ledgersProperty) > 0 { + properties.Set(LedgersProperty, ledgersProperty) + } + + return m.client.Enqueue(&analytics.Track{ + AnonymousId: appID, + Event: ApplicationStats, + Properties: properties, + }) +} + +func newHeartbeat(backend Backend, client analytics.Client, version string, interval time.Duration) *heartbeat { + return &heartbeat{ + version: version, + interval: interval, + client: client, + backend: backend, + stopChan: make(chan chan struct{}, 1), + } +} diff --git a/internal/analytics/analytics_test.go b/internal/analytics/analytics_test.go new file mode 100644 index 000000000..4fbbae14e --- /dev/null +++ b/internal/analytics/analytics_test.go @@ -0,0 +1,189 @@ +package analytics + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "io" + "net/http" + "sync" + "testing" + "time" + + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/require" + "gopkg.in/segmentio/analytics-go.v3" +) + +type transport func(*http.Request) (*http.Response, error) + +func (fn transport) RoundTrip(req *http.Request) (*http.Response, error) { + return fn(req) +} + +type Queue[ITEM any] struct { + mu sync.Mutex + items []ITEM +} + +func (s *Queue[ITEM]) Get() (ret ITEM, ok bool) { + s.mu.Lock() + defer s.mu.Unlock() + + if len(s.items) == 0 { + return + } + ret = s.items[0] + ok = true + if len(s.items) == 1 { + s.items = make([]ITEM, 0) + return + } + s.items = s.items[1:] + return +} + +func (s *Queue[ITEM]) Put(i ITEM) *Queue[ITEM] { + s.mu.Lock() + defer s.mu.Unlock() + + s.items = append(s.items, i) + return s +} + +func (s *Queue[ITEM]) Empty() bool { + s.mu.Lock() + defer s.mu.Unlock() + return len(s.items) == 0 +} + +func NewQueue[ITEM any]() *Queue[ITEM] { + return &Queue[ITEM]{} +} + +type segmentBatch struct { + Batch []analytics.Track `json:"batch"` +} + +const ( + interval = 10 * time.Millisecond + version = "100.0.0" + applicationId = "foo" + writeKey = "key" +) + +func EventuallyQueueNotEmpty[ITEM any](t *testing.T, queue *Queue[ITEM]) { + require.Eventually(t, func() bool { + return !queue.Empty() + }, 10*interval, interval) +} + +var emptyHttpResponse = &http.Response{ + Body: io.NopCloser(bytes.NewReader([]byte{})), + StatusCode: http.StatusOK, +} + +func TestAnalytics(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + transport http.RoundTripper + } + queue := NewQueue[*http.Request]() + firstCallChan := make(chan struct{}) + testCases := []testCase{ + { + name: "nominal", + transport: transport(func(request *http.Request) (*http.Response, error) { + queue.Put(request) + return emptyHttpResponse, nil + }), + }, + { + name: "with error on backend", + transport: transport(func(request *http.Request) (*http.Response, error) { + select { + case <-firstCallChan: // Enter this case only if the chan is closed + queue.Put(request) + return emptyHttpResponse, nil + default: + close(firstCallChan) + return nil, errors.New("general error") + } + }), + }, + } + + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + ctrl := gomock.NewController(t) + mockLedger := NewMockLedger(ctrl) + backend := NewMockBackend(ctrl) + backend. + EXPECT(). + ListLedgers(gomock.Any()). + AnyTimes(). + Return([]string{"default"}, nil) + backend. + EXPECT(). + AppID(gomock.Any()). + AnyTimes(). + Return(applicationId, nil) + backend. + EXPECT(). + GetLedgerStore(gomock.Any(), "default"). + AnyTimes(). + Return(mockLedger, nil) + t.Cleanup(func() { + ctrl.Finish() + }) + analyticsClient, err := analytics.NewWithConfig(writeKey, analytics.Config{ + BatchSize: 1, + Transport: testCase.transport, + }) + require.NoError(t, err) + + mockLedger. + EXPECT(). + CountTransactions(gomock.Any()). + AnyTimes(). + Return(uint64(10), nil) + mockLedger. + EXPECT(). + CountAccounts(gomock.Any()). + AnyTimes(). + Return(uint64(20), nil) + + h := newHeartbeat(backend, analyticsClient, version, interval) + go func() { + require.NoError(t, h.Run(context.Background())) + }() + t.Cleanup(func() { + require.NoError(t, h.Stop(context.Background())) + }) + + for i := 0; i < 10; i++ { + EventuallyQueueNotEmpty(t, queue) + request, ok := queue.Get() + require.True(t, ok) + + username, password, ok := request.BasicAuth() + require.True(t, ok) + require.Equal(t, writeKey, username) + require.Empty(t, password) + + batch := &segmentBatch{} + require.NoError(t, json.NewDecoder(request.Body).Decode(batch)) + require.Len(t, batch.Batch, 1) + + track := batch.Batch[0] + require.Equal(t, ApplicationStats, track.Event) + require.Equal(t, version, track.Properties[VersionProperty]) + require.Equal(t, applicationId, track.AnonymousId) + } + }) + } +} diff --git a/internal/analytics/backend.go b/internal/analytics/backend.go new file mode 100644 index 000000000..951803d1a --- /dev/null +++ b/internal/analytics/backend.go @@ -0,0 +1,83 @@ +package analytics + +import ( + "context" + + storageerrors "github.com/formancehq/ledger/internal/storage" + "github.com/formancehq/ledger/internal/storage/driver" + ledgerstore "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/google/uuid" + "github.com/pkg/errors" +) + +//go:generate mockgen -source backend.go -destination backend_test.go -package analytics . Ledger + +type Ledger interface { + CountTransactions(ctx context.Context) (uint64, error) + CountAccounts(ctx context.Context) (uint64, error) +} + +type defaultLedger struct { + store *ledgerstore.Store +} + +func (d defaultLedger) CountTransactions(ctx context.Context) (uint64, error) { + return d.store.CountTransactions(ctx, ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}))) +} + +func (d defaultLedger) CountAccounts(ctx context.Context) (uint64, error) { + return d.store.CountAccounts(ctx, ledgerstore.NewGetAccountsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}))) +} + +var _ Ledger = (*defaultLedger)(nil) + +type Backend interface { + AppID(ctx context.Context) (string, error) + ListLedgers(ctx context.Context) ([]string, error) + GetLedgerStore(ctx context.Context, l string) (Ledger, error) +} + +type defaultBackend struct { + driver *driver.Driver + appID string +} + +func (d defaultBackend) AppID(ctx context.Context) (string, error) { + var err error + if d.appID == "" { + d.appID, err = d.driver.GetSystemStore().GetConfiguration(ctx, "appId") + if err != nil && !errors.Is(err, storageerrors.ErrNotFound) { + return "", err + } + if errors.Is(err, storageerrors.ErrNotFound) { + d.appID = uuid.NewString() + if err := d.driver.GetSystemStore().InsertConfiguration(ctx, "appId", d.appID); err != nil { + return "", err + } + } + } + return d.appID, nil +} + +func (d defaultBackend) ListLedgers(ctx context.Context) ([]string, error) { + return d.driver.GetSystemStore().ListLedgers(ctx) +} + +func (d defaultBackend) GetLedgerStore(ctx context.Context, name string) (Ledger, error) { + ledgerStore, err := d.driver.GetLedgerStore(ctx, name) + if err != nil { + return nil, err + } + return &defaultLedger{ + store: ledgerStore, + }, nil +} + +var _ Backend = (*defaultBackend)(nil) + +func newDefaultBackend(driver *driver.Driver, appID string) *defaultBackend { + return &defaultBackend{ + driver: driver, + appID: appID, + } +} diff --git a/internal/analytics/backend_test.go b/internal/analytics/backend_test.go new file mode 100644 index 000000000..f91aa5326 --- /dev/null +++ b/internal/analytics/backend_test.go @@ -0,0 +1,133 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: backend.go + +// Package analytics is a generated GoMock package. +package analytics + +import ( + context "context" + reflect "reflect" + + gomock "github.com/golang/mock/gomock" +) + +// MockLedger is a mock of Ledger interface. +type MockLedger struct { + ctrl *gomock.Controller + recorder *MockLedgerMockRecorder +} + +// MockLedgerMockRecorder is the mock recorder for MockLedger. +type MockLedgerMockRecorder struct { + mock *MockLedger +} + +// NewMockLedger creates a new mock instance. +func NewMockLedger(ctrl *gomock.Controller) *MockLedger { + mock := &MockLedger{ctrl: ctrl} + mock.recorder = &MockLedgerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockLedger) EXPECT() *MockLedgerMockRecorder { + return m.recorder +} + +// CountAccounts mocks base method. +func (m *MockLedger) CountAccounts(ctx context.Context) (uint64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CountAccounts", ctx) + ret0, _ := ret[0].(uint64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CountAccounts indicates an expected call of CountAccounts. +func (mr *MockLedgerMockRecorder) CountAccounts(ctx interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountAccounts", reflect.TypeOf((*MockLedger)(nil).CountAccounts), ctx) +} + +// CountTransactions mocks base method. +func (m *MockLedger) CountTransactions(ctx context.Context) (uint64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CountTransactions", ctx) + ret0, _ := ret[0].(uint64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CountTransactions indicates an expected call of CountTransactions. +func (mr *MockLedgerMockRecorder) CountTransactions(ctx interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountTransactions", reflect.TypeOf((*MockLedger)(nil).CountTransactions), ctx) +} + +// MockBackend is a mock of Backend interface. +type MockBackend struct { + ctrl *gomock.Controller + recorder *MockBackendMockRecorder +} + +// MockBackendMockRecorder is the mock recorder for MockBackend. +type MockBackendMockRecorder struct { + mock *MockBackend +} + +// NewMockBackend creates a new mock instance. +func NewMockBackend(ctrl *gomock.Controller) *MockBackend { + mock := &MockBackend{ctrl: ctrl} + mock.recorder = &MockBackendMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockBackend) EXPECT() *MockBackendMockRecorder { + return m.recorder +} + +// AppID mocks base method. +func (m *MockBackend) AppID(ctx context.Context) (string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "AppID", ctx) + ret0, _ := ret[0].(string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// AppID indicates an expected call of AppID. +func (mr *MockBackendMockRecorder) AppID(ctx interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AppID", reflect.TypeOf((*MockBackend)(nil).AppID), ctx) +} + +// GetLedgerStore mocks base method. +func (m *MockBackend) GetLedgerStore(ctx context.Context, l string) (Ledger, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetLedgerStore", ctx, l) + ret0, _ := ret[0].(Ledger) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetLedgerStore indicates an expected call of GetLedgerStore. +func (mr *MockBackendMockRecorder) GetLedgerStore(ctx, l interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLedgerStore", reflect.TypeOf((*MockBackend)(nil).GetLedgerStore), ctx, l) +} + +// ListLedgers mocks base method. +func (m *MockBackend) ListLedgers(ctx context.Context) ([]string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListLedgers", ctx) + ret0, _ := ret[0].([]string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListLedgers indicates an expected call of ListLedgers. +func (mr *MockBackendMockRecorder) ListLedgers(ctx interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListLedgers", reflect.TypeOf((*MockBackend)(nil).ListLedgers), ctx) +} diff --git a/internal/analytics/module.go b/internal/analytics/module.go new file mode 100644 index 000000000..bef19428e --- /dev/null +++ b/internal/analytics/module.go @@ -0,0 +1,48 @@ +package analytics + +import ( + "context" + "time" + + "github.com/formancehq/ledger/internal/storage/driver" + "go.uber.org/fx" + "gopkg.in/segmentio/analytics-go.v3" +) + +func NewHeartbeatModule(version, writeKey, appID string, interval time.Duration) fx.Option { + return fx.Options( + fx.Supply(analytics.Config{}), // Provide empty config to be able to replace (use fx.Replace) if necessary + fx.Provide(func(cfg analytics.Config) (analytics.Client, error) { + return analytics.NewWithConfig(writeKey, cfg) + }), + fx.Provide(func(client analytics.Client, backend Backend) *heartbeat { + return newHeartbeat(backend, client, version, interval) + }), + fx.Provide(func(driver *driver.Driver) Backend { + return newDefaultBackend(driver, appID) + }), + fx.Invoke(func(m *heartbeat, lc fx.Lifecycle) { + lc.Append(fx.Hook{ + OnStart: func(ctx context.Context) error { + go func() { + err := m.Run(context.Background()) + if err != nil { + panic(err) + } + }() + return nil + }, + OnStop: func(ctx context.Context) error { + return m.Stop(ctx) + }, + }) + }), + fx.Invoke(func(lc fx.Lifecycle, client analytics.Client) { + lc.Append(fx.Hook{ + OnStop: func(ctx context.Context) error { + return client.Close() + }, + }) + }), + ) +} diff --git a/internal/api/backend/backend.go b/internal/api/backend/backend.go new file mode 100644 index 000000000..16bbb0782 --- /dev/null +++ b/internal/api/backend/backend.go @@ -0,0 +1,71 @@ +package backend + +import ( + "context" + "math/big" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/engine" + "github.com/formancehq/ledger/internal/engine/command" + "github.com/formancehq/ledger/internal/storage/driver" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/formancehq/stack/libs/go-libs/migrations" +) + +//go:generate mockgen -source backend.go -destination backend_generated.go -package backend . Ledger + +type Ledger interface { + GetAccountWithVolumes(ctx context.Context, query ledgerstore.GetAccountQuery) (*ledger.ExpandedAccount, error) + GetAccountsWithVolumes(ctx context.Context, query *ledgerstore.GetAccountsQuery) (*api.Cursor[ledger.ExpandedAccount], error) + CountAccounts(ctx context.Context, query *ledgerstore.GetAccountsQuery) (uint64, error) + GetAggregatedBalances(ctx context.Context, q *ledgerstore.GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) + GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) + Stats(ctx context.Context) (engine.Stats, error) + GetLogs(ctx context.Context, query *ledgerstore.GetLogsQuery) (*api.Cursor[ledger.ChainedLog], error) + CountTransactions(ctx context.Context, query *ledgerstore.GetTransactionsQuery) (uint64, error) + GetTransactions(ctx context.Context, query *ledgerstore.GetTransactionsQuery) (*api.Cursor[ledger.ExpandedTransaction], error) + GetTransactionWithVolumes(ctx context.Context, query ledgerstore.GetTransactionQuery) (*ledger.ExpandedTransaction, error) + + CreateTransaction(ctx context.Context, parameters command.Parameters, data ledger.RunScript) (*ledger.Transaction, error) + RevertTransaction(ctx context.Context, parameters command.Parameters, id *big.Int) (*ledger.Transaction, error) + SaveMeta(ctx context.Context, parameters command.Parameters, targetType string, targetID any, m metadata.Metadata) error + DeleteMetadata(ctx context.Context, parameters command.Parameters, targetType string, targetID any, key string) error + + IsDatabaseUpToDate(ctx context.Context) (bool, error) +} + +type Backend interface { + GetLedger(ctx context.Context, name string) (Ledger, error) + ListLedgers(ctx context.Context) ([]string, error) + GetVersion() string +} + +type DefaultBackend struct { + storageDriver *driver.Driver + resolver *engine.Resolver + version string +} + +func (d DefaultBackend) GetLedger(ctx context.Context, name string) (Ledger, error) { + return d.resolver.GetLedger(ctx, name) +} + +func (d DefaultBackend) ListLedgers(ctx context.Context) ([]string, error) { + return d.storageDriver.GetSystemStore().ListLedgers(ctx) +} + +func (d DefaultBackend) GetVersion() string { + return d.version +} + +var _ Backend = (*DefaultBackend)(nil) + +func NewDefaultBackend(driver *driver.Driver, version string, resolver *engine.Resolver) *DefaultBackend { + return &DefaultBackend{ + storageDriver: driver, + resolver: resolver, + version: version, + } +} diff --git a/internal/api/backend/backend_generated.go b/internal/api/backend/backend_generated.go new file mode 100644 index 000000000..7744d4ca9 --- /dev/null +++ b/internal/api/backend/backend_generated.go @@ -0,0 +1,337 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: backend.go +// +// Generated by this command: +// +// mockgen -source backend.go -destination backend_generated.go -package backend . Ledger +// +// Package backend is a generated GoMock package. +package backend + +import ( + context "context" + big "math/big" + reflect "reflect" + + ledger "github.com/formancehq/ledger/internal" + engine "github.com/formancehq/ledger/internal/engine" + command "github.com/formancehq/ledger/internal/engine/command" + ledgerstore "github.com/formancehq/ledger/internal/storage/ledgerstore" + api "github.com/formancehq/stack/libs/go-libs/api" + metadata "github.com/formancehq/stack/libs/go-libs/metadata" + migrations "github.com/formancehq/stack/libs/go-libs/migrations" + gomock "go.uber.org/mock/gomock" +) + +// MockLedger is a mock of Ledger interface. +type MockLedger struct { + ctrl *gomock.Controller + recorder *MockLedgerMockRecorder +} + +// MockLedgerMockRecorder is the mock recorder for MockLedger. +type MockLedgerMockRecorder struct { + mock *MockLedger +} + +// NewMockLedger creates a new mock instance. +func NewMockLedger(ctrl *gomock.Controller) *MockLedger { + mock := &MockLedger{ctrl: ctrl} + mock.recorder = &MockLedgerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockLedger) EXPECT() *MockLedgerMockRecorder { + return m.recorder +} + +// CountAccounts mocks base method. +func (m *MockLedger) CountAccounts(ctx context.Context, query *ledgerstore.GetAccountsQuery) (uint64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CountAccounts", ctx, query) + ret0, _ := ret[0].(uint64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CountAccounts indicates an expected call of CountAccounts. +func (mr *MockLedgerMockRecorder) CountAccounts(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountAccounts", reflect.TypeOf((*MockLedger)(nil).CountAccounts), ctx, query) +} + +// CountTransactions mocks base method. +func (m *MockLedger) CountTransactions(ctx context.Context, query *ledgerstore.GetTransactionsQuery) (uint64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CountTransactions", ctx, query) + ret0, _ := ret[0].(uint64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CountTransactions indicates an expected call of CountTransactions. +func (mr *MockLedgerMockRecorder) CountTransactions(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountTransactions", reflect.TypeOf((*MockLedger)(nil).CountTransactions), ctx, query) +} + +// CreateTransaction mocks base method. +func (m *MockLedger) CreateTransaction(ctx context.Context, parameters command.Parameters, data ledger.RunScript) (*ledger.Transaction, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateTransaction", ctx, parameters, data) + ret0, _ := ret[0].(*ledger.Transaction) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CreateTransaction indicates an expected call of CreateTransaction. +func (mr *MockLedgerMockRecorder) CreateTransaction(ctx, parameters, data any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateTransaction", reflect.TypeOf((*MockLedger)(nil).CreateTransaction), ctx, parameters, data) +} + +// DeleteMetadata mocks base method. +func (m *MockLedger) DeleteMetadata(ctx context.Context, parameters command.Parameters, targetType string, targetID any, key string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteMetadata", ctx, parameters, targetType, targetID, key) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteMetadata indicates an expected call of DeleteMetadata. +func (mr *MockLedgerMockRecorder) DeleteMetadata(ctx, parameters, targetType, targetID, key any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteMetadata", reflect.TypeOf((*MockLedger)(nil).DeleteMetadata), ctx, parameters, targetType, targetID, key) +} + +// GetAccountWithVolumes mocks base method. +func (m *MockLedger) GetAccountWithVolumes(ctx context.Context, query ledgerstore.GetAccountQuery) (*ledger.ExpandedAccount, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAccountWithVolumes", ctx, query) + ret0, _ := ret[0].(*ledger.ExpandedAccount) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAccountWithVolumes indicates an expected call of GetAccountWithVolumes. +func (mr *MockLedgerMockRecorder) GetAccountWithVolumes(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccountWithVolumes", reflect.TypeOf((*MockLedger)(nil).GetAccountWithVolumes), ctx, query) +} + +// GetAccountsWithVolumes mocks base method. +func (m *MockLedger) GetAccountsWithVolumes(ctx context.Context, query *ledgerstore.GetAccountsQuery) (*api.Cursor[ledger.ExpandedAccount], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAccountsWithVolumes", ctx, query) + ret0, _ := ret[0].(*api.Cursor[ledger.ExpandedAccount]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAccountsWithVolumes indicates an expected call of GetAccountsWithVolumes. +func (mr *MockLedgerMockRecorder) GetAccountsWithVolumes(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccountsWithVolumes", reflect.TypeOf((*MockLedger)(nil).GetAccountsWithVolumes), ctx, query) +} + +// GetAggregatedBalances mocks base method. +func (m *MockLedger) GetAggregatedBalances(ctx context.Context, q *ledgerstore.GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAggregatedBalances", ctx, q) + ret0, _ := ret[0].(ledger.BalancesByAssets) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAggregatedBalances indicates an expected call of GetAggregatedBalances. +func (mr *MockLedgerMockRecorder) GetAggregatedBalances(ctx, q any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAggregatedBalances", reflect.TypeOf((*MockLedger)(nil).GetAggregatedBalances), ctx, q) +} + +// GetLogs mocks base method. +func (m *MockLedger) GetLogs(ctx context.Context, query *ledgerstore.GetLogsQuery) (*api.Cursor[ledger.ChainedLog], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetLogs", ctx, query) + ret0, _ := ret[0].(*api.Cursor[ledger.ChainedLog]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetLogs indicates an expected call of GetLogs. +func (mr *MockLedgerMockRecorder) GetLogs(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLogs", reflect.TypeOf((*MockLedger)(nil).GetLogs), ctx, query) +} + +// GetMigrationsInfo mocks base method. +func (m *MockLedger) GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetMigrationsInfo", ctx) + ret0, _ := ret[0].([]migrations.Info) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetMigrationsInfo indicates an expected call of GetMigrationsInfo. +func (mr *MockLedgerMockRecorder) GetMigrationsInfo(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMigrationsInfo", reflect.TypeOf((*MockLedger)(nil).GetMigrationsInfo), ctx) +} + +// GetTransactionWithVolumes mocks base method. +func (m *MockLedger) GetTransactionWithVolumes(ctx context.Context, query ledgerstore.GetTransactionQuery) (*ledger.ExpandedTransaction, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetTransactionWithVolumes", ctx, query) + ret0, _ := ret[0].(*ledger.ExpandedTransaction) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetTransactionWithVolumes indicates an expected call of GetTransactionWithVolumes. +func (mr *MockLedgerMockRecorder) GetTransactionWithVolumes(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTransactionWithVolumes", reflect.TypeOf((*MockLedger)(nil).GetTransactionWithVolumes), ctx, query) +} + +// GetTransactions mocks base method. +func (m *MockLedger) GetTransactions(ctx context.Context, query *ledgerstore.GetTransactionsQuery) (*api.Cursor[ledger.ExpandedTransaction], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetTransactions", ctx, query) + ret0, _ := ret[0].(*api.Cursor[ledger.ExpandedTransaction]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetTransactions indicates an expected call of GetTransactions. +func (mr *MockLedgerMockRecorder) GetTransactions(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTransactions", reflect.TypeOf((*MockLedger)(nil).GetTransactions), ctx, query) +} + +// IsDatabaseUpToDate mocks base method. +func (m *MockLedger) IsDatabaseUpToDate(ctx context.Context) (bool, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "IsDatabaseUpToDate", ctx) + ret0, _ := ret[0].(bool) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// IsDatabaseUpToDate indicates an expected call of IsDatabaseUpToDate. +func (mr *MockLedgerMockRecorder) IsDatabaseUpToDate(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsDatabaseUpToDate", reflect.TypeOf((*MockLedger)(nil).IsDatabaseUpToDate), ctx) +} + +// RevertTransaction mocks base method. +func (m *MockLedger) RevertTransaction(ctx context.Context, parameters command.Parameters, id *big.Int) (*ledger.Transaction, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RevertTransaction", ctx, parameters, id) + ret0, _ := ret[0].(*ledger.Transaction) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// RevertTransaction indicates an expected call of RevertTransaction. +func (mr *MockLedgerMockRecorder) RevertTransaction(ctx, parameters, id any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RevertTransaction", reflect.TypeOf((*MockLedger)(nil).RevertTransaction), ctx, parameters, id) +} + +// SaveMeta mocks base method. +func (m_2 *MockLedger) SaveMeta(ctx context.Context, parameters command.Parameters, targetType string, targetID any, m metadata.Metadata) error { + m_2.ctrl.T.Helper() + ret := m_2.ctrl.Call(m_2, "SaveMeta", ctx, parameters, targetType, targetID, m) + ret0, _ := ret[0].(error) + return ret0 +} + +// SaveMeta indicates an expected call of SaveMeta. +func (mr *MockLedgerMockRecorder) SaveMeta(ctx, parameters, targetType, targetID, m any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SaveMeta", reflect.TypeOf((*MockLedger)(nil).SaveMeta), ctx, parameters, targetType, targetID, m) +} + +// Stats mocks base method. +func (m *MockLedger) Stats(ctx context.Context) (engine.Stats, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Stats", ctx) + ret0, _ := ret[0].(engine.Stats) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Stats indicates an expected call of Stats. +func (mr *MockLedgerMockRecorder) Stats(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Stats", reflect.TypeOf((*MockLedger)(nil).Stats), ctx) +} + +// MockBackend is a mock of Backend interface. +type MockBackend struct { + ctrl *gomock.Controller + recorder *MockBackendMockRecorder +} + +// MockBackendMockRecorder is the mock recorder for MockBackend. +type MockBackendMockRecorder struct { + mock *MockBackend +} + +// NewMockBackend creates a new mock instance. +func NewMockBackend(ctrl *gomock.Controller) *MockBackend { + mock := &MockBackend{ctrl: ctrl} + mock.recorder = &MockBackendMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockBackend) EXPECT() *MockBackendMockRecorder { + return m.recorder +} + +// GetLedger mocks base method. +func (m *MockBackend) GetLedger(ctx context.Context, name string) (Ledger, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetLedger", ctx, name) + ret0, _ := ret[0].(Ledger) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetLedger indicates an expected call of GetLedger. +func (mr *MockBackendMockRecorder) GetLedger(ctx, name any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLedger", reflect.TypeOf((*MockBackend)(nil).GetLedger), ctx, name) +} + +// GetVersion mocks base method. +func (m *MockBackend) GetVersion() string { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetVersion") + ret0, _ := ret[0].(string) + return ret0 +} + +// GetVersion indicates an expected call of GetVersion. +func (mr *MockBackendMockRecorder) GetVersion() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetVersion", reflect.TypeOf((*MockBackend)(nil).GetVersion)) +} + +// ListLedgers mocks base method. +func (m *MockBackend) ListLedgers(ctx context.Context) ([]string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListLedgers", ctx) + ret0, _ := ret[0].([]string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListLedgers indicates an expected call of ListLedgers. +func (mr *MockBackendMockRecorder) ListLedgers(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListLedgers", reflect.TypeOf((*MockBackend)(nil).ListLedgers), ctx) +} diff --git a/internal/api/module.go b/internal/api/module.go new file mode 100644 index 000000000..484a035c8 --- /dev/null +++ b/internal/api/module.go @@ -0,0 +1,40 @@ +package api + +import ( + _ "embed" + + "github.com/go-chi/chi/v5" + + "github.com/formancehq/ledger/internal/api/backend" + "github.com/formancehq/ledger/internal/engine" + "github.com/formancehq/ledger/internal/opentelemetry/metrics" + "github.com/formancehq/ledger/internal/storage/driver" + "github.com/formancehq/stack/libs/go-libs/health" + "go.opentelemetry.io/otel/metric" + "go.opentelemetry.io/otel/metric/noop" + "go.uber.org/fx" +) + +type Config struct { + Version string + ReadOnly bool +} + +func Module(cfg Config) fx.Option { + return fx.Options( + fx.Provide(func( + backend backend.Backend, + healthController *health.HealthController, + globalMetricsRegistry metrics.GlobalRegistry) chi.Router { + return NewRouter(backend, healthController, globalMetricsRegistry, cfg.ReadOnly) + }), + fx.Provide(func(storageDriver *driver.Driver, resolver *engine.Resolver) backend.Backend { + return backend.NewDefaultBackend(storageDriver, cfg.Version, resolver) + }), + fx.Provide(fx.Annotate(noop.NewMeterProvider, fx.As(new(metric.MeterProvider)))), + fx.Decorate(fx.Annotate(func(meterProvider metric.MeterProvider) (metrics.GlobalRegistry, error) { + return metrics.RegisterGlobalRegistry(meterProvider) + }, fx.As(new(metrics.GlobalRegistry)))), + health.Module(), + ) +} diff --git a/internal/api/read_only.go b/internal/api/read_only.go new file mode 100644 index 000000000..d2e7ee458 --- /dev/null +++ b/internal/api/read_only.go @@ -0,0 +1,18 @@ +package api + +import ( + "net/http" + + "github.com/formancehq/stack/libs/go-libs/api" + "github.com/pkg/errors" +) + +func ReadOnly(h http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodGet && r.Method != http.MethodOptions && r.Method != http.MethodHead { + api.BadRequest(w, "READ_ONLY", errors.New("Read only mode")) + return + } + h.ServeHTTP(w, r) + }) +} diff --git a/internal/api/router.go b/internal/api/router.go new file mode 100644 index 000000000..110fcbed8 --- /dev/null +++ b/internal/api/router.go @@ -0,0 +1,32 @@ +package api + +import ( + "net/http" + + "github.com/formancehq/ledger/internal/api/backend" + v1 "github.com/formancehq/ledger/internal/api/v1" + v2 "github.com/formancehq/ledger/internal/api/v2" + "github.com/formancehq/ledger/internal/opentelemetry/metrics" + "github.com/formancehq/stack/libs/go-libs/health" + "github.com/go-chi/chi/v5" +) + +func NewRouter( + backend backend.Backend, + healthController *health.HealthController, + globalMetricsRegistry metrics.GlobalRegistry, + readOnly bool, +) chi.Router { + mux := chi.NewRouter() + if readOnly { + mux.Use(ReadOnly) + } + v2Router := v2.NewRouter(backend, healthController, globalMetricsRegistry) + mux.Handle("/v2/*", http.StripPrefix("/v2", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + chi.RouteContext(r.Context()).Reset() + v2Router.ServeHTTP(w, r) + }))) + mux.Handle("/*", v1.NewRouter(backend, healthController, globalMetricsRegistry)) + + return mux +} diff --git a/internal/api/shared/context.go b/internal/api/shared/context.go new file mode 100644 index 000000000..a4a99e744 --- /dev/null +++ b/internal/api/shared/context.go @@ -0,0 +1,19 @@ +package shared + +import ( + "context" + + "github.com/formancehq/ledger/internal/api/backend" +) + +type ledgerKey struct{} + +var _ledgerKey = ledgerKey{} + +func ContextWithLedger(ctx context.Context, ledger backend.Ledger) context.Context { + return context.WithValue(ctx, _ledgerKey, ledger) +} + +func LedgerFromContext(ctx context.Context) backend.Ledger { + return ctx.Value(_ledgerKey).(backend.Ledger) +} diff --git a/internal/api/shared/errors.go b/internal/api/shared/errors.go new file mode 100644 index 000000000..50b8be75f --- /dev/null +++ b/internal/api/shared/errors.go @@ -0,0 +1,110 @@ +package shared + +import ( + "context" + "encoding/base64" + "encoding/json" + "fmt" + "net/http" + "strings" + + "github.com/formancehq/ledger/internal/engine/command" + "github.com/formancehq/ledger/internal/machine/vm" + storageerrors "github.com/formancehq/ledger/internal/storage" + "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/pkg/errors" +) + +const ( + ErrInternal = "INTERNAL" + ErrConflict = "CONFLICT" + ErrInsufficientFund = "INSUFFICIENT_FUND" + ErrValidation = "VALIDATION" + ErrContextCancelled = "CONTEXT_CANCELLED" + ErrStore = "STORE" + ErrNotFound = "NOT_FOUND" + ErrScriptCompilationFailed = "COMPILATION_FAILED" + ErrScriptNoScript = "NO_SCRIPT" + ErrScriptMetadataOverride = "METADATA_OVERRIDE" + ScriptErrorInsufficientFund = "INSUFFICIENT_FUND" + ScriptErrorCompilationFailed = "COMPILATION_FAILED" + ScriptErrorNoScript = "NO_SCRIPT" + ScriptErrorMetadataOverride = "METADATA_OVERRIDE" + ResourceResolutionError = "RESOURCE_RESOLUTION_ERROR" +) + +func ResponseError(w http.ResponseWriter, r *http.Request, err error) { + status, code, details := coreErrorToErrorCode(err) + + baseError := errors.Cause(err) + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(status) + if status < 500 { + err := json.NewEncoder(w).Encode(api.ErrorResponse{ + ErrorCode: code, + ErrorMessage: baseError.Error(), + Details: details, + }) + if err != nil { + panic(err) + } + } else { + logging.FromContext(r.Context()).Errorf("internal server error: %s", err) + } +} + +func coreErrorToErrorCode(err error) (int, string, string) { + switch { + case command.IsConflictError(err): + return http.StatusConflict, ErrConflict, "" + case + command.IsValidationError(err), + command.IsPastTransactionError(err), + command.IsNoPostingsError(err), + errors.Is(err, command.ErrAlreadyReverted), + errors.Is(err, command.ErrRevertOccurring): + return http.StatusBadRequest, ErrValidation, "" + case storageerrors.IsNotFoundError(err): + return http.StatusNotFound, ErrNotFound, "" + case command.IsNoScriptError(err): + baseError := errors.Cause(err) + return http.StatusBadRequest, ScriptErrorNoScript, EncodeLink(baseError.Error()) + case vm.IsInsufficientFundError(err): + baseError := errors.Cause(err) + return http.StatusBadRequest, ScriptErrorInsufficientFund, EncodeLink(baseError.Error()) + case command.IsCompilationFailedError(err): + baseError := errors.Cause(err) + return http.StatusBadRequest, ScriptErrorCompilationFailed, EncodeLink(baseError.Error()) + case vm.IsMetadataOverrideError(err): + baseError := errors.Cause(err) + return http.StatusBadRequest, ScriptErrorMetadataOverride, EncodeLink(baseError.Error()) + case vm.IsResourceResolutionInvalidTypeFromExtSourcesError(err), + vm.IsResourceResolutionMissingMetadataError(err): + baseError := errors.Cause(err) + return http.StatusBadRequest, ResourceResolutionError, EncodeLink(baseError.Error()) + case errors.Is(err, context.Canceled): + return http.StatusInternalServerError, ErrContextCancelled, "" + case storageerrors.IsStorageError(err): + return http.StatusServiceUnavailable, ErrStore, "" + default: + return http.StatusInternalServerError, ErrInternal, "" + } +} + +func EncodeLink(errStr string) string { + if errStr == "" { + return "" + } + + errStr = strings.ReplaceAll(errStr, "\n", "\r\n") + payload, err := json.Marshal(map[string]string{ + "error": errStr, + }) + if err != nil { + panic(err) + } + payloadB64 := base64.StdEncoding.EncodeToString(payload) + return fmt.Sprintf("https://play.numscript.org/?payload=%v", payloadB64) +} diff --git a/internal/api/shared/resolver.go b/internal/api/shared/resolver.go new file mode 100644 index 000000000..a0a8082d5 --- /dev/null +++ b/internal/api/shared/resolver.go @@ -0,0 +1,97 @@ +package shared + +import ( + "math/rand" + "net/http" + "strings" + "sync" + "time" + + "github.com/pkg/errors" + + "github.com/formancehq/ledger/internal/api/backend" + "github.com/formancehq/ledger/internal/opentelemetry/tracer" + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/go-chi/chi/v5" +) + +var ( + r *rand.Rand + mu sync.Mutex +) + +func init() { + r = rand.New(rand.NewSource(time.Now().UnixNano())) +} + +var letterRunes = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789") + +func randomTraceID(n int) string { + mu.Lock() + defer mu.Unlock() + + b := make([]rune, n) + for i := range b { + b[i] = letterRunes[r.Intn(len(letterRunes))] + } + return string(b) +} + +func LedgerMiddleware( + resolver backend.Backend, + excludePathFromSchemaCheck []string, +) func(handler http.Handler) http.Handler { + return func(handler http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + name := chi.URLParam(r, "ledger") + if name == "" { + w.WriteHeader(http.StatusNotFound) + return + } + + ctx, span := tracer.Start(r.Context(), name) + defer span.End() + + r = r.WithContext(ctx) + + loggerFields := map[string]any{ + "ledger": name, + } + if span.SpanContext().TraceID().IsValid() { + loggerFields["trace-id"] = span.SpanContext().TraceID().String() + } else { + loggerFields["trace-id"] = randomTraceID(10) + } + + r = r.WithContext(logging.ContextWithFields(r.Context(), loggerFields)) + + l, err := resolver.GetLedger(r.Context(), name) + if err != nil { + ResponseError(w, r, err) + return + } + + excluded := false + for _, path := range excludePathFromSchemaCheck { + if strings.HasSuffix(r.URL.Path, path) { + excluded = true + break + } + } + + if !excluded { + isUpToDate, err := l.IsDatabaseUpToDate(ctx) + if err != nil { + ResponseError(w, r, err) + return + } + if !isUpToDate { + ResponseError(w, r, errors.New("outdated schema")) + return + } + } + + handler.ServeHTTP(w, r.WithContext(ContextWithLedger(r.Context(), l))) + }) + } +} diff --git a/internal/api/v1/api_utils_test.go b/internal/api/v1/api_utils_test.go new file mode 100644 index 000000000..77896d3c6 --- /dev/null +++ b/internal/api/v1/api_utils_test.go @@ -0,0 +1,28 @@ +package v1_test + +import ( + "testing" + + "github.com/formancehq/ledger/internal/api/backend" + "go.uber.org/mock/gomock" +) + +func newTestingBackend(t *testing.T, expectedSchemaCheck bool) (*backend.MockBackend, *backend.MockLedger) { + ctrl := gomock.NewController(t) + mockLedger := backend.NewMockLedger(ctrl) + backend := backend.NewMockBackend(ctrl) + backend. + EXPECT(). + GetLedger(gomock.Any(), gomock.Any()). + MinTimes(0). + Return(mockLedger, nil) + t.Cleanup(func() { + ctrl.Finish() + }) + if expectedSchemaCheck { + mockLedger.EXPECT(). + IsDatabaseUpToDate(gomock.Any()). + Return(true, nil) + } + return backend, mockLedger +} diff --git a/internal/api/v1/controllers_accounts.go b/internal/api/v1/controllers_accounts.go new file mode 100644 index 000000000..38fafcc80 --- /dev/null +++ b/internal/api/v1/controllers_accounts.go @@ -0,0 +1,182 @@ +package v1 + +import ( + "encoding/json" + "fmt" + "net/http" + "strconv" + "strings" + + "github.com/formancehq/ledger/internal/api/shared" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/engine/command" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/paginate" + "github.com/formancehq/ledger/internal/storage/query" + sharedapi "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/collectionutils" + "github.com/formancehq/stack/libs/go-libs/errorsutil" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/go-chi/chi/v5" + "github.com/pkg/errors" +) + +func buildAccountsFilterQuery(r *http.Request) (query.Builder, error) { + clauses := make([]query.Builder, 0) + + if balance := r.URL.Query().Get("balance"); balance != "" { + if _, err := strconv.ParseInt(balance, 10, 64); err != nil { + return nil, err + } + + balanceOperator, err := getBalanceOperator(r) + if err != nil { + return nil, err + } + + switch balanceOperator { + case "e": + clauses = append(clauses, query.Match("balance", balance)) + case "ne": + clauses = append(clauses, query.Not(query.Match("balance", balance))) + case "lt": + clauses = append(clauses, query.Lt("balance", balance)) + case "lte": + clauses = append(clauses, query.Lte("balance", balance)) + case "gt": + clauses = append(clauses, query.Gt("balance", balance)) + case "gte": + clauses = append(clauses, query.Gte("balance", balance)) + default: + return nil, errors.New("invalid balance operator") + } + } + + if address := r.URL.Query().Get("address"); address != "" { + clauses = append(clauses, query.Match("address", address)) + } + + for elem, value := range r.URL.Query() { + if strings.HasPrefix(elem, "metadata") { + clauses = append(clauses, query.Match(elem, value[0])) + } + } + + if len(clauses) == 0 { + return nil, nil + } + + return query.And(clauses...), nil +} + +func countAccounts(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) + if err != nil { + sharedapi.BadRequest(w, ErrValidation, err) + return + } + + count, err := l.CountAccounts(r.Context(), ledgerstore.NewGetAccountsQuery(*options)) + if err != nil { + ResponseError(w, r, err) + return + } + + w.Header().Set("Count", fmt.Sprint(count)) + sharedapi.NoContent(w) +} + +func getAccounts(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + q := &ledgerstore.GetAccountsQuery{} + + if r.URL.Query().Get(QueryKeyCursor) != "" { + err := paginate.UnmarshalCursor(r.URL.Query().Get(QueryKeyCursor), q) + if err != nil { + ResponseError(w, r, errorsutil.NewError(command.ErrValidation, + errors.Errorf("invalid '%s' query param", QueryKeyCursor))) + return + } + } else { + options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) + if err != nil { + sharedapi.BadRequest(w, ErrValidation, err) + return + } + options.QueryBuilder, err = buildAccountsFilterQuery(r) + q = ledgerstore.NewGetAccountsQuery(*options) + } + + cursor, err := l.GetAccountsWithVolumes(r.Context(), q) + if err != nil { + ResponseError(w, r, err) + return + } + + sharedapi.RenderCursor(w, *cursor) +} + +func getAccount(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + query := ledgerstore.NewGetAccountQuery(chi.URLParam(r, "address")) + if collectionutils.Contains(r.URL.Query()["expand"], "volumes") { + query = query.WithExpandVolumes() + } + if collectionutils.Contains(r.URL.Query()["expand"], "effectiveVolumes") { + query = query.WithExpandEffectiveVolumes() + } + + acc, err := l.GetAccountWithVolumes(r.Context(), query) + if err != nil { + ResponseError(w, r, err) + return + } + + sharedapi.Ok(w, acc) +} + +func postAccountMetadata(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + if !ledger.ValidateAddress(chi.URLParam(r, "address")) { + ResponseError(w, r, errorsutil.NewError(command.ErrValidation, + errors.New("invalid account address format"))) + return + } + + var m metadata.Metadata + if err := json.NewDecoder(r.Body).Decode(&m); err != nil { + ResponseError(w, r, errorsutil.NewError(command.ErrValidation, + errors.New("invalid metadata format"))) + return + } + + err := l.SaveMeta(r.Context(), getCommandParameters(r), ledger.MetaTargetTypeAccount, chi.URLParam(r, "address"), m) + if err != nil { + ResponseError(w, r, err) + return + } + + sharedapi.NoContent(w) +} + +func deleteAccountMetadata(w http.ResponseWriter, r *http.Request) { + if err := shared.LedgerFromContext(r.Context()). + DeleteMetadata( + r.Context(), + getCommandParameters(r), + ledger.MetaTargetTypeAccount, + chi.URLParam(r, "address"), + chi.URLParam(r, "key"), + ); err != nil { + ResponseError(w, r, err) + return + } + + sharedapi.NoContent(w) +} diff --git a/internal/api/v1/controllers_accounts_test.go b/internal/api/v1/controllers_accounts_test.go new file mode 100644 index 000000000..a6ed93cdf --- /dev/null +++ b/internal/api/v1/controllers_accounts_test.go @@ -0,0 +1,238 @@ +package v1_test + +import ( + "net/http" + "net/http/httptest" + "net/url" + "testing" + + ledger "github.com/formancehq/ledger/internal" + v1 "github.com/formancehq/ledger/internal/api/v1" + "github.com/formancehq/ledger/internal/engine/command" + "github.com/formancehq/ledger/internal/opentelemetry/metrics" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/paginate" + "github.com/formancehq/ledger/internal/storage/query" + sharedapi "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/require" +) + +func TestGetAccounts(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + expectQuery ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilterWithVolumes] + expectStatusCode int + expectedErrorCode string + } + + testCases := []testCase{ + { + name: "nominal", + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithPageSize(v1.DefaultPageSize), + }, + { + name: "using metadata", + queryParams: url.Values{ + "metadata[roles]": []string{"admin"}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.And(query.Match("metadata[roles]", "admin"))). + WithPageSize(v1.DefaultPageSize), + }, + { + name: "using address", + queryParams: url.Values{ + "address": []string{"foo"}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.And(query.Match("address", "foo"))). + WithPageSize(v1.DefaultPageSize), + }, + { + name: "using empty cursor", + queryParams: url.Values{ + "cursor": []string{paginate.EncodeCursor(ledgerstore.NewGetAccountsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{})))}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}), + }, + { + name: "using invalid cursor", + queryParams: url.Values{ + "cursor": []string{"XXX"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: v1.ErrValidation, + }, + { + name: "invalid page size", + queryParams: url.Values{ + "pageSize": []string{"nan"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: v1.ErrValidation, + }, + { + name: "page size over maximum", + queryParams: url.Values{ + "pageSize": []string{"1000000"}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithPageSize(v1.MaxPageSize), + }, + { + name: "using balance filter", + queryParams: url.Values{ + "balance": []string{"100"}, + "balanceOperator": []string{"e"}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.And(query.Match("balance", "100"))). + WithPageSize(v1.DefaultPageSize), + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusOK + } + + expectedCursor := sharedapi.Cursor[ledger.ExpandedAccount]{ + Data: []ledger.ExpandedAccount{ + { + Account: ledger.Account{ + Address: "world", + Metadata: metadata.Metadata{}, + }, + }, + }, + } + + backend, mockLedger := newTestingBackend(t, true) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + mockLedger.EXPECT(). + GetAccountsWithVolumes(gomock.Any(), ledgerstore.NewGetAccountsQuery(testCase.expectQuery)). + Return(&expectedCursor, nil) + } + + router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodGet, "/xxx/accounts", nil) + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + cursor := sharedapi.DecodeCursorResponse[ledger.ExpandedAccount](t, rec.Body) + require.Equal(t, expectedCursor, *cursor) + } else { + err := sharedapi.ErrorResponse{} + sharedapi.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} + +func TestGetAccount(t *testing.T) { + t.Parallel() + + account := ledger.ExpandedAccount{ + Account: ledger.Account{ + Address: "foo", + Metadata: metadata.Metadata{}, + }, + } + + backend, mock := newTestingBackend(t, true) + mock.EXPECT(). + GetAccountWithVolumes(gomock.Any(), ledgerstore.NewGetAccountQuery("foo")). + Return(&account, nil) + + router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodGet, "/xxx/accounts/foo", nil) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusOK, rec.Code) + response, _ := sharedapi.DecodeSingleResponse[ledger.ExpandedAccount](t, rec.Body) + require.Equal(t, account, response) +} + +func TestPostAccountMetadata(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + expectStatusCode int + expectedErrorCode string + account string + body any + } + + testCases := []testCase{ + { + name: "nominal", + account: "world", + body: metadata.Metadata{ + "foo": "bar", + }, + }, + { + name: "invalid account address format", + account: "invalid-acc", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: v1.ErrValidation, + }, + { + name: "invalid body", + account: "world", + body: "invalid - not an object", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: v1.ErrValidation, + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusNoContent + } + + backend, mock := newTestingBackend(t, true) + if testCase.expectStatusCode == http.StatusNoContent { + mock.EXPECT(). + SaveMeta(gomock.Any(), command.Parameters{}, ledger.MetaTargetTypeAccount, testCase.account, testCase.body). + Return(nil) + } + + router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodPost, "/xxx/accounts/"+testCase.account+"/metadata", sharedapi.Buffer(t, testCase.body)) + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode >= 300 || testCase.expectStatusCode < 200 { + err := sharedapi.ErrorResponse{} + sharedapi.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v1/controllers_balances.go b/internal/api/v1/controllers_balances.go new file mode 100644 index 000000000..5d5bea8cb --- /dev/null +++ b/internal/api/v1/controllers_balances.go @@ -0,0 +1,90 @@ +package v1 + +import ( + "math/big" + "net/http" + + "github.com/formancehq/ledger/internal/api/shared" + + "github.com/formancehq/ledger/internal/engine/command" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/paginate" + "github.com/formancehq/ledger/internal/storage/query" + sharedapi "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/errorsutil" + "github.com/pkg/errors" +) + +func buildAggregatedBalancesQuery(r *http.Request) (query.Builder, error) { + if address := r.URL.Query().Get("address"); address != "" { + return query.Match("address", address), nil + } + + return nil, nil +} + +func getBalancesAggregated(w http.ResponseWriter, r *http.Request) { + options, err := getPaginatedQueryOptionsOfPITFilter(r) + if err != nil { + sharedapi.BadRequest(w, ErrValidation, err) + return + } + + query := ledgerstore.NewGetAggregatedBalancesQuery(*options) + query.Options.QueryBuilder, err = buildAggregatedBalancesQuery(r) + + balances, err := shared.LedgerFromContext(r.Context()).GetAggregatedBalances(r.Context(), query) + if err != nil { + ResponseError(w, r, err) + return + } + + sharedapi.Ok(w, balances) +} + +func getBalances(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + q := &ledgerstore.GetAccountsQuery{} + + if r.URL.Query().Get(QueryKeyCursor) != "" { + err := paginate.UnmarshalCursor(r.URL.Query().Get(QueryKeyCursor), q) + if err != nil { + ResponseError(w, r, errorsutil.NewError(command.ErrValidation, errors.Errorf("invalid '%s' query param", QueryKeyCursor))) + return + } + } else { + options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) + if err != nil { + sharedapi.BadRequest(w, ErrValidation, err) + return + } + options.QueryBuilder, err = buildAccountsFilterQuery(r) + q = ledgerstore.NewGetAccountsQuery(*options) + } + + cursor, err := l.GetAccountsWithVolumes(r.Context(), q) + if err != nil { + ResponseError(w, r, err) + return + } + + ret := make([]map[string]map[string]*big.Int, 0) + for _, item := range cursor.Data { + e := map[string]map[string]*big.Int{ + item.Address: {}, + } + for asset, volumes := range item.Volumes { + e[item.Address][asset] = volumes.Balance() + } + ret = append(ret, e) + } + + sharedapi.RenderCursor(w, sharedapi.Cursor[map[string]map[string]*big.Int]{ + PageSize: cursor.PageSize, + HasMore: cursor.HasMore, + Previous: cursor.Previous, + Next: cursor.Next, + Data: ret, + }) +} diff --git a/internal/api/v1/controllers_balances_test.go b/internal/api/v1/controllers_balances_test.go new file mode 100644 index 000000000..bab78c5a7 --- /dev/null +++ b/internal/api/v1/controllers_balances_test.go @@ -0,0 +1,69 @@ +package v1_test + +import ( + "math/big" + "net/http" + "net/http/httptest" + "net/url" + "testing" + + ledger "github.com/formancehq/ledger/internal" + v1 "github.com/formancehq/ledger/internal/api/v1" + "github.com/formancehq/ledger/internal/opentelemetry/metrics" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/query" + sharedapi "github.com/formancehq/stack/libs/go-libs/api" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/require" +) + +func TestGetBalancesAggregated(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + expectQuery ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilter] + } + + testCases := []testCase{ + { + name: "nominal", + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilter{}), + }, + { + name: "using address", + queryParams: url.Values{ + "address": []string{"foo"}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilter{}). + WithQueryBuilder(query.Match("address", "foo")), + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + expectedBalances := ledger.BalancesByAssets{ + "world": big.NewInt(-100), + } + backend, mock := newTestingBackend(t, true) + mock.EXPECT(). + GetAggregatedBalances(gomock.Any(), ledgerstore.NewGetAggregatedBalancesQuery(testCase.expectQuery)). + Return(expectedBalances, nil) + + router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodGet, "/xxx/aggregate/balances", nil) + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusOK, rec.Code) + balances, ok := sharedapi.DecodeSingleResponse[ledger.BalancesByAssets](t, rec.Body) + require.True(t, ok) + require.Equal(t, expectedBalances, balances) + }) + } +} diff --git a/internal/api/v1/controllers_config.go b/internal/api/v1/controllers_config.go new file mode 100644 index 000000000..f0036becd --- /dev/null +++ b/internal/api/v1/controllers_config.go @@ -0,0 +1,44 @@ +package v1 + +import ( + _ "embed" + "net/http" + + "github.com/formancehq/ledger/internal/api/backend" + sharedapi "github.com/formancehq/stack/libs/go-libs/api" +) + +type ConfigInfo struct { + Server string `json:"server"` + Version string `json:"version"` + Config *LedgerConfig `json:"config"` +} + +type LedgerConfig struct { + LedgerStorage *LedgerStorage `json:"storage"` +} + +type LedgerStorage struct { + Driver string `json:"driver"` + Ledgers []string `json:"ledgers"` +} + +func getInfo(backend backend.Backend) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + ledgers, err := backend.ListLedgers(r.Context()) + if err != nil { + panic(err) + } + + sharedapi.Ok(w, ConfigInfo{ + Server: "ledger", + Version: backend.GetVersion(), + Config: &LedgerConfig{ + LedgerStorage: &LedgerStorage{ + Driver: "postgres", + Ledgers: ledgers, + }, + }, + }) + } +} diff --git a/internal/api/v1/controllers_config_test.go b/internal/api/v1/controllers_config_test.go new file mode 100644 index 000000000..698bc1167 --- /dev/null +++ b/internal/api/v1/controllers_config_test.go @@ -0,0 +1,51 @@ +package v1_test + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + v2 "github.com/formancehq/ledger/internal/api/v2" + "github.com/formancehq/ledger/internal/opentelemetry/metrics" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/require" +) + +func TestGetInfo(t *testing.T) { + t.Parallel() + + backend, _ := newTestingBackend(t, false) + router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + backend. + EXPECT(). + ListLedgers(gomock.Any()). + Return([]string{"a", "b"}, nil) + + backend. + EXPECT(). + GetVersion(). + Return("latest") + + req := httptest.NewRequest(http.MethodGet, "/_info", nil) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusOK, rec.Code) + + info := v2.ConfigInfo{} + require.NoError(t, json.NewDecoder(rec.Body).Decode(&info)) + + require.EqualValues(t, v2.ConfigInfo{ + Server: "ledger", + Version: "latest", + Config: &v2.LedgerConfig{ + LedgerStorage: &v2.LedgerStorage{ + Driver: "postgres", + Ledgers: []string{"a", "b"}, + }, + }, + }, info) +} diff --git a/internal/api/v1/controllers_info.go b/internal/api/v1/controllers_info.go new file mode 100644 index 000000000..434da171a --- /dev/null +++ b/internal/api/v1/controllers_info.go @@ -0,0 +1,120 @@ +package v1 + +import ( + "net/http" + + "github.com/formancehq/ledger/internal/api/shared" + + "github.com/formancehq/ledger/internal/engine/command" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/paginate" + "github.com/formancehq/ledger/internal/storage/query" + sharedapi "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/errorsutil" + "github.com/formancehq/stack/libs/go-libs/migrations" + "github.com/go-chi/chi/v5" + "github.com/pkg/errors" +) + +type Info struct { + Name string `json:"name"` + Storage StorageInfo `json:"storage"` +} + +type StorageInfo struct { + Migrations []migrations.Info `json:"migrations"` +} + +func getLedgerInfo(w http.ResponseWriter, r *http.Request) { + ledger := shared.LedgerFromContext(r.Context()) + + var err error + res := Info{ + Name: chi.URLParam(r, "ledger"), + Storage: StorageInfo{}, + } + res.Storage.Migrations, err = ledger.GetMigrationsInfo(r.Context()) + if err != nil { + ResponseError(w, r, err) + return + } + + sharedapi.Ok(w, res) +} + +func getStats(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + stats, err := l.Stats(r.Context()) + if err != nil { + ResponseError(w, r, err) + return + } + + sharedapi.Ok(w, stats) +} + +func buildGetLogsQuery(r *http.Request) (query.Builder, error) { + clauses := make([]query.Builder, 0) + if after := r.URL.Query().Get("after"); after != "" { + clauses = append(clauses, query.Lt("id", after)) + } + + if startTime := r.URL.Query().Get("start_time"); startTime != "" { + clauses = append(clauses, query.Gte("date", startTime)) + } + if endTime := r.URL.Query().Get("end_time"); endTime != "" { + clauses = append(clauses, query.Lt("date", endTime)) + } + + if len(clauses) == 0 { + return nil, nil + } + if len(clauses) == 1 { + return clauses[0], nil + } + + return query.And(clauses...), nil +} + +func getLogs(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + query := &ledgerstore.GetLogsQuery{} + + if r.URL.Query().Get(QueryKeyCursor) != "" { + err := paginate.UnmarshalCursor(r.URL.Query().Get(QueryKeyCursor), query) + if err != nil { + ResponseError(w, r, errorsutil.NewError(command.ErrValidation, + errors.Errorf("invalid '%s' query param", QueryKeyCursor))) + return + } + } else { + var err error + + pageSize, err := getPageSize(r) + if err != nil { + ResponseError(w, r, err) + return + } + + qb, err := buildGetLogsQuery(r) + if err != nil { + sharedapi.BadRequest(w, ErrValidation, err) + return + } + + query = ledgerstore.NewGetLogsQuery(ledgerstore.PaginatedQueryOptions[any]{ + QueryBuilder: qb, + PageSize: uint64(pageSize), + }) + } + + cursor, err := l.GetLogs(r.Context(), query) + if err != nil { + ResponseError(w, r, err) + return + } + + sharedapi.RenderCursor(w, *cursor) +} diff --git a/internal/api/v1/controllers_info_test.go b/internal/api/v1/controllers_info_test.go new file mode 100644 index 000000000..ffd7f8b6c --- /dev/null +++ b/internal/api/v1/controllers_info_test.go @@ -0,0 +1,198 @@ +package v1_test + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "net/url" + "testing" + "time" + + ledger "github.com/formancehq/ledger/internal" + v1 "github.com/formancehq/ledger/internal/api/v1" + "github.com/formancehq/ledger/internal/engine" + "github.com/formancehq/ledger/internal/opentelemetry/metrics" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/paginate" + "github.com/formancehq/ledger/internal/storage/query" + sharedapi "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/formancehq/stack/libs/go-libs/migrations" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/require" +) + +func TestGetLedgerInfo(t *testing.T) { + t.Parallel() + + backend, mock := newTestingBackend(t, false) + router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + migrationInfo := []migrations.Info{ + { + Version: "1", + Name: "init", + State: "ready", + Date: time.Now().Add(-2 * time.Minute).Round(time.Second).UTC(), + }, + { + Version: "2", + Name: "fix", + State: "ready", + Date: time.Now().Add(-time.Minute).Round(time.Second).UTC(), + }, + } + + mock.EXPECT(). + GetMigrationsInfo(gomock.Any()). + Return(migrationInfo, nil) + + req := httptest.NewRequest(http.MethodGet, "/xxx/_info", nil) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusOK, rec.Code) + + info, ok := sharedapi.DecodeSingleResponse[v1.Info](t, rec.Body) + require.True(t, ok) + + require.EqualValues(t, v1.Info{ + Name: "xxx", + Storage: v1.StorageInfo{ + Migrations: migrationInfo, + }, + }, info) +} + +func TestGetStats(t *testing.T) { + t.Parallel() + + backend, mock := newTestingBackend(t, true) + router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + expectedStats := engine.Stats{ + Transactions: 10, + Accounts: 5, + } + + mock.EXPECT(). + Stats(gomock.Any()). + Return(expectedStats, nil) + + req := httptest.NewRequest(http.MethodGet, "/xxx/stats", nil) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusOK, rec.Code) + + stats, ok := sharedapi.DecodeSingleResponse[engine.Stats](t, rec.Body) + require.True(t, ok) + + require.EqualValues(t, expectedStats, stats) +} + +func TestGetLogs(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + expectQuery ledgerstore.PaginatedQueryOptions[any] + expectStatusCode int + expectedErrorCode string + } + + now := ledger.Now() + testCases := []testCase{ + { + name: "nominal", + expectQuery: ledgerstore.NewPaginatedQueryOptions[any](nil), + }, + { + name: "using start time", + queryParams: url.Values{ + "start_time": []string{now.Format(ledger.DateFormat)}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions[any](nil).WithQueryBuilder(query.Gte("date", now.Format(ledger.DateFormat))), + }, + { + name: "using end time", + queryParams: url.Values{ + "end_time": []string{now.Format(ledger.DateFormat)}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions[any](nil). + WithQueryBuilder(query.Lt("date", now.Format(ledger.DateFormat))), + }, + { + name: "using empty cursor", + queryParams: url.Values{ + "cursor": []string{paginate.EncodeCursor(ledgerstore.NewGetLogsQuery(ledgerstore.NewPaginatedQueryOptions[any](nil)))}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions[any](nil), + }, + { + name: "using invalid cursor", + queryParams: url.Values{ + "cursor": []string{"xxx"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: v1.ErrValidation, + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusOK + } + + expectedCursor := sharedapi.Cursor[ledger.ChainedLog]{ + Data: []ledger.ChainedLog{ + *ledger.NewTransactionLog(ledger.NewTransaction(), map[string]metadata.Metadata{}). + ChainLog(nil), + }, + } + + backend, mockLedger := newTestingBackend(t, true) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + mockLedger.EXPECT(). + GetLogs(gomock.Any(), ledgerstore.NewGetLogsQuery(testCase.expectQuery)). + Return(&expectedCursor, nil) + } + + router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodGet, "/xxx/logs", nil) + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + cursor := sharedapi.DecodeCursorResponse[ledger.ChainedLog](t, rec.Body) + + cursorData, err := json.Marshal(cursor) + require.NoError(t, err) + + cursorAsMap := make(map[string]any) + require.NoError(t, json.Unmarshal(cursorData, &cursorAsMap)) + + expectedCursorData, err := json.Marshal(expectedCursor) + require.NoError(t, err) + + expectedCursorAsMap := make(map[string]any) + require.NoError(t, json.Unmarshal(expectedCursorData, &expectedCursorAsMap)) + + require.Equal(t, expectedCursorAsMap, cursorAsMap) + } else { + err := sharedapi.ErrorResponse{} + sharedapi.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v1/controllers_transactions.go b/internal/api/v1/controllers_transactions.go new file mode 100644 index 000000000..919c60cdb --- /dev/null +++ b/internal/api/v1/controllers_transactions.go @@ -0,0 +1,318 @@ +package v1 + +import ( + "encoding/json" + "fmt" + "math/big" + "net/http" + "strconv" + "strings" + + "github.com/formancehq/ledger/internal/api/shared" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/engine/command" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/paginate" + "github.com/formancehq/ledger/internal/storage/query" + sharedapi "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/collectionutils" + "github.com/formancehq/stack/libs/go-libs/errorsutil" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/go-chi/chi/v5" + "github.com/pkg/errors" +) + +func mapTransactionToV1(tx ledger.Transaction) any { + return struct { + ledger.Transaction + TxID *big.Int `json:"txid"` + ID *big.Int `json:"-"` + }{ + Transaction: tx, + TxID: tx.ID, + } +} + +func mapExpandedTransactionToV1(tx ledger.ExpandedTransaction) any { + return struct { + ledger.ExpandedTransaction + TxID *big.Int `json:"txid"` + ID *big.Int `json:"-"` + }{ + ExpandedTransaction: tx, + TxID: tx.ID, + } +} + +func buildGetTransactionsQuery(r *http.Request) (query.Builder, error) { + clauses := make([]query.Builder, 0) + if after := r.URL.Query().Get("after"); after != "" { + clauses = append(clauses, query.Lt("id", after)) + } + + if startTime := r.URL.Query().Get("start_time"); startTime != "" { + clauses = append(clauses, query.Gte("date", startTime)) + } + if endTime := r.URL.Query().Get("end_time"); endTime != "" { + clauses = append(clauses, query.Lt("date", endTime)) + } + + if reference := r.URL.Query().Get("reference"); reference != "" { + clauses = append(clauses, query.Match("reference", reference)) + } + if source := r.URL.Query().Get("source"); source != "" { + clauses = append(clauses, query.Match("source", source)) + } + if destination := r.URL.Query().Get("destination"); destination != "" { + clauses = append(clauses, query.Match("destination", destination)) + } + if address := r.URL.Query().Get("account"); address != "" { + clauses = append(clauses, query.Match("account", address)) + } + for elem, value := range r.URL.Query() { + if strings.HasPrefix(elem, "metadata") { + clauses = append(clauses, query.Match(elem, value[0])) + } + } + + if len(clauses) == 0 { + return nil, nil + } + if len(clauses) == 1 { + return clauses[0], nil + } + + return query.And(clauses...), nil +} + +func countTransactions(w http.ResponseWriter, r *http.Request) { + + options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) + if err != nil { + sharedapi.BadRequest(w, ErrValidation, err) + return + } + options.QueryBuilder, err = buildGetTransactionsQuery(r) + if err != nil { + sharedapi.BadRequest(w, ErrValidation, err) + return + } + + count, err := shared.LedgerFromContext(r.Context()). + CountTransactions(r.Context(), ledgerstore.NewGetTransactionsQuery(*options)) + if err != nil { + ResponseError(w, r, err) + return + } + + w.Header().Set("Count", fmt.Sprint(count)) + sharedapi.NoContent(w) +} + +func getTransactions(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + query := &ledgerstore.GetTransactionsQuery{} + + if r.URL.Query().Get(QueryKeyCursor) != "" { + err := paginate.UnmarshalCursor(r.URL.Query().Get(QueryKeyCursor), &query) + if err != nil { + ResponseError(w, r, errorsutil.NewError(command.ErrValidation, + errors.Errorf("invalid '%s' query param", QueryKeyCursor))) + return + } + } else { + options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) + if err != nil { + sharedapi.BadRequest(w, ErrValidation, err) + return + } + options.QueryBuilder, err = buildGetTransactionsQuery(r) + if err != nil { + sharedapi.BadRequest(w, ErrValidation, err) + return + } + query = ledgerstore.NewGetTransactionsQuery(*options) + } + + cursor, err := l.GetTransactions(r.Context(), query) + if err != nil { + ResponseError(w, r, err) + return + } + + sharedapi.RenderCursor(w, *sharedapi.MapCursor(cursor, mapExpandedTransactionToV1)) +} + +type Script struct { + ledger.Script + Vars map[string]any `json:"vars"` +} + +func (s Script) ToCore() ledger.Script { + s.Script.Vars = map[string]string{} + for k, v := range s.Vars { + switch v := v.(type) { + case string: + s.Script.Vars[k] = v + case map[string]any: + s.Script.Vars[k] = fmt.Sprintf("%s %v", v["asset"], v["amount"]) + default: + s.Script.Vars[k] = fmt.Sprint(v) + } + } + return s.Script +} + +type PostTransactionRequest struct { + Postings ledger.Postings `json:"postings"` + Script Script `json:"script"` + Timestamp ledger.Time `json:"timestamp"` + Reference string `json:"reference"` + Metadata metadata.Metadata `json:"metadata" swaggertype:"object"` +} + +func postTransaction(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + payload := PostTransactionRequest{} + if err := json.NewDecoder(r.Body).Decode(&payload); err != nil { + ResponseError(w, r, + errorsutil.NewError(command.ErrValidation, + errors.New("invalid transaction format"))) + return + } + + if len(payload.Postings) > 0 && payload.Script.Plain != "" || + len(payload.Postings) == 0 && payload.Script.Plain == "" { + ResponseError(w, r, errorsutil.NewError(command.ErrValidation, + errors.New("invalid payload: should contain either postings or script"))) + return + } else if len(payload.Postings) > 0 { + if i, err := payload.Postings.Validate(); err != nil { + ResponseError(w, r, errorsutil.NewError(command.ErrValidation, errors.Wrap(err, + fmt.Sprintf("invalid posting %d", i)))) + return + } + txData := ledger.TransactionData{ + Postings: payload.Postings, + Timestamp: payload.Timestamp, + Reference: payload.Reference, + Metadata: payload.Metadata, + } + + res, err := l.CreateTransaction(r.Context(), getCommandParameters(r), ledger.TxToScriptData(txData)) + if err != nil { + ResponseError(w, r, err) + return + } + + sharedapi.Ok(w, []any{mapTransactionToV1(*res)}) + return + } + + script := ledger.RunScript{ + Script: payload.Script.ToCore(), + Timestamp: payload.Timestamp, + Reference: payload.Reference, + Metadata: payload.Metadata, + } + + res, err := l.CreateTransaction(r.Context(), getCommandParameters(r), script) + if err != nil { + ResponseError(w, r, err) + return + } + + sharedapi.Ok(w, []any{mapTransactionToV1(*res)}) +} + +func getTransaction(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + txId, ok := big.NewInt(0).SetString(chi.URLParam(r, "id"), 10) + if !ok { + ResponseError(w, r, errorsutil.NewError(command.ErrValidation, + errors.New("invalid transaction ID"))) + return + } + + query := ledgerstore.NewGetTransactionQuery(txId) + if collectionutils.Contains(r.URL.Query()["expand"], "volumes") { + query = query.WithExpandVolumes() + } + if collectionutils.Contains(r.URL.Query()["expand"], "effectiveVolumes") { + query = query.WithExpandEffectiveVolumes() + } + + tx, err := l.GetTransactionWithVolumes(r.Context(), query) + if err != nil { + ResponseError(w, r, err) + return + } + + sharedapi.Ok(w, mapExpandedTransactionToV1(*tx)) +} + +func revertTransaction(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + transactionID, ok := big.NewInt(0).SetString(chi.URLParam(r, "id"), 10) + if !ok { + sharedapi.NotFound(w) + return + } + + tx, err := l.RevertTransaction(r.Context(), getCommandParameters(r), transactionID) + if err != nil { + ResponseError(w, r, err) + return + } + + sharedapi.Created(w, mapTransactionToV1(*tx)) +} + +func postTransactionMetadata(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + var m metadata.Metadata + if err := json.NewDecoder(r.Body).Decode(&m); err != nil { + ResponseError(w, r, errorsutil.NewError(command.ErrValidation, + errors.New("invalid metadata format"))) + return + } + + txID, ok := big.NewInt(0).SetString(chi.URLParam(r, "id"), 10) + if !ok { + sharedapi.NotFound(w) + return + } + + if err := l.SaveMeta(r.Context(), getCommandParameters(r), ledger.MetaTargetTypeTransaction, txID, m); err != nil { + ResponseError(w, r, err) + return + } + + sharedapi.NoContent(w) +} + +func deleteTransactionMetadata(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + transactionID, err := strconv.ParseUint(chi.URLParam(r, "id"), 10, 64) + if err != nil { + ResponseError(w, r, errorsutil.NewError(command.ErrValidation, + errors.New("invalid transaction ID"))) + return + } + + metadataKey := chi.URLParam(r, "key") + + if err := l.DeleteMetadata(r.Context(), getCommandParameters(r), ledger.MetaTargetTypeTransaction, transactionID, metadataKey); err != nil { + ResponseError(w, r, err) + return + } + + sharedapi.NoContent(w) +} diff --git a/internal/api/v1/controllers_transactions_test.go b/internal/api/v1/controllers_transactions_test.go new file mode 100644 index 000000000..153c57195 --- /dev/null +++ b/internal/api/v1/controllers_transactions_test.go @@ -0,0 +1,625 @@ +package v1_test + +import ( + "math/big" + "net/http" + "net/http/httptest" + "net/url" + "testing" + + ledger "github.com/formancehq/ledger/internal" + v1 "github.com/formancehq/ledger/internal/api/v1" + "github.com/formancehq/ledger/internal/engine/command" + "github.com/formancehq/ledger/internal/opentelemetry/metrics" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/paginate" + "github.com/formancehq/ledger/internal/storage/query" + sharedapi "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/require" +) + +func TestPostTransactions(t *testing.T) { + type testCase struct { + name string + expectedPreview bool + expectedRunScript ledger.RunScript + payload any + expectedStatusCode int + expectedErrorCode string + queryParams url.Values + } + + testCases := []testCase{ + { + name: "using plain numscript", + payload: v1.PostTransactionRequest{ + Script: v1.Script{ + Script: ledger.Script{ + Plain: `XXX`, + }, + }, + }, + expectedRunScript: ledger.RunScript{ + Script: ledger.Script{ + Plain: `XXX`, + Vars: map[string]string{}, + }, + }, + }, + { + name: "using plain numscript with variables", + payload: v1.PostTransactionRequest{ + Script: v1.Script{ + Script: ledger.Script{ + Plain: `vars { + monetary $val + } + + send $val ( + source = @world + destination = @bank + )`, + }, + Vars: map[string]any{ + "val": "USD/2 100", + }, + }, + }, + expectedRunScript: ledger.RunScript{ + Script: ledger.Script{ + Plain: `vars { + monetary $val + } + + send $val ( + source = @world + destination = @bank + )`, + Vars: map[string]string{ + "val": "USD/2 100", + }, + }, + }, + }, + { + name: "using plain numscript with variables (legacy format)", + payload: v1.PostTransactionRequest{ + Script: v1.Script{ + Script: ledger.Script{ + Plain: `vars { + monetary $val + } + + send $val ( + source = @world + destination = @bank + )`, + }, + Vars: map[string]any{ + "val": map[string]any{ + "asset": "USD/2", + "amount": 100, + }, + }, + }, + }, + expectedRunScript: ledger.RunScript{ + Script: ledger.Script{ + Plain: `vars { + monetary $val + } + + send $val ( + source = @world + destination = @bank + )`, + Vars: map[string]string{ + "val": "USD/2 100", + }, + }, + }, + }, + { + name: "using plain numscript and dry run", + payload: v1.PostTransactionRequest{ + Script: v1.Script{ + Script: ledger.Script{ + Plain: `send ( + source = @world + destination = @bank + )`, + }, + }, + }, + expectedRunScript: ledger.RunScript{ + Script: ledger.Script{ + Plain: `send ( + source = @world + destination = @bank + )`, + Vars: map[string]string{}, + }, + }, + expectedPreview: true, + queryParams: url.Values{ + "preview": []string{"true"}, + }, + }, + { + name: "using JSON postings", + payload: v1.PostTransactionRequest{ + Postings: []ledger.Posting{ + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + }, + }, + expectedRunScript: ledger.TxToScriptData(ledger.NewTransactionData().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + )), + }, + { + name: "using JSON postings and dry run", + queryParams: url.Values{ + "preview": []string{"true"}, + }, + payload: v1.PostTransactionRequest{ + Postings: []ledger.Posting{ + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + }, + }, + expectedPreview: true, + expectedRunScript: ledger.TxToScriptData(ledger.NewTransactionData().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + )), + }, + { + name: "no postings or script", + payload: v1.PostTransactionRequest{}, + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: v1.ErrValidation, + }, + { + name: "postings and script", + payload: v1.PostTransactionRequest{ + Postings: ledger.Postings{ + { + Source: "world", + Destination: "alice", + Amount: big.NewInt(100), + Asset: "COIN", + }, + }, + Script: v1.Script{ + Script: ledger.Script{ + Plain: ` + send [COIN 100] ( + source = @world + destination = @bob + )`, + }, + }, + }, + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: v1.ErrValidation, + }, + { + name: "using invalid body", + payload: "not a valid payload", + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: v1.ErrValidation, + }, + } + + for _, testCase := range testCases { + tc := testCase + t.Run(tc.name, func(t *testing.T) { + if testCase.expectedStatusCode == 0 { + testCase.expectedStatusCode = http.StatusOK + } + + expectedTx := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ) + + backend, mockLedger := newTestingBackend(t, true) + if testCase.expectedStatusCode < 300 && testCase.expectedStatusCode >= 200 { + mockLedger.EXPECT(). + CreateTransaction(gomock.Any(), command.Parameters{ + DryRun: tc.expectedPreview, + }, testCase.expectedRunScript). + Return(expectedTx, nil) + } + + router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodPost, "/xxx/transactions", sharedapi.Buffer(t, testCase.payload)) + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectedStatusCode, rec.Code) + if testCase.expectedStatusCode < 300 && testCase.expectedStatusCode >= 200 { + tx, ok := sharedapi.DecodeSingleResponse[[]ledger.Transaction](t, rec.Body) + require.True(t, ok) + require.Equal(t, *expectedTx, tx[0]) + } else { + err := sharedapi.ErrorResponse{} + sharedapi.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} + +func TestPostTransactionMetadata(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + expectStatusCode int + expectedErrorCode string + body any + } + + testCases := []testCase{ + { + name: "nominal", + body: metadata.Metadata{ + "foo": "bar", + }, + }, + { + name: "invalid body", + body: "invalid - not an object", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: v1.ErrValidation, + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusNoContent + } + + backend, mock := newTestingBackend(t, true) + if testCase.expectStatusCode == http.StatusNoContent { + mock.EXPECT(). + SaveMeta(gomock.Any(), command.Parameters{}, ledger.MetaTargetTypeTransaction, big.NewInt(0), testCase.body). + Return(nil) + } + + router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodPost, "/xxx/transactions/0/metadata", sharedapi.Buffer(t, testCase.body)) + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode >= 300 || testCase.expectStatusCode < 200 { + err := sharedapi.ErrorResponse{} + sharedapi.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} + +func TestGetTransaction(t *testing.T) { + t.Parallel() + + tx := ledger.ExpandTransaction( + ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ), + nil, + ) + + backend, mock := newTestingBackend(t, true) + mock.EXPECT(). + GetTransactionWithVolumes(gomock.Any(), ledgerstore.NewGetTransactionQuery(big.NewInt(0))). + Return(&tx, nil) + + router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodGet, "/xxx/transactions/0", nil) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusOK, rec.Code) + response, _ := sharedapi.DecodeSingleResponse[ledger.ExpandedTransaction](t, rec.Body) + require.Equal(t, tx, response) +} + +func TestGetTransactions(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + expectQuery ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilterWithVolumes] + expectStatusCode int + expectedErrorCode string + } + now := ledger.Now() + + testCases := []testCase{ + { + name: "nominal", + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}), + }, + { + name: "using metadata", + queryParams: url.Values{ + "metadata[roles]": []string{"admin"}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("metadata[roles]", "admin")), + }, + { + name: "using startTime", + queryParams: url.Values{ + "start_time": []string{now.Format(ledger.DateFormat)}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Gte("date", now.Format(ledger.DateFormat))), + }, + { + name: "using endTime", + queryParams: url.Values{ + "end_time": []string{now.Format(ledger.DateFormat)}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Lt("date", now.Format(ledger.DateFormat))), + }, + { + name: "using account", + queryParams: url.Values{ + "account": []string{"xxx"}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("account", "xxx")), + }, + { + name: "using reference", + queryParams: url.Values{ + "reference": []string{"xxx"}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("reference", "xxx")), + }, + { + name: "using destination", + queryParams: url.Values{ + "destination": []string{"xxx"}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("destination", "xxx")), + }, + { + name: "using source", + queryParams: url.Values{ + "source": []string{"xxx"}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("source", "xxx")), + }, + { + name: "using empty cursor", + queryParams: url.Values{ + "cursor": []string{paginate.EncodeCursor(ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{})))}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}), + }, + { + name: "using invalid cursor", + queryParams: url.Values{ + "cursor": []string{"XXX"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: v1.ErrValidation, + }, + { + name: "invalid page size", + queryParams: url.Values{ + "pageSize": []string{"nan"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: v1.ErrValidation, + }, + { + name: "page size over maximum", + queryParams: url.Values{ + "pageSize": []string{"1000000"}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithPageSize(v1.MaxPageSize), + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusOK + } + + expectedCursor := sharedapi.Cursor[ledger.ExpandedTransaction]{ + Data: []ledger.ExpandedTransaction{ + ledger.ExpandTransaction( + ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ), + nil, + ), + }, + } + + backend, mockLedger := newTestingBackend(t, true) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + mockLedger.EXPECT(). + GetTransactions(gomock.Any(), ledgerstore.NewGetTransactionsQuery(testCase.expectQuery)). + Return(&expectedCursor, nil) + } + + router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodGet, "/xxx/transactions", nil) + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + cursor := sharedapi.DecodeCursorResponse[ledger.ExpandedTransaction](t, rec.Body) + require.Equal(t, expectedCursor, *cursor) + } else { + err := sharedapi.ErrorResponse{} + sharedapi.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} + +func TestCountTransactions(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + expectQuery ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilterWithVolumes] + expectStatusCode int + expectedErrorCode string + } + now := ledger.Now() + + testCases := []testCase{ + { + name: "nominal", + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}), + }, + { + name: "using metadata", + queryParams: url.Values{ + "metadata[roles]": []string{"admin"}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("metadata[roles]", "admin")), + }, + { + name: "using startTime", + queryParams: url.Values{ + "start_time": []string{now.Format(ledger.DateFormat)}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Gte("date", now.Format(ledger.DateFormat))), + }, + { + name: "using endTime", + queryParams: url.Values{ + "end_time": []string{now.Format(ledger.DateFormat)}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Lt("date", now.Format(ledger.DateFormat))), + }, + { + name: "using account", + queryParams: url.Values{ + "account": []string{"xxx"}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("account", "xxx")), + }, + { + name: "using reference", + queryParams: url.Values{ + "reference": []string{"xxx"}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("reference", "xxx")), + }, + { + name: "using destination", + queryParams: url.Values{ + "destination": []string{"xxx"}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("destination", "xxx")), + }, + { + name: "using source", + queryParams: url.Values{ + "source": []string{"xxx"}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("source", "xxx")), + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusNoContent + } + + backend, mockLedger := newTestingBackend(t, true) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + mockLedger.EXPECT(). + CountTransactions(gomock.Any(), ledgerstore.NewGetTransactionsQuery(testCase.expectQuery)). + Return(uint64(10), nil) + } + + router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodHead, "/xxx/transactions", nil) + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + require.Equal(t, "10", rec.Header().Get("Count")) + } else { + err := sharedapi.ErrorResponse{} + sharedapi.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} + +func TestRevertTransaction(t *testing.T) { + + expectedTx := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ) + + backend, mockLedger := newTestingBackend(t, true) + mockLedger. + EXPECT(). + RevertTransaction(gomock.Any(), command.Parameters{}, big.NewInt(0)). + Return(expectedTx, nil) + + router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodPost, "/xxx/transactions/0/revert", nil) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusCreated, rec.Code) + tx, ok := sharedapi.DecodeSingleResponse[ledger.Transaction](t, rec.Body) + require.True(t, ok) + require.Equal(t, *expectedTx, tx) +} diff --git a/internal/api/v1/errors.go b/internal/api/v1/errors.go new file mode 100644 index 000000000..5d4615107 --- /dev/null +++ b/internal/api/v1/errors.go @@ -0,0 +1,110 @@ +package v1 + +import ( + "context" + "encoding/base64" + "encoding/json" + "fmt" + "net/http" + "strings" + + "github.com/formancehq/ledger/internal/engine/command" + "github.com/formancehq/ledger/internal/machine/vm" + storageerrors "github.com/formancehq/ledger/internal/storage" + "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/pkg/errors" +) + +const ( + ErrInternal = "INTERNAL" + ErrConflict = "CONFLICT" + ErrInsufficientFund = "INSUFFICIENT_FUND" + ErrValidation = "VALIDATION" + ErrContextCancelled = "CONTEXT_CANCELLED" + ErrStore = "STORE" + ErrNotFound = "NOT_FOUND" + ErrScriptCompilationFailed = "COMPILATION_FAILED" + ErrScriptNoScript = "NO_SCRIPT" + ErrScriptMetadataOverride = "METADATA_OVERRIDE" + ScriptErrorInsufficientFund = "INSUFFICIENT_FUND" + ScriptErrorCompilationFailed = "COMPILATION_FAILED" + ScriptErrorNoScript = "NO_SCRIPT" + ScriptErrorMetadataOverride = "METADATA_OVERRIDE" + ResourceResolutionError = "RESOURCE_RESOLUTION_ERROR" +) + +func ResponseError(w http.ResponseWriter, r *http.Request, err error) { + status, code, details := coreErrorToErrorCode(err) + + baseError := errors.Cause(err) + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(status) + if status < 500 { + err := json.NewEncoder(w).Encode(api.ErrorResponse{ + ErrorCode: code, + ErrorMessage: baseError.Error(), + Details: details, + }) + if err != nil { + panic(err) + } + } else { + logging.FromContext(r.Context()).Errorf("internal server error: %s", err) + } +} + +func coreErrorToErrorCode(err error) (int, string, string) { + switch { + case command.IsConflictError(err): + return http.StatusConflict, ErrConflict, "" + case + command.IsValidationError(err), + command.IsPastTransactionError(err), + command.IsNoPostingsError(err), + errors.Is(err, command.ErrAlreadyReverted), + errors.Is(err, command.ErrRevertOccurring): + return http.StatusBadRequest, ErrValidation, "" + case storageerrors.IsNotFoundError(err): + return http.StatusNotFound, ErrNotFound, "" + case command.IsNoScriptError(err): + baseError := errors.Cause(err) + return http.StatusBadRequest, ScriptErrorNoScript, EncodeLink(baseError.Error()) + case vm.IsInsufficientFundError(err): + baseError := errors.Cause(err) + return http.StatusBadRequest, ScriptErrorInsufficientFund, EncodeLink(baseError.Error()) + case command.IsCompilationFailedError(err): + baseError := errors.Cause(err) + return http.StatusBadRequest, ScriptErrorCompilationFailed, EncodeLink(baseError.Error()) + case vm.IsMetadataOverrideError(err): + baseError := errors.Cause(err) + return http.StatusBadRequest, ScriptErrorMetadataOverride, EncodeLink(baseError.Error()) + case vm.IsResourceResolutionInvalidTypeFromExtSourcesError(err), + vm.IsResourceResolutionMissingMetadataError(err): + baseError := errors.Cause(err) + return http.StatusBadRequest, ResourceResolutionError, EncodeLink(baseError.Error()) + case errors.Is(err, context.Canceled): + return http.StatusInternalServerError, ErrContextCancelled, "" + case storageerrors.IsStorageError(err): + return http.StatusServiceUnavailable, ErrStore, "" + default: + return http.StatusInternalServerError, ErrInternal, "" + } +} + +func EncodeLink(errStr string) string { + if errStr == "" { + return "" + } + + errStr = strings.ReplaceAll(errStr, "\n", "\r\n") + payload, err := json.Marshal(map[string]string{ + "error": errStr, + }) + if err != nil { + panic(err) + } + payloadB64 := base64.StdEncoding.EncodeToString(payload) + return fmt.Sprintf("https://play.numscript.org/?payload=%v", payloadB64) +} diff --git a/internal/api/v1/middlewares_metrics.go b/internal/api/v1/middlewares_metrics.go new file mode 100644 index 000000000..c552ad2e3 --- /dev/null +++ b/internal/api/v1/middlewares_metrics.go @@ -0,0 +1,54 @@ +package v1 + +import ( + "net/http" + "time" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/opentelemetry/metrics" + "github.com/go-chi/chi/v5" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/metric" +) + +type statusRecorder struct { + http.ResponseWriter + Status int +} + +func newStatusRecorder(w http.ResponseWriter) *statusRecorder { + return &statusRecorder{ResponseWriter: w} +} + +func (r *statusRecorder) WriteHeader(status int) { + r.Status = status + r.ResponseWriter.WriteHeader(status) +} + +func MetricsMiddleware(globalMetricsRegistry metrics.GlobalRegistry) func(h http.Handler) http.Handler { + return func(h http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + attrs := []attribute.KeyValue{} + + ctx := r.Context() + name := chi.URLParam(r, "ledger") + if name != "" { + attrs = append(attrs, attribute.String("ledger", name)) + } + + recorder := newStatusRecorder(w) + + start := ledger.Now() + h.ServeHTTP(recorder, r) + latency := time.Since(start.Time) + + attrs = append(attrs, + attribute.String("route", chi.RouteContext(r.Context()).RoutePattern())) + + globalMetricsRegistry.APILatencies().Record(ctx, latency.Milliseconds(), metric.WithAttributes(attrs...)) + + attrs = append(attrs, attribute.Int("status", recorder.Status)) + globalMetricsRegistry.StatusCodes().Add(ctx, 1, metric.WithAttributes(attrs...)) + }) + } +} diff --git a/internal/api/v1/query.go b/internal/api/v1/query.go new file mode 100644 index 000000000..6807bfa17 --- /dev/null +++ b/internal/api/v1/query.go @@ -0,0 +1,50 @@ +package v1 + +import ( + "errors" + "net/http" + "strconv" + + "github.com/formancehq/ledger/internal/storage/paginate" +) + +const ( + MaxPageSize = 1000 + DefaultPageSize = paginate.QueryDefaultPageSize + + QueryKeyCursor = "cursor" + QueryKeyPageSize = "pageSize" + QueryKeyBalanceOperator = "balanceOperator" +) + +func getPageSize(c *http.Request) (uint, error) { + pageSizeParam := c.URL.Query().Get(QueryKeyPageSize) + if pageSizeParam == "" { + return DefaultPageSize, nil + } + + var pageSize uint64 + var err error + if pageSizeParam != "" { + pageSize, err = strconv.ParseUint(pageSizeParam, 10, 32) + if err != nil { + return 0, errors.New("invalid page size") + } + } + + if pageSize > MaxPageSize { + return MaxPageSize, nil + } + + return uint(pageSize), nil +} + +func getBalanceOperator(c *http.Request) (string, error) { + balanceOperator := "eq" + balanceOperatorStr := c.URL.Query().Get(QueryKeyBalanceOperator) + if balanceOperatorStr != "" { + return balanceOperatorStr, nil + } + + return balanceOperator, nil +} diff --git a/internal/api/v1/routes.go b/internal/api/v1/routes.go new file mode 100644 index 000000000..4316a3e9b --- /dev/null +++ b/internal/api/v1/routes.go @@ -0,0 +1,77 @@ +package v1 + +import ( + "net/http" + + "github.com/formancehq/ledger/internal/api/shared" + + "github.com/formancehq/ledger/internal/api/backend" + "github.com/formancehq/ledger/internal/opentelemetry/metrics" + "github.com/formancehq/stack/libs/go-libs/health" + "github.com/go-chi/chi/v5" + "github.com/go-chi/chi/v5/middleware" + "github.com/go-chi/cors" + "github.com/riandyrn/otelchi" +) + +func NewRouter(backend backend.Backend, healthController *health.HealthController, globalMetricsRegistry metrics.GlobalRegistry) chi.Router { + router := chi.NewMux() + + router.Use( + cors.New(cors.Options{ + AllowOriginFunc: func(r *http.Request, origin string) bool { + return true + }, + AllowCredentials: true, + }).Handler, + MetricsMiddleware(globalMetricsRegistry), + middleware.Recoverer, + ) + + router.Get("/_healthcheck", healthController.Check) + + router.Group(func(router chi.Router) { + router.Use(otelchi.Middleware("ledger")) + router.Get("/_info", getInfo(backend)) + + router.Route("/{ledger}", func(router chi.Router) { + router.Use(func(handler http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + handler.ServeHTTP(w, r) + }) + }) + router.Use(shared.LedgerMiddleware(backend, []string{"/_info"})) + + // LedgerController + router.Get("/_info", getLedgerInfo) + router.Get("/stats", getStats) + router.Get("/logs", getLogs) + + // AccountController + router.Get("/accounts", getAccounts) + router.Head("/accounts", countAccounts) + router.Get("/accounts/{address}", getAccount) + router.Post("/accounts/{address}/metadata", postAccountMetadata) + router.Delete("/accounts/{address}/metadata/{key}", deleteAccountMetadata) + + // TransactionController + router.Get("/transactions", getTransactions) + router.Head("/transactions", countTransactions) + + router.Post("/transactions", postTransaction) + router.Post("/transactions/batch", func(w http.ResponseWriter, r *http.Request) { + http.Error(w, "not supported", http.StatusBadRequest) + }) + + router.Get("/transactions/{id}", getTransaction) + router.Post("/transactions/{id}/revert", revertTransaction) + router.Post("/transactions/{id}/metadata", postTransactionMetadata) + router.Delete("/transactions/{id}/metadata/{key}", deleteTransactionMetadata) + + router.Get("/balances", getBalances) + router.Get("/aggregate/balances", getBalancesAggregated) + }) + }) + + return router +} diff --git a/internal/api/v1/utils.go b/internal/api/v1/utils.go new file mode 100644 index 000000000..592ee757f --- /dev/null +++ b/internal/api/v1/utils.go @@ -0,0 +1,97 @@ +package v1 + +import ( + "net/http" + "strings" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/engine/command" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/query" + "github.com/formancehq/stack/libs/go-libs/collectionutils" + "github.com/formancehq/stack/libs/go-libs/pointer" +) + +func getPITFilter(r *http.Request) (*ledgerstore.PITFilter, error) { + pitString := r.URL.Query().Get("pit") + if pitString == "" { + return &ledgerstore.PITFilter{}, nil + } + pit, err := ledger.ParseTime(pitString) + if err != nil { + return nil, err + } + return &ledgerstore.PITFilter{ + PIT: &pit, + }, nil +} + +func getPITFilterWithVolumes(r *http.Request) (*ledgerstore.PITFilterWithVolumes, error) { + pit, err := getPITFilter(r) + if err != nil { + return nil, err + } + return &ledgerstore.PITFilterWithVolumes{ + PITFilter: *pit, + ExpandVolumes: collectionutils.Contains(r.URL.Query()["expand"], "volumes"), + ExpandEffectiveVolumes: collectionutils.Contains(r.URL.Query()["expand"], "effectiveVolumes"), + }, nil +} + +func getQueryBuilder(r *http.Request) (query.Builder, error) { + return query.ParseJSON(r.URL.Query().Get("query")) +} + +func getPaginatedQueryOptionsOfPITFilterWithVolumes(r *http.Request) (*ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilterWithVolumes], error) { + qb, err := getQueryBuilder(r) + if err != nil { + return nil, err + } + + pitFilter, err := getPITFilterWithVolumes(r) + if err != nil { + return nil, err + } + + pageSize, err := getPageSize(r) + if err != nil { + return nil, err + } + + return pointer.For(ledgerstore.NewPaginatedQueryOptions(*pitFilter). + WithQueryBuilder(qb). + WithPageSize(uint64(pageSize))), nil +} + +func getPaginatedQueryOptionsOfPITFilter(r *http.Request) (*ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilter], error) { + qb, err := getQueryBuilder(r) + if err != nil { + return nil, err + } + + pitFilter, err := getPITFilter(r) + if err != nil { + return nil, err + } + + pageSize, err := getPageSize(r) + if err != nil { + return nil, err + } + + return pointer.For(ledgerstore.NewPaginatedQueryOptions(*pitFilter). + WithQueryBuilder(qb). + WithPageSize(uint64(pageSize))), nil +} + +func getCommandParameters(r *http.Request) command.Parameters { + dryRunAsString := r.URL.Query().Get("preview") + dryRun := strings.ToUpper(dryRunAsString) == "YES" || strings.ToUpper(dryRunAsString) == "TRUE" || dryRunAsString == "1" + + idempotencyKey := r.Header.Get("Idempotency-Key") + + return command.Parameters{ + DryRun: dryRun, + IdempotencyKey: idempotencyKey, + } +} diff --git a/internal/api/v2/api_utils_test.go b/internal/api/v2/api_utils_test.go new file mode 100644 index 000000000..81e6e6639 --- /dev/null +++ b/internal/api/v2/api_utils_test.go @@ -0,0 +1,29 @@ +package v2_test + +import ( + "testing" + + "go.uber.org/mock/gomock" + + "github.com/formancehq/ledger/internal/api/backend" +) + +func newTestingBackend(t *testing.T, expectedSchemaCheck bool) (*backend.MockBackend, *backend.MockLedger) { + ctrl := gomock.NewController(t) + mockLedger := backend.NewMockLedger(ctrl) + backend := backend.NewMockBackend(ctrl) + backend. + EXPECT(). + GetLedger(gomock.Any(), gomock.Any()). + MinTimes(0). + Return(mockLedger, nil) + t.Cleanup(func() { + ctrl.Finish() + }) + if expectedSchemaCheck { + mockLedger.EXPECT(). + IsDatabaseUpToDate(gomock.Any()). + Return(true, nil) + } + return backend, mockLedger +} diff --git a/internal/api/v2/controllers_accounts.go b/internal/api/v2/controllers_accounts.go new file mode 100644 index 000000000..1f7101459 --- /dev/null +++ b/internal/api/v2/controllers_accounts.go @@ -0,0 +1,136 @@ +package v2 + +import ( + "encoding/json" + "fmt" + "net/http" + + "github.com/formancehq/ledger/internal/api/shared" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/engine/command" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/paginate" + sharedapi "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/collectionutils" + "github.com/formancehq/stack/libs/go-libs/errorsutil" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/go-chi/chi/v5" + "github.com/pkg/errors" +) + +func countAccounts(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) + if err != nil { + sharedapi.BadRequest(w, shared.ErrValidation, err) + return + } + + count, err := l.CountAccounts(r.Context(), ledgerstore.NewGetAccountsQuery(*options)) + if err != nil { + shared.ResponseError(w, r, err) + return + } + + w.Header().Set("Count", fmt.Sprint(count)) + sharedapi.NoContent(w) +} + +func getAccounts(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + query := &ledgerstore.GetAccountsQuery{} + + if r.URL.Query().Get(QueryKeyCursor) != "" { + err := paginate.UnmarshalCursor(r.URL.Query().Get(QueryKeyCursor), query) + if err != nil { + shared.ResponseError(w, r, errorsutil.NewError(command.ErrValidation, + errors.Errorf("invalid '%s' query param", QueryKeyCursor))) + return + } + } else { + options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) + if err != nil { + sharedapi.BadRequest(w, shared.ErrValidation, err) + return + } + query = ledgerstore.NewGetAccountsQuery(*options) + } + + cursor, err := l.GetAccountsWithVolumes(r.Context(), query) + if err != nil { + shared.ResponseError(w, r, err) + return + } + + sharedapi.RenderCursor(w, *cursor) +} + +func getAccount(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + query := ledgerstore.NewGetAccountQuery(chi.URLParam(r, "address")) + if collectionutils.Contains(r.URL.Query()["expand"], "volumes") { + query = query.WithExpandVolumes() + } + if collectionutils.Contains(r.URL.Query()["expand"], "effectiveVolumes") { + query = query.WithExpandEffectiveVolumes() + } + pitFilter, err := getPITFilter(r) + if err != nil { + sharedapi.BadRequest(w, shared.ErrValidation, err) + return + } + query.PITFilter = *pitFilter + + acc, err := l.GetAccountWithVolumes(r.Context(), query) + if err != nil { + shared.ResponseError(w, r, err) + return + } + + sharedapi.Ok(w, acc) +} + +func postAccountMetadata(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + if !ledger.ValidateAddress(chi.URLParam(r, "address")) { + shared.ResponseError(w, r, errorsutil.NewError(command.ErrValidation, + errors.New("invalid account address format"))) + return + } + + var m metadata.Metadata + if err := json.NewDecoder(r.Body).Decode(&m); err != nil { + shared.ResponseError(w, r, errorsutil.NewError(command.ErrValidation, + errors.New("invalid metadata format"))) + return + } + + err := l.SaveMeta(r.Context(), getCommandParameters(r), ledger.MetaTargetTypeAccount, chi.URLParam(r, "address"), m) + if err != nil { + shared.ResponseError(w, r, err) + return + } + + sharedapi.NoContent(w) +} + +func deleteAccountMetadata(w http.ResponseWriter, r *http.Request) { + if err := shared.LedgerFromContext(r.Context()). + DeleteMetadata( + r.Context(), + getCommandParameters(r), + ledger.MetaTargetTypeAccount, + chi.URLParam(r, "address"), + chi.URLParam(r, "key"), + ); err != nil { + shared.ResponseError(w, r, err) + return + } + + sharedapi.NoContent(w) +} diff --git a/internal/api/v2/controllers_accounts_test.go b/internal/api/v2/controllers_accounts_test.go new file mode 100644 index 000000000..ba294ab0d --- /dev/null +++ b/internal/api/v2/controllers_accounts_test.go @@ -0,0 +1,237 @@ +package v2_test + +import ( + "bytes" + "net/http" + "net/http/httptest" + "net/url" + "testing" + + "github.com/formancehq/ledger/internal/api/shared" + + ledger "github.com/formancehq/ledger/internal" + v2 "github.com/formancehq/ledger/internal/api/v2" + "github.com/formancehq/ledger/internal/engine/command" + "github.com/formancehq/ledger/internal/opentelemetry/metrics" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/paginate" + "github.com/formancehq/ledger/internal/storage/query" + sharedapi "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/require" +) + +func TestGetAccounts(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + body string + expectQuery ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilterWithVolumes] + expectStatusCode int + expectedErrorCode string + } + + testCases := []testCase{ + { + name: "nominal", + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithPageSize(v2.DefaultPageSize), + }, + { + name: "using metadata", + body: `{"$match": { "metadata[roles]": "admin" }}`, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("metadata[roles]", "admin")). + WithPageSize(v2.DefaultPageSize), + }, + { + name: "using address", + body: `{"$match": { "address": "foo" }}`, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("address", "foo")). + WithPageSize(v2.DefaultPageSize), + }, + { + name: "using empty cursor", + queryParams: url.Values{ + "cursor": []string{paginate.EncodeCursor(ledgerstore.NewGetAccountsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{})))}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}), + }, + { + name: "using invalid cursor", + queryParams: url.Values{ + "cursor": []string{"XXX"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: shared.ErrValidation, + }, + { + name: "invalid page size", + queryParams: url.Values{ + "pageSize": []string{"nan"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: shared.ErrValidation, + }, + { + name: "page size over maximum", + queryParams: url.Values{ + "pageSize": []string{"1000000"}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithPageSize(v2.MaxPageSize), + }, + { + name: "using balance filter", + body: `{"$lt": { "balance[USD/2]": 100 }}`, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Lt("balance[USD/2]", float64(100))). + WithPageSize(v2.DefaultPageSize), + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusOK + } + + expectedCursor := sharedapi.Cursor[ledger.ExpandedAccount]{ + Data: []ledger.ExpandedAccount{ + { + Account: ledger.Account{ + Address: "world", + Metadata: metadata.Metadata{}, + }, + }, + }, + } + + backend, mockLedger := newTestingBackend(t, true) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + mockLedger.EXPECT(). + GetAccountsWithVolumes(gomock.Any(), ledgerstore.NewGetAccountsQuery(testCase.expectQuery)). + Return(&expectedCursor, nil) + } + + router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodGet, "/xxx/accounts", bytes.NewBufferString(testCase.body)) + rec := httptest.NewRecorder() + if testCase.queryParams != nil { + req.URL.RawQuery = testCase.queryParams.Encode() + } + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + cursor := sharedapi.DecodeCursorResponse[ledger.ExpandedAccount](t, rec.Body) + require.Equal(t, expectedCursor, *cursor) + } else { + err := sharedapi.ErrorResponse{} + sharedapi.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} + +func TestGetAccount(t *testing.T) { + t.Parallel() + + account := ledger.ExpandedAccount{ + Account: ledger.Account{ + Address: "foo", + Metadata: metadata.Metadata{}, + }, + } + + backend, mock := newTestingBackend(t, true) + mock.EXPECT(). + GetAccountWithVolumes(gomock.Any(), ledgerstore.NewGetAccountQuery("foo")). + Return(&account, nil) + + router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodGet, "/xxx/accounts/foo", nil) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusOK, rec.Code) + response, _ := sharedapi.DecodeSingleResponse[ledger.ExpandedAccount](t, rec.Body) + require.Equal(t, account, response) +} + +func TestPostAccountMetadata(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + expectStatusCode int + expectedErrorCode string + account string + body any + } + + testCases := []testCase{ + { + name: "nominal", + account: "world", + body: metadata.Metadata{ + "foo": "bar", + }, + }, + { + name: "invalid account address format", + account: "invalid-acc", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: shared.ErrValidation, + }, + { + name: "invalid body", + account: "world", + body: "invalid - not an object", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: shared.ErrValidation, + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusNoContent + } + + backend, mock := newTestingBackend(t, true) + if testCase.expectStatusCode == http.StatusNoContent { + mock.EXPECT(). + SaveMeta(gomock.Any(), command.Parameters{}, ledger.MetaTargetTypeAccount, testCase.account, testCase.body). + Return(nil) + } + + router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodPost, "/xxx/accounts/"+testCase.account+"/metadata", sharedapi.Buffer(t, testCase.body)) + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode >= 300 || testCase.expectStatusCode < 200 { + err := sharedapi.ErrorResponse{} + sharedapi.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v2/controllers_balances.go b/internal/api/v2/controllers_balances.go new file mode 100644 index 000000000..d2063667e --- /dev/null +++ b/internal/api/v2/controllers_balances.go @@ -0,0 +1,27 @@ +package v2 + +import ( + "net/http" + + "github.com/formancehq/ledger/internal/api/shared" + + "github.com/formancehq/ledger/internal/storage/ledgerstore" + sharedapi "github.com/formancehq/stack/libs/go-libs/api" +) + +func getBalancesAggregated(w http.ResponseWriter, r *http.Request) { + options, err := getPaginatedQueryOptionsOfPITFilter(r) + if err != nil { + sharedapi.BadRequest(w, shared.ErrValidation, err) + return + } + + balances, err := shared.LedgerFromContext(r.Context()). + GetAggregatedBalances(r.Context(), ledgerstore.NewGetAggregatedBalancesQuery(*options)) + if err != nil { + shared.ResponseError(w, r, err) + return + } + + sharedapi.Ok(w, balances) +} diff --git a/internal/api/v2/controllers_balances_test.go b/internal/api/v2/controllers_balances_test.go new file mode 100644 index 000000000..3e5620a9d --- /dev/null +++ b/internal/api/v2/controllers_balances_test.go @@ -0,0 +1,71 @@ +package v2_test + +import ( + "bytes" + "math/big" + "net/http" + "net/http/httptest" + "net/url" + "testing" + + ledger "github.com/formancehq/ledger/internal" + v2 "github.com/formancehq/ledger/internal/api/v2" + "github.com/formancehq/ledger/internal/opentelemetry/metrics" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/query" + sharedapi "github.com/formancehq/stack/libs/go-libs/api" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/require" +) + +func TestGetBalancesAggregated(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + body string + expectQuery ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilter] + } + + testCases := []testCase{ + { + name: "nominal", + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilter{}), + }, + { + name: "using address", + body: `{"$match": {"address": "foo"}}`, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilter{}). + WithQueryBuilder(query.Match("address", "foo")), + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + expectedBalances := ledger.BalancesByAssets{ + "world": big.NewInt(-100), + } + backend, mock := newTestingBackend(t, true) + mock.EXPECT(). + GetAggregatedBalances(gomock.Any(), ledgerstore.NewGetAggregatedBalancesQuery(testCase.expectQuery)). + Return(expectedBalances, nil) + + router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodGet, "/xxx/aggregate/balances", bytes.NewBufferString(testCase.body)) + rec := httptest.NewRecorder() + if testCase.queryParams != nil { + req.URL.RawQuery = testCase.queryParams.Encode() + } + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusOK, rec.Code) + balances, ok := sharedapi.DecodeSingleResponse[ledger.BalancesByAssets](t, rec.Body) + require.True(t, ok) + require.Equal(t, expectedBalances, balances) + }) + } +} diff --git a/internal/api/v2/controllers_config.go b/internal/api/v2/controllers_config.go new file mode 100644 index 000000000..a159dd632 --- /dev/null +++ b/internal/api/v2/controllers_config.go @@ -0,0 +1,44 @@ +package v2 + +import ( + _ "embed" + "net/http" + + "github.com/formancehq/ledger/internal/api/backend" + sharedapi "github.com/formancehq/stack/libs/go-libs/api" +) + +type ConfigInfo struct { + Server string `json:"server"` + Version string `json:"version"` + Config *LedgerConfig `json:"config"` +} + +type LedgerConfig struct { + LedgerStorage *LedgerStorage `json:"storage"` +} + +type LedgerStorage struct { + Driver string `json:"driver"` + Ledgers []string `json:"ledgers"` +} + +func getInfo(backend backend.Backend) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + ledgers, err := backend.ListLedgers(r.Context()) + if err != nil { + panic(err) + } + + sharedapi.RawOk(w, ConfigInfo{ + Server: "ledger", + Version: backend.GetVersion(), + Config: &LedgerConfig{ + LedgerStorage: &LedgerStorage{ + Driver: "postgres", + Ledgers: ledgers, + }, + }, + }) + } +} diff --git a/internal/api/v2/controllers_config_test.go b/internal/api/v2/controllers_config_test.go new file mode 100644 index 000000000..c2fe56c3b --- /dev/null +++ b/internal/api/v2/controllers_config_test.go @@ -0,0 +1,51 @@ +package v2_test + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + v2 "github.com/formancehq/ledger/internal/api/v2" + "github.com/formancehq/ledger/internal/opentelemetry/metrics" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/require" +) + +func TestGetInfo(t *testing.T) { + t.Parallel() + + backend, _ := newTestingBackend(t, false) + router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + backend. + EXPECT(). + ListLedgers(gomock.Any()). + Return([]string{"a", "b"}, nil) + + backend. + EXPECT(). + GetVersion(). + Return("latest") + + req := httptest.NewRequest(http.MethodGet, "/_info", nil) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusOK, rec.Code) + + info := v2.ConfigInfo{} + require.NoError(t, json.NewDecoder(rec.Body).Decode(&info)) + + require.EqualValues(t, v2.ConfigInfo{ + Server: "ledger", + Version: "latest", + Config: &v2.LedgerConfig{ + LedgerStorage: &v2.LedgerStorage{ + Driver: "postgres", + Ledgers: []string{"a", "b"}, + }, + }, + }, info) +} diff --git a/internal/api/v2/controllers_info.go b/internal/api/v2/controllers_info.go new file mode 100644 index 000000000..d5801e0fd --- /dev/null +++ b/internal/api/v2/controllers_info.go @@ -0,0 +1,96 @@ +package v2 + +import ( + "net/http" + + "github.com/formancehq/ledger/internal/api/shared" + + "github.com/formancehq/ledger/internal/engine/command" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/paginate" + sharedapi "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/errorsutil" + "github.com/formancehq/stack/libs/go-libs/migrations" + "github.com/go-chi/chi/v5" + "github.com/pkg/errors" +) + +type Info struct { + Name string `json:"name"` + Storage StorageInfo `json:"storage"` +} + +type StorageInfo struct { + Migrations []migrations.Info `json:"migrations"` +} + +func getLedgerInfo(w http.ResponseWriter, r *http.Request) { + ledger := shared.LedgerFromContext(r.Context()) + + var err error + res := Info{ + Name: chi.URLParam(r, "ledger"), + Storage: StorageInfo{}, + } + res.Storage.Migrations, err = ledger.GetMigrationsInfo(r.Context()) + if err != nil { + shared.ResponseError(w, r, err) + return + } + + sharedapi.Ok(w, res) +} + +func getStats(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + stats, err := l.Stats(r.Context()) + if err != nil { + shared.ResponseError(w, r, err) + return + } + + sharedapi.Ok(w, stats) +} + +func getLogs(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + query := &ledgerstore.GetLogsQuery{} + + if r.URL.Query().Get(QueryKeyCursor) != "" { + err := paginate.UnmarshalCursor(r.URL.Query().Get(QueryKeyCursor), query) + if err != nil { + shared.ResponseError(w, r, errorsutil.NewError(command.ErrValidation, + errors.Errorf("invalid '%s' query param", QueryKeyCursor))) + return + } + } else { + var err error + + pageSize, err := getPageSize(r) + if err != nil { + shared.ResponseError(w, r, err) + return + } + + qb, err := getQueryBuilder(r) + if err != nil { + sharedapi.BadRequest(w, shared.ErrValidation, err) + return + } + + query = ledgerstore.NewGetLogsQuery(ledgerstore.PaginatedQueryOptions[any]{ + QueryBuilder: qb, + PageSize: pageSize, + }) + } + + cursor, err := l.GetLogs(r.Context(), query) + if err != nil { + shared.ResponseError(w, r, err) + return + } + + sharedapi.RenderCursor(w, *cursor) +} diff --git a/internal/api/v2/controllers_info_test.go b/internal/api/v2/controllers_info_test.go new file mode 100644 index 000000000..39c9fdde9 --- /dev/null +++ b/internal/api/v2/controllers_info_test.go @@ -0,0 +1,201 @@ +package v2_test + +import ( + "bytes" + "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + "net/url" + "testing" + "time" + + "github.com/formancehq/ledger/internal/api/shared" + + ledger "github.com/formancehq/ledger/internal" + v2 "github.com/formancehq/ledger/internal/api/v2" + "github.com/formancehq/ledger/internal/engine" + "github.com/formancehq/ledger/internal/opentelemetry/metrics" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/paginate" + "github.com/formancehq/ledger/internal/storage/query" + sharedapi "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/formancehq/stack/libs/go-libs/migrations" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/require" +) + +func TestGetLedgerInfo(t *testing.T) { + t.Parallel() + + backend, mock := newTestingBackend(t, false) + router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + migrationInfo := []migrations.Info{ + { + Version: "1", + Name: "init", + State: "ready", + Date: time.Now().Add(-2 * time.Minute).Round(time.Second).UTC(), + }, + { + Version: "2", + Name: "fix", + State: "ready", + Date: time.Now().Add(-time.Minute).Round(time.Second).UTC(), + }, + } + + mock.EXPECT(). + GetMigrationsInfo(gomock.Any()). + Return(migrationInfo, nil) + + req := httptest.NewRequest(http.MethodGet, "/xxx/_info", nil) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusOK, rec.Code) + + info, ok := sharedapi.DecodeSingleResponse[v2.Info](t, rec.Body) + require.True(t, ok) + + require.EqualValues(t, v2.Info{ + Name: "xxx", + Storage: v2.StorageInfo{ + Migrations: migrationInfo, + }, + }, info) +} + +func TestGetStats(t *testing.T) { + t.Parallel() + + backend, mock := newTestingBackend(t, true) + router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + expectedStats := engine.Stats{ + Transactions: 10, + Accounts: 5, + } + + mock.EXPECT(). + Stats(gomock.Any()). + Return(expectedStats, nil) + + req := httptest.NewRequest(http.MethodGet, "/xxx/stats", nil) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusOK, rec.Code) + + stats, ok := sharedapi.DecodeSingleResponse[engine.Stats](t, rec.Body) + require.True(t, ok) + + require.EqualValues(t, expectedStats, stats) +} + +func TestGetLogs(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + body string + expectQuery ledgerstore.PaginatedQueryOptions[any] + expectStatusCode int + expectedErrorCode string + } + + now := ledger.Now() + testCases := []testCase{ + { + name: "nominal", + expectQuery: ledgerstore.NewPaginatedQueryOptions[any](nil), + }, + { + name: "using start time", + body: fmt.Sprintf(`{"$gte": {"date": "%s"}}`, now.Format(ledger.DateFormat)), + expectQuery: ledgerstore.NewPaginatedQueryOptions[any](nil).WithQueryBuilder(query.Gte("date", now.Format(ledger.DateFormat))), + }, + { + name: "using end time", + body: fmt.Sprintf(`{"$lt": {"date": "%s"}}`, now.Format(ledger.DateFormat)), + expectQuery: ledgerstore.NewPaginatedQueryOptions[any](nil). + WithQueryBuilder(query.Lt("date", now.Format(ledger.DateFormat))), + }, + { + name: "using empty cursor", + queryParams: url.Values{ + "cursor": []string{paginate.EncodeCursor(ledgerstore.NewGetLogsQuery(ledgerstore.NewPaginatedQueryOptions[any](nil)))}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions[any](nil), + }, + { + name: "using invalid cursor", + queryParams: url.Values{ + "cursor": []string{"xxx"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: shared.ErrValidation, + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusOK + } + + expectedCursor := sharedapi.Cursor[ledger.ChainedLog]{ + Data: []ledger.ChainedLog{ + *ledger.NewTransactionLog(ledger.NewTransaction(), map[string]metadata.Metadata{}). + ChainLog(nil), + }, + } + + backend, mockLedger := newTestingBackend(t, true) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + mockLedger.EXPECT(). + GetLogs(gomock.Any(), ledgerstore.NewGetLogsQuery(testCase.expectQuery)). + Return(&expectedCursor, nil) + } + + router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodGet, "/xxx/logs", bytes.NewBufferString(testCase.body)) + rec := httptest.NewRecorder() + if testCase.queryParams != nil { + req.URL.RawQuery = testCase.queryParams.Encode() + } + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + cursor := sharedapi.DecodeCursorResponse[ledger.ChainedLog](t, rec.Body) + + cursorData, err := json.Marshal(cursor) + require.NoError(t, err) + + cursorAsMap := make(map[string]any) + require.NoError(t, json.Unmarshal(cursorData, &cursorAsMap)) + + expectedCursorData, err := json.Marshal(expectedCursor) + require.NoError(t, err) + + expectedCursorAsMap := make(map[string]any) + require.NoError(t, json.Unmarshal(expectedCursorData, &expectedCursorAsMap)) + + require.Equal(t, expectedCursorAsMap, cursorAsMap) + } else { + err := sharedapi.ErrorResponse{} + sharedapi.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v2/controllers_transactions.go b/internal/api/v2/controllers_transactions.go new file mode 100644 index 000000000..7b3c5c38b --- /dev/null +++ b/internal/api/v2/controllers_transactions.go @@ -0,0 +1,249 @@ +package v2 + +import ( + "encoding/json" + "fmt" + "math/big" + "net/http" + + "github.com/formancehq/ledger/internal/api/shared" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/engine/command" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/paginate" + sharedapi "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/collectionutils" + "github.com/formancehq/stack/libs/go-libs/errorsutil" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/go-chi/chi/v5" + "github.com/pkg/errors" +) + +func countTransactions(w http.ResponseWriter, r *http.Request) { + + options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) + if err != nil { + sharedapi.BadRequest(w, shared.ErrValidation, err) + return + } + + count, err := shared.LedgerFromContext(r.Context()). + CountTransactions(r.Context(), ledgerstore.NewGetTransactionsQuery(*options)) + if err != nil { + shared.ResponseError(w, r, err) + return + } + + w.Header().Set("Count", fmt.Sprint(count)) + sharedapi.NoContent(w) +} + +func getTransactions(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + query := &ledgerstore.GetTransactionsQuery{} + + if r.URL.Query().Get(QueryKeyCursor) != "" { + err := paginate.UnmarshalCursor(r.URL.Query().Get(QueryKeyCursor), &query) + if err != nil { + shared.ResponseError(w, r, errorsutil.NewError(command.ErrValidation, + errors.Errorf("invalid '%s' query param", QueryKeyCursor))) + return + } + } else { + options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) + if err != nil { + sharedapi.BadRequest(w, shared.ErrValidation, err) + return + } + query = ledgerstore.NewGetTransactionsQuery(*options) + } + + cursor, err := l.GetTransactions(r.Context(), query) + if err != nil { + shared.ResponseError(w, r, err) + return + } + + sharedapi.RenderCursor(w, *cursor) +} + +type Script struct { + ledger.Script + Vars map[string]any `json:"vars"` +} + +func (s Script) ToCore() ledger.Script { + s.Script.Vars = map[string]string{} + for k, v := range s.Vars { + switch v := v.(type) { + case string: + s.Script.Vars[k] = v + case map[string]any: + s.Script.Vars[k] = fmt.Sprintf("%s %v", v["asset"], v["amount"]) + default: + s.Script.Vars[k] = fmt.Sprint(v) + } + } + return s.Script +} + +type PostTransactionRequest struct { + Postings ledger.Postings `json:"postings"` + Script Script `json:"script"` + Timestamp ledger.Time `json:"timestamp"` + Reference string `json:"reference"` + Metadata metadata.Metadata `json:"metadata" swaggertype:"object"` +} + +func postTransaction(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + payload := PostTransactionRequest{} + if err := json.NewDecoder(r.Body).Decode(&payload); err != nil { + shared.ResponseError(w, r, + errorsutil.NewError(command.ErrValidation, + errors.New("invalid transaction format"))) + return + } + + if len(payload.Postings) > 0 && payload.Script.Plain != "" || + len(payload.Postings) == 0 && payload.Script.Plain == "" { + shared.ResponseError(w, r, errorsutil.NewError(command.ErrValidation, + errors.New("invalid payload: should contain either postings or script"))) + return + } else if len(payload.Postings) > 0 { + if i, err := payload.Postings.Validate(); err != nil { + shared.ResponseError(w, r, errorsutil.NewError(command.ErrValidation, errors.Wrap(err, + fmt.Sprintf("invalid posting %d", i)))) + return + } + txData := ledger.TransactionData{ + Postings: payload.Postings, + Timestamp: payload.Timestamp, + Reference: payload.Reference, + Metadata: payload.Metadata, + } + + res, err := l.CreateTransaction(r.Context(), getCommandParameters(r), ledger.TxToScriptData(txData)) + if err != nil { + shared.ResponseError(w, r, err) + return + } + + sharedapi.Ok(w, res) + return + } + + script := ledger.RunScript{ + Script: payload.Script.ToCore(), + Timestamp: payload.Timestamp, + Reference: payload.Reference, + Metadata: payload.Metadata, + } + + res, err := l.CreateTransaction(r.Context(), getCommandParameters(r), script) + if err != nil { + shared.ResponseError(w, r, err) + return + } + + sharedapi.Ok(w, res) +} + +func getTransaction(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + txId, ok := big.NewInt(0).SetString(chi.URLParam(r, "id"), 10) + if !ok { + shared.ResponseError(w, r, errorsutil.NewError(command.ErrValidation, + errors.New("invalid transaction ID"))) + return + } + + query := ledgerstore.NewGetTransactionQuery(txId) + if collectionutils.Contains(r.URL.Query()["expand"], "volumes") { + query = query.WithExpandVolumes() + } + if collectionutils.Contains(r.URL.Query()["expand"], "effectiveVolumes") { + query = query.WithExpandEffectiveVolumes() + } + + pitFilter, err := getPITFilter(r) + if err != nil { + sharedapi.BadRequest(w, shared.ErrValidation, err) + return + } + query.PITFilter = *pitFilter + + tx, err := l.GetTransactionWithVolumes(r.Context(), query) + if err != nil { + shared.ResponseError(w, r, err) + return + } + + sharedapi.Ok(w, tx) +} + +func revertTransaction(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + transactionID, ok := big.NewInt(0).SetString(chi.URLParam(r, "id"), 10) + if !ok { + sharedapi.NotFound(w) + return + } + + tx, err := l.RevertTransaction(r.Context(), getCommandParameters(r), transactionID) + if err != nil { + shared.ResponseError(w, r, err) + return + } + + sharedapi.Created(w, tx) +} + +func postTransactionMetadata(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + var m metadata.Metadata + if err := json.NewDecoder(r.Body).Decode(&m); err != nil { + shared.ResponseError(w, r, errorsutil.NewError(command.ErrValidation, + errors.New("invalid metadata format"))) + return + } + + txID, ok := big.NewInt(0).SetString(chi.URLParam(r, "id"), 10) + if !ok { + sharedapi.NotFound(w) + return + } + + if err := l.SaveMeta(r.Context(), getCommandParameters(r), ledger.MetaTargetTypeTransaction, txID, m); err != nil { + shared.ResponseError(w, r, err) + return + } + + sharedapi.NoContent(w) +} + +func deleteTransactionMetadata(w http.ResponseWriter, r *http.Request) { + l := shared.LedgerFromContext(r.Context()) + + transactionID, ok := big.NewInt(0).SetString(chi.URLParam(r, "id"), 10) + if !ok { + shared.ResponseError(w, r, errorsutil.NewError(command.ErrValidation, + errors.New("invalid transaction ID"))) + return + } + + metadataKey := chi.URLParam(r, "key") + + if err := l.DeleteMetadata(r.Context(), getCommandParameters(r), ledger.MetaTargetTypeTransaction, transactionID, metadataKey); err != nil { + shared.ResponseError(w, r, err) + return + } + + sharedapi.NoContent(w) +} diff --git a/internal/api/v2/controllers_transactions_test.go b/internal/api/v2/controllers_transactions_test.go new file mode 100644 index 000000000..2a49490e7 --- /dev/null +++ b/internal/api/v2/controllers_transactions_test.go @@ -0,0 +1,607 @@ +package v2_test + +import ( + "bytes" + "fmt" + "math/big" + "net/http" + "net/http/httptest" + "net/url" + "testing" + + "github.com/formancehq/ledger/internal/api/shared" + + ledger "github.com/formancehq/ledger/internal" + v2 "github.com/formancehq/ledger/internal/api/v2" + "github.com/formancehq/ledger/internal/engine/command" + "github.com/formancehq/ledger/internal/opentelemetry/metrics" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/paginate" + "github.com/formancehq/ledger/internal/storage/query" + sharedapi "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/require" +) + +func TestPostTransactions(t *testing.T) { + type testCase struct { + name string + expectedDryRun bool + expectedRunScript ledger.RunScript + payload any + expectedStatusCode int + expectedErrorCode string + queryParams url.Values + } + + testCases := []testCase{ + { + name: "using plain numscript", + payload: v2.PostTransactionRequest{ + Script: v2.Script{ + Script: ledger.Script{ + Plain: `XXX`, + }, + }, + }, + expectedRunScript: ledger.RunScript{ + Script: ledger.Script{ + Plain: `XXX`, + Vars: map[string]string{}, + }, + }, + }, + { + name: "using plain numscript with variables", + payload: v2.PostTransactionRequest{ + Script: v2.Script{ + Script: ledger.Script{ + Plain: `vars { + monetary $val + } + + send $val ( + source = @world + destination = @bank + )`, + }, + Vars: map[string]any{ + "val": "USD/2 100", + }, + }, + }, + expectedRunScript: ledger.RunScript{ + Script: ledger.Script{ + Plain: `vars { + monetary $val + } + + send $val ( + source = @world + destination = @bank + )`, + Vars: map[string]string{ + "val": "USD/2 100", + }, + }, + }, + }, + { + name: "using plain numscript with variables (legacy format)", + payload: v2.PostTransactionRequest{ + Script: v2.Script{ + Script: ledger.Script{ + Plain: `vars { + monetary $val + } + + send $val ( + source = @world + destination = @bank + )`, + }, + Vars: map[string]any{ + "val": map[string]any{ + "asset": "USD/2", + "amount": 100, + }, + }, + }, + }, + expectedRunScript: ledger.RunScript{ + Script: ledger.Script{ + Plain: `vars { + monetary $val + } + + send $val ( + source = @world + destination = @bank + )`, + Vars: map[string]string{ + "val": "USD/2 100", + }, + }, + }, + }, + { + name: "using plain numscript and dry run", + payload: v2.PostTransactionRequest{ + Script: v2.Script{ + Script: ledger.Script{ + Plain: `send ( + source = @world + destination = @bank + )`, + }, + }, + }, + expectedRunScript: ledger.RunScript{ + Script: ledger.Script{ + Plain: `send ( + source = @world + destination = @bank + )`, + Vars: map[string]string{}, + }, + }, + expectedDryRun: true, + queryParams: url.Values{ + "dryRun": []string{"true"}, + }, + }, + { + name: "using JSON postings", + payload: v2.PostTransactionRequest{ + Postings: []ledger.Posting{ + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + }, + }, + expectedRunScript: ledger.TxToScriptData(ledger.NewTransactionData().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + )), + }, + { + name: "using JSON postings and dry run", + queryParams: url.Values{ + "dryRun": []string{"true"}, + }, + payload: v2.PostTransactionRequest{ + Postings: []ledger.Posting{ + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + }, + }, + expectedDryRun: true, + expectedRunScript: ledger.TxToScriptData(ledger.NewTransactionData().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + )), + }, + { + name: "no postings or script", + payload: v2.PostTransactionRequest{}, + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: shared.ErrValidation, + }, + { + name: "postings and script", + payload: v2.PostTransactionRequest{ + Postings: ledger.Postings{ + { + Source: "world", + Destination: "alice", + Amount: big.NewInt(100), + Asset: "COIN", + }, + }, + Script: v2.Script{ + Script: ledger.Script{ + Plain: ` + send [COIN 100] ( + source = @world + destination = @bob + )`, + }, + }, + }, + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: shared.ErrValidation, + }, + { + name: "using invalid body", + payload: "not a valid payload", + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: shared.ErrValidation, + }, + } + + for _, testCase := range testCases { + tc := testCase + t.Run(tc.name, func(t *testing.T) { + if testCase.expectedStatusCode == 0 { + testCase.expectedStatusCode = http.StatusOK + } + + expectedTx := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ) + + backend, mockLedger := newTestingBackend(t, true) + if testCase.expectedStatusCode < 300 && testCase.expectedStatusCode >= 200 { + mockLedger.EXPECT(). + CreateTransaction(gomock.Any(), command.Parameters{ + DryRun: tc.expectedDryRun, + }, testCase.expectedRunScript). + Return(expectedTx, nil) + } + + router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodPost, "/xxx/transactions", sharedapi.Buffer(t, testCase.payload)) + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectedStatusCode, rec.Code) + if testCase.expectedStatusCode < 300 && testCase.expectedStatusCode >= 200 { + tx, ok := sharedapi.DecodeSingleResponse[ledger.Transaction](t, rec.Body) + require.True(t, ok) + require.Equal(t, *expectedTx, tx) + } else { + err := sharedapi.ErrorResponse{} + sharedapi.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} + +func TestPostTransactionMetadata(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + expectStatusCode int + expectedErrorCode string + body any + } + + testCases := []testCase{ + { + name: "nominal", + body: metadata.Metadata{ + "foo": "bar", + }, + }, + { + name: "invalid body", + body: "invalid - not an object", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: shared.ErrValidation, + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusNoContent + } + + backend, mock := newTestingBackend(t, true) + if testCase.expectStatusCode == http.StatusNoContent { + mock.EXPECT(). + SaveMeta(gomock.Any(), command.Parameters{}, ledger.MetaTargetTypeTransaction, big.NewInt(0), testCase.body). + Return(nil) + } + + router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodPost, "/xxx/transactions/0/metadata", sharedapi.Buffer(t, testCase.body)) + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode >= 300 || testCase.expectStatusCode < 200 { + err := sharedapi.ErrorResponse{} + sharedapi.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} + +func TestGetTransaction(t *testing.T) { + t.Parallel() + + tx := ledger.ExpandTransaction( + ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ), + nil, + ) + + backend, mock := newTestingBackend(t, true) + mock.EXPECT(). + GetTransactionWithVolumes(gomock.Any(), ledgerstore.NewGetTransactionQuery(big.NewInt(0))). + Return(&tx, nil) + + router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodGet, "/xxx/transactions/0", nil) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusOK, rec.Code) + response, _ := sharedapi.DecodeSingleResponse[ledger.ExpandedTransaction](t, rec.Body) + require.Equal(t, tx, response) +} + +func TestGetTransactions(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + body string + expectQuery ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilterWithVolumes] + expectStatusCode int + expectedErrorCode string + } + now := ledger.Now() + + testCases := []testCase{ + { + name: "nominal", + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}), + }, + { + name: "using metadata", + body: `{"$match": {"metadata[roles]": "admin"}}`, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("metadata[roles]", "admin")), + }, + { + name: "using startTime", + body: fmt.Sprintf(`{"$gte": {"start_time": "%s"}}`, now.Format(ledger.DateFormat)), + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Gte("start_time", now.Format(ledger.DateFormat))), + }, + { + name: "using endTime", + body: fmt.Sprintf(`{"$lte": {"end_time": "%s"}}`, now.Format(ledger.DateFormat)), + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Lte("end_time", now.Format(ledger.DateFormat))), + }, + { + name: "using account", + body: `{"$match": {"account": "xxx"}}`, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("account", "xxx")), + }, + { + name: "using reference", + body: `{"$match": {"reference": "xxx"}}`, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("reference", "xxx")), + }, + { + name: "using destination", + body: `{"$match": {"destination": "xxx"}}`, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("destination", "xxx")), + }, + { + name: "using source", + body: `{"$match": {"source": "xxx"}}`, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("source", "xxx")), + }, + { + name: "using empty cursor", + queryParams: url.Values{ + "cursor": []string{paginate.EncodeCursor(ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{})))}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}), + }, + { + name: "using invalid cursor", + queryParams: url.Values{ + "cursor": []string{"XXX"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: shared.ErrValidation, + }, + { + name: "invalid page size", + queryParams: url.Values{ + "pageSize": []string{"nan"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: shared.ErrValidation, + }, + { + name: "page size over maximum", + queryParams: url.Values{ + "pageSize": []string{"1000000"}, + }, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithPageSize(v2.MaxPageSize), + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusOK + } + + expectedCursor := sharedapi.Cursor[ledger.ExpandedTransaction]{ + Data: []ledger.ExpandedTransaction{ + ledger.ExpandTransaction( + ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ), + nil, + ), + }, + } + + backend, mockLedger := newTestingBackend(t, true) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + mockLedger.EXPECT(). + GetTransactions(gomock.Any(), ledgerstore.NewGetTransactionsQuery(testCase.expectQuery)). + Return(&expectedCursor, nil) + } + + router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodGet, "/xxx/transactions", bytes.NewBufferString(testCase.body)) + rec := httptest.NewRecorder() + if testCase.queryParams != nil { + req.URL.RawQuery = testCase.queryParams.Encode() + } + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + cursor := sharedapi.DecodeCursorResponse[ledger.ExpandedTransaction](t, rec.Body) + require.Equal(t, expectedCursor, *cursor) + } else { + err := sharedapi.ErrorResponse{} + sharedapi.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} + +func TestCountTransactions(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + body string + expectQuery ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilterWithVolumes] + expectStatusCode int + expectedErrorCode string + } + now := ledger.Now() + + testCases := []testCase{ + { + name: "nominal", + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}), + }, + { + name: "using metadata", + body: `{"$match": {"metadata[roles]": "admin"}}`, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("metadata[roles]", "admin")), + }, + { + name: "using startTime", + body: fmt.Sprintf(`{"$gte": {"date": "%s"}}`, now.Format(ledger.DateFormat)), + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Gte("date", now.Format(ledger.DateFormat))), + }, + { + name: "using endTime", + body: fmt.Sprintf(`{"$gte": {"date": "%s"}}`, now.Format(ledger.DateFormat)), + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Gte("date", now.Format(ledger.DateFormat))), + }, + { + name: "using account", + body: `{"$match": {"account": "xxx"}}`, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("account", "xxx")), + }, + { + name: "using reference", + body: `{"$match": {"reference": "xxx"}}`, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("reference", "xxx")), + }, + { + name: "using destination", + body: `{"$match": {"destination": "xxx"}}`, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("destination", "xxx")), + }, + { + name: "using source", + body: `{"$match": {"source": "xxx"}}`, + expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("source", "xxx")), + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusNoContent + } + + backend, mockLedger := newTestingBackend(t, true) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + mockLedger.EXPECT(). + CountTransactions(gomock.Any(), ledgerstore.NewGetTransactionsQuery(testCase.expectQuery)). + Return(uint64(10), nil) + } + + router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodHead, "/xxx/transactions", bytes.NewBufferString(testCase.body)) + rec := httptest.NewRecorder() + if testCase.queryParams != nil { + req.URL.RawQuery = testCase.queryParams.Encode() + } + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + require.Equal(t, "10", rec.Header().Get("Count")) + } else { + err := sharedapi.ErrorResponse{} + sharedapi.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} + +func TestRevertTransaction(t *testing.T) { + + expectedTx := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ) + + backend, mockLedger := newTestingBackend(t, true) + mockLedger. + EXPECT(). + RevertTransaction(gomock.Any(), command.Parameters{}, big.NewInt(0)). + Return(expectedTx, nil) + + router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry()) + + req := httptest.NewRequest(http.MethodPost, "/xxx/transactions/0/revert", nil) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusCreated, rec.Code) + tx, ok := sharedapi.DecodeSingleResponse[ledger.Transaction](t, rec.Body) + require.True(t, ok) + require.Equal(t, *expectedTx, tx) +} diff --git a/internal/api/v2/middlewares_metrics.go b/internal/api/v2/middlewares_metrics.go new file mode 100644 index 000000000..40ec7814d --- /dev/null +++ b/internal/api/v2/middlewares_metrics.go @@ -0,0 +1,54 @@ +package v2 + +import ( + "net/http" + "time" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/opentelemetry/metrics" + "github.com/go-chi/chi/v5" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/metric" +) + +type statusRecorder struct { + http.ResponseWriter + Status int +} + +func newStatusRecorder(w http.ResponseWriter) *statusRecorder { + return &statusRecorder{ResponseWriter: w} +} + +func (r *statusRecorder) WriteHeader(status int) { + r.Status = status + r.ResponseWriter.WriteHeader(status) +} + +func MetricsMiddleware(globalMetricsRegistry metrics.GlobalRegistry) func(h http.Handler) http.Handler { + return func(h http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + attrs := []attribute.KeyValue{} + + ctx := r.Context() + name := chi.URLParam(r, "ledger") + if name != "" { + attrs = append(attrs, attribute.String("ledger", name)) + } + + recorder := newStatusRecorder(w) + + start := ledger.Now() + h.ServeHTTP(recorder, r) + latency := time.Since(start.Time) + + attrs = append(attrs, + attribute.String("route", chi.RouteContext(r.Context()).RoutePattern())) + + globalMetricsRegistry.APILatencies().Record(ctx, latency.Milliseconds(), metric.WithAttributes(attrs...)) + + attrs = append(attrs, attribute.Int("status", recorder.Status)) + globalMetricsRegistry.StatusCodes().Add(ctx, 1, metric.WithAttributes(attrs...)) + }) + } +} diff --git a/internal/api/v2/query.go b/internal/api/v2/query.go new file mode 100644 index 000000000..a055051c5 --- /dev/null +++ b/internal/api/v2/query.go @@ -0,0 +1,62 @@ +package v2 + +import ( + "net/http" + "strconv" + "strings" + + "github.com/formancehq/ledger/internal/engine/command" + "github.com/formancehq/ledger/internal/storage/paginate" + "github.com/formancehq/stack/libs/go-libs/errorsutil" + "github.com/pkg/errors" +) + +const ( + MaxPageSize = 100 + DefaultPageSize = paginate.QueryDefaultPageSize + + QueryKeyCursor = "cursor" + QueryKeyPageSize = "pageSize" +) + +var ( + ErrInvalidPageSize = errors.New("invalid 'pageSize' query param") + ErrInvalidBalanceOperator = errors.New( + "invalid parameter 'balanceOperator', should be one of 'e, ne, gt, gte, lt, lte'") + ErrInvalidStartTime = errors.New("invalid 'startTime' query param") + ErrInvalidEndTime = errors.New("invalid 'endTime' query param") +) + +func getPageSize(r *http.Request) (uint64, error) { + pageSizeParam := r.URL.Query().Get(QueryKeyPageSize) + if pageSizeParam == "" { + return DefaultPageSize, nil + } + + var pageSize uint64 + var err error + if pageSizeParam != "" { + pageSize, err = strconv.ParseUint(pageSizeParam, 10, 32) + if err != nil { + return 0, errorsutil.NewError(command.ErrValidation, ErrInvalidPageSize) + } + } + + if pageSize > MaxPageSize { + return MaxPageSize, nil + } + + return pageSize, nil +} + +func getCommandParameters(r *http.Request) command.Parameters { + dryRunAsString := r.URL.Query().Get("dryRun") + dryRun := strings.ToUpper(dryRunAsString) == "YES" || strings.ToUpper(dryRunAsString) == "TRUE" || dryRunAsString == "1" + + idempotencyKey := r.Header.Get("Idempotency-Key") + + return command.Parameters{ + DryRun: dryRun, + IdempotencyKey: idempotencyKey, + } +} diff --git a/internal/api/v2/routes.go b/internal/api/v2/routes.go new file mode 100644 index 000000000..6bc5a652d --- /dev/null +++ b/internal/api/v2/routes.go @@ -0,0 +1,73 @@ +package v2 + +import ( + "net/http" + + "github.com/formancehq/ledger/internal/api/shared" + + "github.com/formancehq/ledger/internal/api/backend" + "github.com/formancehq/ledger/internal/opentelemetry/metrics" + "github.com/formancehq/stack/libs/go-libs/health" + "github.com/go-chi/chi/v5" + "github.com/go-chi/chi/v5/middleware" + "github.com/go-chi/cors" + "github.com/riandyrn/otelchi" +) + +func NewRouter( + backend backend.Backend, + healthController *health.HealthController, + globalMetricsRegistry metrics.GlobalRegistry, +) chi.Router { + router := chi.NewMux() + + router.Use( + cors.New(cors.Options{ + AllowOriginFunc: func(r *http.Request, origin string) bool { + return true + }, + AllowCredentials: true, + }).Handler, + MetricsMiddleware(globalMetricsRegistry), + middleware.Recoverer, + ) + + router.Get("/_healthcheck", healthController.Check) + + router.Group(func(router chi.Router) { + router.Use(otelchi.Middleware("ledger")) + router.Get("/_info", getInfo(backend)) + + router.Route("/{ledger}", func(router chi.Router) { + router.Use(shared.LedgerMiddleware(backend, []string{"/_info"})) + + // LedgerController + router.Get("/_info", getLedgerInfo) + router.Get("/stats", getStats) + router.Get("/logs", getLogs) + + // AccountController + router.Get("/accounts", getAccounts) + router.Head("/accounts", countAccounts) + router.Get("/accounts/{address}", getAccount) + router.Post("/accounts/{address}/metadata", postAccountMetadata) + router.Delete("/accounts/{address}/metadata/{key}", deleteAccountMetadata) + + // TransactionController + router.Get("/transactions", getTransactions) + router.Head("/transactions", countTransactions) + + router.Post("/transactions", postTransaction) + + router.Get("/transactions/{id}", getTransaction) + router.Post("/transactions/{id}/revert", revertTransaction) + router.Post("/transactions/{id}/metadata", postTransactionMetadata) + router.Delete("/transactions/{id}/metadata/{key}", deleteTransactionMetadata) + + // TODO: Rename to /aggregatedBalances + router.Get("/aggregate/balances", getBalancesAggregated) + }) + }) + + return router +} diff --git a/internal/api/v2/utils.go b/internal/api/v2/utils.go new file mode 100644 index 000000000..cc73fb3e8 --- /dev/null +++ b/internal/api/v2/utils.go @@ -0,0 +1,92 @@ +package v2 + +import ( + "io" + "net/http" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/query" + "github.com/formancehq/stack/libs/go-libs/collectionutils" + "github.com/formancehq/stack/libs/go-libs/pointer" +) + +func getPITFilter(r *http.Request) (*ledgerstore.PITFilter, error) { + pitString := r.URL.Query().Get("pit") + if pitString == "" { + return &ledgerstore.PITFilter{}, nil + } + pit, err := ledger.ParseTime(pitString) + if err != nil { + return nil, err + } + return &ledgerstore.PITFilter{ + PIT: &pit, + }, nil +} + +func getPITFilterWithVolumes(r *http.Request) (*ledgerstore.PITFilterWithVolumes, error) { + pit, err := getPITFilter(r) + if err != nil { + return nil, err + } + return &ledgerstore.PITFilterWithVolumes{ + PITFilter: *pit, + ExpandVolumes: collectionutils.Contains(r.URL.Query()["expand"], "volumes"), + ExpandEffectiveVolumes: collectionutils.Contains(r.URL.Query()["expand"], "effectiveVolumes"), + }, nil +} + +func getQueryBuilder(r *http.Request) (query.Builder, error) { + data, err := io.ReadAll(r.Body) + if err != nil { + return nil, err + } + + if len(data) > 0 { + return query.ParseJSON(string(data)) + } + return nil, nil +} + +func getPaginatedQueryOptionsOfPITFilterWithVolumes(r *http.Request) (*ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilterWithVolumes], error) { + qb, err := getQueryBuilder(r) + if err != nil { + return nil, err + } + + pitFilter, err := getPITFilterWithVolumes(r) + if err != nil { + return nil, err + } + + pageSize, err := getPageSize(r) + if err != nil { + return nil, err + } + + return pointer.For(ledgerstore.NewPaginatedQueryOptions(*pitFilter). + WithQueryBuilder(qb). + WithPageSize(pageSize)), nil +} + +func getPaginatedQueryOptionsOfPITFilter(r *http.Request) (*ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilter], error) { + qb, err := getQueryBuilder(r) + if err != nil { + return nil, err + } + + pitFilter, err := getPITFilter(r) + if err != nil { + return nil, err + } + + pageSize, err := getPageSize(r) + if err != nil { + return nil, err + } + + return pointer.For(ledgerstore.NewPaginatedQueryOptions(*pitFilter). + WithQueryBuilder(qb). + WithPageSize(pageSize)), nil +} diff --git a/internal/asset.go b/internal/asset.go new file mode 100644 index 000000000..bca09f3f0 --- /dev/null +++ b/internal/asset.go @@ -0,0 +1,13 @@ +package ledger + +import ( + "regexp" +) + +const AssetPattern = `[A-Z][A-Z0-9]{0,16}(\/\d{1,6})?` + +var AssetRegexp = regexp.MustCompile("^" + AssetPattern + "$") + +func AssetIsValid(v string) bool { + return AssetRegexp.Match([]byte(v)) +} diff --git a/internal/bigint.go b/internal/bigint.go new file mode 100644 index 000000000..d0bd3f2cf --- /dev/null +++ b/internal/bigint.go @@ -0,0 +1,7 @@ +package ledger + +import ( + "math/big" +) + +var Zero = big.NewInt(0) diff --git a/internal/bus/message.go b/internal/bus/message.go new file mode 100644 index 000000000..883f7822b --- /dev/null +++ b/internal/bus/message.go @@ -0,0 +1,81 @@ +package bus + +import ( + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/pkg/events" + "github.com/formancehq/stack/libs/go-libs/metadata" +) + +type EventMessage struct { + Date ledger.Time `json:"date"` + App string `json:"app"` + Version string `json:"version"` + Type string `json:"type"` + Payload any `json:"payload"` +} + +type CommittedTransactions struct { + Ledger string `json:"ledger"` + Transactions []ledger.Transaction `json:"transactions"` + AccountMetadata map[string]metadata.Metadata `json:"accountMetadata"` +} + +func newEventCommittedTransactions(txs CommittedTransactions) EventMessage { + return EventMessage{ + Date: ledger.Now(), + App: events.EventApp, + Version: events.EventVersion, + Type: events.EventTypeCommittedTransactions, + Payload: txs, + } +} + +type SavedMetadata struct { + Ledger string `json:"ledger"` + TargetType string `json:"targetType"` + TargetID string `json:"targetId"` + Metadata metadata.Metadata `json:"metadata"` +} + +func newEventSavedMetadata(metadata SavedMetadata) EventMessage { + return EventMessage{ + Date: ledger.Now(), + App: events.EventApp, + Version: events.EventVersion, + Type: events.EventTypeSavedMetadata, + Payload: metadata, + } +} + +type RevertedTransaction struct { + Ledger string `json:"ledger"` + RevertedTransaction ledger.Transaction `json:"revertedTransaction"` + RevertTransaction ledger.Transaction `json:"revertTransaction"` +} + +func newEventRevertedTransaction(tx RevertedTransaction) EventMessage { + return EventMessage{ + Date: ledger.Now(), + App: events.EventApp, + Version: events.EventVersion, + Type: events.EventTypeRevertedTransaction, + Payload: tx, + } +} + +type DeletedMetadata struct { + Ledger string `json:"ledger"` + TargetType string `json:"targetType"` + TargetID any `json:"targetId"` + Key string `json:"key"` +} + +func newEventDeletedMetadata(tx DeletedMetadata) EventMessage { + return EventMessage{ + Date: ledger.Now(), + App: events.EventApp, + Version: events.EventVersion, + Type: events.EventTypeDeletedMetadata, + Payload: tx, + } +} diff --git a/internal/bus/monitor.go b/internal/bus/monitor.go new file mode 100644 index 000000000..88a6cf974 --- /dev/null +++ b/internal/bus/monitor.go @@ -0,0 +1,97 @@ +package bus + +import ( + "context" + + "github.com/ThreeDotsLabs/watermill/message" + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/pkg/events" + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/formancehq/stack/libs/go-libs/publish" +) + +type Monitor interface { + CommittedTransactions(ctx context.Context, res ledger.Transaction, accountMetadata map[string]metadata.Metadata) + SavedMetadata(ctx context.Context, targetType, id string, metadata metadata.Metadata) + RevertedTransaction(ctx context.Context, reverted, revert *ledger.Transaction) + DeletedMetadata(ctx context.Context, targetType string, targetID any, key string) +} + +type noOpMonitor struct{} + +func (n noOpMonitor) DeletedMetadata(ctx context.Context, targetType string, targetID any, key string) { +} + +func (n noOpMonitor) CommittedTransactions(ctx context.Context, res ledger.Transaction, accountMetadata map[string]metadata.Metadata) { +} +func (n noOpMonitor) SavedMetadata(ctx context.Context, targetType string, id string, metadata metadata.Metadata) { +} +func (n noOpMonitor) RevertedTransaction(ctx context.Context, reverted, revert *ledger.Transaction) { +} + +var _ Monitor = &noOpMonitor{} + +func NewNoOpMonitor() *noOpMonitor { + return &noOpMonitor{} +} + +type ledgerMonitor struct { + publisher message.Publisher + ledgerName string +} + +var _ Monitor = &ledgerMonitor{} + +func NewLedgerMonitor(publisher message.Publisher, ledgerName string) *ledgerMonitor { + m := &ledgerMonitor{ + publisher: publisher, + ledgerName: ledgerName, + } + return m +} + +func (l *ledgerMonitor) CommittedTransactions(ctx context.Context, txs ledger.Transaction, accountMetadata map[string]metadata.Metadata) { + l.publish(ctx, events.EventTypeCommittedTransactions, + newEventCommittedTransactions(CommittedTransactions{ + Ledger: l.ledgerName, + Transactions: []ledger.Transaction{txs}, + AccountMetadata: accountMetadata, + })) +} + +func (l *ledgerMonitor) SavedMetadata(ctx context.Context, targetType, targetID string, metadata metadata.Metadata) { + l.publish(ctx, events.EventTypeSavedMetadata, + newEventSavedMetadata(SavedMetadata{ + Ledger: l.ledgerName, + TargetType: targetType, + TargetID: targetID, + Metadata: metadata, + })) +} + +func (l *ledgerMonitor) RevertedTransaction(ctx context.Context, reverted, revert *ledger.Transaction) { + l.publish(ctx, events.EventTypeRevertedTransaction, + newEventRevertedTransaction(RevertedTransaction{ + Ledger: l.ledgerName, + RevertedTransaction: *reverted, + RevertTransaction: *revert, + })) +} + +func (l *ledgerMonitor) DeletedMetadata(ctx context.Context, targetType string, targetID any, key string) { + l.publish(ctx, events.EventTypeDeletedMetadata, + newEventDeletedMetadata(DeletedMetadata{ + Ledger: l.ledgerName, + TargetType: targetType, + TargetID: targetID, + Key: key, + })) +} + +func (l *ledgerMonitor) publish(ctx context.Context, topic string, ev EventMessage) { + if err := l.publisher.Publish(topic, publish.NewMessage(ctx, ev)); err != nil { + logging.FromContext(ctx).Errorf("publishing message: %s", err) + return + } +} diff --git a/pkg/bus/monitor_test.go b/internal/bus/monitor_test.go similarity index 55% rename from pkg/bus/monitor_test.go rename to internal/bus/monitor_test.go index a7935a85f..c1fa881dd 100644 --- a/pkg/bus/monitor_test.go +++ b/internal/bus/monitor_test.go @@ -5,11 +5,13 @@ import ( "testing" "time" + ledger "github.com/formancehq/ledger/internal" + "github.com/ThreeDotsLabs/watermill" "github.com/ThreeDotsLabs/watermill/pubsub/gochannel" - "github.com/formancehq/go-libs/publish" + "github.com/formancehq/stack/libs/go-libs/publish" "github.com/pborman/uuid" - "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestMonitor(t *testing.T) { @@ -18,17 +20,15 @@ func TestMonitor(t *testing.T) { gochannel.Config{ BlockPublishUntilSubscriberAck: true, }, - watermill.NewStdLogger(false, false), + watermill.NewStdLogger(testing.Verbose(), testing.Verbose()), ) messages, err := pubSub.Subscribe(context.Background(), "testing") - if !assert.NoError(t, err) { - return - } - p := publish.NewTopicMapperPublisher(pubSub, map[string]string{ + require.NoError(t, err) + p := publish.NewTopicMapperPublisherDecorator(pubSub, map[string]string{ "*": "testing", }) - m := newLedgerMonitor(p) - go m.CommittedTransactions(context.Background(), uuid.New()) + m := NewLedgerMonitor(p, uuid.New()) + go m.CommittedTransactions(context.Background(), ledger.Transaction{}, nil) select { case m := <-messages: diff --git a/internal/engine/command/commander.go b/internal/engine/command/commander.go new file mode 100644 index 000000000..f7bc459f2 --- /dev/null +++ b/internal/engine/command/commander.go @@ -0,0 +1,334 @@ +package command + +import ( + "context" + "fmt" + "math/big" + "sync" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/bus" + "github.com/formancehq/ledger/internal/engine/utils/batching" + "github.com/formancehq/ledger/internal/machine" + "github.com/formancehq/ledger/internal/machine/vm" + storageerrors "github.com/formancehq/ledger/internal/storage" + "github.com/formancehq/stack/libs/go-libs/collectionutils" + "github.com/formancehq/stack/libs/go-libs/errorsutil" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/pkg/errors" +) + +type Parameters struct { + DryRun bool + IdempotencyKey string +} + +type Commander struct { + *batching.Batcher[*ledger.ChainedLog] + store Store + locker Locker + compiler *Compiler + running sync.WaitGroup + lastTXID *big.Int + referencer *Referencer + mu sync.Mutex + + lastLog *ledger.ChainedLog + monitor bus.Monitor +} + +func New(store Store, locker Locker, compiler *Compiler, referencer *Referencer, monitor bus.Monitor) *Commander { + return &Commander{ + store: store, + locker: locker, + compiler: compiler, + lastTXID: big.NewInt(-1), + referencer: referencer, + Batcher: batching.NewBatcher(store.InsertLogs, 1, 4096), + monitor: monitor, + } +} + +func (commander *Commander) Init(ctx context.Context) error { + lastTx, err := commander.store.GetLastTransaction(ctx) + if err != nil && !storageerrors.IsNotFoundError(err) { + return err + } + if lastTx != nil { + commander.lastTXID = lastTx.ID + } + + commander.lastLog, err = commander.store.GetLastLog(ctx) + if err != nil && !storageerrors.IsNotFoundError(err) { + return err + } + return nil +} + +func (commander *Commander) GetLedgerStore() Store { + return commander.store +} + +func (commander *Commander) exec(ctx context.Context, parameters Parameters, script ledger.RunScript, + logComputer func(tx *ledger.Transaction, accountMetadata map[string]metadata.Metadata) *ledger.Log) (*ledger.ChainedLog, error) { + + if script.Script.Plain == "" { + return nil, ErrNoScript + } + + if script.Timestamp.IsZero() { + script.Timestamp = ledger.Now() + } + + execContext := newExecutionContext(commander, parameters) + return execContext.run(ctx, func(executionContext *executionContext) (*ledger.ChainedLog, chan struct{}, error) { + if script.Reference != "" { + if err := commander.referencer.take(referenceTxReference, script.Reference); err != nil { + return nil, nil, ErrConflictError + } + defer commander.referencer.release(referenceTxReference, script.Reference) + + _, err := commander.store.GetTransactionByReference(ctx, script.Reference) + if err == nil { + return nil, nil, ErrConflictError + } + if err != storageerrors.ErrNotFound && err != nil { + return nil, nil, err + } + } + + program, err := commander.compiler.Compile(ctx, script.Plain) + if err != nil { + return nil, nil, errorsutil.NewError(ErrCompilationFailed, errors.Wrap(err, "compiling numscript")) + } + + m := vm.NewMachine(*program) + + if err := m.SetVarsFromJSON(script.Vars); err != nil { + return nil, nil, errorsutil.NewError(ErrCompilationFailed, + errors.Wrap(err, "could not set variables")) + } + + involvedAccounts, involvedSources, err := m.ResolveResources(ctx, commander.store) + if err != nil { + return nil, nil, errorsutil.NewError(ErrCompilationFailed, + errors.Wrap(err, "could not resolve program resources")) + } + + worldFilter := collectionutils.FilterNot(collectionutils.FilterEq("world")) + lockAccounts := Accounts{ + Read: collectionutils.Filter(involvedAccounts, worldFilter), + Write: collectionutils.Filter(involvedSources, worldFilter), + } + + unlock, err := commander.locker.Lock(ctx, lockAccounts) + if err != nil { + return nil, nil, errors.Wrap(err, "locking accounts for tx processing") + } + unlock(ctx) + + err = m.ResolveBalances(ctx, commander.store) + if err != nil { + return nil, nil, errorsutil.NewError(ErrCompilationFailed, + errors.Wrap(err, "could not resolve balances")) + } + + result, err := machine.Run(m, script) + if err != nil { + return nil, nil, errors.Wrap(err, "running numscript") + } + + if len(result.Postings) == 0 { + return nil, nil, ErrNoPostings + } + + tx := ledger.NewTransaction(). + WithPostings(result.Postings...). + WithMetadata(result.Metadata). + WithDate(script.Timestamp). + WithID(commander.nextTXID()). + WithReference(script.Reference) + + log := logComputer(tx, result.AccountMetadata) + if parameters.IdempotencyKey != "" { + log = log.WithIdempotencyKey(parameters.IdempotencyKey) + } + + return executionContext.AppendLog(ctx, log) + }) +} + +func (commander *Commander) CreateTransaction(ctx context.Context, parameters Parameters, script ledger.RunScript) (*ledger.Transaction, error) { + log, err := commander.exec(ctx, parameters, script, ledger.NewTransactionLog) + if err != nil { + return nil, err + } + + commander.monitor.CommittedTransactions(ctx, *log.Data.(ledger.NewTransactionLogPayload).Transaction, log.Data.(ledger.NewTransactionLogPayload).AccountMetadata) + + return log.Data.(ledger.NewTransactionLogPayload).Transaction, nil +} + +func (commander *Commander) SaveMeta(ctx context.Context, parameters Parameters, targetType string, targetID interface{}, m metadata.Metadata) error { + if m == nil { + return nil + } + + if targetType == "" { + return errorsutil.NewError(ErrValidation, errors.New("empty target type")) + } + if targetID == "" { + return errorsutil.NewError(ErrValidation, errors.New("empty target id")) + } + + execContext := newExecutionContext(commander, parameters) + _, err := execContext.run(ctx, func(executionContext *executionContext) (*ledger.ChainedLog, chan struct{}, error) { + var ( + log *ledger.Log + at = ledger.Now() + ) + switch targetType { + case ledger.MetaTargetTypeTransaction: + _, err := commander.store.GetTransaction(ctx, targetID.(*big.Int)) + if err != nil { + return nil, nil, err + } + log = ledger.NewSetMetadataLog(at, ledger.SetMetadataLogPayload{ + TargetType: ledger.MetaTargetTypeTransaction, + TargetID: targetID.(*big.Int), + Metadata: m, + }) + case ledger.MetaTargetTypeAccount: + log = ledger.NewSetMetadataLog(at, ledger.SetMetadataLogPayload{ + TargetType: ledger.MetaTargetTypeAccount, + TargetID: targetID.(string), + Metadata: m, + }) + default: + return nil, nil, errorsutil.NewError(ErrValidation, errors.Errorf("unknown target type '%s'", targetType)) + } + + return executionContext.AppendLog(ctx, log) + }) + if err != nil { + return err + } + + commander.monitor.SavedMetadata(ctx, targetType, fmt.Sprint(targetID), m) + return nil +} + +func (commander *Commander) RevertTransaction(ctx context.Context, parameters Parameters, id *big.Int) (*ledger.Transaction, error) { + + if err := commander.referencer.take(referenceReverts, id); err != nil { + return nil, ErrRevertOccurring + } + defer commander.referencer.release(referenceReverts, id) + + tx, err := commander.store.GetTransaction(ctx, id) + if err != nil { + if errors.Is(err, storageerrors.ErrNotFound) { + return nil, errors.New("tx not found") + } + return nil, err + } + if tx.Reverted { + return nil, ErrAlreadyReverted + } + + transactionToRevert, err := commander.store.GetTransaction(ctx, id) + if storageerrors.IsNotFoundError(err) { + return nil, errorsutil.NewError(err, errors.Errorf("transaction %d not found", id)) + } + if err != nil { + return nil, err + } + + rt := transactionToRevert.Reverse() + rt.Metadata = ledger.MarkReverts(metadata.Metadata{}, transactionToRevert.ID) + + log, err := commander.exec(ctx, parameters, + ledger.TxToScriptData(ledger.TransactionData{ + Postings: rt.Postings, + Metadata: rt.Metadata, + }), + func(tx *ledger.Transaction, accountMetadata map[string]metadata.Metadata) *ledger.Log { + return ledger.NewRevertedTransactionLog(tx.Timestamp, transactionToRevert.ID, tx) + }) + if err != nil { + return nil, err + } + + commander.monitor.RevertedTransaction(ctx, log.Data.(ledger.RevertedTransactionLogPayload).RevertTransaction, tx) + + return log.Data.(ledger.RevertedTransactionLogPayload).RevertTransaction, nil +} + +func (commander *Commander) Close() { + commander.Batcher.Close() + commander.running.Wait() +} + +func (commander *Commander) chainLog(log *ledger.Log) *ledger.ChainedLog { + commander.mu.Lock() + defer commander.mu.Unlock() + + commander.lastLog = log.ChainLog(commander.lastLog) + return commander.lastLog +} + +func (commander *Commander) nextTXID() *big.Int { + commander.mu.Lock() + defer commander.mu.Unlock() + + ret := big.NewInt(0).Add(commander.lastTXID, big.NewInt(1)) + commander.lastTXID = ret + + return ret +} + +func (commander *Commander) DeleteMetadata(ctx context.Context, parameters Parameters, targetType string, targetID any, key string) error { + if targetType == "" { + return errorsutil.NewError(ErrValidation, errors.New("empty target type")) + } + if targetID == "" { + return errorsutil.NewError(ErrValidation, errors.New("empty target id")) + } + + execContext := newExecutionContext(commander, parameters) + _, err := execContext.run(ctx, func(executionContext *executionContext) (*ledger.ChainedLog, chan struct{}, error) { + var ( + log *ledger.Log + at = ledger.Now() + ) + switch targetType { + case ledger.MetaTargetTypeTransaction: + _, err := commander.store.GetTransaction(ctx, targetID.(*big.Int)) + if err != nil { + return nil, nil, err + } + log = ledger.NewDeleteMetadataLog(at, ledger.DeleteMetadataLogPayload{ + TargetType: ledger.MetaTargetTypeTransaction, + TargetID: targetID.(*big.Int), + Key: key, + }) + case ledger.MetaTargetTypeAccount: + log = ledger.NewDeleteMetadataLog(at, ledger.DeleteMetadataLogPayload{ + TargetType: ledger.MetaTargetTypeAccount, + TargetID: targetID.(string), + Key: key, + }) + default: + return nil, nil, errorsutil.NewError(ErrValidation, errors.Errorf("unknown target type '%s'", targetType)) + } + + return executionContext.AppendLog(ctx, log) + }) + if err != nil { + return err + } + + commander.monitor.DeletedMetadata(ctx, targetType, targetID, key) + + return nil +} diff --git a/internal/engine/command/commander_test.go b/internal/engine/command/commander_test.go new file mode 100644 index 000000000..43abf1639 --- /dev/null +++ b/internal/engine/command/commander_test.go @@ -0,0 +1,253 @@ +package command + +import ( + "context" + "math/big" + "testing" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/bus" + storageerrors "github.com/formancehq/ledger/internal/storage" + internaltesting "github.com/formancehq/ledger/internal/testing" + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/pkg/errors" + "github.com/stretchr/testify/require" +) + +var ( + now = ledger.Now() +) + +type testCase struct { + name string + setup func(t *testing.T, r Store) + script string + reference string + expectedError error + expectedTx *ledger.Transaction + expectedLogs []*ledger.Log + parameters Parameters +} + +var testCases = []testCase{ + { + name: "nominal", + script: ` + send [GEM 100] ( + source = @world + destination = @mint + )`, + expectedTx: ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "mint", "GEM", big.NewInt(100)), + ), + expectedLogs: []*ledger.Log{ + ledger.NewTransactionLog( + ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "mint", "GEM", big.NewInt(100))), + map[string]metadata.Metadata{}, + ), + }, + }, + { + name: "no script", + script: ``, + expectedError: ErrNoScript, + }, + { + name: "invalid script", + script: `XXX`, + expectedError: ErrCompilationFailed, + }, + { + name: "set reference conflict", + setup: func(t *testing.T, store Store) { + tx := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "mint", "GEM", big.NewInt(100))). + WithReference("tx_ref") + log := ledger.NewTransactionLog(tx, nil) + err := store.InsertLogs(context.Background(), log.ChainLog(nil)) + require.NoError(t, err) + }, + script: ` + send [GEM 100] ( + source = @world + destination = @mint + )`, + reference: "tx_ref", + expectedError: ErrConflictError, + }, + { + name: "set reference", + script: ` + send [GEM 100] ( + source = @world + destination = @mint + )`, + reference: "tx_ref", + expectedTx: ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "mint", "GEM", big.NewInt(100)), + ). + WithReference("tx_ref"), + expectedLogs: []*ledger.Log{ + ledger.NewTransactionLog( + ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "mint", "GEM", big.NewInt(100)), + ). + WithReference("tx_ref"), + map[string]metadata.Metadata{}, + ), + }, + }, + { + name: "using idempotency", + script: ` + send [GEM 100] ( + source = @world + destination = @mint + )`, + reference: "tx_ref", + expectedTx: ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "mint", "GEM", big.NewInt(100)), + ), + expectedLogs: []*ledger.Log{ + ledger.NewTransactionLog( + ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "mint", "GEM", big.NewInt(100)), + ), + map[string]metadata.Metadata{}, + ).WithIdempotencyKey("testing"), + }, + setup: func(t *testing.T, r Store) { + log := ledger.NewTransactionLog( + ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "mint", "GEM", big.NewInt(100)), + ). + WithDate(now), + map[string]metadata.Metadata{}, + ).WithIdempotencyKey("testing") + err := r.InsertLogs(context.Background(), log.ChainLog(nil)) + require.NoError(t, err) + }, + parameters: Parameters{ + IdempotencyKey: "testing", + }, + }, +} + +func TestCreateTransaction(t *testing.T) { + t.Parallel() + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + + store := storageerrors.NewInMemoryStore() + ctx := logging.TestingContext() + + commander := New(store, NoOpLocker, NewCompiler(1024), NewReferencer(), bus.NewNoOpMonitor()) + go commander.Run(ctx) + defer commander.Close() + + if tc.setup != nil { + tc.setup(t, store) + } + ret, err := commander.CreateTransaction(ctx, tc.parameters, ledger.RunScript{ + Script: ledger.Script{ + Plain: tc.script, + }, + Timestamp: now, + Reference: tc.reference, + }) + + if tc.expectedError != nil { + require.True(t, errors.Is(err, tc.expectedError)) + } else { + require.NoError(t, err) + require.NotNil(t, ret) + tc.expectedTx.Timestamp = now + internaltesting.RequireEqual(t, tc.expectedTx, ret) + + for ind := range tc.expectedLogs { + expectedLog := tc.expectedLogs[ind] + switch v := expectedLog.Data.(type) { + case ledger.NewTransactionLogPayload: + v.Transaction.Timestamp = now + expectedLog.Data = v + } + expectedLog.Date = now + } + } + }) + } +} + +func TestRevert(t *testing.T) { + txID := big.NewInt(0) + store := storageerrors.NewInMemoryStore() + ctx := logging.TestingContext() + + log := ledger.NewTransactionLog( + ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ), + map[string]metadata.Metadata{}, + ).ChainLog(nil) + err := store.InsertLogs(context.Background(), log) + require.NoError(t, err) + + commander := New(store, NoOpLocker, NewCompiler(1024), NewReferencer(), bus.NewNoOpMonitor()) + go commander.Run(ctx) + defer commander.Close() + + _, err = commander.RevertTransaction(ctx, Parameters{}, txID) + require.NoError(t, err) +} + +func TestRevertWithAlreadyReverted(t *testing.T) { + + store := storageerrors.NewInMemoryStore() + ctx := logging.TestingContext() + + tx := ledger.NewTransaction().WithPostings(ledger.NewPosting("world", "bank", "USD", big.NewInt(100))) + err := store.InsertLogs(context.Background(), + ledger.NewTransactionLog(tx, map[string]metadata.Metadata{}).ChainLog(nil), + ledger.NewRevertedTransactionLog(ledger.Now(), tx.ID, ledger.NewTransaction()).ChainLog(nil), + ) + require.NoError(t, err) + + commander := New(store, NoOpLocker, NewCompiler(1024), NewReferencer(), bus.NewNoOpMonitor()) + go commander.Run(ctx) + defer commander.Close() + + _, err = commander.RevertTransaction(context.Background(), Parameters{}, tx.ID) + require.True(t, errors.Is(err, ErrAlreadyReverted)) +} + +func TestRevertWithRevertOccurring(t *testing.T) { + + store := storageerrors.NewInMemoryStore() + ctx := logging.TestingContext() + + tx := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ) + log := ledger.NewTransactionLog(tx, map[string]metadata.Metadata{}) + err := store.InsertLogs(ctx, log.ChainLog(nil)) + require.NoError(t, err) + + referencer := NewReferencer() + commander := New(store, NoOpLocker, NewCompiler(1024), referencer, bus.NewNoOpMonitor()) + go commander.Run(ctx) + defer commander.Close() + + referencer.take(referenceReverts, big.NewInt(0)) + + _, err = commander.RevertTransaction(ctx, Parameters{}, tx.ID) + require.True(t, errors.Is(err, ErrRevertOccurring)) +} diff --git a/internal/engine/command/compiler.go b/internal/engine/command/compiler.go new file mode 100644 index 000000000..832879c66 --- /dev/null +++ b/internal/engine/command/compiler.go @@ -0,0 +1,48 @@ +package command + +import ( + "context" + "crypto/sha256" + "encoding/base64" + + "github.com/bluele/gcache" + "github.com/formancehq/ledger/internal/machine/script/compiler" + "github.com/formancehq/ledger/internal/machine/vm" + "github.com/formancehq/ledger/internal/machine/vm/program" + "github.com/formancehq/stack/libs/go-libs/errorsutil" +) + +type Compiler struct { + cache gcache.Cache +} + +func (c *Compiler) Compile(ctx context.Context, script string) (*program.Program, error) { + + digest := sha256.New() + _, err := digest.Write([]byte(script)) + if err != nil { + return nil, errorsutil.NewError(vm.ErrCompilationFailed, err) + } + + cacheKey := base64.StdEncoding.EncodeToString(digest.Sum(nil)) + v, err := c.cache.Get(cacheKey) + if err == nil { + return v.(*program.Program), nil + } + + program, err := compiler.Compile(script) + if err != nil { + return nil, errorsutil.NewError(vm.ErrCompilationFailed, err) + } + _ = c.cache.Set(cacheKey, program) + + return program, nil +} + +func NewCompiler(maxCacheCount int) *Compiler { + return &Compiler{ + cache: gcache.New(maxCacheCount). + LFU(). + Build(), + } +} diff --git a/internal/engine/command/compiler_test.go b/internal/engine/command/compiler_test.go new file mode 100644 index 000000000..20f41379e --- /dev/null +++ b/internal/engine/command/compiler_test.go @@ -0,0 +1,25 @@ +package command + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestCompiler(t *testing.T) { + + script := `send [USD/2 100] ( + source = @world + destination = @bank +)` + + compiler := NewCompiler(1024) + p1, err := compiler.Compile(context.Background(), script) + require.NoError(t, err) + + p2, err := compiler.Compile(context.Background(), script) + require.NoError(t, err) + + require.Equal(t, p1, p2) +} diff --git a/internal/engine/command/context.go b/internal/engine/command/context.go new file mode 100644 index 000000000..dd2a5ea75 --- /dev/null +++ b/internal/engine/command/context.go @@ -0,0 +1,66 @@ +package command + +import ( + "context" + + ledger "github.com/formancehq/ledger/internal" + storageerrors "github.com/formancehq/ledger/internal/storage" + "github.com/formancehq/stack/libs/go-libs/logging" +) + +type executionContext struct { + commander *Commander + parameters Parameters +} + +func (e *executionContext) AppendLog(ctx context.Context, log *ledger.Log) (*ledger.ChainedLog, chan struct{}, error) { + if e.parameters.DryRun { + ret := make(chan struct{}) + close(ret) + return log.ChainLog(nil), ret, nil + } + + chainedLog := e.commander.chainLog(log) + logging.FromContext(ctx).WithFields(map[string]any{ + "id": chainedLog.ID, + }).Debugf("Appending log") + done := make(chan struct{}) + e.commander.Append(chainedLog, func() { + close(done) + }) + return chainedLog, done, nil +} + +func (e *executionContext) run(ctx context.Context, executor func(e *executionContext) (*ledger.ChainedLog, chan struct{}, error)) (*ledger.ChainedLog, error) { + if ik := e.parameters.IdempotencyKey; ik != "" { + if err := e.commander.referencer.take(referenceIks, ik); err != nil { + return nil, err + } + defer e.commander.referencer.release(referenceIks, ik) + + chainedLog, err := e.commander.store.ReadLogWithIdempotencyKey(ctx, ik) + if err == nil { + return chainedLog, nil + } + if err != nil && !storageerrors.IsNotFoundError(err) { + return nil, err + } + } + chainedLog, done, err := executor(e) + if err != nil { + return nil, err + } + <-done + logger := logging.FromContext(ctx).WithFields(map[string]any{ + "id": chainedLog.ID, + }) + logger.Debugf("Log inserted in database") + return chainedLog, nil +} + +func newExecutionContext(commander *Commander, parameters Parameters) *executionContext { + return &executionContext{ + commander: commander, + parameters: parameters, + } +} diff --git a/internal/engine/command/errors.go b/internal/engine/command/errors.go new file mode 100644 index 000000000..36cda0d55 --- /dev/null +++ b/internal/engine/command/errors.go @@ -0,0 +1,43 @@ +package command + +import ( + "github.com/pkg/errors" +) + +var ( + ErrNoPostings = errors.New("transaction has no postings") + ErrNoScript = errors.New("no script") + ErrCompilationFailed = errors.New("compilation failed") + ErrVM = errors.New("vm error") + ErrState = errors.New("state error") + ErrValidation = errors.New("validation error") + ErrAlreadyReverted = errors.New("transaction already reverted") + ErrRevertOccurring = errors.New("revert already occurring") + ErrPastTransaction = errors.New("cannot pass a timestamp prior to the last transaction") + ErrConflictError = errors.New("conflict error") + ErrIKOccurring = errors.New("a request with the same IK is already occurring") +) + +func IsNoScriptError(err error) bool { + return errors.Is(err, ErrNoScript) +} + +func IsNoPostingsError(err error) bool { + return errors.Is(err, ErrNoPostings) +} + +func IsCompilationFailedError(err error) bool { + return errors.Is(err, ErrCompilationFailed) +} + +func IsValidationError(err error) bool { + return errors.Is(err, ErrValidation) +} + +func IsPastTransactionError(err error) bool { + return errors.Is(err, ErrPastTransaction) +} + +func IsConflictError(err error) bool { + return errors.Is(err, ErrConflictError) +} diff --git a/internal/engine/command/lock.go b/internal/engine/command/lock.go new file mode 100644 index 000000000..b5f7113ac --- /dev/null +++ b/internal/engine/command/lock.go @@ -0,0 +1,160 @@ +package command + +import ( + "context" + "sync" + "sync/atomic" + + "github.com/formancehq/stack/libs/go-libs/collectionutils" + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/pkg/errors" +) + +type Unlock func(ctx context.Context) + +type Locker interface { + Lock(ctx context.Context, accounts Accounts) (Unlock, error) +} +type LockerFn func(ctx context.Context, accounts Accounts) (Unlock, error) + +func (fn LockerFn) Lock(ctx context.Context, accounts Accounts) (Unlock, error) { + return fn(ctx, accounts) +} + +var NoOpLocker = LockerFn(func(ctx context.Context, accounts Accounts) (Unlock, error) { + return func(ctx context.Context) {}, nil +}) + +type Accounts struct { + Read []string + Write []string +} + +type lockIntent struct { + accounts Accounts + acquired chan struct{} +} + +func (intent *lockIntent) tryLock(ctx context.Context, chain *DefaultLocker) bool { + + for _, account := range intent.accounts.Read { + _, ok := chain.writeLocks[account] + if ok { + return false + } + } + + for _, account := range intent.accounts.Write { + _, ok := chain.readLocks[account] + if ok { + return false + } + _, ok = chain.writeLocks[account] + if ok { + return false + } + } + + logging.FromContext(ctx).Debugf("Lock acquired") + + for _, account := range intent.accounts.Read { + atomicValue, ok := chain.readLocks[account] + if !ok { + atomicValue = &atomic.Int64{} + chain.readLocks[account] = atomicValue + } + atomicValue.Add(1) + } + for _, account := range intent.accounts.Write { + chain.writeLocks[account] = struct{}{} + } + + return true +} + +func (intent *lockIntent) unlock(ctx context.Context, chain *DefaultLocker) { + logging.FromContext(ctx).Debugf("Unlock accounts") + for _, account := range intent.accounts.Read { + atomicValue := chain.readLocks[account] + if atomicValue.Add(-1) == 0 { + delete(chain.readLocks, account) + } + } + for _, account := range intent.accounts.Write { + delete(chain.writeLocks, account) + } +} + +type DefaultLocker struct { + intents *collectionutils.LinkedList[*lockIntent] + mu sync.Mutex + readLocks map[string]*atomic.Int64 + writeLocks map[string]struct{} +} + +func (defaultLocker *DefaultLocker) Lock(ctx context.Context, accounts Accounts) (Unlock, error) { + defaultLocker.mu.Lock() + + logger := logging.FromContext(ctx).WithFields(map[string]any{ + "read": accounts.Read, + "write": accounts.Write, + }) + ctx = logging.ContextWithLogger(ctx, logger) + + logger.Debugf("Intent lock") + intent := &lockIntent{ + accounts: accounts, + acquired: make(chan struct{}), + } + + recheck := func() { + node := defaultLocker.intents.FirstNode() + for { + if node == nil { + break + } + if node.Value().tryLock(ctx, defaultLocker) { + node.Remove() + close(node.Value().acquired) + } + node = node.Next() + } + } + + releaseIntent := func(ctx context.Context) { + defaultLocker.mu.Lock() + defer defaultLocker.mu.Unlock() + + intent.unlock(logging.ContextWithLogger(ctx, logger), defaultLocker) + + recheck() + } + + acquired := intent.tryLock(ctx, defaultLocker) + if acquired { + defaultLocker.mu.Unlock() + logger.Debugf("Lock directly acquired") + + return releaseIntent, nil + } + + logger.Debugf("Lock not acquired, some accounts are already used, putting in queue") + defaultLocker.intents.Append(intent) + defaultLocker.mu.Unlock() + + select { + case <-ctx.Done(): + defaultLocker.intents.RemoveValue(intent) + return nil, errors.Wrapf(ctx.Err(), "locking accounts: %s as read, and %s as write", accounts.Read, accounts.Write) + case <-intent.acquired: + return releaseIntent, nil + } +} + +func NewDefaultLocker() *DefaultLocker { + return &DefaultLocker{ + intents: collectionutils.NewLinkedList[*lockIntent](), + readLocks: map[string]*atomic.Int64{}, + writeLocks: map[string]struct{}{}, + } +} diff --git a/internal/engine/command/lock_test.go b/internal/engine/command/lock_test.go new file mode 100644 index 000000000..2e7fb1cfc --- /dev/null +++ b/internal/engine/command/lock_test.go @@ -0,0 +1,44 @@ +package command + +import ( + "fmt" + "math/rand" + "sync" + "testing" + "time" + + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/stretchr/testify/require" +) + +func TestLock(t *testing.T) { + locker := NewDefaultLocker() + var accounts []string + for i := 0; i < 10; i++ { + accounts = append(accounts, fmt.Sprintf("accounts:%d", i)) + } + + r := rand.New(rand.NewSource(time.Now().Unix())) + ctx := logging.TestingContext() + + const nbLoop = 1000 + wg := sync.WaitGroup{} + wg.Add(nbLoop) + + for i := 0; i < nbLoop; i++ { + go func() { + unlock, err := locker.Lock(ctx, Accounts{ + Read: []string{accounts[r.Int31n(10)]}, + Write: []string{accounts[r.Int31n(10)]}, + }) + require.NoError(t, err) + defer unlock(ctx) + + <-time.After(10 * time.Millisecond) + wg.Add(-1) + }() + } + + wg.Wait() + +} diff --git a/internal/engine/command/reference.go b/internal/engine/command/reference.go new file mode 100644 index 000000000..86d4a10c6 --- /dev/null +++ b/internal/engine/command/reference.go @@ -0,0 +1,42 @@ +package command + +import ( + "fmt" + "sync" + + "github.com/pkg/errors" +) + +type Reference int + +const ( + referenceReverts = iota + referenceIks + referenceTxReference +) + +type Referencer struct { + references map[Reference]*sync.Map +} + +func (r *Referencer) take(ref Reference, key any) error { + _, loaded := r.references[ref].LoadOrStore(fmt.Sprintf("%d/%s", ref, key), struct{}{}) + if loaded { + return errors.New("already taken") + } + return nil +} + +func (r *Referencer) release(ref Reference, key any) { + r.references[ref].Delete(fmt.Sprintf("%d/%s", ref, key)) +} + +func NewReferencer() *Referencer { + return &Referencer{ + references: map[Reference]*sync.Map{ + referenceReverts: {}, + referenceIks: {}, + referenceTxReference: {}, + }, + } +} diff --git a/internal/engine/command/store.go b/internal/engine/command/store.go new file mode 100644 index 000000000..25569e56f --- /dev/null +++ b/internal/engine/command/store.go @@ -0,0 +1,19 @@ +package command + +import ( + "context" + "math/big" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/machine/vm" +) + +type Store interface { + vm.Store + InsertLogs(ctx context.Context, logs ...*ledger.ChainedLog) error + GetLastLog(ctx context.Context) (*ledger.ChainedLog, error) + GetLastTransaction(ctx context.Context) (*ledger.ExpandedTransaction, error) + ReadLogWithIdempotencyKey(ctx context.Context, key string) (*ledger.ChainedLog, error) + GetTransactionByReference(ctx context.Context, ref string) (*ledger.ExpandedTransaction, error) + GetTransaction(ctx context.Context, txID *big.Int) (*ledger.Transaction, error) +} diff --git a/internal/engine/ledger.go b/internal/engine/ledger.go new file mode 100644 index 000000000..75abda917 --- /dev/null +++ b/internal/engine/ledger.go @@ -0,0 +1,114 @@ +package engine + +import ( + "context" + "math/big" + + "github.com/ThreeDotsLabs/watermill/message" + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/bus" + "github.com/formancehq/ledger/internal/engine/command" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/pkg/errors" +) + +type Ledger struct { + commander *command.Commander + store *ledgerstore.Store +} + +func New( + store *ledgerstore.Store, + publisher message.Publisher, + compiler *command.Compiler, +) *Ledger { + var monitor bus.Monitor = bus.NewNoOpMonitor() + if publisher != nil { + monitor = bus.NewLedgerMonitor(publisher, store.Name()) + } + return &Ledger{ + commander: command.New( + store, + command.NewDefaultLocker(), + compiler, + command.NewReferencer(), + monitor, + ), + store: store, + } +} + +func (l *Ledger) Start(ctx context.Context) { + if err := l.commander.Init(ctx); err != nil { + panic(err) + } + go l.commander.Run(logging.ContextWithField(ctx, "component", "commander")) +} + +func (l *Ledger) Close(ctx context.Context) { + logging.FromContext(ctx).Debugf("Close commander") + l.commander.Close() +} + +func (l *Ledger) GetTransactions(ctx context.Context, q *ledgerstore.GetTransactionsQuery) (*api.Cursor[ledger.ExpandedTransaction], error) { + txs, err := l.store.GetTransactions(ctx, q) + return txs, errors.Wrap(err, "getting transactions") +} + +func (l *Ledger) CountTransactions(ctx context.Context, q *ledgerstore.GetTransactionsQuery) (uint64, error) { + count, err := l.store.CountTransactions(ctx, q) + return count, errors.Wrap(err, "counting transactions") +} + +func (l *Ledger) GetTransactionWithVolumes(ctx context.Context, query ledgerstore.GetTransactionQuery) (*ledger.ExpandedTransaction, error) { + tx, err := l.store.GetTransactionWithVolumes(ctx, query) + return tx, errors.Wrap(err, "getting transaction") +} + +func (l *Ledger) CountAccounts(ctx context.Context, a *ledgerstore.GetAccountsQuery) (uint64, error) { + count, err := l.store.CountAccounts(ctx, a) + return count, errors.Wrap(err, "counting accounts") +} + +func (l *Ledger) GetAccountsWithVolumes(ctx context.Context, a *ledgerstore.GetAccountsQuery) (*api.Cursor[ledger.ExpandedAccount], error) { + accounts, err := l.store.GetAccountsWithVolumes(ctx, a) + return accounts, errors.Wrap(err, "getting accounts") +} + +func (l *Ledger) GetAccountWithVolumes(ctx context.Context, q ledgerstore.GetAccountQuery) (*ledger.ExpandedAccount, error) { + accounts, err := l.store.GetAccountWithVolumes(ctx, q) + return accounts, errors.Wrap(err, "getting account") +} + +func (l *Ledger) GetAggregatedBalances(ctx context.Context, q *ledgerstore.GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) { + balances, err := l.store.GetAggregatedBalances(ctx, q) + return balances, errors.Wrap(err, "getting balances aggregated") +} + +func (l *Ledger) GetLogs(ctx context.Context, q *ledgerstore.GetLogsQuery) (*api.Cursor[ledger.ChainedLog], error) { + logs, err := l.store.GetLogs(ctx, q) + return logs, errors.Wrap(err, "getting logs") +} + +func (l *Ledger) CreateTransaction(ctx context.Context, parameters command.Parameters, data ledger.RunScript) (*ledger.Transaction, error) { + return l.commander.CreateTransaction(ctx, parameters, data) +} + +func (l *Ledger) RevertTransaction(ctx context.Context, parameters command.Parameters, id *big.Int) (*ledger.Transaction, error) { + return l.commander.RevertTransaction(ctx, parameters, id) +} + +func (l *Ledger) SaveMeta(ctx context.Context, parameters command.Parameters, targetType string, targetID any, m metadata.Metadata) error { + return l.commander.SaveMeta(ctx, parameters, targetType, targetID, m) +} + +func (l *Ledger) DeleteMetadata(ctx context.Context, parameters command.Parameters, targetType string, targetID any, key string) error { + return l.commander.DeleteMetadata(ctx, parameters, targetType, targetID, key) +} + +func (l *Ledger) IsDatabaseUpToDate(ctx context.Context) (bool, error) { + return l.store.IsSchemaUpToDate(ctx) +} diff --git a/internal/engine/migrations.go b/internal/engine/migrations.go new file mode 100644 index 000000000..6d8810893 --- /dev/null +++ b/internal/engine/migrations.go @@ -0,0 +1,11 @@ +package engine + +import ( + "context" + + "github.com/formancehq/stack/libs/go-libs/migrations" +) + +func (l *Ledger) GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) { + return l.store.GetMigrationsInfo(ctx) +} diff --git a/internal/engine/module.go b/internal/engine/module.go new file mode 100644 index 000000000..74ab1b299 --- /dev/null +++ b/internal/engine/module.go @@ -0,0 +1,52 @@ +package engine + +import ( + "context" + + "github.com/ThreeDotsLabs/watermill/message" + "github.com/formancehq/ledger/internal/bus" + "github.com/formancehq/ledger/internal/engine/command" + "github.com/formancehq/ledger/internal/opentelemetry/metrics" + "github.com/formancehq/ledger/internal/storage/driver" + "github.com/formancehq/stack/libs/go-libs/logging" + "go.uber.org/fx" +) + +type NumscriptCacheConfiguration struct { + MaxCount int +} + +type Configuration struct { + NumscriptCache NumscriptCacheConfiguration +} + +func Module(configuration Configuration) fx.Option { + return fx.Options( + fx.Provide(func( + storageDriver *driver.Driver, + publisher message.Publisher, + metricsRegistry metrics.GlobalRegistry, + logger logging.Logger, + ) *Resolver { + options := []option{ + WithMessagePublisher(publisher), + WithMetricsRegistry(metricsRegistry), + WithLogger(logger), + } + if configuration.NumscriptCache.MaxCount != 0 { + options = append(options, WithCompiler(command.NewCompiler(configuration.NumscriptCache.MaxCount))) + } + return NewResolver(storageDriver, options...) + }), + fx.Provide(fx.Annotate(bus.NewNoOpMonitor, fx.As(new(bus.Monitor)))), + fx.Provide(fx.Annotate(metrics.NewNoOpRegistry, fx.As(new(metrics.GlobalRegistry)))), + //TODO(gfyrag): Move in pkg/ledger package + fx.Invoke(func(lc fx.Lifecycle, resolver *Resolver) { + lc.Append(fx.Hook{ + OnStop: func(ctx context.Context) error { + return resolver.CloseLedgers(ctx) + }, + }) + }), + ) +} diff --git a/internal/engine/resolver.go b/internal/engine/resolver.go new file mode 100644 index 000000000..70cfc1fbe --- /dev/null +++ b/internal/engine/resolver.go @@ -0,0 +1,112 @@ +package engine + +import ( + "context" + "sync" + + "github.com/ThreeDotsLabs/watermill/message" + "github.com/formancehq/ledger/internal/engine/command" + "github.com/formancehq/ledger/internal/opentelemetry/metrics" + "github.com/formancehq/ledger/internal/storage/driver" + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/sirupsen/logrus" +) + +type option func(r *Resolver) + +func WithMessagePublisher(publisher message.Publisher) option { + return func(r *Resolver) { + r.publisher = publisher + } +} + +func WithMetricsRegistry(registry metrics.GlobalRegistry) option { + return func(r *Resolver) { + r.metricsRegistry = registry + } +} + +func WithCompiler(compiler *command.Compiler) option { + return func(r *Resolver) { + r.compiler = compiler + } +} + +func WithLogger(logger logging.Logger) option { + return func(r *Resolver) { + r.logger = logger + } +} + +var defaultOptions = []option{ + WithMetricsRegistry(metrics.NewNoOpRegistry()), + WithCompiler(command.NewCompiler(1024)), + WithLogger(logging.NewLogrus(logrus.New())), +} + +type Resolver struct { + storageDriver *driver.Driver + lock sync.RWMutex + metricsRegistry metrics.GlobalRegistry + //TODO(gfyrag): add a routine to clean old ledger + ledgers map[string]*Ledger + compiler *command.Compiler + logger logging.Logger + publisher message.Publisher +} + +func NewResolver(storageDriver *driver.Driver, options ...option) *Resolver { + r := &Resolver{ + storageDriver: storageDriver, + ledgers: map[string]*Ledger{}, + } + for _, opt := range append(defaultOptions, options...) { + opt(r) + } + + return r +} + +func (r *Resolver) GetLedger(ctx context.Context, name string) (*Ledger, error) { + r.lock.RLock() + ledger, ok := r.ledgers[name] + r.lock.RUnlock() + + if !ok { + r.lock.Lock() + defer r.lock.Unlock() + + logging.FromContext(ctx).Infof("Initialize new ledger") + + ledger, ok = r.ledgers[name] + if ok { + return ledger, nil + } + + store, err := r.storageDriver.GetLedgerStore(ctx, name) + if err != nil { + return nil, err + } + + ledger = New(store, r.publisher, r.compiler) + ledger.Start(logging.ContextWithLogger(context.Background(), r.logger)) + r.ledgers[name] = ledger + r.metricsRegistry.ActiveLedgers().Add(ctx, +1) + } + + return ledger, nil +} + +func (r *Resolver) CloseLedgers(ctx context.Context) error { + r.logger.Info("Close all ledgers") + defer func() { + r.logger.Info("All ledgers closed") + }() + for name, ledger := range r.ledgers { + r.logger.Infof("Close ledger %s", name) + ledger.Close(logging.ContextWithLogger(ctx, r.logger.WithField("ledger", name))) + delete(r.ledgers, name) + } + + return nil +} diff --git a/internal/engine/stats.go b/internal/engine/stats.go new file mode 100644 index 000000000..7df6d12b2 --- /dev/null +++ b/internal/engine/stats.go @@ -0,0 +1,32 @@ +package engine + +import ( + "context" + + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/pkg/errors" +) + +type Stats struct { + Transactions uint64 `json:"transactions"` + Accounts uint64 `json:"accounts"` +} + +func (l *Ledger) Stats(ctx context.Context) (Stats, error) { + var stats Stats + + transactions, err := l.store.CountTransactions(ctx, ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}))) + if err != nil { + return stats, errors.Wrap(err, "counting transactions") + } + + accounts, err := l.store.CountAccounts(ctx, ledgerstore.NewGetAccountsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}))) + if err != nil { + return stats, errors.Wrap(err, "counting accounts") + } + + return Stats{ + Transactions: transactions, + Accounts: accounts, + }, nil +} diff --git a/internal/engine/utils/batching/batcher.go b/internal/engine/utils/batching/batcher.go new file mode 100644 index 000000000..055f62e7d --- /dev/null +++ b/internal/engine/utils/batching/batcher.go @@ -0,0 +1,85 @@ +package batching + +import ( + "context" + "fmt" + "sync" + + "github.com/formancehq/ledger/internal/engine/utils/job" + "github.com/formancehq/stack/libs/go-libs/collectionutils" +) + +type OnBatchProcessed[T any] func(...T) + +func NoOpOnBatchProcessed[T any]() func(...T) { + return func(t ...T) {} +} + +type pending[T any] struct { + object T + callback func() +} + +type batcherJob[T any] struct { + items []*pending[T] +} + +func (b batcherJob[T]) String() string { + return fmt.Sprintf("processing %d items", len(b.items)) +} + +func (b batcherJob[T]) Terminated() { + for _, v := range b.items { + v.callback() + } +} + +type Batcher[T any] struct { + *job.Runner[batcherJob[T]] + pending []*pending[T] + mu sync.Mutex + maxBatchSize int +} + +func (s *Batcher[T]) Append(object T, callback func()) { + s.mu.Lock() + s.pending = append(s.pending, &pending[T]{ + callback: callback, + object: object, + }) + s.mu.Unlock() + s.Runner.Next() +} + +func (s *Batcher[T]) nextBatch() *batcherJob[T] { + s.mu.Lock() + defer s.mu.Unlock() + + if len(s.pending) == 0 { + return nil + } + if len(s.pending) > s.maxBatchSize { + batch := s.pending[:s.maxBatchSize] + s.pending = s.pending[s.maxBatchSize:] + return &batcherJob[T]{ + items: batch, + } + } + batch := s.pending + s.pending = make([]*pending[T], 0) + return &batcherJob[T]{ + items: batch, + } +} + +func NewBatcher[T any](runner func(context.Context, ...T) error, nbWorkers, maxBatchSize int) *Batcher[T] { + ret := &Batcher[T]{ + maxBatchSize: maxBatchSize, + } + ret.Runner = job.NewJobRunner[batcherJob[T]](func(ctx context.Context, job *batcherJob[T]) error { + return runner(ctx, collectionutils.Map(job.items, func(from *pending[T]) T { + return from.object + })...) + }, ret.nextBatch, nbWorkers) + return ret +} diff --git a/internal/engine/utils/job/jobs.go b/internal/engine/utils/job/jobs.go new file mode 100644 index 000000000..5538d81c5 --- /dev/null +++ b/internal/engine/utils/job/jobs.go @@ -0,0 +1,143 @@ +package job + +import ( + "context" + "fmt" + "runtime/debug" + "sync/atomic" + + "github.com/alitto/pond" + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/pkg/errors" +) + +type Job interface { + Terminated() +} + +type builtJob struct { + terminatedFn func() +} + +func (j builtJob) Terminated() { + j.terminatedFn() +} + +func newJob(terminatedFn func()) *builtJob { + return &builtJob{ + terminatedFn: terminatedFn, + } +} + +type Runner[JOB Job] struct { + stopChan chan chan struct{} + runner func(context.Context, *JOB) error + nbWorkers int + parkedWorkers atomic.Int64 + nextJob func() *JOB + jobs chan *JOB + newJobsAvailable chan struct{} +} + +func (r *Runner[JOB]) Next() { + r.newJobsAvailable <- struct{}{} +} + +func (r *Runner[JOB]) Close() { + done := make(chan struct{}) + r.stopChan <- done + <-done +} + +func (r *Runner[JOB]) Run(ctx context.Context) { + + logger := logging.FromContext(ctx) + logger.Infof("Start worker") + + defer func() { + if e := recover(); e != nil { + logger.Error(e) + debug.PrintStack() + panic(e) + } + }() + + terminatedJobs := make(chan *JOB, r.nbWorkers) + jobsErrors := make(chan error, r.nbWorkers) + + w := pond.New(r.nbWorkers, r.nbWorkers) + for i := 0; i < r.nbWorkers; i++ { + i := i + w.Submit(func() { + defer func() { + if e := recover(); e != nil { + if err, isError := e.(error); isError { + jobsErrors <- errors.WithStack(err) + return + } + jobsErrors <- errors.WithStack(fmt.Errorf("%s", e)) + } + }() + logger := logger.WithFields(map[string]any{ + "worker": i, + }) + for { + select { + case job, ok := <-r.jobs: + if !ok { + logger.Debugf("Worker %d stopped", i) + return + } + logger := logger.WithField("job", job) + logger.Debugf("Got new job") + if err := r.runner(ctx, job); err != nil { + panic(err) + } + logger.Debugf("Job terminated") + terminatedJobs <- job + } + } + }) + } + + for { + select { + case jobError := <-jobsErrors: + panic(jobError) + case done := <-r.stopChan: + close(r.jobs) + w.StopAndWait() + close(terminatedJobs) + close(done) + return + case <-r.newJobsAvailable: + if r.parkedWorkers.Load() > 0 { + if job := r.nextJob(); job != nil { + r.jobs <- job + r.parkedWorkers.Add(-1) + } + } + case job := <-terminatedJobs: + (*job).Terminated() + if job := r.nextJob(); job != nil { + r.jobs <- job + } else { + r.parkedWorkers.Add(1) + } + } + } +} + +func NewJobRunner[JOB Job](runner func(context.Context, *JOB) error, nextJob func() *JOB, nbWorkers int) *Runner[JOB] { + parkedWorkers := atomic.Int64{} + parkedWorkers.Add(int64(nbWorkers)) + return &Runner[JOB]{ + stopChan: make(chan chan struct{}), + runner: runner, + nbWorkers: nbWorkers, + parkedWorkers: parkedWorkers, + nextJob: nextJob, + jobs: make(chan *JOB, nbWorkers), + newJobsAvailable: make(chan struct{}), + } +} diff --git a/internal/engine/utils/job/jobs_test.go b/internal/engine/utils/job/jobs_test.go new file mode 100644 index 000000000..1b875d71a --- /dev/null +++ b/internal/engine/utils/job/jobs_test.go @@ -0,0 +1,44 @@ +package job + +import ( + "context" + "sync/atomic" + "testing" + "time" + + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/stretchr/testify/require" +) + +func TestWorkerPool(t *testing.T) { + t.Parallel() + + const countJobs = 10000 + createdJobs := atomic.Int64{} + terminatedJobs := atomic.Int64{} + nextJob := func() *builtJob { + if createdJobs.Load() == 10000 { + return nil + } + createdJobs.Add(1) + return newJob(func() { + terminatedJobs.Add(1) + }) + } + runner := func(ctx context.Context, job *builtJob) error { + return nil + } + ctx := logging.TestingContext() + + pool := NewJobRunner[builtJob](runner, nextJob, 5) + go pool.Run(ctx) + defer pool.Close() + + for i := 0; i < 100; i++ { + go pool.Next() // Simulate random input + } + + require.Eventually(t, func() bool { + return countJobs == createdJobs.Load() + }, 5*time.Second, time.Millisecond*100) +} diff --git a/internal/log.go b/internal/log.go new file mode 100644 index 000000000..24369c96e --- /dev/null +++ b/internal/log.go @@ -0,0 +1,308 @@ +package ledger + +import ( + "crypto/sha256" + "encoding/json" + "math/big" + "reflect" + "strconv" + "strings" + + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/pkg/errors" +) + +type LogType int16 + +const ( + // TODO(gfyrag): Create dedicated log type for account and metadata + SetMetadataLogType LogType = iota // "SET_METADATA" + NewTransactionLogType // "NEW_TRANSACTION" + RevertedTransactionLogType // "REVERTED_TRANSACTION" + DeleteMetadataLogType +) + +func (l LogType) String() string { + switch l { + case SetMetadataLogType: + return "SET_METADATA" + case NewTransactionLogType: + return "NEW_TRANSACTION" + case RevertedTransactionLogType: + return "REVERTED_TRANSACTION" + case DeleteMetadataLogType: + return "DELETE_METADATA" + } + + return "" +} + +func LogTypeFromString(logType string) LogType { + switch logType { + case "SET_METADATA": + return SetMetadataLogType + case "NEW_TRANSACTION": + return NewTransactionLogType + case "REVERTED_TRANSACTION": + return RevertedTransactionLogType + case "DELETE_METADATA": + return DeleteMetadataLogType + } + + panic(errors.New("invalid log type")) +} + +// Needed in order to keep the compatibility with the openapi response for +// ListLogs. +func (lt LogType) MarshalJSON() ([]byte, error) { + return json.Marshal(lt.String()) +} + +func (lt *LogType) UnmarshalJSON(data []byte) error { + var s string + if err := json.Unmarshal(data, &s); err != nil { + return err + } + + *lt = LogTypeFromString(s) + + return nil +} + +type ChainedLog struct { + Log + ID *big.Int `json:"id"` + Projected bool `json:"-"` + Hash []byte `json:"hash"` +} + +func (l *ChainedLog) WithID(id uint64) *ChainedLog { + l.ID = big.NewInt(int64(id)) + return l +} + +func (l *ChainedLog) UnmarshalJSON(data []byte) error { + type auxLog ChainedLog + type log struct { + auxLog + Data json.RawMessage `json:"data"` + } + rawLog := log{} + if err := json.Unmarshal(data, &rawLog); err != nil { + return err + } + + var err error + rawLog.auxLog.Data, err = HydrateLog(rawLog.Type, rawLog.Data) + if err != nil { + return err + } + *l = ChainedLog(rawLog.auxLog) + return err +} + +func (l *ChainedLog) ComputeHash(previous *ChainedLog) { + digest := sha256.New() + enc := json.NewEncoder(digest) + if previous != nil { + if err := enc.Encode(previous.Hash); err != nil { + panic(err) + } + } + if err := enc.Encode(l); err != nil { + panic(err) + } + + l.Hash = digest.Sum(nil) +} + +type Log struct { + Type LogType `json:"type"` + Data any `json:"data"` + Date Time `json:"date"` + IdempotencyKey string `json:"idempotencyKey"` +} + +func (l *Log) WithDate(date Time) *Log { + l.Date = date + return l +} + +func (l *Log) WithIdempotencyKey(key string) *Log { + l.IdempotencyKey = key + return l +} + +func (l *Log) ChainLog(previous *ChainedLog) *ChainedLog { + ret := &ChainedLog{ + Log: *l, + ID: big.NewInt(0), + } + ret.ComputeHash(previous) + if previous != nil { + ret.ID = ret.ID.Add(previous.ID, big.NewInt(1)) + } + return ret +} + +type AccountMetadata map[string]metadata.Metadata + +type NewTransactionLogPayload struct { + Transaction *Transaction `json:"transaction"` + AccountMetadata AccountMetadata `json:"accountMetadata"` +} + +func NewTransactionLogWithDate(tx *Transaction, accountMetadata map[string]metadata.Metadata, time Time) *Log { + // Since the id is unique and the hash is a hash of the previous log, they + // will be filled at insertion time during the batch process. + return &Log{ + Type: NewTransactionLogType, + Date: time, + Data: NewTransactionLogPayload{ + Transaction: tx, + AccountMetadata: accountMetadata, + }, + } +} + +func NewTransactionLog(tx *Transaction, accountMetadata map[string]metadata.Metadata) *Log { + return NewTransactionLogWithDate(tx, accountMetadata, Now()) +} + +type SetMetadataLogPayload struct { + TargetType string `json:"targetType"` + TargetID any `json:"targetId"` + Metadata metadata.Metadata `json:"metadata"` +} + +func (s *SetMetadataLogPayload) UnmarshalJSON(data []byte) error { + type X struct { + TargetType string `json:"targetType"` + TargetID json.RawMessage `json:"targetId"` + Metadata metadata.Metadata `json:"metadata"` + } + x := X{} + err := json.Unmarshal(data, &x) + if err != nil { + return err + } + var id interface{} + switch strings.ToUpper(x.TargetType) { + case strings.ToUpper(MetaTargetTypeAccount): + id = "" + err = json.Unmarshal(x.TargetID, &id) + case strings.ToUpper(MetaTargetTypeTransaction): + id, err = strconv.ParseUint(string(x.TargetID), 10, 64) + default: + panic("unknown type") + } + if err != nil { + return err + } + + *s = SetMetadataLogPayload{ + TargetType: x.TargetType, + TargetID: id, + Metadata: x.Metadata, + } + return nil +} + +func NewSetMetadataLog(at Time, metadata SetMetadataLogPayload) *Log { + // Since the id is unique and the hash is a hash of the previous log, they + // will be filled at insertion time during the batch process. + return &Log{ + Type: SetMetadataLogType, + Date: at, + Data: metadata, + } +} + +type DeleteMetadataLogPayload struct { + TargetType string `json:"targetType"` + TargetID any `json:"targetId"` + Key string `json:"key"` +} + +func NewDeleteMetadataLog(at Time, payload DeleteMetadataLogPayload) *Log { + // Since the id is unique and the hash is a hash of the previous log, they + // will be filled at insertion time during the batch process. + return &Log{ + Type: DeleteMetadataLogType, + Date: at, + Data: payload, + } +} + +func NewSetMetadataOnAccountLog(at Time, account string, metadata metadata.Metadata) *Log { + return &Log{ + Type: SetMetadataLogType, + Date: at, + Data: SetMetadataLogPayload{ + TargetType: MetaTargetTypeAccount, + TargetID: account, + Metadata: metadata, + }, + } +} + +func NewSetMetadataOnTransactionLog(at Time, txID *big.Int, metadata metadata.Metadata) *Log { + return &Log{ + Type: SetMetadataLogType, + Date: at, + Data: SetMetadataLogPayload{ + TargetType: MetaTargetTypeTransaction, + TargetID: txID, + Metadata: metadata, + }, + } +} + +type RevertedTransactionLogPayload struct { + RevertedTransactionID *big.Int `json:"revertedTransactionID"` + RevertTransaction *Transaction `json:"transaction"` +} + +func NewRevertedTransactionLog(at Time, revertedTxID *big.Int, tx *Transaction) *Log { + return &Log{ + Type: RevertedTransactionLogType, + Date: at, + Data: RevertedTransactionLogPayload{ + RevertedTransactionID: revertedTxID, + RevertTransaction: tx, + }, + } +} + +func HydrateLog(_type LogType, data []byte) (any, error) { + var payload any + switch _type { + case NewTransactionLogType: + payload = &NewTransactionLogPayload{} + case SetMetadataLogType: + payload = &SetMetadataLogPayload{} + case RevertedTransactionLogType: + payload = &RevertedTransactionLogPayload{} + default: + panic("unknown type " + _type.String()) + } + err := json.Unmarshal(data, &payload) + if err != nil { + return nil, err + } + + return reflect.ValueOf(payload).Elem().Interface(), nil +} + +type Accounts map[string]Account + +func ChainLogs(logs ...*Log) []*ChainedLog { + var previous *ChainedLog + ret := make([]*ChainedLog, 0) + for _, log := range logs { + next := log.ChainLog(previous) + ret = append(ret, next) + previous = next + } + return ret +} diff --git a/internal/machine/docs/instructions.md b/internal/machine/docs/instructions.md new file mode 100644 index 000000000..17ee360b6 --- /dev/null +++ b/internal/machine/docs/instructions.md @@ -0,0 +1,27 @@ +# Formance Machine Instruction Set + +``` +INIT +LOAD address +BEGIN +BALANCE asset, address +IPUSH value +MPUSH value +RPUSH value +GET register +SET register +IADD +ISUB +IMUL +MADD +MSUB +RMUL +RDD +RSUB +TXSTART size +TXEND +SEND source, destination, value +FLUSH +COMMIT +ABORT +``` diff --git a/internal/machine/docs/types.md b/internal/machine/docs/types.md new file mode 100644 index 000000000..195c2e897 --- /dev/null +++ b/internal/machine/docs/types.md @@ -0,0 +1,9 @@ +# Formance Machine Types + +``` +# Integers +unsigned 128 bits integers + +# Monetary values +{USD/2 100} +``` diff --git a/internal/machine/examples/basic.go b/internal/machine/examples/basic.go new file mode 100644 index 000000000..24cd3b7ad --- /dev/null +++ b/internal/machine/examples/basic.go @@ -0,0 +1,81 @@ +package main + +import ( + "context" + "fmt" + "math/big" + + "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/machine/script/compiler" + vm2 "github.com/formancehq/ledger/internal/machine/vm" + "github.com/formancehq/stack/libs/go-libs/metadata" +) + +func main() { + program, err := compiler.Compile(` + // This is a comment + vars { + account $dest + } + send [COIN 99] ( + source = { + 15% from { + @alice + @bob + } + remaining from @bob + } + destination = $dest + )`) + if err != nil { + panic(err) + } + fmt.Print(program) + + m := vm2.NewMachine(*program) + m.Debug = true + + if err = m.SetVarsFromJSON(map[string]string{ + "dest": "charlie", + }); err != nil { + panic(err) + } + + initialVolumes := map[string]map[string]*big.Int{ + "alice": { + "COIN": big.NewInt(10), + }, + "bob": { + "COIN": big.NewInt(100), + }, + } + + store := vm2.StaticStore{} + for account, balances := range initialVolumes { + store[account] = &vm2.AccountWithBalances{ + Account: ledger.Account{ + Address: account, + Metadata: metadata.Metadata{}, + }, + Balances: balances, + } + } + + _, _, err = m.ResolveResources(context.Background(), vm2.EmptyStore) + if err != nil { + panic(err) + } + + err = m.ResolveBalances(context.Background(), store) + if err != nil { + panic(err) + } + + err = m.Execute() + if err != nil { + panic(err) + } + + fmt.Println(m.Postings) + fmt.Println(m.TxMeta) +} diff --git a/internal/machine/internal/account.go b/internal/machine/internal/account.go new file mode 100644 index 000000000..b7618b286 --- /dev/null +++ b/internal/machine/internal/account.go @@ -0,0 +1,21 @@ +package internal + +import ( + "fmt" + + ledger "github.com/formancehq/ledger/internal" +) + +type AccountAddress string + +func (AccountAddress) GetType() Type { return TypeAccount } +func (a AccountAddress) String() string { + return fmt.Sprintf("@%v", string(a)) +} + +func ValidateAccountAddress(acc AccountAddress) error { + if !ledger.AccountRegexp.MatchString(string(acc)) { + return fmt.Errorf("accounts should respect pattern %s", ledger.AccountPattern) + } + return nil +} diff --git a/internal/machine/internal/address.go b/internal/machine/internal/address.go new file mode 100644 index 000000000..1915fc1b7 --- /dev/null +++ b/internal/machine/internal/address.go @@ -0,0 +1,31 @@ +package internal + +import "encoding/binary" + +// Address represents an address in the machine's resources, which include +// constants (literals) and variables passed to the program +type Address uint16 + +func NewAddress(x uint16) Address { + return Address(x) +} + +func (a Address) ToBytes() []byte { + bytes := make([]byte, 2) + binary.LittleEndian.PutUint16(bytes, uint16(a)) + return bytes +} + +type Addresses []Address + +func (a Addresses) Len() int { + return len(a) +} + +func (a Addresses) Less(i, j int) bool { + return a[i] < a[j] +} + +func (a Addresses) Swap(i, j int) { + a[i], a[j] = a[j], a[i] +} diff --git a/internal/machine/internal/allotment.go b/internal/machine/internal/allotment.go new file mode 100644 index 000000000..351955262 --- /dev/null +++ b/internal/machine/internal/allotment.go @@ -0,0 +1,75 @@ +package internal + +import ( + "errors" + "fmt" + "math/big" +) + +type Allotment []big.Rat + +func (Allotment) GetType() Type { return TypeAllotment } + +func NewAllotment(portions []Portion) (*Allotment, error) { + n := len(portions) + total := big.NewRat(0, 1) + var remainingIdx *int + allotment := make([]big.Rat, n) + for i := 0; i < n; i++ { + if portions[i].Remaining { + if remainingIdx != nil { + return nil, errors.New("two uses of `remaining` in the same allotment") + } + allotment[i] = big.Rat{} // temporary + idx := i + remainingIdx = &idx + } else { + rat := *portions[i].Specific + allotment[i] = rat + total.Add(total, &rat) + } + } + if total.Cmp(big.NewRat(1, 1)) == 1 { + return nil, errors.New("sum of portions exceeded 100%") + } + if remainingIdx != nil { + remaining := big.NewRat(1, 1) + remaining.Sub(remaining, total) + allotment[*remainingIdx] = *remaining + } + result := Allotment(allotment) + return &result, nil +} + +func (a Allotment) String() string { + out := "{ " + for i, ratio := range a { + out += fmt.Sprintf("%v", &ratio) + if i != len(a)-1 { + out += " : " + } + } + return out + " }" +} + +func (a Allotment) Allocate(amount *MonetaryInt) []*MonetaryInt { + amtBigint := big.Int(*amount) + parts := make([]*MonetaryInt, len(a)) + totalAllocated := Zero + // for every part in the allotment, calculate the floored value + for i, allot := range a { + var res big.Int + res.Mul(&amtBigint, allot.Num()) + res.Div(&res, allot.Denom()) + mi := MonetaryInt(res) + parts[i] = &mi + totalAllocated = totalAllocated.Add(parts[i]) + } + for i := range parts { + if totalAllocated.Lt(amount) { + parts[i] = parts[i].Add(NewMonetaryInt(1)) + totalAllocated = totalAllocated.Add(NewMonetaryInt(1)) + } + } + return parts +} diff --git a/internal/machine/internal/allotment_test.go b/internal/machine/internal/allotment_test.go new file mode 100644 index 000000000..e991e584c --- /dev/null +++ b/internal/machine/internal/allotment_test.go @@ -0,0 +1,66 @@ +package internal + +import ( + "math/big" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestAllocate(t *testing.T) { + allotment, err := NewAllotment([]Portion{ + {Specific: big.NewRat(4, 5)}, + {Specific: big.NewRat(2, 25)}, + {Specific: big.NewRat(3, 25)}, + }) + require.NoError(t, err) + + parts := allotment.Allocate(NewMonetaryInt(15)) + expectedParts := []*MonetaryInt{NewMonetaryInt(13), NewMonetaryInt(1), NewMonetaryInt(1)} + if len(parts) != len(expectedParts) { + t.Fatalf("unexpected output %v != %v", parts, expectedParts) + } + for i := range parts { + if !parts[i].Equal(expectedParts[i]) { + t.Fatalf("unexpected output %v != %v", parts, expectedParts) + } + } +} + +func TestAllocateEmptyRemainder(t *testing.T) { + allotment, err := NewAllotment([]Portion{ + {Specific: big.NewRat(1, 2)}, + {Specific: big.NewRat(1, 2)}, + {Remaining: true}, + }) + require.NoError(t, err) + + parts := allotment.Allocate(NewMonetaryInt(15)) + expectedParts := []*MonetaryInt{NewMonetaryInt(8), NewMonetaryInt(7), NewMonetaryInt(0)} + if len(parts) != len(expectedParts) { + t.Fatalf("unexpected output %v != %v", parts, expectedParts) + } + for i := range parts { + if !parts[i].Equal(expectedParts[i]) { + t.Fatalf("unexpected output %v != %v", parts, expectedParts) + } + } + +} + +func TestInvalidAllotments(t *testing.T) { + _, err := NewAllotment([]Portion{ + {Remaining: true}, + {Specific: big.NewRat(2, 25)}, + {Remaining: true}, + }) + assert.Errorf(t, err, "allowed two remainings") + + _, err = NewAllotment([]Portion{ + {Specific: big.NewRat(1, 2)}, + {Specific: big.NewRat(1, 2)}, + {Specific: big.NewRat(1, 2)}, + }) + assert.Errorf(t, err, "allowed more than 100%") +} diff --git a/internal/machine/internal/asset.go b/internal/machine/internal/asset.go new file mode 100644 index 000000000..29f46fcde --- /dev/null +++ b/internal/machine/internal/asset.go @@ -0,0 +1,27 @@ +package internal + +import ( + "fmt" + + ledger "github.com/formancehq/ledger/internal" +) + +type Asset string + +func (Asset) GetType() Type { return TypeAsset } +func (a Asset) String() string { + return fmt.Sprintf("%v", string(a)) +} + +type HasAsset interface { + GetAsset() Asset +} + +func (a Asset) GetAsset() Asset { return a } + +func ValidateAsset(ass Asset) error { + if !ledger.AssetRegexp.MatchString(string(ass)) { + return fmt.Errorf("asset should respect pattern '%s'", ledger.AssetPattern) + } + return nil +} diff --git a/internal/machine/internal/funding.go b/internal/machine/internal/funding.go new file mode 100644 index 000000000..c66a9fefb --- /dev/null +++ b/internal/machine/internal/funding.go @@ -0,0 +1,169 @@ +package internal + +import ( + "errors" + "fmt" +) + +type FundingPart struct { + Amount *MonetaryInt + Account AccountAddress +} + +func (Funding) GetType() Type { return TypeFunding } + +func (f Funding) GetAsset() Asset { return f.Asset } + +func (lhs FundingPart) Equals(rhs FundingPart) bool { + return lhs.Account == rhs.Account && lhs.Amount.Equal(rhs.Amount) +} + +type Funding struct { + Asset Asset + Parts []FundingPart +} + +func (lhs Funding) Equals(rhs Funding) bool { + if lhs.Asset != rhs.Asset { + return false + } + if len(lhs.Parts) != len(rhs.Parts) { + return false + } + for i := range lhs.Parts { + if !lhs.Parts[i].Equals(rhs.Parts[i]) { + return false + } + } + return true +} + +func (f Funding) String() string { + out := fmt.Sprintf("[%v", string(f.Asset)) + for _, part := range f.Parts { + out += fmt.Sprintf(" %v %v", part.Account, part.Amount) + } + return out + "]" +} + +func (f Funding) Take(amount *MonetaryInt) (Funding, Funding, error) { + result := Funding{ + Asset: f.Asset, + } + remainder := Funding{ + Asset: f.Asset, + } + + if amount.Eq(Zero) && len(f.Parts) > 0 { + result.Parts = append(result.Parts, FundingPart{ + Account: f.Parts[0].Account, + Amount: amount, + }) + } + + remainingToWithdraw := amount + i := 0 + for remainingToWithdraw.Gt(Zero) && i < len(f.Parts) { + amtToWithdraw := f.Parts[i].Amount + // if this part has excess balance, put it in the remainder & only take what's needed + if amtToWithdraw.Gt(remainingToWithdraw) { + rem := amtToWithdraw.Sub(remainingToWithdraw) + amtToWithdraw = remainingToWithdraw + remainder.Parts = append(remainder.Parts, FundingPart{ + Account: f.Parts[i].Account, + Amount: rem, + }) + } + remainingToWithdraw = remainingToWithdraw.Sub(amtToWithdraw) + result.Parts = append(result.Parts, FundingPart{ + Account: f.Parts[i].Account, + Amount: amtToWithdraw, + }) + i++ + } + for i < len(f.Parts) { + remainder.Parts = append(remainder.Parts, FundingPart{ + Account: f.Parts[i].Account, + Amount: f.Parts[i].Amount, + }) + i++ + } + if !remainingToWithdraw.Eq(Zero) { + return Funding{}, Funding{}, errors.New("account had insufficient funds") + } + return result, remainder, nil +} + +func (f Funding) TakeMax(amount *MonetaryInt) (Funding, Funding) { + result := Funding{ + Asset: f.Asset, + } + remainder := Funding{ + Asset: f.Asset, + } + remainingToWithdraw := amount + i := 0 + for remainingToWithdraw.Gt(Zero) && i < len(f.Parts) { + amtToWithdraw := f.Parts[i].Amount + // if this part has excess balance, put it in the remainder & only take what's needed + if amtToWithdraw.Gt(remainingToWithdraw) { + rem := amtToWithdraw.Sub(remainingToWithdraw) + amtToWithdraw = remainingToWithdraw + remainder.Parts = append(remainder.Parts, FundingPart{ + Account: f.Parts[i].Account, + Amount: rem, + }) + } + remainingToWithdraw = remainingToWithdraw.Sub(amtToWithdraw) + result.Parts = append(result.Parts, FundingPart{ + Account: f.Parts[i].Account, + Amount: amtToWithdraw, + }) + i++ + } + for i < len(f.Parts) { + remainder.Parts = append(remainder.Parts, FundingPart{ + Account: f.Parts[i].Account, + Amount: f.Parts[i].Amount, + }) + i++ + } + return result, remainder +} + +func (f Funding) Concat(other Funding) (Funding, error) { + if f.Asset != other.Asset { + return Funding{}, errors.New("tried to concat different assets") + } + res := Funding{ + Asset: f.Asset, + Parts: f.Parts, + } + if len(res.Parts) > 0 && len(other.Parts) > 0 && res.Parts[len(res.Parts)-1].Account == other.Parts[0].Account { + res.Parts[len(res.Parts)-1].Amount = res.Parts[len(res.Parts)-1].Amount.Add(other.Parts[0].Amount) + res.Parts = append(res.Parts, other.Parts[1:]...) + } else { + res.Parts = append(res.Parts, other.Parts...) + } + return res, nil +} + +func (f Funding) Total() *MonetaryInt { + total := Zero + for _, part := range f.Parts { + total = total.Add(part.Amount) + } + return total +} + +func (f Funding) Reverse() Funding { + newParts := []FundingPart{} + for i := len(f.Parts) - 1; i >= 0; i-- { + newParts = append(newParts, f.Parts[i]) + } + newFunding := Funding{ + Asset: f.Asset, + Parts: newParts, + } + return newFunding +} diff --git a/internal/machine/internal/funding_test.go b/internal/machine/internal/funding_test.go new file mode 100644 index 000000000..90ebad93a --- /dev/null +++ b/internal/machine/internal/funding_test.go @@ -0,0 +1,165 @@ +package internal + +import ( + "testing" +) + +func TestFundingTake(t *testing.T) { + f := Funding{ + Asset: "COIN", + Parts: []FundingPart{ + { + Account: "aaa", + Amount: NewMonetaryInt(70), + }, + { + Account: "bbb", + Amount: NewMonetaryInt(30), + }, + { + Account: "ccc", + Amount: NewMonetaryInt(50), + }, + }, + } + result, remainder, err := f.Take(NewMonetaryInt(80)) + if err != nil { + t.Fatal(err) + } + expectedResult := Funding{ + Asset: "COIN", + Parts: []FundingPart{ + { + Account: "aaa", + Amount: NewMonetaryInt(70), + }, + { + Account: "bbb", + Amount: NewMonetaryInt(10), + }, + }, + } + if !ValueEquals(result, expectedResult) { + t.Fatalf("unexpected result: %v", result) + } + expectedRemainder := Funding{ + Asset: "COIN", + Parts: []FundingPart{ + { + Account: "bbb", + Amount: NewMonetaryInt(20), + }, + { + Account: "ccc", + Amount: NewMonetaryInt(50), + }, + }, + } + if !ValueEquals(remainder, expectedRemainder) { + t.Fatalf("unexpected remainder: %v", remainder) + } +} + +func TestFundingTakeMaxUnder(t *testing.T) { + f := Funding{ + Asset: "COIN", + Parts: []FundingPart{ + { + Account: "aaa", + Amount: NewMonetaryInt(30), + }, + }, + } + result, remainder := f.TakeMax(NewMonetaryInt(80)) + if !ValueEquals(result, Funding{ + Asset: "COIN", + Parts: []FundingPart{ + { + Account: "aaa", + Amount: NewMonetaryInt(30), + }, + }, + }) { + t.Fatalf("unexpected result: %v", result) + } + if !ValueEquals(remainder, Funding{ + Asset: "COIN", + }) { + t.Fatalf("unexpected remainder: %v", remainder) + } +} + +func TestFundingTakeMaxAbove(t *testing.T) { + f := Funding{ + Asset: "COIN", + Parts: []FundingPart{ + { + Account: "aaa", + Amount: NewMonetaryInt(90), + }, + }, + } + result, remainder := f.TakeMax(NewMonetaryInt(80)) + if !ValueEquals(result, Funding{ + Asset: "COIN", + Parts: []FundingPart{ + { + Account: "aaa", + Amount: NewMonetaryInt(80), + }, + }, + }) { + t.Fatalf("unexpected result: %v", result) + } + if !ValueEquals(remainder, Funding{ + Asset: "COIN", + Parts: []FundingPart{ + { + Account: "aaa", + Amount: NewMonetaryInt(10), + }, + }, + }) { + t.Fatalf("unexpected remainder: %v", remainder) + } +} + +func TestFundingReversal(t *testing.T) { + f := Funding{ + Asset: "COIN", + Parts: []FundingPart{ + { + Account: "aaa", + Amount: NewMonetaryInt(10), + }, + { + Account: "bbb", + Amount: NewMonetaryInt(20), + }, + { + Account: "ccc", + Amount: NewMonetaryInt(30), + }, + }, + } + rev := f.Reverse() + if !ValueEquals(rev, Funding{ + Asset: "COIN", + Parts: []FundingPart{ + { + Account: "ccc", + Amount: NewMonetaryInt(30), + }, + { + Account: "bbb", + Amount: NewMonetaryInt(20), + }, + { + Account: "aaa", + Amount: NewMonetaryInt(10), + }, + }, + }) { + t.Fatalf("unexpected result: %v", rev) + } +} diff --git a/internal/machine/internal/json.go b/internal/machine/internal/json.go new file mode 100644 index 000000000..83c3deb0a --- /dev/null +++ b/internal/machine/internal/json.go @@ -0,0 +1,85 @@ +package internal + +import ( + "encoding/json" + "fmt" + "strings" + + "github.com/pkg/errors" +) + +type ValueJSON struct { + Type string `json:"type"` + Value json.RawMessage `json:"value"` +} + +func NewValueFromString(typ Type, data string) (Value, error) { + var value Value + switch typ { + case TypeAccount: + if err := ValidateAccountAddress(AccountAddress(data)); err != nil { + return nil, errors.Wrapf(err, "value %s", data) + } + value = AccountAddress(data) + case TypeAsset: + if err := ValidateAsset(Asset(data)); err != nil { + return nil, errors.Wrapf(err, "value %s", data) + } + value = Asset(data) + case TypeNumber: + var number Number + if err := json.Unmarshal([]byte(data), &number); err != nil { + return nil, err + } + value = number + case TypeMonetary: + parts := strings.SplitN(data, " ", 2) + if len(parts) != 2 { + return nil, errors.New("monetary must have two parts") + } + mi, err := ParseMonetaryInt(parts[1]) + if err != nil { + return nil, err + } + mon := Monetary{ + Asset: Asset(parts[0]), + Amount: mi, + } + if err := ParseMonetary(mon); err != nil { + return nil, errors.Wrapf(err, "value %s", mon.String()) + } + value = mon + case TypePortion: + res, err := ParsePortionSpecific(data) + if err != nil { + return nil, err + } + value = *res + case TypeString: + value = String(data) + default: + return nil, fmt.Errorf("invalid type '%v'", typ) + } + + return value, nil +} + +func NewStringFromValue(value Value) (string, error) { + switch value.GetType() { + case TypeAccount: + return string(value.(AccountAddress)), nil + case TypeAsset: + return string(value.(Asset)), nil + case TypeString: + return string(value.(String)), nil + case TypeNumber: + return value.(*MonetaryInt).String(), nil + case TypeMonetary: + m := value.(Monetary) + return fmt.Sprintf("%s %s", m.Asset, m.Amount), nil + case TypePortion: + return value.(Portion).String(), nil + default: + return "", fmt.Errorf("invalid type '%v'", value.GetType()) + } +} diff --git a/internal/machine/internal/json_test.go b/internal/machine/internal/json_test.go new file mode 100644 index 000000000..62b8f1e4a --- /dev/null +++ b/internal/machine/internal/json_test.go @@ -0,0 +1,111 @@ +package internal + +import ( + "encoding/json" + "math/big" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestAccountTypedJSON(t *testing.T) { + j := "users:001" + value, err := NewValueFromString(TypeAccount, j) + require.NoError(t, err) + + if !ValueEquals(value, AccountAddress("users:001")) { + t.Fatalf("unexpected value: %v", value) + } +} + +func TestAssetTypedJSON(t *testing.T) { + j := "EUR/2" + value, err := NewValueFromString(TypeAsset, j) + require.NoError(t, err) + + if !ValueEquals(value, Asset("EUR/2")) { + t.Fatalf("unexpected value: %v", value) + } +} + +func TestNumberTypedJSON(t *testing.T) { + j := "89849865111111111111111111111111111555555555555555555555555555555555555555555555555999999999999999999999" + value, err := NewValueFromString(TypeNumber, j) + require.NoError(t, err) + + num, err := ParseNumber("89849865111111111111111111111111111555555555555555555555555555555555555555555555555999999999999999999999") + require.NoError(t, err) + + if !ValueEquals(value, num) { + t.Fatalf("unexpected value: %v", value) + } +} + +func TestMonetaryTypedJSON(t *testing.T) { + j := "EUR/2 123456" + value, err := NewValueFromString(TypeMonetary, j) + require.NoError(t, err) + + if !ValueEquals(value, Monetary{ + Asset: "EUR/2", + Amount: NewMonetaryInt(123456), + }) { + t.Fatalf("unexpected value: %v", value) + } +} + +func TestPortionTypedJSON(t *testing.T) { + j := "90%" + value, err := NewValueFromString(TypePortion, j) + require.NoError(t, err) + + portion, err := NewPortionSpecific(*big.NewRat(90, 100)) + require.NoError(t, err) + + if !ValueEquals(value, *portion) { + t.Fatalf("unexpected value: %v", value) + } +} + +func TestMarshalJSON(t *testing.T) { + t.Run("account", func(t *testing.T) { + by, err := json.Marshal(AccountAddress("platform")) + require.NoError(t, err) + assert.Equal(t, `"platform"`, string(by)) + }) + t.Run("asset", func(t *testing.T) { + by, err := json.Marshal(Asset("COIN")) + require.NoError(t, err) + assert.Equal(t, `"COIN"`, string(by)) + }) + t.Run("number", func(t *testing.T) { + by, err := json.Marshal( + Number(big.NewInt(42))) + require.NoError(t, err) + assert.Equal(t, `42`, string(by)) + }) + t.Run("string", func(t *testing.T) { + by, err := json.Marshal(String("test")) + require.NoError(t, err) + assert.Equal(t, `"test"`, string(by)) + }) + t.Run("monetary", func(t *testing.T) { + by, err := json.Marshal( + Monetary{ + Asset: "COIN", + Amount: NewMonetaryInt(42), + }) + require.NoError(t, err) + assert.Equal(t, `{"asset":"COIN","amount":42}`, string(by)) + }) + t.Run("portion", func(t *testing.T) { + by, err := json.Marshal( + Portion{ + Remaining: true, + Specific: big.NewRat(10, 12), + }) + require.NoError(t, err) + assert.Equal(t, `{"remaining":true,"specific":"5/6"}`, string(by)) + }) +} diff --git a/pkg/core/monetary.go b/internal/machine/internal/monetary.go similarity index 59% rename from pkg/core/monetary.go rename to internal/machine/internal/monetary.go index 36b7889ab..14dfd759e 100644 --- a/pkg/core/monetary.go +++ b/internal/machine/internal/monetary.go @@ -1,38 +1,74 @@ -package core +package internal import ( - "errors" + "fmt" "math/big" + + "github.com/pkg/errors" ) +type Monetary struct { + Asset Asset `json:"asset"` + Amount *MonetaryInt `json:"amount"` +} + +func (Monetary) GetType() Type { return TypeMonetary } + +func (m Monetary) String() string { + if m.Amount == nil { + return fmt.Sprintf("[%s nil]", m.Asset) + } + amt := *m.Amount + return fmt.Sprintf("[%v %s]", m.Asset, amt.String()) +} + +func (m Monetary) GetAsset() Asset { return m.Asset } + +var Zero = NewMonetaryInt(0) + +func ParseMonetary(mon Monetary) error { + if err := ValidateAsset(mon.Asset); err != nil { + return errors.Wrapf(err, "asset '%s'", mon.Asset) + } + if mon.Amount == nil { + return errors.Errorf("nil amount") + } + if mon.Amount.Ltz() { + return errors.Errorf("negative amount") + } + return nil +} + type MonetaryInt big.Int +func (MonetaryInt) GetType() Type { return TypeNumber } + func (a *MonetaryInt) Add(b *MonetaryInt) *MonetaryInt { if a == nil { - a = NewMonetaryInt(0) + a = (*MonetaryInt)(&big.Int{}) } if b == nil { - b = NewMonetaryInt(0) + b = (*MonetaryInt)(&big.Int{}) } - return (*MonetaryInt)(big.NewInt(0).Add((*big.Int)(a), (*big.Int)(b))) + return (*MonetaryInt)((&big.Int{}).Add((*big.Int)(a), (*big.Int)(b))) } func (a *MonetaryInt) Sub(b *MonetaryInt) *MonetaryInt { if a == nil { - a = NewMonetaryInt(0) + a = (*MonetaryInt)(&big.Int{}) } if b == nil { - b = NewMonetaryInt(0) + b = (*MonetaryInt)(&big.Int{}) } - return (*MonetaryInt)(big.NewInt(0).Sub((*big.Int)(a), (*big.Int)(b))) + return (*MonetaryInt)((&big.Int{}).Sub((*big.Int)(a), (*big.Int)(b))) } func (a *MonetaryInt) Neg() *MonetaryInt { - return (*MonetaryInt)(big.NewInt(0).Neg((*big.Int)(a))) + return (*MonetaryInt)((&big.Int{}).Neg((*big.Int)(a))) } func (a *MonetaryInt) OrZero() *MonetaryInt { @@ -56,7 +92,7 @@ func (a *MonetaryInt) Lt(b *MonetaryInt) bool { } func (a *MonetaryInt) Ltz() bool { - return (*big.Int)(a).Cmp(big.NewInt(0)) < 0 + return (*big.Int)(a).Cmp(new(big.Int)) < 0 } func (a *MonetaryInt) Gt(b *MonetaryInt) bool { @@ -110,8 +146,12 @@ func NewMonetaryInt(i int64) *MonetaryInt { return (*MonetaryInt)(big.NewInt(i)) } +func NewMonetaryIntFromBigInt(v *big.Int) *MonetaryInt { + return (*MonetaryInt)(v) +} + func ParseMonetaryInt(s string) (*MonetaryInt, error) { - i, ok := big.NewInt(0).SetString(s, 10) + i, ok := (&big.Int{}).SetString(s, 10) if !ok { return nil, errors.New("invalid monetary int") } diff --git a/internal/machine/internal/number.go b/internal/machine/internal/number.go new file mode 100644 index 000000000..97acb7244 --- /dev/null +++ b/internal/machine/internal/number.go @@ -0,0 +1,11 @@ +package internal + +type Number = *MonetaryInt + +func NewNumber(i int64) Number { + return NewMonetaryInt(i) +} + +func ParseNumber(s string) (Number, error) { + return ParseMonetaryInt(s) +} diff --git a/internal/machine/internal/portion.go b/internal/machine/internal/portion.go new file mode 100644 index 000000000..2c242a1f9 --- /dev/null +++ b/internal/machine/internal/portion.go @@ -0,0 +1,94 @@ +package internal + +import ( + "errors" + "fmt" + "math/big" + "regexp" +) + +type Portion struct { + Remaining bool `json:"remaining"` + Specific *big.Rat `json:"specific"` +} + +func (Portion) GetType() Type { return TypePortion } + +func NewPortionRemaining() Portion { + return Portion{ + Remaining: true, + Specific: nil, + } +} + +func NewPortionSpecific(r big.Rat) (*Portion, error) { + if r.Cmp(big.NewRat(0, 1)) == -1 || r.Cmp(big.NewRat(1, 1)) == 1 { + return nil, errors.New("portion must be between 0% and 100% inclusive") + } + return &Portion{ + Remaining: false, + Specific: &r, + }, nil +} + +func ValidatePortionSpecific(p Portion) error { + if p.Remaining { + return errors.New("remaining should not be true for a specific portion") + } + if p.Specific == nil { + return errors.New("specific portion should not be nil") + } + if p.Specific.Cmp(big.NewRat(0, 1)) == -1 || p.Specific.Cmp(big.NewRat(1, 1)) == 1 { + return errors.New("specific portion must be between 0% and 100% inclusive") + } + return nil +} + +func (lhs Portion) Equals(rhs Portion) bool { + if lhs.Remaining != rhs.Remaining { + return false + } + if !lhs.Remaining && lhs.Specific.Cmp(rhs.Specific) != 0 { + return false + } + return true +} + +func ParsePortionSpecific(input string) (*Portion, error) { + var res *big.Rat + var ok bool + + re := regexp.MustCompile(`^([0-9]+)(?:[.]([0-9]+))?[%]$`) + percentMatch := re.FindStringSubmatch(input) + if len(percentMatch) != 0 { + integral := percentMatch[1] + fractional := percentMatch[2] + res, ok = new(big.Rat).SetString(integral + "." + fractional) + if !ok { + return nil, errors.New("invalid percent format") + } + res.Mul(res, big.NewRat(1, 100)) + } else { + re = regexp.MustCompile(`^([0-9]+)\s?[/]\s?([0-9]+)$`) + fractionMatch := re.FindStringSubmatch(input) + if len(fractionMatch) != 0 { + numerator := fractionMatch[1] + denominator := fractionMatch[2] + res, ok = new(big.Rat).SetString(numerator + "/" + denominator) + if !ok { + return nil, errors.New("invalid fractional format") + } + } + } + if res == nil { + return nil, errors.New("invalid format") + } + return NewPortionSpecific(*res) +} + +func (p Portion) String() string { + if p.Remaining { + return "remaining" + } + return fmt.Sprintf("%v", p.Specific) +} diff --git a/internal/machine/internal/portion_test.go b/internal/machine/internal/portion_test.go new file mode 100644 index 000000000..ce047cd1b --- /dev/null +++ b/internal/machine/internal/portion_test.go @@ -0,0 +1,129 @@ +package internal + +import ( + "math/big" + "strings" + "testing" +) + +func TestBetween0And1Inclusive(t *testing.T) { + tests := []struct { + in string + want *big.Rat + wantErr bool + }{ + { + in: "0%", + want: big.NewRat(0, 1), + }, + { + in: "0.0%", + want: big.NewRat(0, 1), + }, + { + in: "0/1", + want: big.NewRat(0, 1), + }, + { + in: "0/25", + want: big.NewRat(0, 1), + }, + { + in: "0/100", + want: big.NewRat(0, 1), + }, + { + in: "1%", + want: big.NewRat(1, 100), + }, + { + in: "1/100", + want: big.NewRat(1, 100), + }, + { + in: "10/1000", + want: big.NewRat(1, 100), + }, + { + in: "50/100", + want: big.NewRat(50, 100), + }, + { + in: "50%", + want: big.NewRat(50, 100), + }, + { + in: "50.0%", + want: big.NewRat(50, 100), + }, + { + in: "1/1", + want: big.NewRat(1, 1), + }, + { + in: "100/100", + want: big.NewRat(1, 1), + }, + { + in: "100.0%", + want: big.NewRat(1, 1), + }, + { + in: "100%", + want: big.NewRat(1, 1), + }, + // Now for the failures. We don't check negative numbers in this test because + // those are a parsing failure, not a range failure. + { + in: "100.1%", + wantErr: true, + }, + { + in: "101%", + wantErr: true, + }, + { + in: "101/100", + wantErr: true, + }, + { + in: "2/1", + wantErr: true, + }, + { + in: "3/2", + wantErr: true, + }, + } + + for _, test := range tests { + t.Run(test.in, func(t *testing.T) { + got, err := ParsePortionSpecific(test.in) + if test.wantErr { + if err == nil { + t.Fatal("should have errored") + } + if !strings.Contains(err.Error(), "between") { + t.Fatal("wrong error") + } + return + } + if err != nil { + t.Fatalf("ParsePortionSpecific(%q): %v", test.in, err) + } + if test.want.Cmp(got.Specific) != 0 { + t.Fatalf("ParsePortionSpecific(%q) = %q, want %q", test.in, got, test.want) + } + }) + } +} + +func TestInvalidFormat(t *testing.T) { + _, err := ParsePortionSpecific("this is not a portion") + if err == nil { + t.Fatal("should have errored") + } + if !strings.Contains(err.Error(), "format") { + t.Fatal("wrong error") + } +} diff --git a/internal/machine/internal/value.go b/internal/machine/internal/value.go new file mode 100644 index 000000000..00f7e586e --- /dev/null +++ b/internal/machine/internal/value.go @@ -0,0 +1,86 @@ +package internal + +import ( + "fmt" + "reflect" +) + +type Type byte + +const ( + TypeAccount = Type(iota + 1) // address of an account + TypeAsset // name of an asset + TypeNumber // 64bit unsigned integer + TypeString // string + TypeMonetary // [asset number] + TypePortion // rational number between 0 and 1 both inclusive + TypeAllotment // list of portions + TypeAmount // either ALL or a SPECIFIC number + TypeFunding // (asset, []{amount, account}) +) + +func (t Type) String() string { + switch t { + case TypeAccount: + return "account" + case TypeAsset: + return "asset" + case TypeNumber: + return "number" + case TypeString: + return "string" + case TypeMonetary: + return "monetary" + case TypePortion: + return "portion" + case TypeAllotment: + return "allotment" + case TypeAmount: + return "amount" + default: + return "invalid type" + } +} + +type Value interface { + GetType() Type +} + +type String string + +func (String) GetType() Type { return TypeString } +func (s String) String() string { + return fmt.Sprintf("\"%v\"", string(s)) +} + +func ValueEquals(lhs, rhs Value) bool { + if reflect.TypeOf(lhs) != reflect.TypeOf(rhs) { + return false + } + if lhsn, ok := lhs.(*MonetaryInt); ok { + rhsn := rhs.(*MonetaryInt) + return lhsn.Equal(rhsn) + } else if lhsm, ok := lhs.(Monetary); ok { + rhsm := rhs.(Monetary) + return lhsm.Asset == rhsm.Asset && lhsm.Amount.Equal(rhsm.Amount) + } else if lhsa, ok := lhs.(Allotment); ok { + rhsa := rhs.(Allotment) + if len(lhsa) != len(rhsa) { + return false + } + for i := range lhsa { + if lhsa[i].Cmp(&rhsa[i]) != 0 { + return false + } + } + } else if lhsp, ok := lhs.(Portion); ok { + rhsp := rhs.(Portion) + return lhsp.Equals(rhsp) + } else if lhsf, ok := lhs.(Funding); ok { + rhsf := rhs.(Funding) + return lhsf.Equals(rhsf) + } else if lhs != rhs { + return false + } + return true +} diff --git a/internal/machine/machine.go b/internal/machine/machine.go new file mode 100644 index 000000000..7ca08dd33 --- /dev/null +++ b/internal/machine/machine.go @@ -0,0 +1,50 @@ +package machine + +import ( + "math/big" + + ledger "github.com/formancehq/ledger/internal" + vm2 "github.com/formancehq/ledger/internal/machine/vm" + "github.com/formancehq/stack/libs/go-libs/errorsutil" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/pkg/errors" +) + +type Result struct { + Postings ledger.Postings + Metadata metadata.Metadata + AccountMetadata map[string]metadata.Metadata +} + +func Run(m *vm2.Machine, script ledger.RunScript) (*Result, error) { + err := m.Execute() + if err != nil { + return nil, errors.Wrap(err, "script execution failed") + } + + result := Result{ + Postings: make([]ledger.Posting, len(m.Postings)), + Metadata: m.GetTxMetaJSON(), + AccountMetadata: m.GetAccountsMetaJSON(), + } + + for j, posting := range m.Postings { + result.Postings[j] = ledger.Posting{ + Source: posting.Source, + Destination: posting.Destination, + Amount: (*big.Int)(posting.Amount), + Asset: posting.Asset, + } + } + + for k, v := range script.Metadata { + _, ok := result.Metadata[k] + if ok { + return nil, errorsutil.NewError(vm2.ErrMetadataOverride, + errors.New("cannot override metadata from script")) + } + result.Metadata[k] = v + } + + return &result, nil +} diff --git a/internal/machine/machine_test.go b/internal/machine/machine_test.go new file mode 100644 index 000000000..5b6c45bc4 --- /dev/null +++ b/internal/machine/machine_test.go @@ -0,0 +1,407 @@ +package machine + +import ( + "context" + "errors" + "math/big" + "testing" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/machine/script/compiler" + vm2 "github.com/formancehq/ledger/internal/machine/vm" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/stretchr/testify/require" +) + +type testCase struct { + name string + script string + vars map[string]string + expectErrorCode error + expectResult Result + store vm2.Store + metadata metadata.Metadata +} + +var testCases = []testCase{ + { + name: "nominal", + script: ` + send [USD/2 99] ( + source = @world + destination = @user:001 + )`, + expectResult: Result{ + Postings: []ledger.Posting{ + ledger.NewPosting("world", "user:001", "USD/2", big.NewInt(99)), + }, + Metadata: metadata.Metadata{}, + AccountMetadata: map[string]metadata.Metadata{}, + }, + }, + { + name: "not enough funds", + script: ` + send [USD/2 99] ( + source = @bank + destination = @user:001 + )`, + expectErrorCode: vm2.ErrInsufficientFund, + }, + { + name: "send $0", + script: ` + send [USD/2 0] ( + source = @alice + destination = @user:001 + )`, + expectResult: Result{ + Postings: []ledger.Posting{ + ledger.NewPosting("alice", "user:001", "USD/2", big.NewInt(0)), + }, + Metadata: metadata.Metadata{}, + AccountMetadata: map[string]metadata.Metadata{}, + }, + }, + { + name: "send $0 world", + script: ` + send [USD/2 0] ( + source = @world + destination = @user:001 + )`, + expectResult: Result{ + Postings: []ledger.Posting{ + ledger.NewPosting("world", "user:001", "USD/2", big.NewInt(0)), + }, + Metadata: metadata.Metadata{}, + AccountMetadata: map[string]metadata.Metadata{}, + }, + }, + { + name: "send all available", + script: ` + send [USD/2 *] ( + source = @alice + destination = @user:001 + )`, + expectResult: Result{ + Postings: []ledger.Posting{ + ledger.NewPosting("alice", "user:001", "USD/2", big.NewInt(0)), + }, + Metadata: metadata.Metadata{}, + AccountMetadata: map[string]metadata.Metadata{}, + }, + }, + { + name: "with variable", + script: ` + vars { + account $dest + } + + send [CAD/2 42] ( + source = @world + destination = $dest + )`, + vars: map[string]string{ + "dest": "user:001", + }, + expectResult: Result{ + Postings: []ledger.Posting{ + ledger.NewPosting("world", "user:001", "CAD/2", big.NewInt(42)), + }, + Metadata: metadata.Metadata{}, + AccountMetadata: map[string]metadata.Metadata{}, + }, + }, + { + name: "using metadata", + store: vm2.StaticStore{ + "sales:001": &vm2.AccountWithBalances{ + Account: ledger.Account{ + Address: "sales:001", + Metadata: metadata.Metadata{ + "seller": "users:001", + }, + }, + Balances: map[string]*big.Int{ + "COIN": big.NewInt(100), + }, + }, + "users:001": &vm2.AccountWithBalances{ + Account: ledger.Account{ + Address: "sales:001", + Metadata: metadata.Metadata{ + "commission": "15.5%", + }, + }, + Balances: map[string]*big.Int{}, + }, + }, + script: ` + vars { + account $sale + account $seller = meta($sale, "seller") + portion $commission = meta($seller, "commission") + } + + send [COIN *] ( + source = $sale + destination = { + remaining to $seller + $commission to @platform + } + ) + `, + vars: map[string]string{ + "sale": "sales:001", + }, + expectResult: Result{ + Postings: []ledger.Posting{ + ledger.NewPosting("sales:001", "users:001", "COIN", big.NewInt(85)), + ledger.NewPosting("sales:001", "platform", "COIN", big.NewInt(15)), + }, + Metadata: metadata.Metadata{}, + AccountMetadata: map[string]metadata.Metadata{}, + }, + }, + { + name: "defining metadata from input", + script: ` + send [USD/2 99] ( + source = @world + destination = @users:001 + )`, + metadata: metadata.Metadata{ + "priority": "low", + }, + expectResult: Result{ + Postings: []ledger.Posting{ + ledger.NewPosting("world", "users:001", "USD/2", big.NewInt(99)), + }, + Metadata: metadata.Metadata{ + "priority": "low", + }, + AccountMetadata: map[string]metadata.Metadata{}, + }, + }, + { + name: "defining metadata from script", + script: ` + set_tx_meta("priority", "low") + send [USD/2 99] ( + source = @world + destination = @users:001 + )`, + expectResult: Result{ + Postings: []ledger.Posting{ + ledger.NewPosting("world", "users:001", "USD/2", big.NewInt(99)), + }, + Metadata: metadata.Metadata{ + "priority": "low", + }, + AccountMetadata: map[string]metadata.Metadata{}, + }, + }, + { + name: "override metadata from script", + script: ` + set_tx_meta("priority", "low") + send [USD/2 99] ( + source = @world + destination = @users:001 + )`, + metadata: metadata.Metadata{ + "priority": "low", + }, + expectErrorCode: vm2.ErrMetadataOverride, + }, + { + name: "set account meta", + script: ` + send [USD/2 99] ( + source = @world + destination = @users:001 + ) + set_account_meta(@alice, "aaa", "string meta") + set_account_meta(@alice, "bbb", 42) + set_account_meta(@alice, "ccc", COIN) + set_account_meta(@alice, "ddd", [COIN 30]) + set_account_meta(@alice, "eee", @bob) + `, + expectResult: Result{ + Postings: []ledger.Posting{ + ledger.NewPosting("world", "users:001", "USD/2", big.NewInt(99)), + }, + Metadata: metadata.Metadata{}, + AccountMetadata: map[string]metadata.Metadata{ + "alice": { + "aaa": "string meta", + "bbb": "42", + "ccc": "COIN", + "ddd": "COIN 30", + "eee": "bob", + }, + }, + }, + }, + { + name: "balance function", + store: vm2.StaticStore{ + "users:001": { + Account: ledger.Account{ + Address: "users:001", + Metadata: metadata.Metadata{}, + }, + Balances: map[string]*big.Int{ + "COIN": big.NewInt(100), + }, + }, + }, + script: ` + vars { + monetary $bal = balance(@users:001, COIN) + } + send $bal ( + source = @users:001 + destination = @world + )`, + expectResult: Result{ + Postings: []ledger.Posting{ + ledger.NewPosting("users:001", "world", "COIN", big.NewInt(100)), + }, + Metadata: metadata.Metadata{}, + AccountMetadata: map[string]metadata.Metadata{}, + }, + }, + { + name: "overdraft", + script: ` + send [USD/2 100] ( + source = @users:001 allowing unbounded overdraft + destination = @users:002 + )`, + expectResult: Result{ + Postings: []ledger.Posting{ + ledger.NewPosting("users:001", "users:002", "USD/2", big.NewInt(100)), + }, + Metadata: metadata.Metadata{}, + AccountMetadata: map[string]metadata.Metadata{}, + }, + }, + { + name: "send amount 0", + store: vm2.StaticStore{ + "alice": { + Account: ledger.Account{ + Address: "alice", + Metadata: metadata.Metadata{}, + }, + Balances: map[string]*big.Int{}, + }, + }, + script: ` + send [USD 0] ( + source = @alice + destination = @bob + )`, + expectResult: Result{ + Postings: []ledger.Posting{ + ledger.NewPosting("alice", "bob", "USD", big.NewInt(0)), + }, + Metadata: metadata.Metadata{}, + AccountMetadata: map[string]metadata.Metadata{}, + }, + }, + { + name: "send all with balance 0", + store: vm2.StaticStore{ + "alice": { + Account: ledger.Account{ + Address: "alice", + Metadata: metadata.Metadata{}, + }, + Balances: map[string]*big.Int{}, + }, + }, + script: ` + send [USD *] ( + source = @alice + destination = @bob + )`, + expectResult: Result{ + Postings: []ledger.Posting{ + ledger.NewPosting("alice", "bob", "USD", big.NewInt(0)), + }, + Metadata: metadata.Metadata{}, + AccountMetadata: map[string]metadata.Metadata{}, + }, + }, + { + name: "send account balance of 0", + store: vm2.StaticStore{ + "alice": { + Account: ledger.Account{ + Address: "alice", + Metadata: metadata.Metadata{}, + }, + Balances: map[string]*big.Int{}, + }, + }, + script: ` + vars { + monetary $bal = balance(@alice, USD) + } + send $bal ( + source = @alice + destination = @bob + )`, + expectResult: Result{ + Postings: []ledger.Posting{ + ledger.NewPosting("alice", "bob", "USD", big.NewInt(0)), + }, + Metadata: metadata.Metadata{}, + AccountMetadata: map[string]metadata.Metadata{}, + }, + }, +} + +func TestMachine(t *testing.T) { + t.Parallel() + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + + if tc.store == nil { + tc.store = vm2.StaticStore{} + } + + program, err := compiler.Compile(tc.script) + require.NoError(t, err) + + m := vm2.NewMachine(*program) + require.NoError(t, m.SetVarsFromJSON(tc.vars)) + + _, _, err = m.ResolveResources(context.Background(), tc.store) + require.NoError(t, err) + require.NoError(t, m.ResolveBalances(context.Background(), tc.store)) + + result, err := Run(m, ledger.RunScript{ + Script: ledger.Script{ + Plain: tc.script, + Vars: tc.vars, + }, + Metadata: tc.metadata, + }) + if tc.expectErrorCode != nil { + require.True(t, errors.Is(err, tc.expectErrorCode)) + } else { + require.NoError(t, err) + require.NotNil(t, result) + require.Equal(t, tc.expectResult, *result) + } + }) + } +} diff --git a/internal/machine/script/NumScript.g4 b/internal/machine/script/NumScript.g4 new file mode 100644 index 000000000..489759bd2 --- /dev/null +++ b/internal/machine/script/NumScript.g4 @@ -0,0 +1,174 @@ +grammar NumScript; + +NEWLINE: [\r\n]+; +WHITESPACE: [ \t]+ -> skip; + +MULTILINE_COMMENT: '/*' (MULTILINE_COMMENT|.)*? '*/' -> skip; +LINE_COMMENT: '//' .*? NEWLINE -> skip; +VARS: 'vars'; +META: 'meta'; +SET_TX_META: 'set_tx_meta'; +SET_ACCOUNT_META: 'set_account_meta'; +PRINT: 'print'; +FAIL: 'fail'; +SEND: 'send'; +SOURCE: 'source'; +FROM: 'from'; +MAX: 'max'; +DESTINATION: 'destination'; +TO: 'to'; +ALLOCATE: 'allocate'; +OP_ADD: '+'; +OP_SUB: '-'; +LPAREN: '('; +RPAREN: ')'; +LBRACK: '['; +RBRACK: ']'; +LBRACE: '{'; +RBRACE: '}'; +EQ: '='; +TY_ACCOUNT: 'account'; +TY_ASSET: 'asset'; +TY_NUMBER: 'number'; +TY_MONETARY: 'monetary'; +TY_PORTION: 'portion'; +TY_STRING: 'string'; +STRING: '"' [a-zA-Z0-9_\- ]* '"'; +PORTION: + ( [0-9]+ [ ]? '/' [ ]? [0-9]+ + | [0-9]+ ('.' [0-9]+)? '%' + ); +REMAINING: 'remaining'; +KEPT: 'kept'; +BALANCE: 'balance'; +SAVE: 'save'; +NUMBER: [0-9]+; +PERCENT: '%'; +VARIABLE_NAME: '$' [a-z_]+ [a-z0-9_]*; +ACCOUNT: '@' [a-zA-Z_]+ [a-zA-Z0-9_:]*; +ASSET: [A-Z/0-9]+; + +monetary: LBRACK asset=expression amt=NUMBER RBRACK; + +monetaryAll: LBRACK asset=expression '*' RBRACK; + +literal + : ACCOUNT # LitAccount + | ASSET # LitAsset + | NUMBER # LitNumber + | STRING # LitString + | PORTION # LitPortion + | monetary # LitMonetary + ; + +variable: VARIABLE_NAME; + +expression + : lhs=expression op=(OP_ADD|OP_SUB) rhs=expression # ExprAddSub + | lit=literal # ExprLiteral + | var_=variable # ExprVariable + ; + +allotmentPortion + : PORTION # AllotmentPortionConst + | por=variable # AllotmentPortionVar + | REMAINING # AllotmentPortionRemaining + ; + +destinationInOrder + : LBRACE NEWLINE + (MAX amounts+=expression dests+=keptOrDestination NEWLINE)+ + REMAINING remainingDest=keptOrDestination NEWLINE + RBRACE + ; + +destinationAllotment + : LBRACE NEWLINE + (portions+=allotmentPortion dests+=keptOrDestination NEWLINE)+ + RBRACE + ; + +keptOrDestination + : TO destination # IsDestination + | KEPT # IsKept + ; + +destination + : expression # DestAccount + | destinationInOrder # DestInOrder + | destinationAllotment # DestAllotment + ; + +sourceAccountOverdraft + : 'allowing overdraft up to' specific=expression # SrcAccountOverdraftSpecific + | 'allowing unbounded overdraft' # SrcAccountOverdraftUnbounded + ; + +sourceAccount: account=expression (overdraft=sourceAccountOverdraft)?; + +sourceInOrder + : LBRACE NEWLINE + (sources+=source NEWLINE)+ + RBRACE + ; + +sourceMaxed: MAX max=expression FROM src=source; + +source + : sourceAccount # SrcAccount + | sourceMaxed # SrcMaxed + | sourceInOrder # SrcInOrder + ; + +sourceAllotment + : LBRACE NEWLINE + (portions+=allotmentPortion FROM sources+=source NEWLINE)+ + RBRACE + ; + +valueAwareSource + : source # Src + | sourceAllotment # SrcAllotment + ; + +statement + : PRINT expr=expression # Print + | SAVE (mon=expression | monAll=monetaryAll) FROM acc=expression # SaveFromAccount + | SET_TX_META '(' key=STRING ',' value=expression ')' # SetTxMeta + | SET_ACCOUNT_META '(' acc=expression ',' key=STRING ',' value=expression ')' # SetAccountMeta + | FAIL # Fail + | SEND (mon=expression | monAll=monetaryAll) LPAREN NEWLINE + ( SOURCE '=' src=valueAwareSource NEWLINE DESTINATION '=' dest=destination + | DESTINATION '=' dest=destination NEWLINE SOURCE '=' src=valueAwareSource) NEWLINE RPAREN # Send + ; + +type_ + : TY_ACCOUNT + | TY_ASSET + | TY_NUMBER + | TY_STRING + | TY_MONETARY + | TY_PORTION + ; + +origin + : META '(' account=expression ',' key=STRING ')' # OriginAccountMeta + | BALANCE '(' account=expression ',' asset=expression ')' # OriginAccountBalance + ; + +varDecl: ty=type_ name=variable (EQ orig=origin)?; + +varListDecl + : VARS LBRACE NEWLINE + (v+=varDecl NEWLINE+)+ + RBRACE NEWLINE + ; + +script: + NEWLINE* + vars=varListDecl? + stmts+=statement + (NEWLINE stmts+=statement)* + NEWLINE* + EOF + ; diff --git a/internal/machine/script/compiler/allotment.go b/internal/machine/script/compiler/allotment.go new file mode 100644 index 000000000..9e7a0e4fa --- /dev/null +++ b/internal/machine/script/compiler/allotment.go @@ -0,0 +1,84 @@ +package compiler + +import ( + "errors" + "fmt" + "math/big" + + "github.com/antlr/antlr4/runtime/Go/antlr" + internal2 "github.com/formancehq/ledger/internal/machine/internal" + "github.com/formancehq/ledger/internal/machine/script/parser" + program2 "github.com/formancehq/ledger/internal/machine/vm/program" +) + +func (p *parseVisitor) VisitAllotment(c antlr.ParserRuleContext, portions []parser.IAllotmentPortionContext) *CompileError { + total := big.NewRat(0, 1) + hasVariable := false + hasRemaining := false + for i := len(portions) - 1; i >= 0; i-- { + c := portions[i] + switch c := c.(type) { + case *parser.AllotmentPortionConstContext: + portion, err := internal2.ParsePortionSpecific(c.GetText()) + if err != nil { + return LogicError(c, err) + } + rat := *portion.Specific + total.Add(&rat, total) + addr, err := p.AllocateResource(program2.Constant{Inner: *portion}) + if err != nil { + return LogicError(c, err) + } + p.PushAddress(*addr) + case *parser.AllotmentPortionVarContext: + ty, _, err := p.VisitVariable(c.GetPor(), true) + if err != nil { + return err + } + if ty != internal2.TypePortion { + return LogicError(c, + fmt.Errorf("wrong type: expected type portion for variable: %v", ty), + ) + } + hasVariable = true + case *parser.AllotmentPortionRemainingContext: + if hasRemaining { + return LogicError(c, + errors.New("two uses of `remaining` in the same allocation"), + ) + } + addr, err := p.AllocateResource(program2.Constant{Inner: internal2.NewPortionRemaining()}) + if err != nil { + return LogicError(c, err) + } + p.PushAddress(*addr) + hasRemaining = true + } + } + if total.Cmp(big.NewRat(1, 1)) == 1 { + return LogicError(c, + errors.New("the sum of known portions is greater than 100%"), + ) + } + if total.Cmp(big.NewRat(1, 1)) == -1 && !hasRemaining { + return LogicError(c, + errors.New("the sum of portions might be less than 100%"), + ) + } + if total.Cmp(big.NewRat(1, 1)) == 0 && hasVariable { + return LogicError(c, + errors.New("the sum of portions might be greater than 100%"), + ) + } + if total.Cmp(big.NewRat(1, 1)) == 0 && hasRemaining { + return LogicError(c, + errors.New("known portions are already equal to 100%"), + ) + } + err := p.PushInteger(internal2.NewNumber(int64(len(portions)))) + if err != nil { + return LogicError(c, err) + } + p.AppendInstruction(program2.OP_MAKE_ALLOTMENT) + return nil +} diff --git a/internal/machine/script/compiler/compiler.go b/internal/machine/script/compiler/compiler.go new file mode 100644 index 000000000..0f72697f9 --- /dev/null +++ b/internal/machine/script/compiler/compiler.go @@ -0,0 +1,697 @@ +package compiler + +import ( + "fmt" + "sort" + "strings" + + "github.com/antlr/antlr4/runtime/Go/antlr" + internal2 "github.com/formancehq/ledger/internal/machine/internal" + parser2 "github.com/formancehq/ledger/internal/machine/script/parser" + program2 "github.com/formancehq/ledger/internal/machine/vm/program" + "github.com/pkg/errors" +) + +type parseVisitor struct { + errListener *ErrorListener + instructions []byte + // resources must not exceed 65536 elements + resources []program2.Resource + // sources store all source accounts + // a source can be also a destination of another posting + sources map[internal2.Address]struct{} + // varIdx maps name to resource index + varIdx map[string]internal2.Address + // needBalances store for each account, the set of assets needed + neededBalances map[internal2.Address]map[internal2.Address]struct{} +} + +// Allocates constants if it hasn't already been, +// and returns its resource address. +func (p *parseVisitor) findConstant(constant program2.Constant) (*internal2.Address, bool) { + for i := 0; i < len(p.resources); i++ { + if c, ok := p.resources[i].(program2.Constant); ok { + if internal2.ValueEquals(c.Inner, constant.Inner) { + addr := internal2.Address(i) + return &addr, true + } + } + } + return nil, false +} + +func (p *parseVisitor) AllocateResource(res program2.Resource) (*internal2.Address, error) { + if c, ok := res.(program2.Constant); ok { + idx, ok := p.findConstant(c) + if ok { + return idx, nil + } + } + if len(p.resources) >= 65536 { + return nil, errors.New("number of unique constants exceeded 65536") + } + p.resources = append(p.resources, res) + addr := internal2.NewAddress(uint16(len(p.resources) - 1)) + return &addr, nil +} + +func (p *parseVisitor) isWorld(addr internal2.Address) bool { + idx := int(addr) + if idx < len(p.resources) { + if c, ok := p.resources[idx].(program2.Constant); ok { + if acc, ok := c.Inner.(internal2.AccountAddress); ok { + if string(acc) == "world" { + return true + } + } + } + } + return false +} + +func (p *parseVisitor) VisitVariable(c parser2.IVariableContext, push bool) (internal2.Type, *internal2.Address, *CompileError) { + name := c.GetText()[1:] // strip '$' prefix + if idx, ok := p.varIdx[name]; ok { + res := p.resources[idx] + if push { + p.PushAddress(idx) + } + return res.GetType(), &idx, nil + } else { + return 0, nil, LogicError(c, errors.New("variable not declared")) + } +} + +func (p *parseVisitor) VisitExpr(c parser2.IExpressionContext, push bool) (internal2.Type, *internal2.Address, *CompileError) { + switch c := c.(type) { + case *parser2.ExprAddSubContext: + lhsType, lhsAddr, err := p.VisitExpr(c.GetLhs(), push) + if err != nil { + return 0, nil, err + } + switch lhsType { + case internal2.TypeNumber: + rhsType, _, err := p.VisitExpr(c.GetRhs(), push) + if err != nil { + return 0, nil, err + } + if rhsType != internal2.TypeNumber { + return 0, nil, LogicError(c, fmt.Errorf( + "tried to do an arithmetic operation with incompatible left and right-hand side operand types: %s and %s", + lhsType, rhsType)) + } + if push { + switch c.GetOp().GetTokenType() { + case parser2.NumScriptLexerOP_ADD: + p.AppendInstruction(program2.OP_IADD) + case parser2.NumScriptLexerOP_SUB: + p.AppendInstruction(program2.OP_ISUB) + } + } + return internal2.TypeNumber, nil, nil + case internal2.TypeMonetary: + rhsType, _, err := p.VisitExpr(c.GetRhs(), push) + if err != nil { + return 0, nil, err + } + if rhsType != internal2.TypeMonetary { + return 0, nil, LogicError(c, fmt.Errorf( + "tried to do an arithmetic operation with incompatible left and right-hand side operand types: %s and %s", + lhsType, rhsType)) + } + if push { + switch c.GetOp().GetTokenType() { + case parser2.NumScriptLexerOP_ADD: + p.AppendInstruction(program2.OP_MONETARY_ADD) + case parser2.NumScriptLexerOP_SUB: + p.AppendInstruction(program2.OP_MONETARY_SUB) + } + } + return internal2.TypeMonetary, lhsAddr, nil + default: + return 0, nil, LogicError(c, fmt.Errorf( + "tried to do an arithmetic operation with unsupported left-hand side operand type: %s", + lhsType)) + } + case *parser2.ExprLiteralContext: + return p.VisitLit(c.GetLit(), push) + case *parser2.ExprVariableContext: + return p.VisitVariable(c.GetVar_(), push) + default: + return 0, nil, InternalError(c) + } +} + +func (p *parseVisitor) VisitLit(c parser2.ILiteralContext, push bool) (internal2.Type, *internal2.Address, *CompileError) { + switch c := c.(type) { + case *parser2.LitAccountContext: + account := internal2.AccountAddress(c.GetText()[1:]) + addr, err := p.AllocateResource(program2.Constant{Inner: account}) + if err != nil { + return 0, nil, LogicError(c, err) + } + if push { + p.PushAddress(*addr) + } + return internal2.TypeAccount, addr, nil + case *parser2.LitAssetContext: + asset := internal2.Asset(c.GetText()) + addr, err := p.AllocateResource(program2.Constant{Inner: asset}) + if err != nil { + return 0, nil, LogicError(c, err) + } + if push { + p.PushAddress(*addr) + } + return internal2.TypeAsset, addr, nil + case *parser2.LitNumberContext: + number, err := internal2.ParseNumber(c.GetText()) + if err != nil { + return 0, nil, LogicError(c, err) + } + addr, err := p.AllocateResource(program2.Constant{Inner: number}) + if err != nil { + return 0, nil, LogicError(c, err) + } + if push { + p.PushAddress(*addr) + } + return internal2.TypeNumber, addr, nil + case *parser2.LitStringContext: + addr, err := p.AllocateResource(program2.Constant{ + Inner: internal2.String(strings.Trim(c.GetText(), `"`)), + }) + if err != nil { + return 0, nil, LogicError(c, err) + } + if push { + p.PushAddress(*addr) + } + return internal2.TypeString, addr, nil + case *parser2.LitPortionContext: + portion, err := internal2.ParsePortionSpecific(c.GetText()) + if err != nil { + return 0, nil, LogicError(c, err) + } + addr, err := p.AllocateResource(program2.Constant{Inner: *portion}) + if err != nil { + return 0, nil, LogicError(c, err) + } + if push { + p.PushAddress(*addr) + } + return internal2.TypePortion, addr, nil + case *parser2.LitMonetaryContext: + typ, assetAddr, compErr := p.VisitExpr(c.Monetary().GetAsset(), false) + if compErr != nil { + return 0, nil, compErr + } + if typ != internal2.TypeAsset { + return 0, nil, LogicError(c, fmt.Errorf( + "the expression in monetary literal should be of type '%s' instead of '%s'", + internal2.TypeAsset, typ)) + } + + amt, err := internal2.ParseMonetaryInt(c.Monetary().GetAmt().GetText()) + if err != nil { + return 0, nil, LogicError(c, err) + } + + var ( + monAddr *internal2.Address + alreadyAllocated bool + ) + for i, r := range p.resources { + switch v := r.(type) { + case program2.Monetary: + if v.Asset == *assetAddr && v.Amount.Equal(amt) { + alreadyAllocated = true + tmp := internal2.Address(uint16(i)) + monAddr = &tmp + break + } + } + } + if !alreadyAllocated { + monAddr, err = p.AllocateResource(program2.Monetary{ + Asset: *assetAddr, + Amount: amt, + }) + if err != nil { + return 0, nil, LogicError(c, err) + } + } + if push { + p.PushAddress(*monAddr) + } + return internal2.TypeMonetary, monAddr, nil + default: + return 0, nil, InternalError(c) + } +} + +func (p *parseVisitor) VisitMonetaryAll(c *parser2.SendContext, monAll parser2.IMonetaryAllContext) *CompileError { + assetType, assetAddr, compErr := p.VisitExpr(monAll.GetAsset(), false) + if compErr != nil { + return compErr + } + if assetType != internal2.TypeAsset { + return LogicError(c, fmt.Errorf( + "send monetary all: the expression should be of type 'asset' instead of '%s'", assetType)) + } + + switch c := c.GetSrc().(type) { + case *parser2.SrcContext: + accounts, _, _, compErr := p.VisitSource(c.Source(), func() { + p.PushAddress(*assetAddr) + }, true) + if compErr != nil { + return compErr + } + p.setNeededBalances(accounts, assetAddr) + + case *parser2.SrcAllotmentContext: + return LogicError(c, errors.New("cannot take all balance of an allotment source")) + } + return nil +} + +func (p *parseVisitor) VisitMonetary(c *parser2.SendContext, mon parser2.IExpressionContext) *CompileError { + monType, monAddr, compErr := p.VisitExpr(mon, false) + if compErr != nil { + return compErr + } + if monType != internal2.TypeMonetary { + return LogicError(c, fmt.Errorf( + "send monetary: the expression should be of type 'monetary' instead of '%s'", monType)) + } + + switch c := c.GetSrc().(type) { + case *parser2.SrcContext: + accounts, _, fallback, compErr := p.VisitSource(c.Source(), func() { + p.PushAddress(*monAddr) + p.AppendInstruction(program2.OP_ASSET) + }, false) + if compErr != nil { + return compErr + } + p.setNeededBalances(accounts, monAddr) + + if _, _, err := p.VisitExpr(mon, true); err != nil { + return err + } + + if err := p.TakeFromSource(fallback); err != nil { + return LogicError(c, err) + } + case *parser2.SrcAllotmentContext: + if _, _, err := p.VisitExpr(mon, true); err != nil { + return err + } + p.VisitAllotment(c.SourceAllotment(), c.SourceAllotment().GetPortions()) + p.AppendInstruction(program2.OP_ALLOC) + + sources := c.SourceAllotment().GetSources() + n := len(sources) + for i := 0; i < n; i++ { + accounts, _, fallback, compErr := p.VisitSource(sources[i], func() { + p.PushAddress(*monAddr) + p.AppendInstruction(program2.OP_ASSET) + }, false) + if compErr != nil { + return compErr + } + p.setNeededBalances(accounts, monAddr) + + if err := p.Bump(int64(i + 1)); err != nil { + return LogicError(c, err) + } + + if err := p.TakeFromSource(fallback); err != nil { + return LogicError(c, err) + } + } + + if err := p.PushInteger(internal2.NewNumber(int64(n))); err != nil { + return LogicError(c, err) + } + + p.AppendInstruction(program2.OP_FUNDING_ASSEMBLE) + } + return nil +} + +func (p *parseVisitor) setNeededBalances(accounts map[internal2.Address]struct{}, addr *internal2.Address) { + for acc := range accounts { + if b, ok := p.neededBalances[acc]; ok { + b[*addr] = struct{}{} + } else { + p.neededBalances[acc] = map[internal2.Address]struct{}{ + *addr: {}, + } + } + } +} + +func (p *parseVisitor) VisitSend(c *parser2.SendContext) *CompileError { + if monAll := c.GetMonAll(); monAll != nil { + if err := p.VisitMonetaryAll(c, monAll); err != nil { + return err + } + } else if mon := c.GetMon(); mon != nil { + if err := p.VisitMonetary(c, mon); err != nil { + return err + } + } + + if err := p.VisitDestination(c.GetDest()); err != nil { + return err + } + + return nil +} + +func (p *parseVisitor) VisitSetTxMeta(ctx *parser2.SetTxMetaContext) *CompileError { + _, _, compErr := p.VisitExpr(ctx.GetValue(), true) + if compErr != nil { + return compErr + } + + keyAddr, err := p.AllocateResource(program2.Constant{ + Inner: internal2.String(strings.Trim(ctx.GetKey().GetText(), `"`)), + }) + if err != nil { + return LogicError(ctx, err) + } + p.PushAddress(*keyAddr) + + p.AppendInstruction(program2.OP_TX_META) + + return nil +} + +func (p *parseVisitor) VisitSetAccountMeta(ctx *parser2.SetAccountMetaContext) *CompileError { + _, _, compErr := p.VisitExpr(ctx.GetValue(), true) + if compErr != nil { + return compErr + } + + keyAddr, err := p.AllocateResource(program2.Constant{ + Inner: internal2.String(strings.Trim(ctx.GetKey().GetText(), `"`)), + }) + if err != nil { + return LogicError(ctx, err) + } + p.PushAddress(*keyAddr) + + ty, accAddr, compErr := p.VisitExpr(ctx.GetAcc(), false) + if compErr != nil { + return compErr + } + if ty != internal2.TypeAccount { + return LogicError(ctx, fmt.Errorf( + "set_account_meta: expression is of type %s, and should be of type account", ty)) + } + p.PushAddress(*accAddr) + + p.AppendInstruction(program2.OP_ACCOUNT_META) + + return nil +} + +func (p *parseVisitor) VisitSaveFromAccount(c *parser2.SaveFromAccountContext) *CompileError { + var ( + typ internal2.Type + addr *internal2.Address + compErr *CompileError + ) + if monAll := c.GetMonAll(); monAll != nil { + typ, addr, compErr = p.VisitExpr(monAll.GetAsset(), false) + if compErr != nil { + return compErr + } + if typ != internal2.TypeAsset { + return LogicError(c, fmt.Errorf( + "save monetary all from account: the first expression should be of type 'asset' instead of '%s'", typ)) + } + } else if mon := c.GetMon(); mon != nil { + typ, addr, compErr = p.VisitExpr(mon, false) + if compErr != nil { + return compErr + } + if typ != internal2.TypeMonetary { + return LogicError(c, fmt.Errorf( + "save monetary from account: the first expression should be of type 'monetary' instead of '%s'", typ)) + } + } + p.PushAddress(*addr) + + typ, addr, compErr = p.VisitExpr(c.GetAcc(), false) + if compErr != nil { + return compErr + } + if typ != internal2.TypeAccount { + return LogicError(c, fmt.Errorf( + "save monetary from account: the second expression should be of type 'account' instead of '%s'", typ)) + } + p.PushAddress(*addr) + + p.AppendInstruction(program2.OP_SAVE) + + return nil +} + +func (p *parseVisitor) VisitPrint(ctx *parser2.PrintContext) *CompileError { + _, _, err := p.VisitExpr(ctx.GetExpr(), true) + if err != nil { + return err + } + + p.AppendInstruction(program2.OP_PRINT) + + return nil +} + +func (p *parseVisitor) VisitVars(c *parser2.VarListDeclContext) *CompileError { + if len(c.GetV()) > 32768 { + return LogicError(c, fmt.Errorf("number of variables exceeded %v", 32768)) + } + + for _, v := range c.GetV() { + name := v.GetName().GetText()[1:] + if _, ok := p.varIdx[name]; ok { + return LogicError(c, fmt.Errorf("duplicate variable $%s", name)) + } + var ty internal2.Type + switch v.GetTy().GetText() { + case "account": + ty = internal2.TypeAccount + case "asset": + ty = internal2.TypeAsset + case "number": + ty = internal2.TypeNumber + case "string": + ty = internal2.TypeString + case "monetary": + ty = internal2.TypeMonetary + case "portion": + ty = internal2.TypePortion + default: + return InternalError(c) + } + + var addr *internal2.Address + var err error + if v.GetOrig() == nil { + addr, err = p.AllocateResource(program2.Variable{Typ: ty, Name: name}) + if err != nil { + return &CompileError{ + Msg: errors.Wrap(err, + "allocating variable resource").Error(), + } + } + p.varIdx[name] = *addr + continue + } + + switch c := v.GetOrig().(type) { + case *parser2.OriginAccountMetaContext: + srcTy, src, compErr := p.VisitExpr(c.GetAccount(), false) + if compErr != nil { + return compErr + } + if srcTy != internal2.TypeAccount { + return LogicError(c, fmt.Errorf( + "variable $%s: type should be 'account' to pull account metadata", name)) + } + key := strings.Trim(c.GetKey().GetText(), `"`) + addr, err = p.AllocateResource(program2.VariableAccountMetadata{ + Typ: ty, + Name: name, + Account: *src, + Key: key, + }) + case *parser2.OriginAccountBalanceContext: + if ty != internal2.TypeMonetary { + return LogicError(c, fmt.Errorf( + "variable $%s: type should be 'monetary' to pull account balance", name)) + } + accTy, accAddr, compErr := p.VisitExpr(c.GetAccount(), false) + if compErr != nil { + return compErr + } + if accTy != internal2.TypeAccount { + return LogicError(c, fmt.Errorf( + "variable $%s: the first argument to pull account balance should be of type 'account'", name)) + } + + assTy, assAddr, compErr := p.VisitExpr(c.GetAsset(), false) + if compErr != nil { + return compErr + } + if assTy != internal2.TypeAsset { + return LogicError(c, fmt.Errorf( + "variable $%s: the second argument to pull account balance should be of type 'asset'", name)) + } + + addr, err = p.AllocateResource(program2.VariableAccountBalance{ + Name: name, + Account: *accAddr, + Asset: *assAddr, + }) + if err != nil { + return LogicError(c, err) + } + } + if err != nil { + return LogicError(c, err) + } + + p.varIdx[name] = *addr + } + + return nil +} + +func (p *parseVisitor) VisitScript(c parser2.IScriptContext) *CompileError { + switch c := c.(type) { + case *parser2.ScriptContext: + vars := c.GetVars() + if vars != nil { + switch c := vars.(type) { + case *parser2.VarListDeclContext: + if err := p.VisitVars(c); err != nil { + return err + } + default: + return InternalError(c) + } + } + + for _, stmt := range c.GetStmts() { + var err *CompileError + switch c := stmt.(type) { + case *parser2.PrintContext: + err = p.VisitPrint(c) + case *parser2.FailContext: + p.AppendInstruction(program2.OP_FAIL) + case *parser2.SendContext: + err = p.VisitSend(c) + case *parser2.SetTxMetaContext: + err = p.VisitSetTxMeta(c) + case *parser2.SetAccountMetaContext: + err = p.VisitSetAccountMeta(c) + case *parser2.SaveFromAccountContext: + err = p.VisitSaveFromAccount(c) + default: + return InternalError(c) + } + if err != nil { + return err + } + } + default: + return InternalError(c) + } + + return nil +} + +type CompileArtifacts struct { + Source string + Tokens []antlr.Token + Errors []CompileError + Program *program2.Program +} + +func CompileFull(input string) CompileArtifacts { + artifacts := CompileArtifacts{ + Source: input, + } + + errListener := &ErrorListener{} + + is := antlr.NewInputStream(input) + lexer := parser2.NewNumScriptLexer(is) + lexer.RemoveErrorListeners() + lexer.AddErrorListener(errListener) + + stream := antlr.NewCommonTokenStream(lexer, antlr.LexerDefaultTokenChannel) + p := parser2.NewNumScriptParser(stream) + p.RemoveErrorListeners() + p.AddErrorListener(errListener) + + p.BuildParseTrees = true + + tree := p.Script() + + artifacts.Tokens = stream.GetAllTokens() + artifacts.Errors = append(artifacts.Errors, errListener.Errors...) + + if len(errListener.Errors) != 0 { + return artifacts + } + + visitor := parseVisitor{ + errListener: errListener, + instructions: make([]byte, 0), + resources: make([]program2.Resource, 0), + varIdx: make(map[string]internal2.Address), + neededBalances: make(map[internal2.Address]map[internal2.Address]struct{}), + sources: map[internal2.Address]struct{}{}, + } + + err := visitor.VisitScript(tree) + if err != nil { + artifacts.Errors = append(artifacts.Errors, *err) + return artifacts + } + + sources := make(internal2.Addresses, 0) + for address := range visitor.sources { + sources = append(sources, address) + } + sort.Stable(sources) + + artifacts.Program = &program2.Program{ + Instructions: visitor.instructions, + Resources: visitor.resources, + NeededBalances: visitor.neededBalances, + Sources: sources, + } + + return artifacts +} + +func Compile(input string) (*program2.Program, error) { + artifacts := CompileFull(input) + if len(artifacts.Errors) > 0 { + err := CompileErrorList{ + Errors: artifacts.Errors, + Source: artifacts.Source, + } + return nil, &err + } + + return artifacts.Program, nil +} diff --git a/internal/machine/script/compiler/compiler_test.go b/internal/machine/script/compiler/compiler_test.go new file mode 100644 index 000000000..a6b3868a7 --- /dev/null +++ b/internal/machine/script/compiler/compiler_test.go @@ -0,0 +1,1711 @@ +package compiler + +import ( + "bytes" + "fmt" + "math/big" + "reflect" + "testing" + + internal2 "github.com/formancehq/ledger/internal/machine/internal" + program2 "github.com/formancehq/ledger/internal/machine/vm/program" + "github.com/stretchr/testify/require" +) + +type TestCase struct { + Case string + Expected CaseResult +} + +type CaseResult struct { + Instructions []byte + Resources []program2.Resource + Variables []string + Error string +} + +func test(t *testing.T, c TestCase) { + p, err := Compile(c.Case) + if c.Expected.Error != "" { + require.Error(t, err) + require.NotEmpty(t, err.Error()) + require.ErrorContains(t, err, c.Expected.Error) + return + } + require.NoError(t, err) + require.NotNil(t, p) + + if len(c.Expected.Instructions) > 0 && !bytes.Equal(p.Instructions, c.Expected.Instructions) { + t.Error(fmt.Errorf( + "unexpected instructions:\n%v\nhas: %+v\nwant:%+v", + *p, p.Instructions, c.Expected.Instructions)) + return + } else if len(p.Resources) != len(c.Expected.Resources) { + t.Error(fmt.Errorf( + "unexpected resources\n%v\nhas: \n%+v\nwant:\n%+v", + *p, p.Resources, c.Expected.Resources)) + return + } + + for i, expected := range c.Expected.Resources { + if !checkResourcesEqual(p.Resources[i], c.Expected.Resources[i]) { + t.Error(fmt.Errorf("%v: %v is not %v: %v", + p.Resources[i], reflect.TypeOf(p.Resources[i]).Name(), + expected, reflect.TypeOf(expected).Name(), + )) + t.Error(fmt.Errorf( + "unexpected resources\n%v\nhas: \n%+v\nwant:\n%+v", + *p, p.Resources, c.Expected.Resources)) + return + } + } +} + +func checkResourcesEqual(actual, expected program2.Resource) bool { + if reflect.TypeOf(actual) != reflect.TypeOf(expected) { + return false + } + switch res := actual.(type) { + case program2.Constant: + return internal2.ValueEquals(res.Inner, expected.(program2.Constant).Inner) + case program2.Variable: + e := expected.(program2.Variable) + return res.Typ == e.Typ && res.Name == e.Name + case program2.VariableAccountMetadata: + e := expected.(program2.VariableAccountMetadata) + return res.Account == e.Account && + res.Key == e.Key && + res.Typ == e.Typ + case program2.VariableAccountBalance: + e := expected.(program2.VariableAccountBalance) + return res.Account == e.Account && + res.Asset == e.Asset + case program2.Monetary: + e := expected.(program2.Monetary) + return res.Amount.Equal(e.Amount) && res.Asset == e.Asset + default: + panic(fmt.Errorf("invalid resource of type '%T'", res)) + } +} + +func TestSimplePrint(t *testing.T) { + test(t, TestCase{ + Case: "print 1", + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 00, 00, + program2.OP_PRINT, + }, + Resources: []program2.Resource{ + program2.Constant{Inner: internal2.NewMonetaryInt(1)}, + }, + }, + }) +} + +func TestCompositeExpr(t *testing.T) { + test(t, TestCase{ + Case: "print 29 + 15 - 2", + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 00, 00, + program2.OP_APUSH, 01, 00, + program2.OP_IADD, + program2.OP_APUSH, 02, 00, + program2.OP_ISUB, + program2.OP_PRINT, + }, + Resources: []program2.Resource{ + program2.Constant{Inner: internal2.NewMonetaryInt(29)}, + program2.Constant{Inner: internal2.NewMonetaryInt(15)}, + program2.Constant{Inner: internal2.NewMonetaryInt(2)}, + }, + }, + }) +} + +func TestFail(t *testing.T) { + test(t, TestCase{ + Case: "fail", + Expected: CaseResult{ + Instructions: []byte{program2.OP_FAIL}, + Resources: []program2.Resource{}, + }, + }) +} + +func TestCRLF(t *testing.T) { + test(t, TestCase{ + Case: "print @a\r\nprint @b", + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 00, 00, + program2.OP_PRINT, + program2.OP_APUSH, 01, 00, + program2.OP_PRINT, + }, + Resources: []program2.Resource{ + program2.Constant{Inner: internal2.AccountAddress("a")}, + program2.Constant{Inner: internal2.AccountAddress("b")}, + }, + }, + }) +} + +func TestConstant(t *testing.T) { + user := internal2.AccountAddress("user:U001") + test(t, TestCase{ + Case: "print @user:U001", + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 00, 00, + program2.OP_PRINT, + }, + Resources: []program2.Resource{program2.Constant{Inner: user}}, + }, + }) +} + +func TestSetTxMeta(t *testing.T) { + test(t, TestCase{ + Case: ` + set_tx_meta("aaa", @platform) + set_tx_meta("bbb", GEM) + set_tx_meta("ccc", 42) + set_tx_meta("ddd", "test") + set_tx_meta("eee", [COIN 30]) + set_tx_meta("fff", 15%) + `, + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 00, 00, + program2.OP_APUSH, 01, 00, + program2.OP_TX_META, + program2.OP_APUSH, 02, 00, + program2.OP_APUSH, 03, 00, + program2.OP_TX_META, + program2.OP_APUSH, 04, 00, + program2.OP_APUSH, 05, 00, + program2.OP_TX_META, + program2.OP_APUSH, 06, 00, + program2.OP_APUSH, 07, 00, + program2.OP_TX_META, + program2.OP_APUSH, 9, 00, + program2.OP_APUSH, 10, 00, + program2.OP_TX_META, + program2.OP_APUSH, 11, 00, + program2.OP_APUSH, 12, 00, + program2.OP_TX_META, + }, + Resources: []program2.Resource{ + program2.Constant{Inner: internal2.AccountAddress("platform")}, + program2.Constant{Inner: internal2.String("aaa")}, + program2.Constant{Inner: internal2.Asset("GEM")}, + program2.Constant{Inner: internal2.String("bbb")}, + program2.Constant{Inner: internal2.NewNumber(42)}, + program2.Constant{Inner: internal2.String("ccc")}, + program2.Constant{Inner: internal2.String("test")}, + program2.Constant{Inner: internal2.String("ddd")}, + program2.Constant{Inner: internal2.Asset("COIN")}, + program2.Monetary{Asset: 8, Amount: internal2.NewMonetaryInt(30)}, + program2.Constant{Inner: internal2.String("eee")}, + program2.Constant{Inner: internal2.Portion{ + Remaining: false, + Specific: big.NewRat(15, 100), + }}, + program2.Constant{Inner: internal2.String("fff")}, + }, + }, + }) +} + +func TestSetTxMetaVars(t *testing.T) { + test(t, TestCase{ + Case: ` + vars { + portion $commission + } + set_tx_meta("fee", $commission) + `, + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 00, 00, + program2.OP_APUSH, 01, 00, + program2.OP_TX_META, + }, + Resources: []program2.Resource{ + program2.Variable{Typ: internal2.TypePortion, Name: "commission"}, + program2.Constant{Inner: internal2.String("fee")}, + }, + }, + }) +} + +func TestComments(t *testing.T) { + test(t, TestCase{ + Case: ` + /* This is a multi-line comment, it spans multiple lines + and /* doesn't choke on nested comments */ ! */ + vars { + account $a + } + // this is a single-line comment + print $a + `, + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 00, 00, + program2.OP_PRINT, + }, + Resources: []program2.Resource{ + program2.Variable{Typ: internal2.TypeAccount, Name: "a"}, + }, + }, + }) +} + +func TestUndeclaredVariable(t *testing.T) { + test(t, TestCase{ + Case: "print $nope", + Expected: CaseResult{ + Error: "declared", + }, + }) +} + +func TestInvalidTypeInSendValue(t *testing.T) { + test(t, TestCase{ + Case: ` + send @a ( + source = { + @a + [GEM 2] + } + destination = @b + )`, + Expected: CaseResult{ + Error: "send monetary: the expression should be of type 'monetary' instead of 'account'", + }, + }) +} + +func TestInvalidTypeInSource(t *testing.T) { + test(t, TestCase{ + Case: ` + send [USD/2 99] ( + source = { + @a + [GEM 2] + } + destination = @b + )`, + Expected: CaseResult{ + Error: "wrong type", + }, + }) +} + +func TestDestinationAllotment(t *testing.T) { + test(t, TestCase{ + Case: `send [EUR/2 43] ( + source = @foo + destination = { + 1/8 to @bar + 7/8 to @baz + } + )`, + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 02, 00, // @foo + program2.OP_APUSH, 01, 00, // @foo, [EUR/2 43] + program2.OP_ASSET, // @foo, EUR/2 + program2.OP_APUSH, 03, 00, // @foo, EUR/2, 0 + program2.OP_MONETARY_NEW, // @foo, [EUR/2 0] + program2.OP_TAKE_ALL, // [EUR/2 @foo ] + program2.OP_APUSH, 01, 00, // [EUR/2 @foo ], [EUR/2 43] + program2.OP_TAKE, // [EUR/2 @foo ], [EUR/2 @foo 43] + program2.OP_APUSH, 04, 00, // [EUR/2 @foo ], [EUR/2 @foo 43] 1 + program2.OP_BUMP, // [EUR/2 @foo 43], [EUR/2 @foo ] + program2.OP_REPAY, // [EUR/2 @foo 43] + program2.OP_FUNDING_SUM, // [EUR/2 @foo 43], [EUR/2 43] + program2.OP_APUSH, 05, 00, // [EUR/2 @foo 43], [EUR/2 43], 7/8 + program2.OP_APUSH, 06, 00, // [EUR/2 @foo 43], [EUR/2 43], 7/8, 1/8 + program2.OP_APUSH, 07, 00, // [EUR/2 @foo 43], [EUR/2 43], 7/8, 1/8, 2 + program2.OP_MAKE_ALLOTMENT, // [EUR/2 @foo 43], [EUR/2 43], {1/8 : 7/8} + program2.OP_ALLOC, // [EUR/2 @foo 43], [EUR/2 37], [EUR/2 6] + program2.OP_APUSH, 07, 00, // [EUR/2 @foo 43], [EUR/2 37] [EUR/2 6], 2 + program2.OP_BUMP, // [EUR/2 37], [EUR/2 6], [EUR/2 @foo 43] + program2.OP_APUSH, 04, 00, // [EUR/2 37], [EUR/2 6], [EUR/2 @foo 43] 1 + program2.OP_BUMP, // [EUR/2 37], [EUR/2 @foo 43], [EUR/2 6] + program2.OP_TAKE, // [EUR/2 37], [EUR/2 @foo 37], [EUR/2 @foo 6] + program2.OP_FUNDING_SUM, // [EUR/2 37], [EUR/2 @foo 37], [EUR/2 @foo 6] [EUR/2 6] + program2.OP_TAKE, // [EUR/2 37], [EUR/2 @foo 37], [EUR/2] [EUR/2 @foo 6] + program2.OP_APUSH, 8, 00, // [EUR/2 37], [EUR/2 @foo 37], [EUR/2] [EUR/2 @foo 6], @bar + program2.OP_SEND, // [EUR/2 37], [EUR/2 @foo 37], [EUR/2] + program2.OP_APUSH, 04, 00, // [EUR/2 37], [EUR/2 @foo 37], [EUR/2] 1 + program2.OP_BUMP, // [EUR/2 37], [EUR/2], [EUR/2 @foo 37] + program2.OP_APUSH, 07, 00, // [EUR/2 37], [EUR/2], [EUR/2 @foo 37] 2 + program2.OP_FUNDING_ASSEMBLE, // [EUR/2 37], [EUR/2 @foo 37] + program2.OP_APUSH, 04, 00, // [EUR/2 37], [EUR/2 @foo 37], 1 + program2.OP_BUMP, // [EUR/2 @foo 37], [EUR/2 37] + program2.OP_TAKE, // [EUR/2], [EUR/2 @foo 37] + program2.OP_FUNDING_SUM, // [EUR/2], [EUR/2 @foo 37], [EUR/2 37] + program2.OP_TAKE, // [EUR/2], [EUR/2], [EUR/2 @foo 37] + program2.OP_APUSH, 9, 00, // [EUR/2], [EUR/2], [EUR/2 @foo 37], @baz + program2.OP_SEND, // [EUR/2], [EUR/2] + program2.OP_APUSH, 04, 00, // [EUR/2], [EUR/2], 1 + program2.OP_BUMP, // [EUR/2], [EUR/2] + program2.OP_APUSH, 07, 00, // [EUR/2], [EUR/2], 2 + program2.OP_FUNDING_ASSEMBLE, // [EUR/2] + program2.OP_REPAY, // + }, + Resources: []program2.Resource{ + program2.Constant{Inner: internal2.Asset("EUR/2")}, + program2.Monetary{ + Asset: 0, + Amount: internal2.NewMonetaryInt(43), + }, + program2.Constant{Inner: internal2.AccountAddress("foo")}, + program2.Constant{Inner: internal2.NewMonetaryInt(0)}, + program2.Constant{Inner: internal2.NewMonetaryInt(1)}, + program2.Constant{Inner: internal2.Portion{Specific: big.NewRat(7, 8)}}, + program2.Constant{Inner: internal2.Portion{Specific: big.NewRat(1, 8)}}, + program2.Constant{Inner: internal2.NewMonetaryInt(2)}, + program2.Constant{Inner: internal2.AccountAddress("bar")}, + program2.Constant{Inner: internal2.AccountAddress("baz")}, + }, + }, + }) +} + +func TestDestinationInOrder(t *testing.T) { + test(t, TestCase{ + Case: `send [COIN 50] ( + source = @a + destination = { + max [COIN 10] to @b + remaining to @c + } + )`, + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 02, 00, // @a + program2.OP_APUSH, 01, 00, // @a, [COIN 50] + program2.OP_ASSET, // @a, COIN + program2.OP_APUSH, 03, 00, // @a, COIN, 0 + program2.OP_MONETARY_NEW, // @a, [COIN 0] + program2.OP_TAKE_ALL, // [COIN @a ] + program2.OP_APUSH, 01, 00, // [COIN @a ], [COIN 50] + program2.OP_TAKE, // [COIN @a ], [COIN @a 50] + program2.OP_APUSH, 04, 00, // [COIN @a ], [COIN @a 50], 1 + program2.OP_BUMP, // [COIN @a 50], [COIN @a ] + program2.OP_REPAY, // [COIN @a 50] + program2.OP_FUNDING_SUM, // [COIN @a 50], [COIN 50] <- start of DestinationInOrder + program2.OP_ASSET, // [COIN @a 50], COIN + program2.OP_APUSH, 03, 00, // [COIN @a 50], COIN, 0 + program2.OP_MONETARY_NEW, // [COIN @a 50], [COIN 0] + program2.OP_APUSH, 04, 00, // [COIN @a 50], [COIN 0], 1 + program2.OP_BUMP, // [COIN 0], [COIN @a 50] + program2.OP_APUSH, 05, 00, // [COIN 0], [COIN @a 50], [COIN 10] <- start processing max subdestinations + program2.OP_TAKE_MAX, // [COIN 0], [COIN 0], [COIN @a 40], [COIN @a 10] + program2.OP_APUSH, 06, 00, // [COIN 0], [COIN 0], [COIN @a 40], [COIN @a 10], 2 + program2.OP_BUMP, // [COIN 0], [COIN @a 40], [COIN @a 10], [COIN 0] + program2.OP_DELETE, // [COIN 0], [COIN @a 40], [COIN @a 10] + program2.OP_FUNDING_SUM, // [COIN 0], [COIN @a 40], [COIN @a 10], [COIN 10] + program2.OP_TAKE, // [COIN 0], [COIN @a 40], [COIN], [COIN @a 10] + program2.OP_APUSH, 07, 00, // [COIN 0], [COIN @a 40], [COIN], [COIN @a 10], @b + program2.OP_SEND, // [COIN 0], [COIN @a 40], [COIN] + program2.OP_FUNDING_SUM, // [COIN 0], [COIN @a 40], [COIN], [COIN 0] + program2.OP_APUSH, 8, 00, // [COIN 0], [COIN @a 40], [COIN], [COIN 0], 3 + program2.OP_BUMP, // [COIN @a 40], [COIN], [COIN 0], [COIN 0] + program2.OP_MONETARY_ADD, // [COIN @a 40], [COIN], [COIN 0] + program2.OP_APUSH, 04, 00, // [COIN @a 40], [COIN], [COIN 0], 1 + program2.OP_BUMP, // [COIN @a 40], [COIN 0], [COIN] + program2.OP_APUSH, 06, 00, // [COIN @a 40], [COIN 0], [COIN] 2 + program2.OP_BUMP, // [COIN 0], [COIN], [COIN @a 40] + program2.OP_APUSH, 06, 00, // [COIN 0], [COIN], [COIN @a 40], 2 + program2.OP_FUNDING_ASSEMBLE, // [COIN 0], [COIN @a 40] + program2.OP_FUNDING_REVERSE, // [COIN 0], [COIN @a 40] <- start processing remaining subdestination + program2.OP_APUSH, 04, 00, // [COIN 0], [COIN @a 40], 1 + program2.OP_BUMP, // [COIN @a 40], [COIN 0] + program2.OP_TAKE, // [COIN @a 40], [COIN] + program2.OP_FUNDING_REVERSE, // [COIN @a 40], [COIN] + program2.OP_APUSH, 04, 00, // [COIN @a 40], [COIN], 1 + program2.OP_BUMP, // [COIN], [COIN @a 40] + program2.OP_FUNDING_REVERSE, // [COIN], [COIN @a 40] + program2.OP_FUNDING_SUM, // [COIN], [COIN @a 40], [COIN 40] + program2.OP_TAKE, // [COIN], [COIN], [COIN @a 40] + program2.OP_APUSH, 9, 00, // [COIN], [COIN], [COIN @a 40], @c + program2.OP_SEND, // [COIN], [COIN] + program2.OP_APUSH, 04, 00, // [COIN], [COIN], 1 + program2.OP_BUMP, // [COIN], [COIN] + program2.OP_APUSH, 06, 00, // [COIN], [COIN], 2 + program2.OP_FUNDING_ASSEMBLE, // [COIN] + program2.OP_REPAY, // + }, + Resources: []program2.Resource{ + program2.Constant{Inner: internal2.Asset("COIN")}, + program2.Monetary{ + Asset: 0, + Amount: internal2.NewMonetaryInt(50), + }, + program2.Constant{Inner: internal2.AccountAddress("a")}, + program2.Constant{Inner: internal2.NewMonetaryInt(0)}, + program2.Constant{Inner: internal2.NewMonetaryInt(1)}, + program2.Monetary{ + Asset: 0, + Amount: internal2.NewMonetaryInt(10), + }, + program2.Constant{Inner: internal2.NewMonetaryInt(2)}, + program2.Constant{Inner: internal2.AccountAddress("b")}, + program2.Constant{Inner: internal2.NewMonetaryInt(3)}, + program2.Constant{Inner: internal2.AccountAddress("c")}, + }, + }, + }) +} + +func TestAllocationPercentages(t *testing.T) { + test(t, TestCase{ + Case: `send [EUR/2 43] ( + source = @foo + destination = { + 12.5% to @bar + 37.5% to @baz + 50% to @qux + } + )`, + Expected: CaseResult{ + Resources: []program2.Resource{ + program2.Constant{Inner: internal2.Asset("EUR/2")}, + program2.Monetary{ + Asset: 0, + Amount: internal2.NewMonetaryInt(43), + }, + program2.Constant{Inner: internal2.AccountAddress("foo")}, + program2.Constant{Inner: internal2.NewMonetaryInt(0)}, + program2.Constant{Inner: internal2.NewMonetaryInt(1)}, + program2.Constant{Inner: internal2.Portion{Specific: big.NewRat(1, 2)}}, + program2.Constant{Inner: internal2.Portion{Specific: big.NewRat(3, 8)}}, + program2.Constant{Inner: internal2.Portion{Specific: big.NewRat(1, 8)}}, + program2.Constant{Inner: internal2.NewMonetaryInt(3)}, + program2.Constant{Inner: internal2.AccountAddress("bar")}, + program2.Constant{Inner: internal2.NewMonetaryInt(2)}, + program2.Constant{Inner: internal2.AccountAddress("baz")}, + program2.Constant{Inner: internal2.AccountAddress("qux")}, + }, + }, + }) +} + +func TestSend(t *testing.T) { + script := ` + send [EUR/2 99] ( + source = @alice + destination = @bob + )` + alice := internal2.AccountAddress("alice") + bob := internal2.AccountAddress("bob") + test(t, TestCase{ + Case: script, + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 02, 00, // @alice + program2.OP_APUSH, 01, 00, // @alice, [EUR/2 99] + program2.OP_ASSET, // @alice, EUR/2 + program2.OP_APUSH, 03, 00, // @alice, EUR/2, 0 + program2.OP_MONETARY_NEW, // @alice, [EUR/2 0] + program2.OP_TAKE_ALL, // [EUR/2 @alice ] + program2.OP_APUSH, 01, 00, // [EUR/2 @alice ], [EUR/2 99] + program2.OP_TAKE, // [EUR/2 @alice ], [EUR/2 @alice 99] + program2.OP_APUSH, 04, 00, // [EUR/2 @alice ], [EUR/2 @alice 99], 1 + program2.OP_BUMP, // [EUR/2 @alice 99], [EUR/2 @alice ] + program2.OP_REPAY, // [EUR/2 @alice 99] + program2.OP_FUNDING_SUM, // [EUR/2 @alice 99], [EUR/2 99] + program2.OP_TAKE, // [EUR/2], [EUR/2 @alice 99] + program2.OP_APUSH, 05, 00, // [EUR/2], [EUR/2 @alice 99], @bob + program2.OP_SEND, // [EUR/2] + program2.OP_REPAY, // + }, Resources: []program2.Resource{ + program2.Constant{Inner: internal2.Asset("EUR/2")}, + program2.Monetary{ + Asset: 0, + Amount: internal2.NewMonetaryInt(99), + }, + program2.Constant{Inner: alice}, + program2.Constant{Inner: internal2.NewMonetaryInt(0)}, + program2.Constant{Inner: internal2.NewMonetaryInt(1)}, + program2.Constant{Inner: bob}}, + }, + }) +} + +func TestSendAll(t *testing.T) { + test(t, TestCase{ + Case: `send [EUR/2 *] ( + source = @alice + destination = @bob + )`, + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 01, 00, // @alice + program2.OP_APUSH, 00, 00, // @alice, EUR/2 + program2.OP_APUSH, 02, 00, // @alice, EUR/2, 0 + program2.OP_MONETARY_NEW, // @alice, [EUR/2 0] + program2.OP_TAKE_ALL, // [EUR/2 @alice ] + program2.OP_FUNDING_SUM, // [EUR/2 @alice ], [EUR/2 ] + program2.OP_TAKE, // [EUR/2], [EUR/2 @alice ] + program2.OP_APUSH, 03, 00, // [EUR/2], [EUR/2 @alice ], @b + program2.OP_SEND, // [EUR/2] + program2.OP_REPAY, // + }, Resources: []program2.Resource{ + program2.Constant{Inner: internal2.Asset("EUR/2")}, + program2.Constant{Inner: internal2.AccountAddress("alice")}, + program2.Constant{Inner: internal2.NewMonetaryInt(0)}, + program2.Constant{Inner: internal2.AccountAddress("bob")}}, + }, + }) +} + +func TestMetadata(t *testing.T) { + test(t, TestCase{ + Case: ` + vars { + account $sale + account $seller = meta($sale, "seller") + portion $commission = meta($seller, "commission") + } + send [EUR/2 53] ( + source = $sale + destination = { + $commission to @platform + remaining to $seller + } + )`, + Expected: CaseResult{ + Resources: []program2.Resource{ + program2.Variable{Typ: internal2.TypeAccount, Name: "sale"}, + program2.VariableAccountMetadata{ + Typ: internal2.TypeAccount, + Account: internal2.NewAddress(0), + Key: "seller", + }, + program2.VariableAccountMetadata{ + Typ: internal2.TypePortion, + Account: internal2.NewAddress(1), + Key: "commission", + }, + program2.Constant{Inner: internal2.Asset("EUR/2")}, + program2.Monetary{ + Asset: 3, + Amount: internal2.NewMonetaryInt(53), + }, + program2.Constant{Inner: internal2.NewMonetaryInt(0)}, + program2.Constant{Inner: internal2.NewMonetaryInt(1)}, + program2.Constant{Inner: internal2.NewPortionRemaining()}, + program2.Constant{Inner: internal2.NewMonetaryInt(2)}, + program2.Constant{Inner: internal2.AccountAddress("platform")}, + }, + }, + }) +} + +func TestSyntaxError(t *testing.T) { + test(t, TestCase{ + Case: "print fail", + Expected: CaseResult{ + Error: "mismatched input", + }, + }) +} + +func TestLogicError(t *testing.T) { + test(t, TestCase{ + Case: `send [EUR/2 200] ( + source = 200 + destination = @bob + )`, + Expected: CaseResult{ + Error: "expected", + }, + }) +} + +func TestPreventTakeAllFromWorld(t *testing.T) { + test(t, TestCase{ + Case: `send [GEM *] ( + source = @world + destination = @foo + )`, + Expected: CaseResult{ + Error: "cannot", + }, + }) +} + +func TestPreventAddToBottomlessSource(t *testing.T) { + test(t, TestCase{ + Case: `send [GEM 1000] ( + source = { + @a + @world + @c + } + destination = @out + )`, + Expected: CaseResult{ + Error: "world", + }, + }) +} + +func TestPreventAddToBottomlessSource2(t *testing.T) { + test(t, TestCase{ + Case: `send [GEM 1000] ( + source = { + { + @a + @world + } + { + @b + @world + } + } + destination = @out + )`, + Expected: CaseResult{ + Error: "world", + }, + }) +} + +func TestPreventSourceAlreadyEmptied(t *testing.T) { + test(t, TestCase{ + Case: `send [GEM 1000] ( + source = { + { + @a + @b + } + @a + } + destination = @out + )`, + Expected: CaseResult{ + Error: "empt", + }, + }) +} + +func TestPreventTakeAllFromAllocation(t *testing.T) { + test(t, TestCase{ + Case: `send [GEM *] ( + source = { + 50% from @a + 50% from @b + } + destination = @out + )`, + Expected: CaseResult{ + Error: "all", + }, + }) +} + +func TestWrongTypeSourceMax(t *testing.T) { + test(t, TestCase{ + Case: `send [GEM 15] ( + source = { + max @foo from @bar + @world + } + destination = @baz + )`, + Expected: CaseResult{ + Error: "type", + }, + }) +} + +func TestOverflowingAllocation(t *testing.T) { + t.Run(">100%", func(t *testing.T) { + test(t, TestCase{ + Case: `send [GEM 15] ( + source = @world + destination = { + 2/3 to @a + 2/3 to @b + } + )`, + Expected: CaseResult{ + Error: "100%", + }, + }) + }) + + t.Run("=100% + remaining", func(t *testing.T) { + test(t, TestCase{ + Case: `send [GEM 15] ( + source = @world + destination = { + 1/2 to @a + 1/2 to @b + remaining to @c + } + )`, + Expected: CaseResult{ + Error: "100%", + }, + }) + }) + + t.Run(">100% + remaining", func(t *testing.T) { + test(t, TestCase{ + Case: `send [GEM 15] ( + source = @world + destination = { + 2/3 to @a + 1/2 to @b + remaining to @c + } + )`, + Expected: CaseResult{ + Error: "100%", + }, + }) + }) + + t.Run("const remaining + remaining", func(t *testing.T) { + test(t, TestCase{ + Case: `send [GEM 15] ( + source = @world + destination = { + 2/3 to @a + remaining to @b + remaining to @c + } + )`, + Expected: CaseResult{ + Error: "`remaining` in the same", + }, + }) + }) + + t.Run("dyn remaining + remaining", func(t *testing.T) { + test(t, TestCase{ + Case: `vars { + portion $p + } + send [GEM 15] ( + source = @world + destination = { + $p to @a + remaining to @b + remaining to @c + } + )`, + Expected: CaseResult{ + Error: "`remaining` in the same", + }, + }) + }) + + t.Run(">100% + remaining + variable", func(t *testing.T) { + test(t, TestCase{ + Case: `vars { + portion $prop + } + send [GEM 15] ( + source = @world + destination = { + 1/2 to @a + 2/3 to @b + remaining to @c + $prop to @d + } + )`, + Expected: CaseResult{ + Error: "100%", + }, + }) + }) + + t.Run("variable - remaining", func(t *testing.T) { + test(t, TestCase{ + Case: `vars { + portion $prop + } + send [GEM 15] ( + source = @world + destination = { + 2/3 to @a + $prop to @b + } + )`, + Expected: CaseResult{ + Error: "100%", + }, + }) + }) +} + +func TestAllocationWrongDestination(t *testing.T) { + test(t, TestCase{ + Case: `send [GEM 15] ( + source = @world + destination = [GEM 10] + )`, + Expected: CaseResult{ + Error: "account", + }, + }) + test(t, TestCase{ + Case: `send [GEM 15] ( + source = @world + destination = { + 2/3 to @a + 1/3 to [GEM 10] + } + )`, + Expected: CaseResult{ + Error: "account", + }, + }) +} + +func TestAllocationInvalidPortion(t *testing.T) { + test(t, TestCase{ + Case: `vars { + account $p + } + send [GEM 15] ( + source = @world + destination = { + 10% to @a + $p to @b + } + )`, + Expected: CaseResult{ + Error: "type", + }, + }) +} + +func TestOverdraftOnWorld(t *testing.T) { + test(t, TestCase{ + Case: `send [GEM 15] ( + source = @world allowing overdraft up to [GEM 10] + destination = @foo + )`, + Expected: CaseResult{ + Error: "overdraft", + }, + }) +} + +func TestOverdraftWrongType(t *testing.T) { + test(t, TestCase{ + Case: `send [GEM 15] ( + source = @foo allowing overdraft up to @baz + destination = @bar + )`, + Expected: CaseResult{ + Error: "type", + }, + }) +} + +func TestDestinationInOrderWrongType(t *testing.T) { + test(t, TestCase{ + Case: `send [GEM 15] ( + source = @foo + destination = { + max @bar to @baz + remaining to @qux + } + )`, + Expected: CaseResult{ + Error: "type", + }, + }) +} + +func TestSetAccountMeta(t *testing.T) { + t.Run("all types", func(t *testing.T) { + test(t, TestCase{ + Case: ` + set_account_meta(@alice, "aaa", @platform) + set_account_meta(@alice, "bbb", GEM) + set_account_meta(@alice, "ccc", 42) + set_account_meta(@alice, "ddd", "test") + set_account_meta(@alice, "eee", [COIN 30]) + set_account_meta(@alice, "fff", 15%) + `, + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 00, 00, + program2.OP_APUSH, 01, 00, + program2.OP_APUSH, 02, 00, + program2.OP_ACCOUNT_META, + program2.OP_APUSH, 03, 00, + program2.OP_APUSH, 04, 00, + program2.OP_APUSH, 02, 00, + program2.OP_ACCOUNT_META, + program2.OP_APUSH, 05, 00, + program2.OP_APUSH, 06, 00, + program2.OP_APUSH, 02, 00, + program2.OP_ACCOUNT_META, + program2.OP_APUSH, 7, 00, + program2.OP_APUSH, 8, 00, + program2.OP_APUSH, 02, 00, + program2.OP_ACCOUNT_META, + program2.OP_APUSH, 10, 00, + program2.OP_APUSH, 11, 00, + program2.OP_APUSH, 02, 00, + program2.OP_ACCOUNT_META, + program2.OP_APUSH, 12, 00, + program2.OP_APUSH, 13, 00, + program2.OP_APUSH, 02, 00, + program2.OP_ACCOUNT_META, + }, + Resources: []program2.Resource{ + program2.Constant{Inner: internal2.AccountAddress("platform")}, + program2.Constant{Inner: internal2.String("aaa")}, + program2.Constant{Inner: internal2.AccountAddress("alice")}, + program2.Constant{Inner: internal2.Asset("GEM")}, + program2.Constant{Inner: internal2.String("bbb")}, + program2.Constant{Inner: internal2.NewNumber(42)}, + program2.Constant{Inner: internal2.String("ccc")}, + program2.Constant{Inner: internal2.String("test")}, + program2.Constant{Inner: internal2.String("ddd")}, + program2.Constant{Inner: internal2.Asset("COIN")}, + program2.Monetary{ + Asset: 9, + Amount: internal2.NewMonetaryInt(30), + }, + program2.Constant{Inner: internal2.String("eee")}, + program2.Constant{Inner: internal2.Portion{ + Remaining: false, + Specific: big.NewRat(15, 100), + }}, + program2.Constant{Inner: internal2.String("fff")}, + }, + }, + }) + }) + + t.Run("with vars", func(t *testing.T) { + test(t, TestCase{ + Case: `vars { + account $acc + } + send [EUR/2 100] ( + source = @world + destination = $acc + ) + set_account_meta($acc, "fees", 1%)`, + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 03, 00, + program2.OP_APUSH, 02, 00, + program2.OP_ASSET, + program2.OP_APUSH, 04, 00, + program2.OP_MONETARY_NEW, + program2.OP_TAKE_ALL, + program2.OP_APUSH, 02, 00, + program2.OP_TAKE_MAX, + program2.OP_APUSH, 05, 00, + program2.OP_BUMP, + program2.OP_REPAY, + program2.OP_APUSH, 03, 00, + program2.OP_APUSH, 06, 00, + program2.OP_BUMP, + program2.OP_TAKE_ALWAYS, + program2.OP_APUSH, 06, 00, + program2.OP_FUNDING_ASSEMBLE, + program2.OP_FUNDING_SUM, + program2.OP_TAKE, + program2.OP_APUSH, 00, 00, + program2.OP_SEND, + program2.OP_REPAY, + program2.OP_APUSH, 07, 00, + program2.OP_APUSH, 8, 00, + program2.OP_APUSH, 00, 00, + program2.OP_ACCOUNT_META, + }, + Resources: []program2.Resource{ + program2.Variable{Typ: internal2.TypeAccount, Name: "acc"}, + program2.Constant{Inner: internal2.Asset("EUR/2")}, + program2.Monetary{ + Asset: 1, + Amount: internal2.NewMonetaryInt(100), + }, + program2.Constant{Inner: internal2.AccountAddress("world")}, + program2.Constant{Inner: internal2.NewMonetaryInt(0)}, + program2.Constant{Inner: internal2.NewMonetaryInt(1)}, + program2.Constant{Inner: internal2.NewMonetaryInt(2)}, + program2.Constant{Inner: internal2.Portion{ + Remaining: false, + Specific: big.NewRat(1, 100), + }}, + program2.Constant{Inner: internal2.String("fees")}, + }, + }, + }) + }) + + t.Run("errors", func(t *testing.T) { + test(t, TestCase{ + Case: `set_account_meta(@alice, "fees")`, + Expected: CaseResult{ + Error: "mismatched input", + }, + }) + test(t, TestCase{ + Case: `set_account_meta("test")`, + Expected: CaseResult{ + Error: "mismatched input", + }, + }) + test(t, TestCase{ + Case: `set_account_meta(@alice, "t1", "t2", "t3")`, + Expected: CaseResult{ + Error: "mismatched input", + }, + }) + test(t, TestCase{ + Case: `vars { + portion $p + } + set_account_meta($p, "fees", 1%)`, + Expected: CaseResult{ + Error: "should be of type account", + }, + }) + }) +} + +func TestVariableBalance(t *testing.T) { + t.Run("simplest", func(t *testing.T) { + test(t, TestCase{ + Case: `vars { + monetary $bal = balance(@alice, COIN) + } + send $bal ( + source = @alice + destination = @bob + )`, + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 00, 00, + program2.OP_APUSH, 02, 00, + program2.OP_ASSET, + program2.OP_APUSH, 03, 00, + program2.OP_MONETARY_NEW, + program2.OP_TAKE_ALL, + program2.OP_APUSH, 02, 00, + program2.OP_TAKE, + program2.OP_APUSH, 04, 00, + program2.OP_BUMP, + program2.OP_REPAY, + program2.OP_FUNDING_SUM, + program2.OP_TAKE, + program2.OP_APUSH, 05, 00, + program2.OP_SEND, + program2.OP_REPAY, + }, + Resources: []program2.Resource{ + program2.Constant{Inner: internal2.AccountAddress("alice")}, + program2.Constant{Inner: internal2.Asset("COIN")}, + program2.VariableAccountBalance{Account: 0, Asset: 1}, + program2.Constant{Inner: internal2.NewMonetaryInt(0)}, + program2.Constant{Inner: internal2.NewMonetaryInt(1)}, + program2.Constant{Inner: internal2.AccountAddress("bob")}, + }, + }, + }) + }) + + t.Run("with account variable", func(t *testing.T) { + test(t, TestCase{ + Case: `vars { + account $acc + monetary $bal = balance($acc, COIN) + } + send $bal ( + source = @world + destination = @alice + )`, + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 03, 00, + program2.OP_APUSH, 02, 00, + program2.OP_ASSET, + program2.OP_APUSH, 04, 00, + program2.OP_MONETARY_NEW, + program2.OP_TAKE_ALL, + program2.OP_APUSH, 02, 00, + program2.OP_TAKE_MAX, + program2.OP_APUSH, 05, 00, + program2.OP_BUMP, + program2.OP_REPAY, + program2.OP_APUSH, 03, 00, + program2.OP_APUSH, 06, 00, + program2.OP_BUMP, + program2.OP_TAKE_ALWAYS, + program2.OP_APUSH, 06, 00, + program2.OP_FUNDING_ASSEMBLE, + program2.OP_FUNDING_SUM, + program2.OP_TAKE, + program2.OP_APUSH, 07, 00, + program2.OP_SEND, + program2.OP_REPAY, + }, + Resources: []program2.Resource{ + program2.Variable{Typ: internal2.TypeAccount, Name: "acc"}, + program2.Constant{Inner: internal2.Asset("COIN")}, + program2.VariableAccountBalance{Account: 0, Asset: 1}, + program2.Constant{Inner: internal2.AccountAddress("world")}, + program2.Constant{Inner: internal2.NewMonetaryInt(0)}, + program2.Constant{Inner: internal2.NewMonetaryInt(1)}, + program2.Constant{Inner: internal2.NewMonetaryInt(2)}, + program2.Constant{Inner: internal2.AccountAddress("alice")}, + }, + }, + }) + }) + + t.Run("error variable type", func(t *testing.T) { + test(t, TestCase{ + Case: `vars { + account $bal = balance(@alice, COIN) + } + send $bal ( + source = @alice + destination = @bob + )`, + Expected: CaseResult{ + Error: "variable $bal: type should be 'monetary' to pull account balance", + }, + }) + }) + + t.Run("error no asset", func(t *testing.T) { + test(t, TestCase{ + Case: `vars { + monetary $bal = balance(@alice) + } + send $bal ( + source = @alice + destination = @bob + )`, + Expected: CaseResult{ + Error: "mismatched input", + }, + }) + }) + + t.Run("error too many arguments", func(t *testing.T) { + test(t, TestCase{ + Case: `vars { + monetary $bal = balance(@alice, USD, COIN) + } + send $bal ( + source = @alice + destination = @bob + )`, + Expected: CaseResult{ + Error: "mismatched input ',' expecting ')'", + }, + }) + }) + + t.Run("error wrong type for account", func(t *testing.T) { + test(t, TestCase{ + Case: `vars { + monetary $bal = balance(USD, COIN) + } + send $bal ( + source = @alice + destination = @bob + )`, + Expected: CaseResult{ + Error: "variable $bal: the first argument to pull account balance should be of type 'account'", + }, + }) + }) + + t.Run("error wrong type for asset", func(t *testing.T) { + test(t, TestCase{ + Case: `vars { + monetary $bal = balance(@alice, @bob) + } + send $bal ( + source = @alice + destination = @bob + )`, + Expected: CaseResult{ + Error: "variable $bal: the second argument to pull account balance should be of type 'asset'", + }, + }) + }) + + t.Run("error not in variables", func(t *testing.T) { + test(t, TestCase{ + Case: `send balance(@alice, COIN) ( + source = @alice + destination = @bob + )`, + Expected: CaseResult{ + Error: "mismatched input 'balance'", + }, + }) + }) +} + +func TestVariableAsset(t *testing.T) { + script := `vars { + asset $ass + monetary $bal = balance(@alice, $ass) + } + + send [$ass *] ( + source = @alice + destination = @bob + ) + + send [$ass 1] ( + source = @bob + destination = @alice + ) + + send $bal ( + source = @alice + destination = @bob + )` + + test(t, TestCase{ + Case: script, + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 01, 00, + program2.OP_APUSH, 00, 00, + program2.OP_APUSH, 03, 00, + program2.OP_MONETARY_NEW, + program2.OP_TAKE_ALL, + program2.OP_FUNDING_SUM, + program2.OP_TAKE, + program2.OP_APUSH, 04, 00, + program2.OP_SEND, + program2.OP_REPAY, + program2.OP_APUSH, 04, 00, + program2.OP_APUSH, 05, 00, + program2.OP_ASSET, + program2.OP_APUSH, 03, 00, + program2.OP_MONETARY_NEW, + program2.OP_TAKE_ALL, + program2.OP_APUSH, 05, 00, + program2.OP_TAKE, + program2.OP_APUSH, 06, 00, + program2.OP_BUMP, + program2.OP_REPAY, + program2.OP_FUNDING_SUM, + program2.OP_TAKE, + program2.OP_APUSH, 01, 00, + program2.OP_SEND, + program2.OP_REPAY, + program2.OP_APUSH, 01, 00, + program2.OP_APUSH, 02, 00, + program2.OP_ASSET, + program2.OP_APUSH, 03, 00, + program2.OP_MONETARY_NEW, + program2.OP_TAKE_ALL, + program2.OP_APUSH, 02, 00, + program2.OP_TAKE, + program2.OP_APUSH, 06, 00, + program2.OP_BUMP, + program2.OP_REPAY, + program2.OP_FUNDING_SUM, + program2.OP_TAKE, + program2.OP_APUSH, 04, 00, + program2.OP_SEND, + program2.OP_REPAY, + }, + Resources: []program2.Resource{ + program2.Variable{Typ: internal2.TypeAsset, Name: "ass"}, + program2.Constant{Inner: internal2.AccountAddress("alice")}, + program2.VariableAccountBalance{ + Name: "bal", + Account: 1, + Asset: 0, + }, + program2.Constant{Inner: internal2.NewMonetaryInt(0)}, + program2.Constant{Inner: internal2.AccountAddress("bob")}, + program2.Monetary{ + Asset: 0, + Amount: internal2.NewMonetaryInt(1), + }, + program2.Constant{Inner: internal2.NewMonetaryInt(1)}, + }, + }, + }) +} + +func TestPrint(t *testing.T) { + script := `print 1 + 2 + 3` + test(t, TestCase{ + Case: script, + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 00, 00, + program2.OP_APUSH, 01, 00, + program2.OP_IADD, + program2.OP_APUSH, 02, 00, + program2.OP_IADD, + program2.OP_PRINT, + }, + Resources: []program2.Resource{ + program2.Constant{Inner: internal2.NewMonetaryInt(1)}, + program2.Constant{Inner: internal2.NewMonetaryInt(2)}, + program2.Constant{Inner: internal2.NewMonetaryInt(3)}, + }, + }, + }) +} + +func TestSendWithArithmetic(t *testing.T) { + t.Run("nominal", func(t *testing.T) { + script := ` + vars { + asset $ass + monetary $mon + } + send [EUR 1] + $mon + [$ass 3] - [EUR 4] ( + source = @a + destination = @b + )` + + test(t, TestCase{ + Case: script, + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 06, 00, + program2.OP_APUSH, 03, 00, + program2.OP_ASSET, + program2.OP_APUSH, 07, 00, + program2.OP_MONETARY_NEW, + program2.OP_TAKE_ALL, + program2.OP_APUSH, 03, 00, + program2.OP_APUSH, 01, 00, + program2.OP_MONETARY_ADD, + program2.OP_APUSH, 04, 00, + program2.OP_MONETARY_ADD, + program2.OP_APUSH, 05, 00, + program2.OP_MONETARY_SUB, + program2.OP_TAKE, + program2.OP_APUSH, 8, 00, + program2.OP_BUMP, + program2.OP_REPAY, + program2.OP_FUNDING_SUM, + program2.OP_TAKE, + program2.OP_APUSH, 9, 00, + program2.OP_SEND, + program2.OP_REPAY, + }, + Resources: []program2.Resource{ + program2.Variable{ + Typ: internal2.TypeAsset, + Name: "ass", + }, + program2.Variable{ + Typ: internal2.TypeMonetary, + Name: "mon", + }, + program2.Constant{Inner: internal2.Asset("EUR")}, + program2.Monetary{ + Asset: 2, + Amount: internal2.NewMonetaryInt(1), + }, + program2.Monetary{ + Asset: 0, + Amount: internal2.NewMonetaryInt(3), + }, + program2.Monetary{ + Asset: 2, + Amount: internal2.NewMonetaryInt(4), + }, + program2.Constant{Inner: internal2.AccountAddress("a")}, + program2.Constant{Inner: internal2.NewMonetaryInt(0)}, + program2.Constant{Inner: internal2.NewMonetaryInt(1)}, + program2.Constant{Inner: internal2.AccountAddress("b")}, + }, + }, + }) + }) + + t.Run("error incompatible types", func(t *testing.T) { + script := `send [EUR 1] + 2 ( + source = @world + destination = @bob + )` + + test(t, TestCase{ + Case: script, + Expected: CaseResult{ + Instructions: []byte{}, + Resources: []program2.Resource{}, + Error: "tried to do an arithmetic operation with incompatible left and right-hand side operand types: monetary and number", + }, + }) + }) + + t.Run("error incompatible types var", func(t *testing.T) { + script := ` + vars { + number $nb + } + send [EUR 1] - $nb ( + source = @world + destination = @bob + )` + + test(t, TestCase{ + Case: script, + Expected: CaseResult{ + Instructions: []byte{}, + Resources: []program2.Resource{}, + Error: "tried to do an arithmetic operation with incompatible left and right-hand side operand types: monetary and number", + }, + }) + }) +} + +func TestSaveFromAccount(t *testing.T) { + t.Run("simple", func(t *testing.T) { + test(t, TestCase{ + Case: ` + save [EUR 10] from @alice + + send [EUR 20] ( + source = @alice + destination = @bob + )`, + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 01, 00, + program2.OP_APUSH, 02, 00, + program2.OP_SAVE, + program2.OP_APUSH, 02, 00, + program2.OP_APUSH, 03, 00, + program2.OP_ASSET, + program2.OP_APUSH, 04, 00, + program2.OP_MONETARY_NEW, + program2.OP_TAKE_ALL, + program2.OP_APUSH, 03, 00, + program2.OP_TAKE, + program2.OP_APUSH, 05, 00, + program2.OP_BUMP, + program2.OP_REPAY, + program2.OP_FUNDING_SUM, + program2.OP_TAKE, + program2.OP_APUSH, 06, 00, + program2.OP_SEND, + program2.OP_REPAY, + }, + Resources: []program2.Resource{ + program2.Constant{Inner: internal2.Asset("EUR")}, + program2.Monetary{ + Asset: 0, + Amount: internal2.NewMonetaryInt(10), + }, + program2.Constant{Inner: internal2.AccountAddress("alice")}, + program2.Monetary{ + Asset: 0, + Amount: internal2.NewMonetaryInt(20), + }, + program2.Constant{Inner: internal2.NewMonetaryInt(0)}, + program2.Constant{Inner: internal2.NewMonetaryInt(1)}, + program2.Constant{Inner: internal2.AccountAddress("bob")}, + }, + }, + }) + }) + + t.Run("save all", func(t *testing.T) { + test(t, TestCase{ + Case: ` + save [EUR *] from @alice + + send [EUR 20] ( + source = @alice + destination = @bob + )`, + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 00, 00, + program2.OP_APUSH, 01, 00, + program2.OP_SAVE, + program2.OP_APUSH, 01, 00, + program2.OP_APUSH, 02, 00, + program2.OP_ASSET, + program2.OP_APUSH, 03, 00, + program2.OP_MONETARY_NEW, + program2.OP_TAKE_ALL, + program2.OP_APUSH, 02, 00, + program2.OP_TAKE, + program2.OP_APUSH, 04, 00, + program2.OP_BUMP, + program2.OP_REPAY, + program2.OP_FUNDING_SUM, + program2.OP_TAKE, + program2.OP_APUSH, 05, 00, + program2.OP_SEND, + program2.OP_REPAY, + }, + Resources: []program2.Resource{ + program2.Constant{Inner: internal2.Asset("EUR")}, + program2.Constant{Inner: internal2.AccountAddress("alice")}, + program2.Monetary{ + Asset: 0, + Amount: internal2.NewMonetaryInt(20), + }, + program2.Constant{Inner: internal2.NewMonetaryInt(0)}, + program2.Constant{Inner: internal2.NewMonetaryInt(1)}, + program2.Constant{Inner: internal2.AccountAddress("bob")}, + }, + }, + }) + }) + + t.Run("with asset var", func(t *testing.T) { + test(t, TestCase{ + Case: ` + vars { + asset $ass + } + + save [$ass 10] from @alice + + send [$ass 20] ( + source = @alice + destination = @bob + )`, + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 01, 00, + program2.OP_APUSH, 02, 00, + program2.OP_SAVE, + program2.OP_APUSH, 02, 00, + program2.OP_APUSH, 03, 00, + program2.OP_ASSET, + program2.OP_APUSH, 04, 00, + program2.OP_MONETARY_NEW, + program2.OP_TAKE_ALL, + program2.OP_APUSH, 03, 00, + program2.OP_TAKE, + program2.OP_APUSH, 05, 00, + program2.OP_BUMP, + program2.OP_REPAY, + program2.OP_FUNDING_SUM, + program2.OP_TAKE, + program2.OP_APUSH, 06, 00, + program2.OP_SEND, + program2.OP_REPAY, + }, + Resources: []program2.Resource{ + program2.Variable{Typ: internal2.TypeAsset, Name: "ass"}, + program2.Monetary{ + Asset: 0, + Amount: internal2.NewMonetaryInt(10), + }, + program2.Constant{Inner: internal2.AccountAddress("alice")}, + program2.Monetary{ + Asset: 0, + Amount: internal2.NewMonetaryInt(20), + }, + program2.Constant{Inner: internal2.NewMonetaryInt(0)}, + program2.Constant{Inner: internal2.NewMonetaryInt(1)}, + program2.Constant{Inner: internal2.AccountAddress("bob")}, + }, + }, + }) + }) + + t.Run("with monetary var", func(t *testing.T) { + test(t, TestCase{ + Case: ` + vars { + monetary $mon + } + + save $mon from @alice + + send [EUR 20] ( + source = @alice + destination = @bob + )`, + Expected: CaseResult{ + Instructions: []byte{ + program2.OP_APUSH, 00, 00, + program2.OP_APUSH, 01, 00, + program2.OP_SAVE, + program2.OP_APUSH, 01, 00, + program2.OP_APUSH, 03, 00, + program2.OP_ASSET, + program2.OP_APUSH, 04, 00, + program2.OP_MONETARY_NEW, + program2.OP_TAKE_ALL, + program2.OP_APUSH, 03, 00, + program2.OP_TAKE, + program2.OP_APUSH, 05, 00, + program2.OP_BUMP, + program2.OP_REPAY, + program2.OP_FUNDING_SUM, + program2.OP_TAKE, + program2.OP_APUSH, 06, 00, + program2.OP_SEND, + program2.OP_REPAY, + }, + Resources: []program2.Resource{ + program2.Variable{Typ: internal2.TypeMonetary, Name: "mon"}, + program2.Constant{Inner: internal2.AccountAddress("alice")}, + program2.Constant{Inner: internal2.Asset("EUR")}, + program2.Monetary{ + Asset: 2, + Amount: internal2.NewMonetaryInt(20), + }, + program2.Constant{Inner: internal2.NewMonetaryInt(0)}, + program2.Constant{Inner: internal2.NewMonetaryInt(1)}, + program2.Constant{Inner: internal2.AccountAddress("bob")}, + }, + }, + }) + }) + + t.Run("error wrong type monetary", func(t *testing.T) { + test(t, TestCase{ + Case: ` + save 30 from @alice + `, + Expected: CaseResult{ + Instructions: []byte{}, + Resources: []program2.Resource{}, + Error: "save monetary from account: the first expression should be of type 'monetary' instead of 'number'", + }, + }) + }) + + t.Run("error wrong type account", func(t *testing.T) { + test(t, TestCase{ + Case: ` + save [EUR 30] from ALICE + `, + Expected: CaseResult{ + Instructions: []byte{}, + Resources: []program2.Resource{}, + Error: "save monetary from account: the second expression should be of type 'account' instead of 'asset'", + }, + }) + }) +} diff --git a/internal/machine/script/compiler/destination.go b/internal/machine/script/compiler/destination.go new file mode 100644 index 000000000..255f3b8f1 --- /dev/null +++ b/internal/machine/script/compiler/destination.go @@ -0,0 +1,179 @@ +package compiler + +import ( + "errors" + + internal2 "github.com/formancehq/ledger/internal/machine/internal" + "github.com/formancehq/ledger/internal/machine/script/parser" + "github.com/formancehq/ledger/internal/machine/vm/program" +) + +func (p *parseVisitor) VisitDestination(c parser.IDestinationContext) *CompileError { + err := p.VisitDestinationRecursive(c) + if err != nil { + return err + } + p.AppendInstruction(program.OP_REPAY) + return nil +} + +func (p *parseVisitor) VisitDestinationRecursive(c parser.IDestinationContext) *CompileError { + switch c := c.(type) { + case *parser.DestAccountContext: + p.AppendInstruction(program.OP_FUNDING_SUM) + p.AppendInstruction(program.OP_TAKE) + ty, _, err := p.VisitExpr(c.Expression(), true) + if err != nil { + return err + } + if ty != internal2.TypeAccount { + return LogicError(c, + errors.New("wrong type: expected account as destination"), + ) + } + p.AppendInstruction(program.OP_SEND) + return nil + case *parser.DestInOrderContext: + dests := c.DestinationInOrder().GetDests() + amounts := c.DestinationInOrder().GetAmounts() + n := len(dests) + + // initialize the `kept` accumulator + p.AppendInstruction(program.OP_FUNDING_SUM) + p.AppendInstruction(program.OP_ASSET) + err := p.PushInteger(internal2.NewNumber(0)) + if err != nil { + return LogicError(c, err) + } + p.AppendInstruction(program.OP_MONETARY_NEW) + + err = p.Bump(1) + if err != nil { + return LogicError(c, err) + } + + for i := 0; i < n; i++ { + ty, _, compErr := p.VisitExpr(amounts[i], true) + if compErr != nil { + return compErr + } + if ty != internal2.TypeMonetary { + return LogicError(c, errors.New("wrong type: expected monetary as max")) + } + p.AppendInstruction(program.OP_TAKE_MAX) + err := p.Bump(2) + if err != nil { + return LogicError(c, err) + } + p.AppendInstruction(program.OP_DELETE) + compErr = p.VisitKeptOrDestination(dests[i]) + if compErr != nil { + return compErr + } + p.AppendInstruction(program.OP_FUNDING_SUM) + err = p.Bump(3) + if err != nil { + return LogicError(c, err) + } + p.AppendInstruction(program.OP_MONETARY_ADD) + err = p.Bump(1) + if err != nil { + return LogicError(c, err) + } + err = p.Bump(2) + if err != nil { + return LogicError(c, err) + } + err = p.PushInteger(internal2.NewNumber(2)) + if err != nil { + return LogicError(c, err) + } + p.AppendInstruction(program.OP_FUNDING_ASSEMBLE) + } + p.AppendInstruction(program.OP_FUNDING_REVERSE) + err = p.Bump(1) + if err != nil { + return LogicError(c, err) + } + p.AppendInstruction(program.OP_TAKE) + p.AppendInstruction(program.OP_FUNDING_REVERSE) + err = p.Bump(1) + if err != nil { + return LogicError(c, err) + } + p.AppendInstruction(program.OP_FUNDING_REVERSE) + cerr := p.VisitKeptOrDestination(c.DestinationInOrder().GetRemainingDest()) + if cerr != nil { + return cerr + } + err = p.Bump(1) + if err != nil { + return LogicError(c, err) + } + err = p.PushInteger(internal2.NewNumber(2)) + if err != nil { + return LogicError(c, err) + } + p.AppendInstruction(program.OP_FUNDING_ASSEMBLE) + return nil + case *parser.DestAllotmentContext: + err := p.VisitDestinationAllotment(c.DestinationAllotment()) + return err + default: + return InternalError(c) + } +} + +func (p *parseVisitor) VisitKeptOrDestination(c parser.IKeptOrDestinationContext) *CompileError { + switch c := c.(type) { + case *parser.IsKeptContext: + return nil + case *parser.IsDestinationContext: + err := p.VisitDestinationRecursive(c.Destination()) + return err + default: + return InternalError(c) + } +} + +func (p *parseVisitor) VisitDestinationAllotment(c parser.IDestinationAllotmentContext) *CompileError { + p.AppendInstruction(program.OP_FUNDING_SUM) + err := p.VisitAllotment(c, c.GetPortions()) + if err != nil { + return err + } + p.AppendInstruction(program.OP_ALLOC) + err = p.VisitAllocDestination(c.GetDests()) + if err != nil { + return err + } + return nil +} + +func (p *parseVisitor) VisitAllocDestination(dests []parser.IKeptOrDestinationContext) *CompileError { + err := p.Bump(int64(len(dests))) + if err != nil { + return LogicError(dests[0], err) + } + for _, dest := range dests { + err = p.Bump(1) + if err != nil { + return LogicError(dest, err) + } + p.AppendInstruction(program.OP_TAKE) + compErr := p.VisitKeptOrDestination(dest) + if compErr != nil { + return compErr + } + err = p.Bump(1) + if err != nil { + return LogicError(dest, err) + } + err = p.PushInteger(internal2.NewNumber(2)) + if err != nil { + return LogicError(dest, err) + } + p.AppendInstruction(program.OP_FUNDING_ASSEMBLE) + } + return nil +} diff --git a/internal/machine/script/compiler/error.go b/internal/machine/script/compiler/error.go new file mode 100644 index 000000000..43ec901dd --- /dev/null +++ b/internal/machine/script/compiler/error.go @@ -0,0 +1,124 @@ +package compiler + +import ( + "fmt" + "math" + "strings" + + "github.com/antlr/antlr4/runtime/Go/antlr" + "github.com/logrusorgru/aurora" +) + +type CompileError struct { + StartL, StartC int + EndL, EndC int + Msg string +} + +type CompileErrorList struct { + Errors []CompileError + Source string +} + +func (c *CompileErrorList) Error() string { + source := strings.ReplaceAll(c.Source, "\t", " ") + lines := strings.SplitAfter(strings.ReplaceAll(source, "\r\n", "\n"), "\n") + lines[len(lines)-1] += "\n" + + txtBarGood := aurora.Blue("|") + + s := "" + for _, e := range c.Errors { + lnPad := int(math.Log10(float64(e.EndL))) + 1 // line number padding + // error indicator + s += fmt.Sprintf("%v error:%v:%v\n", aurora.Red("-->"), e.StartL, e.StartC) + // initial empty line + s += fmt.Sprintf("%v %v\n", strings.Repeat(" ", lnPad), txtBarGood) + // offending lines + for l := e.StartL; l <= e.EndL; l++ { // "print fail" + line := lines[l-1] + before := "" + after := "" + start := 0 + if l == e.StartL { + before = line[:e.StartC] + line = line[e.StartC:] + start = e.StartC + } + if l == e.EndL { + idx := e.EndC - start + 1 + if idx >= len(line) { // because newline was erased + idx = len(line) - 1 + } + after = line[idx:] + line = line[:idx] + } + s += aurora.Red(fmt.Sprintf("%0*d | ", lnPad, l)).String() + s += fmt.Sprintf("%v%v%v", + aurora.BrightBlack(before), line, aurora.BrightBlack(after)) + } + // message + start := strings.IndexFunc(lines[e.EndL-1], func(r rune) bool { + return r != ' ' + }) + span := e.EndC - start + 1 + if e.StartL == e.EndL { + start = e.StartC + span = e.EndC - e.StartC + } + if span == 0 { + span = 1 + } + s += fmt.Sprintf("%v %v %v%v %v\n", + strings.Repeat(" ", lnPad), + txtBarGood, + strings.Repeat(" ", start), + aurora.Red(strings.Repeat("^", span)), + e.Msg) + } + return s +} + +type ErrorListener struct { + *antlr.DefaultErrorListener + Errors []CompileError +} + +func (l *ErrorListener) SyntaxError(recognizer antlr.Recognizer, offendingSymbol interface{}, startL, startC int, msg string, e antlr.RecognitionException) { + length := 1 + if token, ok := offendingSymbol.(antlr.Token); ok { + length = len(token.GetText()) + } + endL := startL + endC := startC + length - 1 // -1 so that end character is inside the offending token + l.Errors = append(l.Errors, CompileError{ + StartL: startL, + StartC: startC, + EndL: endL, + EndC: endC, + Msg: msg, + }) +} + +func LogicError(c antlr.ParserRuleContext, err error) *CompileError { + endC := c.GetStop().GetColumn() + len(c.GetStop().GetText()) + return &CompileError{ + StartL: c.GetStart().GetLine(), + StartC: c.GetStart().GetColumn(), + EndL: c.GetStop().GetLine(), + EndC: endC, + Msg: err.Error(), + } +} + +const InternalErrorMsg = "internal compiler error, please report to the issue tracker" + +func InternalError(c antlr.ParserRuleContext) *CompileError { + return &CompileError{ + StartL: c.GetStart().GetLine(), + StartC: c.GetStart().GetColumn(), + EndL: c.GetStop().GetLine(), + EndC: c.GetStop().GetColumn(), + Msg: InternalErrorMsg, + } +} diff --git a/internal/machine/script/compiler/error_test.go b/internal/machine/script/compiler/error_test.go new file mode 100644 index 000000000..c9a5241dc --- /dev/null +++ b/internal/machine/script/compiler/error_test.go @@ -0,0 +1,41 @@ +package compiler + +import ( + "testing" +) + +func TestEndCharacter(t *testing.T) { + src := ` + send [CREDIT 200] ( + source = @a + destination = { + 500% to @b + 50% to @c + } + ) + ` + + _, err := Compile(src) + if err == nil { + t.Fatal("expected error and got none") + } + + if _, ok := err.(*CompileErrorList); !ok { + t.Fatal("error had wrong type") + } + + compErr := err.(*CompileErrorList).Errors[0] + + if compErr.StartL != 5 { + t.Fatalf("start line was %v", compErr.StartL) + } + if compErr.StartC != 3 { + t.Fatalf("start character was %v", compErr.StartC) + } + if compErr.EndL != 5 { + t.Fatalf("end line was %v", compErr.EndL) + } + if compErr.EndC != 7 { + t.Fatalf("end character was %v", compErr.EndC) + } +} diff --git a/internal/machine/script/compiler/program.go b/internal/machine/script/compiler/program.go new file mode 100644 index 000000000..460f22c32 --- /dev/null +++ b/internal/machine/script/compiler/program.go @@ -0,0 +1,36 @@ +package compiler + +import ( + internal2 "github.com/formancehq/ledger/internal/machine/internal" + program2 "github.com/formancehq/ledger/internal/machine/vm/program" +) + +func (p *parseVisitor) AppendInstruction(instruction byte) { + p.instructions = append(p.instructions, instruction) +} + +func (p *parseVisitor) PushAddress(addr internal2.Address) { + p.instructions = append(p.instructions, program2.OP_APUSH) + bytes := addr.ToBytes() + p.instructions = append(p.instructions, bytes...) +} + +func (p *parseVisitor) PushInteger(val internal2.Number) error { + addr, err := p.AllocateResource(program2.Constant{Inner: val}) + if err != nil { + return err + } + p.instructions = append(p.instructions, program2.OP_APUSH) + bytes := addr.ToBytes() + p.instructions = append(p.instructions, bytes...) + return nil +} + +func (p *parseVisitor) Bump(n int64) error { + err := p.PushInteger(internal2.NewNumber(n)) + if err != nil { + return err + } + p.instructions = append(p.instructions, program2.OP_BUMP) + return nil +} diff --git a/internal/machine/script/compiler/source.go b/internal/machine/script/compiler/source.go new file mode 100644 index 000000000..e6af5c67f --- /dev/null +++ b/internal/machine/script/compiler/source.go @@ -0,0 +1,237 @@ +package compiler + +import ( + "errors" + "fmt" + + internal2 "github.com/formancehq/ledger/internal/machine/internal" + "github.com/formancehq/ledger/internal/machine/script/parser" + "github.com/formancehq/ledger/internal/machine/vm/program" +) + +type FallbackAccount internal2.Address + +// VisitValueAwareSource returns the resource addresses of all the accounts +func (p *parseVisitor) VisitValueAwareSource(c parser.IValueAwareSourceContext, pushAsset func(), monAddr *internal2.Address) (map[internal2.Address]struct{}, *CompileError) { + neededAccounts := map[internal2.Address]struct{}{} + isAll := monAddr == nil + switch c := c.(type) { + case *parser.SrcContext: + accounts, _, unbounded, compErr := p.VisitSource(c.Source(), pushAsset, isAll) + if compErr != nil { + return nil, compErr + } + for k, v := range accounts { + neededAccounts[k] = v + } + if !isAll { + p.PushAddress(*monAddr) + err := p.TakeFromSource(unbounded) + if err != nil { + return nil, LogicError(c, err) + } + } + case *parser.SrcAllotmentContext: + if isAll { + return nil, LogicError(c, errors.New("cannot take all balance of an allotment source")) + } + p.PushAddress(*monAddr) + p.VisitAllotment(c.SourceAllotment(), c.SourceAllotment().GetPortions()) + p.AppendInstruction(program.OP_ALLOC) + + sources := c.SourceAllotment().GetSources() + n := len(sources) + for i := 0; i < n; i++ { + accounts, _, fallback, compErr := p.VisitSource(sources[i], pushAsset, isAll) + if compErr != nil { + return nil, compErr + } + for k, v := range accounts { + neededAccounts[k] = v + } + err := p.Bump(int64(i + 1)) + if err != nil { + return nil, LogicError(c, err) + } + err = p.TakeFromSource(fallback) + if err != nil { + return nil, LogicError(c, err) + } + } + err := p.PushInteger(internal2.NewNumber(int64(n))) + if err != nil { + return nil, LogicError(c, err) + } + p.AppendInstruction(program.OP_FUNDING_ASSEMBLE) + } + return neededAccounts, nil +} + +func (p *parseVisitor) TakeFromSource(fallback *FallbackAccount) error { + if fallback == nil { + p.AppendInstruction(program.OP_TAKE) + err := p.Bump(1) + if err != nil { + return err + } + p.AppendInstruction(program.OP_REPAY) + return nil + } + + p.AppendInstruction(program.OP_TAKE_MAX) + err := p.Bump(1) + if err != nil { + return err + } + p.AppendInstruction(program.OP_REPAY) + p.PushAddress(internal2.Address(*fallback)) + err = p.Bump(2) + if err != nil { + return err + } + p.AppendInstruction(program.OP_TAKE_ALWAYS) + err = p.PushInteger(internal2.NewNumber(2)) + if err != nil { + return err + } + p.AppendInstruction(program.OP_FUNDING_ASSEMBLE) + return nil +} + +// VisitSource returns the resource addresses of all the accounts, +// the addresses of accounts already emptied, +// and possibly a fallback account if the source has an unbounded overdraft allowance or contains @world +func (p *parseVisitor) VisitSource(c parser.ISourceContext, pushAsset func(), isAll bool) (map[internal2.Address]struct{}, map[internal2.Address]struct{}, *FallbackAccount, *CompileError) { + neededAccounts := map[internal2.Address]struct{}{} + emptiedAccounts := map[internal2.Address]struct{}{} + var fallback *FallbackAccount + switch c := c.(type) { + case *parser.SrcAccountContext: + ty, accAddr, compErr := p.VisitExpr(c.SourceAccount().GetAccount(), true) + if compErr != nil { + return nil, nil, nil, compErr + } + if ty != internal2.TypeAccount { + return nil, nil, nil, LogicError(c, errors.New("wrong type: expected account or allocation as destination")) + } + if p.isWorld(*accAddr) { + f := FallbackAccount(*accAddr) + fallback = &f + } + + overdraft := c.SourceAccount().GetOverdraft() + if overdraft == nil { + // no overdraft: use zero monetary + pushAsset() + err := p.PushInteger(internal2.NewNumber(0)) + if err != nil { + return nil, nil, nil, LogicError(c, err) + } + p.AppendInstruction(program.OP_MONETARY_NEW) + p.AppendInstruction(program.OP_TAKE_ALL) + } else { + if p.isWorld(*accAddr) { + return nil, nil, nil, LogicError(c, errors.New("@world is already set to an unbounded overdraft")) + } + switch c := overdraft.(type) { + case *parser.SrcAccountOverdraftSpecificContext: + ty, _, compErr := p.VisitExpr(c.GetSpecific(), true) + if compErr != nil { + return nil, nil, nil, compErr + } + if ty != internal2.TypeMonetary { + return nil, nil, nil, LogicError(c, errors.New("wrong type: expected monetary")) + } + p.AppendInstruction(program.OP_TAKE_ALL) + case *parser.SrcAccountOverdraftUnboundedContext: + pushAsset() + err := p.PushInteger(internal2.NewNumber(0)) + if err != nil { + return nil, nil, nil, LogicError(c, err) + } + p.AppendInstruction(program.OP_MONETARY_NEW) + p.AppendInstruction(program.OP_TAKE_ALL) + f := FallbackAccount(*accAddr) + fallback = &f + } + } + neededAccounts[*accAddr] = struct{}{} + emptiedAccounts[*accAddr] = struct{}{} + + if fallback != nil && isAll { + return nil, nil, nil, LogicError(c, errors.New("cannot take all balance of an unbounded source")) + } + + case *parser.SrcMaxedContext: + accounts, _, subsourceFallback, compErr := p.VisitSource(c.SourceMaxed().GetSrc(), pushAsset, false) + if compErr != nil { + return nil, nil, nil, compErr + } + ty, _, compErr := p.VisitExpr(c.SourceMaxed().GetMax(), true) + if compErr != nil { + return nil, nil, nil, compErr + } + if ty != internal2.TypeMonetary { + return nil, nil, nil, LogicError(c, errors.New("wrong type: expected monetary as max")) + } + for k, v := range accounts { + neededAccounts[k] = v + } + p.AppendInstruction(program.OP_TAKE_MAX) + err := p.Bump(1) + if err != nil { + return nil, nil, nil, LogicError(c, err) + } + p.AppendInstruction(program.OP_REPAY) + if subsourceFallback != nil { + p.PushAddress(internal2.Address(*subsourceFallback)) + err := p.Bump(2) + if err != nil { + return nil, nil, nil, LogicError(c, err) + } + p.AppendInstruction(program.OP_TAKE_ALL) + err = p.PushInteger(internal2.NewNumber(2)) + if err != nil { + return nil, nil, nil, LogicError(c, err) + } + p.AppendInstruction(program.OP_FUNDING_ASSEMBLE) + } else { + err := p.Bump(1) + if err != nil { + return nil, nil, nil, LogicError(c, err) + } + p.AppendInstruction(program.OP_DELETE) + } + case *parser.SrcInOrderContext: + sources := c.SourceInOrder().GetSources() + n := len(sources) + for i := 0; i < n; i++ { + accounts, emptied, subsourceFallback, compErr := p.VisitSource(sources[i], pushAsset, isAll) + if compErr != nil { + return nil, nil, nil, compErr + } + fallback = subsourceFallback + if subsourceFallback != nil && i != n-1 { + return nil, nil, nil, LogicError(c, errors.New("an unbounded subsource can only be in last position")) + } + for k, v := range accounts { + neededAccounts[k] = v + } + for k, v := range emptied { + if _, ok := emptiedAccounts[k]; ok { + return nil, nil, nil, LogicError(sources[i], fmt.Errorf("%v is already empty at this stage", p.resources[k])) + } + emptiedAccounts[k] = v + } + } + err := p.PushInteger(internal2.NewNumber(int64(n))) + if err != nil { + return nil, nil, nil, LogicError(c, err) + } + p.AppendInstruction(program.OP_FUNDING_ASSEMBLE) + } + for address := range neededAccounts { + p.sources[address] = struct{}{} + } + return neededAccounts, emptiedAccounts, fallback, nil +} diff --git a/internal/machine/script/generate.go b/internal/machine/script/generate.go new file mode 100644 index 000000000..9da7fbc6a --- /dev/null +++ b/internal/machine/script/generate.go @@ -0,0 +1,3 @@ +package parser + +//go:generate ./generate.sh diff --git a/internal/machine/script/generate.sh b/internal/machine/script/generate.sh new file mode 100755 index 000000000..8378080da --- /dev/null +++ b/internal/machine/script/generate.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +set -o errexit +set -o nounset +set -o pipefail +if [[ "${TRACE-0}" == "1" ]]; then + set -o xtrace +fi + +cd "$(dirname "$0")" + +ANTLR_VERSION='4.10.1' + +main() { + curl --continue-at - https://www.antlr.org/download/antlr-$ANTLR_VERSION-complete.jar -O + java -Xmx500M -cp "./antlr-$ANTLR_VERSION-complete.jar" org.antlr.v4.Tool -Dlanguage=Go -o parser NumScript.g4 +} + +main "$@" diff --git a/internal/machine/script/parser/NumScript.interp b/internal/machine/script/parser/NumScript.interp new file mode 100644 index 000000000..12a17ac08 --- /dev/null +++ b/internal/machine/script/parser/NumScript.interp @@ -0,0 +1,128 @@ +token literal names: +null +'*' +'allowing overdraft up to' +'allowing unbounded overdraft' +',' +null +null +null +null +'vars' +'meta' +'set_tx_meta' +'set_account_meta' +'print' +'fail' +'send' +'source' +'from' +'max' +'destination' +'to' +'allocate' +'+' +'-' +'(' +')' +'[' +']' +'{' +'}' +'=' +'account' +'asset' +'number' +'monetary' +'portion' +'string' +null +null +'remaining' +'kept' +'balance' +'save' +null +'%' +null +null +null + +token symbolic names: +null +null +null +null +null +NEWLINE +WHITESPACE +MULTILINE_COMMENT +LINE_COMMENT +VARS +META +SET_TX_META +SET_ACCOUNT_META +PRINT +FAIL +SEND +SOURCE +FROM +MAX +DESTINATION +TO +ALLOCATE +OP_ADD +OP_SUB +LPAREN +RPAREN +LBRACK +RBRACK +LBRACE +RBRACE +EQ +TY_ACCOUNT +TY_ASSET +TY_NUMBER +TY_MONETARY +TY_PORTION +TY_STRING +STRING +PORTION +REMAINING +KEPT +BALANCE +SAVE +NUMBER +PERCENT +VARIABLE_NAME +ACCOUNT +ASSET + +rule names: +monetary +monetaryAll +literal +variable +expression +allotmentPortion +destinationInOrder +destinationAllotment +keptOrDestination +destination +sourceAccountOverdraft +sourceAccount +sourceInOrder +sourceMaxed +source +sourceAllotment +valueAwareSource +statement +type_ +origin +varDecl +varListDecl +script + + +atn: +[4, 1, 47, 292, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 63, 8, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 3, 4, 70, 8, 4, 1, 4, 1, 4, 1, 4, 5, 4, 75, 8, 4, 10, 4, 12, 4, 78, 9, 4, 1, 5, 1, 5, 1, 5, 3, 5, 83, 8, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 4, 6, 92, 8, 6, 11, 6, 12, 6, 93, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 4, 7, 107, 8, 7, 11, 7, 12, 7, 108, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 3, 8, 116, 8, 8, 1, 9, 1, 9, 1, 9, 3, 9, 121, 8, 9, 1, 10, 1, 10, 1, 10, 3, 10, 126, 8, 10, 1, 11, 1, 11, 3, 11, 130, 8, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 4, 12, 137, 8, 12, 11, 12, 12, 12, 138, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 3, 14, 151, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 4, 15, 160, 8, 15, 11, 15, 12, 15, 161, 1, 15, 1, 15, 1, 16, 1, 16, 3, 16, 168, 8, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 175, 8, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 200, 8, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 220, 8, 17, 1, 17, 1, 17, 1, 17, 3, 17, 225, 8, 17, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 3, 19, 243, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 3, 20, 249, 8, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 4, 21, 256, 8, 21, 11, 21, 12, 21, 257, 4, 21, 260, 8, 21, 11, 21, 12, 21, 261, 1, 21, 1, 21, 1, 21, 1, 22, 5, 22, 268, 8, 22, 10, 22, 12, 22, 271, 9, 22, 1, 22, 3, 22, 274, 8, 22, 1, 22, 1, 22, 1, 22, 5, 22, 279, 8, 22, 10, 22, 12, 22, 282, 9, 22, 1, 22, 5, 22, 285, 8, 22, 10, 22, 12, 22, 288, 9, 22, 1, 22, 1, 22, 1, 22, 0, 1, 8, 23, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 0, 2, 1, 0, 22, 23, 1, 0, 31, 36, 305, 0, 46, 1, 0, 0, 0, 2, 51, 1, 0, 0, 0, 4, 62, 1, 0, 0, 0, 6, 64, 1, 0, 0, 0, 8, 69, 1, 0, 0, 0, 10, 82, 1, 0, 0, 0, 12, 84, 1, 0, 0, 0, 14, 100, 1, 0, 0, 0, 16, 115, 1, 0, 0, 0, 18, 120, 1, 0, 0, 0, 20, 125, 1, 0, 0, 0, 22, 127, 1, 0, 0, 0, 24, 131, 1, 0, 0, 0, 26, 142, 1, 0, 0, 0, 28, 150, 1, 0, 0, 0, 30, 152, 1, 0, 0, 0, 32, 167, 1, 0, 0, 0, 34, 224, 1, 0, 0, 0, 36, 226, 1, 0, 0, 0, 38, 242, 1, 0, 0, 0, 40, 244, 1, 0, 0, 0, 42, 250, 1, 0, 0, 0, 44, 269, 1, 0, 0, 0, 46, 47, 5, 26, 0, 0, 47, 48, 3, 8, 4, 0, 48, 49, 5, 43, 0, 0, 49, 50, 5, 27, 0, 0, 50, 1, 1, 0, 0, 0, 51, 52, 5, 26, 0, 0, 52, 53, 3, 8, 4, 0, 53, 54, 5, 1, 0, 0, 54, 55, 5, 27, 0, 0, 55, 3, 1, 0, 0, 0, 56, 63, 5, 46, 0, 0, 57, 63, 5, 47, 0, 0, 58, 63, 5, 43, 0, 0, 59, 63, 5, 37, 0, 0, 60, 63, 5, 38, 0, 0, 61, 63, 3, 0, 0, 0, 62, 56, 1, 0, 0, 0, 62, 57, 1, 0, 0, 0, 62, 58, 1, 0, 0, 0, 62, 59, 1, 0, 0, 0, 62, 60, 1, 0, 0, 0, 62, 61, 1, 0, 0, 0, 63, 5, 1, 0, 0, 0, 64, 65, 5, 45, 0, 0, 65, 7, 1, 0, 0, 0, 66, 67, 6, 4, -1, 0, 67, 70, 3, 4, 2, 0, 68, 70, 3, 6, 3, 0, 69, 66, 1, 0, 0, 0, 69, 68, 1, 0, 0, 0, 70, 76, 1, 0, 0, 0, 71, 72, 10, 3, 0, 0, 72, 73, 7, 0, 0, 0, 73, 75, 3, 8, 4, 4, 74, 71, 1, 0, 0, 0, 75, 78, 1, 0, 0, 0, 76, 74, 1, 0, 0, 0, 76, 77, 1, 0, 0, 0, 77, 9, 1, 0, 0, 0, 78, 76, 1, 0, 0, 0, 79, 83, 5, 38, 0, 0, 80, 83, 3, 6, 3, 0, 81, 83, 5, 39, 0, 0, 82, 79, 1, 0, 0, 0, 82, 80, 1, 0, 0, 0, 82, 81, 1, 0, 0, 0, 83, 11, 1, 0, 0, 0, 84, 85, 5, 28, 0, 0, 85, 91, 5, 5, 0, 0, 86, 87, 5, 18, 0, 0, 87, 88, 3, 8, 4, 0, 88, 89, 3, 16, 8, 0, 89, 90, 5, 5, 0, 0, 90, 92, 1, 0, 0, 0, 91, 86, 1, 0, 0, 0, 92, 93, 1, 0, 0, 0, 93, 91, 1, 0, 0, 0, 93, 94, 1, 0, 0, 0, 94, 95, 1, 0, 0, 0, 95, 96, 5, 39, 0, 0, 96, 97, 3, 16, 8, 0, 97, 98, 5, 5, 0, 0, 98, 99, 5, 29, 0, 0, 99, 13, 1, 0, 0, 0, 100, 101, 5, 28, 0, 0, 101, 106, 5, 5, 0, 0, 102, 103, 3, 10, 5, 0, 103, 104, 3, 16, 8, 0, 104, 105, 5, 5, 0, 0, 105, 107, 1, 0, 0, 0, 106, 102, 1, 0, 0, 0, 107, 108, 1, 0, 0, 0, 108, 106, 1, 0, 0, 0, 108, 109, 1, 0, 0, 0, 109, 110, 1, 0, 0, 0, 110, 111, 5, 29, 0, 0, 111, 15, 1, 0, 0, 0, 112, 113, 5, 20, 0, 0, 113, 116, 3, 18, 9, 0, 114, 116, 5, 40, 0, 0, 115, 112, 1, 0, 0, 0, 115, 114, 1, 0, 0, 0, 116, 17, 1, 0, 0, 0, 117, 121, 3, 8, 4, 0, 118, 121, 3, 12, 6, 0, 119, 121, 3, 14, 7, 0, 120, 117, 1, 0, 0, 0, 120, 118, 1, 0, 0, 0, 120, 119, 1, 0, 0, 0, 121, 19, 1, 0, 0, 0, 122, 123, 5, 2, 0, 0, 123, 126, 3, 8, 4, 0, 124, 126, 5, 3, 0, 0, 125, 122, 1, 0, 0, 0, 125, 124, 1, 0, 0, 0, 126, 21, 1, 0, 0, 0, 127, 129, 3, 8, 4, 0, 128, 130, 3, 20, 10, 0, 129, 128, 1, 0, 0, 0, 129, 130, 1, 0, 0, 0, 130, 23, 1, 0, 0, 0, 131, 132, 5, 28, 0, 0, 132, 136, 5, 5, 0, 0, 133, 134, 3, 28, 14, 0, 134, 135, 5, 5, 0, 0, 135, 137, 1, 0, 0, 0, 136, 133, 1, 0, 0, 0, 137, 138, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0, 138, 139, 1, 0, 0, 0, 139, 140, 1, 0, 0, 0, 140, 141, 5, 29, 0, 0, 141, 25, 1, 0, 0, 0, 142, 143, 5, 18, 0, 0, 143, 144, 3, 8, 4, 0, 144, 145, 5, 17, 0, 0, 145, 146, 3, 28, 14, 0, 146, 27, 1, 0, 0, 0, 147, 151, 3, 22, 11, 0, 148, 151, 3, 26, 13, 0, 149, 151, 3, 24, 12, 0, 150, 147, 1, 0, 0, 0, 150, 148, 1, 0, 0, 0, 150, 149, 1, 0, 0, 0, 151, 29, 1, 0, 0, 0, 152, 153, 5, 28, 0, 0, 153, 159, 5, 5, 0, 0, 154, 155, 3, 10, 5, 0, 155, 156, 5, 17, 0, 0, 156, 157, 3, 28, 14, 0, 157, 158, 5, 5, 0, 0, 158, 160, 1, 0, 0, 0, 159, 154, 1, 0, 0, 0, 160, 161, 1, 0, 0, 0, 161, 159, 1, 0, 0, 0, 161, 162, 1, 0, 0, 0, 162, 163, 1, 0, 0, 0, 163, 164, 5, 29, 0, 0, 164, 31, 1, 0, 0, 0, 165, 168, 3, 28, 14, 0, 166, 168, 3, 30, 15, 0, 167, 165, 1, 0, 0, 0, 167, 166, 1, 0, 0, 0, 168, 33, 1, 0, 0, 0, 169, 170, 5, 13, 0, 0, 170, 225, 3, 8, 4, 0, 171, 174, 5, 42, 0, 0, 172, 175, 3, 8, 4, 0, 173, 175, 3, 2, 1, 0, 174, 172, 1, 0, 0, 0, 174, 173, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 177, 5, 17, 0, 0, 177, 178, 3, 8, 4, 0, 178, 225, 1, 0, 0, 0, 179, 180, 5, 11, 0, 0, 180, 181, 5, 24, 0, 0, 181, 182, 5, 37, 0, 0, 182, 183, 5, 4, 0, 0, 183, 184, 3, 8, 4, 0, 184, 185, 5, 25, 0, 0, 185, 225, 1, 0, 0, 0, 186, 187, 5, 12, 0, 0, 187, 188, 5, 24, 0, 0, 188, 189, 3, 8, 4, 0, 189, 190, 5, 4, 0, 0, 190, 191, 5, 37, 0, 0, 191, 192, 5, 4, 0, 0, 192, 193, 3, 8, 4, 0, 193, 194, 5, 25, 0, 0, 194, 225, 1, 0, 0, 0, 195, 225, 5, 14, 0, 0, 196, 199, 5, 15, 0, 0, 197, 200, 3, 8, 4, 0, 198, 200, 3, 2, 1, 0, 199, 197, 1, 0, 0, 0, 199, 198, 1, 0, 0, 0, 200, 201, 1, 0, 0, 0, 201, 202, 5, 24, 0, 0, 202, 219, 5, 5, 0, 0, 203, 204, 5, 16, 0, 0, 204, 205, 5, 30, 0, 0, 205, 206, 3, 32, 16, 0, 206, 207, 5, 5, 0, 0, 207, 208, 5, 19, 0, 0, 208, 209, 5, 30, 0, 0, 209, 210, 3, 18, 9, 0, 210, 220, 1, 0, 0, 0, 211, 212, 5, 19, 0, 0, 212, 213, 5, 30, 0, 0, 213, 214, 3, 18, 9, 0, 214, 215, 5, 5, 0, 0, 215, 216, 5, 16, 0, 0, 216, 217, 5, 30, 0, 0, 217, 218, 3, 32, 16, 0, 218, 220, 1, 0, 0, 0, 219, 203, 1, 0, 0, 0, 219, 211, 1, 0, 0, 0, 220, 221, 1, 0, 0, 0, 221, 222, 5, 5, 0, 0, 222, 223, 5, 25, 0, 0, 223, 225, 1, 0, 0, 0, 224, 169, 1, 0, 0, 0, 224, 171, 1, 0, 0, 0, 224, 179, 1, 0, 0, 0, 224, 186, 1, 0, 0, 0, 224, 195, 1, 0, 0, 0, 224, 196, 1, 0, 0, 0, 225, 35, 1, 0, 0, 0, 226, 227, 7, 1, 0, 0, 227, 37, 1, 0, 0, 0, 228, 229, 5, 10, 0, 0, 229, 230, 5, 24, 0, 0, 230, 231, 3, 8, 4, 0, 231, 232, 5, 4, 0, 0, 232, 233, 5, 37, 0, 0, 233, 234, 5, 25, 0, 0, 234, 243, 1, 0, 0, 0, 235, 236, 5, 41, 0, 0, 236, 237, 5, 24, 0, 0, 237, 238, 3, 8, 4, 0, 238, 239, 5, 4, 0, 0, 239, 240, 3, 8, 4, 0, 240, 241, 5, 25, 0, 0, 241, 243, 1, 0, 0, 0, 242, 228, 1, 0, 0, 0, 242, 235, 1, 0, 0, 0, 243, 39, 1, 0, 0, 0, 244, 245, 3, 36, 18, 0, 245, 248, 3, 6, 3, 0, 246, 247, 5, 30, 0, 0, 247, 249, 3, 38, 19, 0, 248, 246, 1, 0, 0, 0, 248, 249, 1, 0, 0, 0, 249, 41, 1, 0, 0, 0, 250, 251, 5, 9, 0, 0, 251, 252, 5, 28, 0, 0, 252, 259, 5, 5, 0, 0, 253, 255, 3, 40, 20, 0, 254, 256, 5, 5, 0, 0, 255, 254, 1, 0, 0, 0, 256, 257, 1, 0, 0, 0, 257, 255, 1, 0, 0, 0, 257, 258, 1, 0, 0, 0, 258, 260, 1, 0, 0, 0, 259, 253, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, 261, 259, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 263, 1, 0, 0, 0, 263, 264, 5, 29, 0, 0, 264, 265, 5, 5, 0, 0, 265, 43, 1, 0, 0, 0, 266, 268, 5, 5, 0, 0, 267, 266, 1, 0, 0, 0, 268, 271, 1, 0, 0, 0, 269, 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 273, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 272, 274, 3, 42, 21, 0, 273, 272, 1, 0, 0, 0, 273, 274, 1, 0, 0, 0, 274, 275, 1, 0, 0, 0, 275, 280, 3, 34, 17, 0, 276, 277, 5, 5, 0, 0, 277, 279, 3, 34, 17, 0, 278, 276, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 286, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 285, 5, 5, 0, 0, 284, 283, 1, 0, 0, 0, 285, 288, 1, 0, 0, 0, 286, 284, 1, 0, 0, 0, 286, 287, 1, 0, 0, 0, 287, 289, 1, 0, 0, 0, 288, 286, 1, 0, 0, 0, 289, 290, 5, 0, 0, 1, 290, 45, 1, 0, 0, 0, 26, 62, 69, 76, 82, 93, 108, 115, 120, 125, 129, 138, 150, 161, 167, 174, 199, 219, 224, 242, 248, 257, 261, 269, 273, 280, 286] diff --git a/internal/machine/script/parser/NumScript.tokens b/internal/machine/script/parser/NumScript.tokens new file mode 100644 index 000000000..cb7dd35a2 --- /dev/null +++ b/internal/machine/script/parser/NumScript.tokens @@ -0,0 +1,84 @@ +T__0=1 +T__1=2 +T__2=3 +T__3=4 +NEWLINE=5 +WHITESPACE=6 +MULTILINE_COMMENT=7 +LINE_COMMENT=8 +VARS=9 +META=10 +SET_TX_META=11 +SET_ACCOUNT_META=12 +PRINT=13 +FAIL=14 +SEND=15 +SOURCE=16 +FROM=17 +MAX=18 +DESTINATION=19 +TO=20 +ALLOCATE=21 +OP_ADD=22 +OP_SUB=23 +LPAREN=24 +RPAREN=25 +LBRACK=26 +RBRACK=27 +LBRACE=28 +RBRACE=29 +EQ=30 +TY_ACCOUNT=31 +TY_ASSET=32 +TY_NUMBER=33 +TY_MONETARY=34 +TY_PORTION=35 +TY_STRING=36 +STRING=37 +PORTION=38 +REMAINING=39 +KEPT=40 +BALANCE=41 +SAVE=42 +NUMBER=43 +PERCENT=44 +VARIABLE_NAME=45 +ACCOUNT=46 +ASSET=47 +'*'=1 +'allowing overdraft up to'=2 +'allowing unbounded overdraft'=3 +','=4 +'vars'=9 +'meta'=10 +'set_tx_meta'=11 +'set_account_meta'=12 +'print'=13 +'fail'=14 +'send'=15 +'source'=16 +'from'=17 +'max'=18 +'destination'=19 +'to'=20 +'allocate'=21 +'+'=22 +'-'=23 +'('=24 +')'=25 +'['=26 +']'=27 +'{'=28 +'}'=29 +'='=30 +'account'=31 +'asset'=32 +'number'=33 +'monetary'=34 +'portion'=35 +'string'=36 +'remaining'=39 +'kept'=40 +'balance'=41 +'save'=42 +'%'=44 diff --git a/internal/machine/script/parser/NumScriptLexer.interp b/internal/machine/script/parser/NumScriptLexer.interp new file mode 100644 index 000000000..531a9b456 --- /dev/null +++ b/internal/machine/script/parser/NumScriptLexer.interp @@ -0,0 +1,158 @@ +token literal names: +null +'*' +'allowing overdraft up to' +'allowing unbounded overdraft' +',' +null +null +null +null +'vars' +'meta' +'set_tx_meta' +'set_account_meta' +'print' +'fail' +'send' +'source' +'from' +'max' +'destination' +'to' +'allocate' +'+' +'-' +'(' +')' +'[' +']' +'{' +'}' +'=' +'account' +'asset' +'number' +'monetary' +'portion' +'string' +null +null +'remaining' +'kept' +'balance' +'save' +null +'%' +null +null +null + +token symbolic names: +null +null +null +null +null +NEWLINE +WHITESPACE +MULTILINE_COMMENT +LINE_COMMENT +VARS +META +SET_TX_META +SET_ACCOUNT_META +PRINT +FAIL +SEND +SOURCE +FROM +MAX +DESTINATION +TO +ALLOCATE +OP_ADD +OP_SUB +LPAREN +RPAREN +LBRACK +RBRACK +LBRACE +RBRACE +EQ +TY_ACCOUNT +TY_ASSET +TY_NUMBER +TY_MONETARY +TY_PORTION +TY_STRING +STRING +PORTION +REMAINING +KEPT +BALANCE +SAVE +NUMBER +PERCENT +VARIABLE_NAME +ACCOUNT +ASSET + +rule names: +T__0 +T__1 +T__2 +T__3 +NEWLINE +WHITESPACE +MULTILINE_COMMENT +LINE_COMMENT +VARS +META +SET_TX_META +SET_ACCOUNT_META +PRINT +FAIL +SEND +SOURCE +FROM +MAX +DESTINATION +TO +ALLOCATE +OP_ADD +OP_SUB +LPAREN +RPAREN +LBRACK +RBRACK +LBRACE +RBRACE +EQ +TY_ACCOUNT +TY_ASSET +TY_NUMBER +TY_MONETARY +TY_PORTION +TY_STRING +STRING +PORTION +REMAINING +KEPT +BALANCE +SAVE +NUMBER +PERCENT +VARIABLE_NAME +ACCOUNT +ASSET + +channel names: +DEFAULT_TOKEN_CHANNEL +HIDDEN + +mode names: +DEFAULT_MODE + +atn: +[4, 0, 47, 457, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 4, 4, 155, 8, 4, 11, 4, 12, 4, 156, 1, 5, 4, 5, 160, 8, 5, 11, 5, 12, 5, 161, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 5, 6, 171, 8, 6, 10, 6, 12, 6, 174, 9, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 5, 7, 185, 8, 7, 10, 7, 12, 7, 188, 9, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 5, 36, 354, 8, 36, 10, 36, 12, 36, 357, 9, 36, 1, 36, 1, 36, 1, 37, 4, 37, 362, 8, 37, 11, 37, 12, 37, 363, 1, 37, 3, 37, 367, 8, 37, 1, 37, 1, 37, 3, 37, 371, 8, 37, 1, 37, 4, 37, 374, 8, 37, 11, 37, 12, 37, 375, 1, 37, 4, 37, 379, 8, 37, 11, 37, 12, 37, 380, 1, 37, 1, 37, 4, 37, 385, 8, 37, 11, 37, 12, 37, 386, 3, 37, 389, 8, 37, 1, 37, 3, 37, 392, 8, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 4, 42, 423, 8, 42, 11, 42, 12, 42, 424, 1, 43, 1, 43, 1, 44, 1, 44, 4, 44, 431, 8, 44, 11, 44, 12, 44, 432, 1, 44, 5, 44, 436, 8, 44, 10, 44, 12, 44, 439, 9, 44, 1, 45, 1, 45, 4, 45, 443, 8, 45, 11, 45, 12, 45, 444, 1, 45, 5, 45, 448, 8, 45, 10, 45, 12, 45, 451, 9, 45, 1, 46, 4, 46, 454, 8, 46, 11, 46, 12, 46, 455, 2, 172, 186, 0, 47, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 28, 57, 29, 59, 30, 61, 31, 63, 32, 65, 33, 67, 34, 69, 35, 71, 36, 73, 37, 75, 38, 77, 39, 79, 40, 81, 41, 83, 42, 85, 43, 87, 44, 89, 45, 91, 46, 93, 47, 1, 0, 10, 2, 0, 10, 10, 13, 13, 2, 0, 9, 9, 32, 32, 6, 0, 32, 32, 45, 45, 48, 57, 65, 90, 95, 95, 97, 122, 1, 0, 48, 57, 1, 0, 32, 32, 2, 0, 95, 95, 97, 122, 3, 0, 48, 57, 95, 95, 97, 122, 3, 0, 65, 90, 95, 95, 97, 122, 4, 0, 48, 58, 65, 90, 95, 95, 97, 122, 2, 0, 47, 57, 65, 90, 476, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 63, 1, 0, 0, 0, 0, 65, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, 0, 0, 0, 0, 73, 1, 0, 0, 0, 0, 75, 1, 0, 0, 0, 0, 77, 1, 0, 0, 0, 0, 79, 1, 0, 0, 0, 0, 81, 1, 0, 0, 0, 0, 83, 1, 0, 0, 0, 0, 85, 1, 0, 0, 0, 0, 87, 1, 0, 0, 0, 0, 89, 1, 0, 0, 0, 0, 91, 1, 0, 0, 0, 0, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 3, 97, 1, 0, 0, 0, 5, 122, 1, 0, 0, 0, 7, 151, 1, 0, 0, 0, 9, 154, 1, 0, 0, 0, 11, 159, 1, 0, 0, 0, 13, 165, 1, 0, 0, 0, 15, 180, 1, 0, 0, 0, 17, 193, 1, 0, 0, 0, 19, 198, 1, 0, 0, 0, 21, 203, 1, 0, 0, 0, 23, 215, 1, 0, 0, 0, 25, 232, 1, 0, 0, 0, 27, 238, 1, 0, 0, 0, 29, 243, 1, 0, 0, 0, 31, 248, 1, 0, 0, 0, 33, 255, 1, 0, 0, 0, 35, 260, 1, 0, 0, 0, 37, 264, 1, 0, 0, 0, 39, 276, 1, 0, 0, 0, 41, 279, 1, 0, 0, 0, 43, 288, 1, 0, 0, 0, 45, 290, 1, 0, 0, 0, 47, 292, 1, 0, 0, 0, 49, 294, 1, 0, 0, 0, 51, 296, 1, 0, 0, 0, 53, 298, 1, 0, 0, 0, 55, 300, 1, 0, 0, 0, 57, 302, 1, 0, 0, 0, 59, 304, 1, 0, 0, 0, 61, 306, 1, 0, 0, 0, 63, 314, 1, 0, 0, 0, 65, 320, 1, 0, 0, 0, 67, 327, 1, 0, 0, 0, 69, 336, 1, 0, 0, 0, 71, 344, 1, 0, 0, 0, 73, 351, 1, 0, 0, 0, 75, 391, 1, 0, 0, 0, 77, 393, 1, 0, 0, 0, 79, 403, 1, 0, 0, 0, 81, 408, 1, 0, 0, 0, 83, 416, 1, 0, 0, 0, 85, 422, 1, 0, 0, 0, 87, 426, 1, 0, 0, 0, 89, 428, 1, 0, 0, 0, 91, 440, 1, 0, 0, 0, 93, 453, 1, 0, 0, 0, 95, 96, 5, 42, 0, 0, 96, 2, 1, 0, 0, 0, 97, 98, 5, 97, 0, 0, 98, 99, 5, 108, 0, 0, 99, 100, 5, 108, 0, 0, 100, 101, 5, 111, 0, 0, 101, 102, 5, 119, 0, 0, 102, 103, 5, 105, 0, 0, 103, 104, 5, 110, 0, 0, 104, 105, 5, 103, 0, 0, 105, 106, 5, 32, 0, 0, 106, 107, 5, 111, 0, 0, 107, 108, 5, 118, 0, 0, 108, 109, 5, 101, 0, 0, 109, 110, 5, 114, 0, 0, 110, 111, 5, 100, 0, 0, 111, 112, 5, 114, 0, 0, 112, 113, 5, 97, 0, 0, 113, 114, 5, 102, 0, 0, 114, 115, 5, 116, 0, 0, 115, 116, 5, 32, 0, 0, 116, 117, 5, 117, 0, 0, 117, 118, 5, 112, 0, 0, 118, 119, 5, 32, 0, 0, 119, 120, 5, 116, 0, 0, 120, 121, 5, 111, 0, 0, 121, 4, 1, 0, 0, 0, 122, 123, 5, 97, 0, 0, 123, 124, 5, 108, 0, 0, 124, 125, 5, 108, 0, 0, 125, 126, 5, 111, 0, 0, 126, 127, 5, 119, 0, 0, 127, 128, 5, 105, 0, 0, 128, 129, 5, 110, 0, 0, 129, 130, 5, 103, 0, 0, 130, 131, 5, 32, 0, 0, 131, 132, 5, 117, 0, 0, 132, 133, 5, 110, 0, 0, 133, 134, 5, 98, 0, 0, 134, 135, 5, 111, 0, 0, 135, 136, 5, 117, 0, 0, 136, 137, 5, 110, 0, 0, 137, 138, 5, 100, 0, 0, 138, 139, 5, 101, 0, 0, 139, 140, 5, 100, 0, 0, 140, 141, 5, 32, 0, 0, 141, 142, 5, 111, 0, 0, 142, 143, 5, 118, 0, 0, 143, 144, 5, 101, 0, 0, 144, 145, 5, 114, 0, 0, 145, 146, 5, 100, 0, 0, 146, 147, 5, 114, 0, 0, 147, 148, 5, 97, 0, 0, 148, 149, 5, 102, 0, 0, 149, 150, 5, 116, 0, 0, 150, 6, 1, 0, 0, 0, 151, 152, 5, 44, 0, 0, 152, 8, 1, 0, 0, 0, 153, 155, 7, 0, 0, 0, 154, 153, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156, 154, 1, 0, 0, 0, 156, 157, 1, 0, 0, 0, 157, 10, 1, 0, 0, 0, 158, 160, 7, 1, 0, 0, 159, 158, 1, 0, 0, 0, 160, 161, 1, 0, 0, 0, 161, 159, 1, 0, 0, 0, 161, 162, 1, 0, 0, 0, 162, 163, 1, 0, 0, 0, 163, 164, 6, 5, 0, 0, 164, 12, 1, 0, 0, 0, 165, 166, 5, 47, 0, 0, 166, 167, 5, 42, 0, 0, 167, 172, 1, 0, 0, 0, 168, 171, 3, 13, 6, 0, 169, 171, 9, 0, 0, 0, 170, 168, 1, 0, 0, 0, 170, 169, 1, 0, 0, 0, 171, 174, 1, 0, 0, 0, 172, 173, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 175, 1, 0, 0, 0, 174, 172, 1, 0, 0, 0, 175, 176, 5, 42, 0, 0, 176, 177, 5, 47, 0, 0, 177, 178, 1, 0, 0, 0, 178, 179, 6, 6, 0, 0, 179, 14, 1, 0, 0, 0, 180, 181, 5, 47, 0, 0, 181, 182, 5, 47, 0, 0, 182, 186, 1, 0, 0, 0, 183, 185, 9, 0, 0, 0, 184, 183, 1, 0, 0, 0, 185, 188, 1, 0, 0, 0, 186, 187, 1, 0, 0, 0, 186, 184, 1, 0, 0, 0, 187, 189, 1, 0, 0, 0, 188, 186, 1, 0, 0, 0, 189, 190, 3, 9, 4, 0, 190, 191, 1, 0, 0, 0, 191, 192, 6, 7, 0, 0, 192, 16, 1, 0, 0, 0, 193, 194, 5, 118, 0, 0, 194, 195, 5, 97, 0, 0, 195, 196, 5, 114, 0, 0, 196, 197, 5, 115, 0, 0, 197, 18, 1, 0, 0, 0, 198, 199, 5, 109, 0, 0, 199, 200, 5, 101, 0, 0, 200, 201, 5, 116, 0, 0, 201, 202, 5, 97, 0, 0, 202, 20, 1, 0, 0, 0, 203, 204, 5, 115, 0, 0, 204, 205, 5, 101, 0, 0, 205, 206, 5, 116, 0, 0, 206, 207, 5, 95, 0, 0, 207, 208, 5, 116, 0, 0, 208, 209, 5, 120, 0, 0, 209, 210, 5, 95, 0, 0, 210, 211, 5, 109, 0, 0, 211, 212, 5, 101, 0, 0, 212, 213, 5, 116, 0, 0, 213, 214, 5, 97, 0, 0, 214, 22, 1, 0, 0, 0, 215, 216, 5, 115, 0, 0, 216, 217, 5, 101, 0, 0, 217, 218, 5, 116, 0, 0, 218, 219, 5, 95, 0, 0, 219, 220, 5, 97, 0, 0, 220, 221, 5, 99, 0, 0, 221, 222, 5, 99, 0, 0, 222, 223, 5, 111, 0, 0, 223, 224, 5, 117, 0, 0, 224, 225, 5, 110, 0, 0, 225, 226, 5, 116, 0, 0, 226, 227, 5, 95, 0, 0, 227, 228, 5, 109, 0, 0, 228, 229, 5, 101, 0, 0, 229, 230, 5, 116, 0, 0, 230, 231, 5, 97, 0, 0, 231, 24, 1, 0, 0, 0, 232, 233, 5, 112, 0, 0, 233, 234, 5, 114, 0, 0, 234, 235, 5, 105, 0, 0, 235, 236, 5, 110, 0, 0, 236, 237, 5, 116, 0, 0, 237, 26, 1, 0, 0, 0, 238, 239, 5, 102, 0, 0, 239, 240, 5, 97, 0, 0, 240, 241, 5, 105, 0, 0, 241, 242, 5, 108, 0, 0, 242, 28, 1, 0, 0, 0, 243, 244, 5, 115, 0, 0, 244, 245, 5, 101, 0, 0, 245, 246, 5, 110, 0, 0, 246, 247, 5, 100, 0, 0, 247, 30, 1, 0, 0, 0, 248, 249, 5, 115, 0, 0, 249, 250, 5, 111, 0, 0, 250, 251, 5, 117, 0, 0, 251, 252, 5, 114, 0, 0, 252, 253, 5, 99, 0, 0, 253, 254, 5, 101, 0, 0, 254, 32, 1, 0, 0, 0, 255, 256, 5, 102, 0, 0, 256, 257, 5, 114, 0, 0, 257, 258, 5, 111, 0, 0, 258, 259, 5, 109, 0, 0, 259, 34, 1, 0, 0, 0, 260, 261, 5, 109, 0, 0, 261, 262, 5, 97, 0, 0, 262, 263, 5, 120, 0, 0, 263, 36, 1, 0, 0, 0, 264, 265, 5, 100, 0, 0, 265, 266, 5, 101, 0, 0, 266, 267, 5, 115, 0, 0, 267, 268, 5, 116, 0, 0, 268, 269, 5, 105, 0, 0, 269, 270, 5, 110, 0, 0, 270, 271, 5, 97, 0, 0, 271, 272, 5, 116, 0, 0, 272, 273, 5, 105, 0, 0, 273, 274, 5, 111, 0, 0, 274, 275, 5, 110, 0, 0, 275, 38, 1, 0, 0, 0, 276, 277, 5, 116, 0, 0, 277, 278, 5, 111, 0, 0, 278, 40, 1, 0, 0, 0, 279, 280, 5, 97, 0, 0, 280, 281, 5, 108, 0, 0, 281, 282, 5, 108, 0, 0, 282, 283, 5, 111, 0, 0, 283, 284, 5, 99, 0, 0, 284, 285, 5, 97, 0, 0, 285, 286, 5, 116, 0, 0, 286, 287, 5, 101, 0, 0, 287, 42, 1, 0, 0, 0, 288, 289, 5, 43, 0, 0, 289, 44, 1, 0, 0, 0, 290, 291, 5, 45, 0, 0, 291, 46, 1, 0, 0, 0, 292, 293, 5, 40, 0, 0, 293, 48, 1, 0, 0, 0, 294, 295, 5, 41, 0, 0, 295, 50, 1, 0, 0, 0, 296, 297, 5, 91, 0, 0, 297, 52, 1, 0, 0, 0, 298, 299, 5, 93, 0, 0, 299, 54, 1, 0, 0, 0, 300, 301, 5, 123, 0, 0, 301, 56, 1, 0, 0, 0, 302, 303, 5, 125, 0, 0, 303, 58, 1, 0, 0, 0, 304, 305, 5, 61, 0, 0, 305, 60, 1, 0, 0, 0, 306, 307, 5, 97, 0, 0, 307, 308, 5, 99, 0, 0, 308, 309, 5, 99, 0, 0, 309, 310, 5, 111, 0, 0, 310, 311, 5, 117, 0, 0, 311, 312, 5, 110, 0, 0, 312, 313, 5, 116, 0, 0, 313, 62, 1, 0, 0, 0, 314, 315, 5, 97, 0, 0, 315, 316, 5, 115, 0, 0, 316, 317, 5, 115, 0, 0, 317, 318, 5, 101, 0, 0, 318, 319, 5, 116, 0, 0, 319, 64, 1, 0, 0, 0, 320, 321, 5, 110, 0, 0, 321, 322, 5, 117, 0, 0, 322, 323, 5, 109, 0, 0, 323, 324, 5, 98, 0, 0, 324, 325, 5, 101, 0, 0, 325, 326, 5, 114, 0, 0, 326, 66, 1, 0, 0, 0, 327, 328, 5, 109, 0, 0, 328, 329, 5, 111, 0, 0, 329, 330, 5, 110, 0, 0, 330, 331, 5, 101, 0, 0, 331, 332, 5, 116, 0, 0, 332, 333, 5, 97, 0, 0, 333, 334, 5, 114, 0, 0, 334, 335, 5, 121, 0, 0, 335, 68, 1, 0, 0, 0, 336, 337, 5, 112, 0, 0, 337, 338, 5, 111, 0, 0, 338, 339, 5, 114, 0, 0, 339, 340, 5, 116, 0, 0, 340, 341, 5, 105, 0, 0, 341, 342, 5, 111, 0, 0, 342, 343, 5, 110, 0, 0, 343, 70, 1, 0, 0, 0, 344, 345, 5, 115, 0, 0, 345, 346, 5, 116, 0, 0, 346, 347, 5, 114, 0, 0, 347, 348, 5, 105, 0, 0, 348, 349, 5, 110, 0, 0, 349, 350, 5, 103, 0, 0, 350, 72, 1, 0, 0, 0, 351, 355, 5, 34, 0, 0, 352, 354, 7, 2, 0, 0, 353, 352, 1, 0, 0, 0, 354, 357, 1, 0, 0, 0, 355, 353, 1, 0, 0, 0, 355, 356, 1, 0, 0, 0, 356, 358, 1, 0, 0, 0, 357, 355, 1, 0, 0, 0, 358, 359, 5, 34, 0, 0, 359, 74, 1, 0, 0, 0, 360, 362, 7, 3, 0, 0, 361, 360, 1, 0, 0, 0, 362, 363, 1, 0, 0, 0, 363, 361, 1, 0, 0, 0, 363, 364, 1, 0, 0, 0, 364, 366, 1, 0, 0, 0, 365, 367, 7, 4, 0, 0, 366, 365, 1, 0, 0, 0, 366, 367, 1, 0, 0, 0, 367, 368, 1, 0, 0, 0, 368, 370, 5, 47, 0, 0, 369, 371, 7, 4, 0, 0, 370, 369, 1, 0, 0, 0, 370, 371, 1, 0, 0, 0, 371, 373, 1, 0, 0, 0, 372, 374, 7, 3, 0, 0, 373, 372, 1, 0, 0, 0, 374, 375, 1, 0, 0, 0, 375, 373, 1, 0, 0, 0, 375, 376, 1, 0, 0, 0, 376, 392, 1, 0, 0, 0, 377, 379, 7, 3, 0, 0, 378, 377, 1, 0, 0, 0, 379, 380, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 388, 1, 0, 0, 0, 382, 384, 5, 46, 0, 0, 383, 385, 7, 3, 0, 0, 384, 383, 1, 0, 0, 0, 385, 386, 1, 0, 0, 0, 386, 384, 1, 0, 0, 0, 386, 387, 1, 0, 0, 0, 387, 389, 1, 0, 0, 0, 388, 382, 1, 0, 0, 0, 388, 389, 1, 0, 0, 0, 389, 390, 1, 0, 0, 0, 390, 392, 5, 37, 0, 0, 391, 361, 1, 0, 0, 0, 391, 378, 1, 0, 0, 0, 392, 76, 1, 0, 0, 0, 393, 394, 5, 114, 0, 0, 394, 395, 5, 101, 0, 0, 395, 396, 5, 109, 0, 0, 396, 397, 5, 97, 0, 0, 397, 398, 5, 105, 0, 0, 398, 399, 5, 110, 0, 0, 399, 400, 5, 105, 0, 0, 400, 401, 5, 110, 0, 0, 401, 402, 5, 103, 0, 0, 402, 78, 1, 0, 0, 0, 403, 404, 5, 107, 0, 0, 404, 405, 5, 101, 0, 0, 405, 406, 5, 112, 0, 0, 406, 407, 5, 116, 0, 0, 407, 80, 1, 0, 0, 0, 408, 409, 5, 98, 0, 0, 409, 410, 5, 97, 0, 0, 410, 411, 5, 108, 0, 0, 411, 412, 5, 97, 0, 0, 412, 413, 5, 110, 0, 0, 413, 414, 5, 99, 0, 0, 414, 415, 5, 101, 0, 0, 415, 82, 1, 0, 0, 0, 416, 417, 5, 115, 0, 0, 417, 418, 5, 97, 0, 0, 418, 419, 5, 118, 0, 0, 419, 420, 5, 101, 0, 0, 420, 84, 1, 0, 0, 0, 421, 423, 7, 3, 0, 0, 422, 421, 1, 0, 0, 0, 423, 424, 1, 0, 0, 0, 424, 422, 1, 0, 0, 0, 424, 425, 1, 0, 0, 0, 425, 86, 1, 0, 0, 0, 426, 427, 5, 37, 0, 0, 427, 88, 1, 0, 0, 0, 428, 430, 5, 36, 0, 0, 429, 431, 7, 5, 0, 0, 430, 429, 1, 0, 0, 0, 431, 432, 1, 0, 0, 0, 432, 430, 1, 0, 0, 0, 432, 433, 1, 0, 0, 0, 433, 437, 1, 0, 0, 0, 434, 436, 7, 6, 0, 0, 435, 434, 1, 0, 0, 0, 436, 439, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 437, 438, 1, 0, 0, 0, 438, 90, 1, 0, 0, 0, 439, 437, 1, 0, 0, 0, 440, 442, 5, 64, 0, 0, 441, 443, 7, 7, 0, 0, 442, 441, 1, 0, 0, 0, 443, 444, 1, 0, 0, 0, 444, 442, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 449, 1, 0, 0, 0, 446, 448, 7, 8, 0, 0, 447, 446, 1, 0, 0, 0, 448, 451, 1, 0, 0, 0, 449, 447, 1, 0, 0, 0, 449, 450, 1, 0, 0, 0, 450, 92, 1, 0, 0, 0, 451, 449, 1, 0, 0, 0, 452, 454, 7, 9, 0, 0, 453, 452, 1, 0, 0, 0, 454, 455, 1, 0, 0, 0, 455, 453, 1, 0, 0, 0, 455, 456, 1, 0, 0, 0, 456, 94, 1, 0, 0, 0, 21, 0, 156, 161, 170, 172, 186, 355, 363, 366, 370, 375, 380, 386, 388, 391, 424, 432, 437, 444, 449, 455, 1, 6, 0, 0] diff --git a/internal/machine/script/parser/NumScriptLexer.tokens b/internal/machine/script/parser/NumScriptLexer.tokens new file mode 100644 index 000000000..cb7dd35a2 --- /dev/null +++ b/internal/machine/script/parser/NumScriptLexer.tokens @@ -0,0 +1,84 @@ +T__0=1 +T__1=2 +T__2=3 +T__3=4 +NEWLINE=5 +WHITESPACE=6 +MULTILINE_COMMENT=7 +LINE_COMMENT=8 +VARS=9 +META=10 +SET_TX_META=11 +SET_ACCOUNT_META=12 +PRINT=13 +FAIL=14 +SEND=15 +SOURCE=16 +FROM=17 +MAX=18 +DESTINATION=19 +TO=20 +ALLOCATE=21 +OP_ADD=22 +OP_SUB=23 +LPAREN=24 +RPAREN=25 +LBRACK=26 +RBRACK=27 +LBRACE=28 +RBRACE=29 +EQ=30 +TY_ACCOUNT=31 +TY_ASSET=32 +TY_NUMBER=33 +TY_MONETARY=34 +TY_PORTION=35 +TY_STRING=36 +STRING=37 +PORTION=38 +REMAINING=39 +KEPT=40 +BALANCE=41 +SAVE=42 +NUMBER=43 +PERCENT=44 +VARIABLE_NAME=45 +ACCOUNT=46 +ASSET=47 +'*'=1 +'allowing overdraft up to'=2 +'allowing unbounded overdraft'=3 +','=4 +'vars'=9 +'meta'=10 +'set_tx_meta'=11 +'set_account_meta'=12 +'print'=13 +'fail'=14 +'send'=15 +'source'=16 +'from'=17 +'max'=18 +'destination'=19 +'to'=20 +'allocate'=21 +'+'=22 +'-'=23 +'('=24 +')'=25 +'['=26 +']'=27 +'{'=28 +'}'=29 +'='=30 +'account'=31 +'asset'=32 +'number'=33 +'monetary'=34 +'portion'=35 +'string'=36 +'remaining'=39 +'kept'=40 +'balance'=41 +'save'=42 +'%'=44 diff --git a/internal/machine/script/parser/numscript_base_listener.go b/internal/machine/script/parser/numscript_base_listener.go new file mode 100644 index 000000000..5e4904df7 --- /dev/null +++ b/internal/machine/script/parser/numscript_base_listener.go @@ -0,0 +1,298 @@ +// Code generated from NumScript.g4 by ANTLR 4.10.1. DO NOT EDIT. + +package parser // NumScript + +import "github.com/antlr/antlr4/runtime/Go/antlr" + +// BaseNumScriptListener is a complete listener for a parse tree produced by NumScriptParser. +type BaseNumScriptListener struct{} + +var _ NumScriptListener = &BaseNumScriptListener{} + +// VisitTerminal is called when a terminal node is visited. +func (s *BaseNumScriptListener) VisitTerminal(node antlr.TerminalNode) {} + +// VisitErrorNode is called when an error node is visited. +func (s *BaseNumScriptListener) VisitErrorNode(node antlr.ErrorNode) {} + +// EnterEveryRule is called when any rule is entered. +func (s *BaseNumScriptListener) EnterEveryRule(ctx antlr.ParserRuleContext) {} + +// ExitEveryRule is called when any rule is exited. +func (s *BaseNumScriptListener) ExitEveryRule(ctx antlr.ParserRuleContext) {} + +// EnterMonetary is called when production monetary is entered. +func (s *BaseNumScriptListener) EnterMonetary(ctx *MonetaryContext) {} + +// ExitMonetary is called when production monetary is exited. +func (s *BaseNumScriptListener) ExitMonetary(ctx *MonetaryContext) {} + +// EnterMonetaryAll is called when production monetaryAll is entered. +func (s *BaseNumScriptListener) EnterMonetaryAll(ctx *MonetaryAllContext) {} + +// ExitMonetaryAll is called when production monetaryAll is exited. +func (s *BaseNumScriptListener) ExitMonetaryAll(ctx *MonetaryAllContext) {} + +// EnterLitAccount is called when production LitAccount is entered. +func (s *BaseNumScriptListener) EnterLitAccount(ctx *LitAccountContext) {} + +// ExitLitAccount is called when production LitAccount is exited. +func (s *BaseNumScriptListener) ExitLitAccount(ctx *LitAccountContext) {} + +// EnterLitAsset is called when production LitAsset is entered. +func (s *BaseNumScriptListener) EnterLitAsset(ctx *LitAssetContext) {} + +// ExitLitAsset is called when production LitAsset is exited. +func (s *BaseNumScriptListener) ExitLitAsset(ctx *LitAssetContext) {} + +// EnterLitNumber is called when production LitNumber is entered. +func (s *BaseNumScriptListener) EnterLitNumber(ctx *LitNumberContext) {} + +// ExitLitNumber is called when production LitNumber is exited. +func (s *BaseNumScriptListener) ExitLitNumber(ctx *LitNumberContext) {} + +// EnterLitString is called when production LitString is entered. +func (s *BaseNumScriptListener) EnterLitString(ctx *LitStringContext) {} + +// ExitLitString is called when production LitString is exited. +func (s *BaseNumScriptListener) ExitLitString(ctx *LitStringContext) {} + +// EnterLitPortion is called when production LitPortion is entered. +func (s *BaseNumScriptListener) EnterLitPortion(ctx *LitPortionContext) {} + +// ExitLitPortion is called when production LitPortion is exited. +func (s *BaseNumScriptListener) ExitLitPortion(ctx *LitPortionContext) {} + +// EnterLitMonetary is called when production LitMonetary is entered. +func (s *BaseNumScriptListener) EnterLitMonetary(ctx *LitMonetaryContext) {} + +// ExitLitMonetary is called when production LitMonetary is exited. +func (s *BaseNumScriptListener) ExitLitMonetary(ctx *LitMonetaryContext) {} + +// EnterVariable is called when production variable is entered. +func (s *BaseNumScriptListener) EnterVariable(ctx *VariableContext) {} + +// ExitVariable is called when production variable is exited. +func (s *BaseNumScriptListener) ExitVariable(ctx *VariableContext) {} + +// EnterExprAddSub is called when production ExprAddSub is entered. +func (s *BaseNumScriptListener) EnterExprAddSub(ctx *ExprAddSubContext) {} + +// ExitExprAddSub is called when production ExprAddSub is exited. +func (s *BaseNumScriptListener) ExitExprAddSub(ctx *ExprAddSubContext) {} + +// EnterExprLiteral is called when production ExprLiteral is entered. +func (s *BaseNumScriptListener) EnterExprLiteral(ctx *ExprLiteralContext) {} + +// ExitExprLiteral is called when production ExprLiteral is exited. +func (s *BaseNumScriptListener) ExitExprLiteral(ctx *ExprLiteralContext) {} + +// EnterExprVariable is called when production ExprVariable is entered. +func (s *BaseNumScriptListener) EnterExprVariable(ctx *ExprVariableContext) {} + +// ExitExprVariable is called when production ExprVariable is exited. +func (s *BaseNumScriptListener) ExitExprVariable(ctx *ExprVariableContext) {} + +// EnterAllotmentPortionConst is called when production AllotmentPortionConst is entered. +func (s *BaseNumScriptListener) EnterAllotmentPortionConst(ctx *AllotmentPortionConstContext) {} + +// ExitAllotmentPortionConst is called when production AllotmentPortionConst is exited. +func (s *BaseNumScriptListener) ExitAllotmentPortionConst(ctx *AllotmentPortionConstContext) {} + +// EnterAllotmentPortionVar is called when production AllotmentPortionVar is entered. +func (s *BaseNumScriptListener) EnterAllotmentPortionVar(ctx *AllotmentPortionVarContext) {} + +// ExitAllotmentPortionVar is called when production AllotmentPortionVar is exited. +func (s *BaseNumScriptListener) ExitAllotmentPortionVar(ctx *AllotmentPortionVarContext) {} + +// EnterAllotmentPortionRemaining is called when production AllotmentPortionRemaining is entered. +func (s *BaseNumScriptListener) EnterAllotmentPortionRemaining(ctx *AllotmentPortionRemainingContext) { +} + +// ExitAllotmentPortionRemaining is called when production AllotmentPortionRemaining is exited. +func (s *BaseNumScriptListener) ExitAllotmentPortionRemaining(ctx *AllotmentPortionRemainingContext) { +} + +// EnterDestinationInOrder is called when production destinationInOrder is entered. +func (s *BaseNumScriptListener) EnterDestinationInOrder(ctx *DestinationInOrderContext) {} + +// ExitDestinationInOrder is called when production destinationInOrder is exited. +func (s *BaseNumScriptListener) ExitDestinationInOrder(ctx *DestinationInOrderContext) {} + +// EnterDestinationAllotment is called when production destinationAllotment is entered. +func (s *BaseNumScriptListener) EnterDestinationAllotment(ctx *DestinationAllotmentContext) {} + +// ExitDestinationAllotment is called when production destinationAllotment is exited. +func (s *BaseNumScriptListener) ExitDestinationAllotment(ctx *DestinationAllotmentContext) {} + +// EnterIsDestination is called when production IsDestination is entered. +func (s *BaseNumScriptListener) EnterIsDestination(ctx *IsDestinationContext) {} + +// ExitIsDestination is called when production IsDestination is exited. +func (s *BaseNumScriptListener) ExitIsDestination(ctx *IsDestinationContext) {} + +// EnterIsKept is called when production IsKept is entered. +func (s *BaseNumScriptListener) EnterIsKept(ctx *IsKeptContext) {} + +// ExitIsKept is called when production IsKept is exited. +func (s *BaseNumScriptListener) ExitIsKept(ctx *IsKeptContext) {} + +// EnterDestAccount is called when production DestAccount is entered. +func (s *BaseNumScriptListener) EnterDestAccount(ctx *DestAccountContext) {} + +// ExitDestAccount is called when production DestAccount is exited. +func (s *BaseNumScriptListener) ExitDestAccount(ctx *DestAccountContext) {} + +// EnterDestInOrder is called when production DestInOrder is entered. +func (s *BaseNumScriptListener) EnterDestInOrder(ctx *DestInOrderContext) {} + +// ExitDestInOrder is called when production DestInOrder is exited. +func (s *BaseNumScriptListener) ExitDestInOrder(ctx *DestInOrderContext) {} + +// EnterDestAllotment is called when production DestAllotment is entered. +func (s *BaseNumScriptListener) EnterDestAllotment(ctx *DestAllotmentContext) {} + +// ExitDestAllotment is called when production DestAllotment is exited. +func (s *BaseNumScriptListener) ExitDestAllotment(ctx *DestAllotmentContext) {} + +// EnterSrcAccountOverdraftSpecific is called when production SrcAccountOverdraftSpecific is entered. +func (s *BaseNumScriptListener) EnterSrcAccountOverdraftSpecific(ctx *SrcAccountOverdraftSpecificContext) { +} + +// ExitSrcAccountOverdraftSpecific is called when production SrcAccountOverdraftSpecific is exited. +func (s *BaseNumScriptListener) ExitSrcAccountOverdraftSpecific(ctx *SrcAccountOverdraftSpecificContext) { +} + +// EnterSrcAccountOverdraftUnbounded is called when production SrcAccountOverdraftUnbounded is entered. +func (s *BaseNumScriptListener) EnterSrcAccountOverdraftUnbounded(ctx *SrcAccountOverdraftUnboundedContext) { +} + +// ExitSrcAccountOverdraftUnbounded is called when production SrcAccountOverdraftUnbounded is exited. +func (s *BaseNumScriptListener) ExitSrcAccountOverdraftUnbounded(ctx *SrcAccountOverdraftUnboundedContext) { +} + +// EnterSourceAccount is called when production sourceAccount is entered. +func (s *BaseNumScriptListener) EnterSourceAccount(ctx *SourceAccountContext) {} + +// ExitSourceAccount is called when production sourceAccount is exited. +func (s *BaseNumScriptListener) ExitSourceAccount(ctx *SourceAccountContext) {} + +// EnterSourceInOrder is called when production sourceInOrder is entered. +func (s *BaseNumScriptListener) EnterSourceInOrder(ctx *SourceInOrderContext) {} + +// ExitSourceInOrder is called when production sourceInOrder is exited. +func (s *BaseNumScriptListener) ExitSourceInOrder(ctx *SourceInOrderContext) {} + +// EnterSourceMaxed is called when production sourceMaxed is entered. +func (s *BaseNumScriptListener) EnterSourceMaxed(ctx *SourceMaxedContext) {} + +// ExitSourceMaxed is called when production sourceMaxed is exited. +func (s *BaseNumScriptListener) ExitSourceMaxed(ctx *SourceMaxedContext) {} + +// EnterSrcAccount is called when production SrcAccount is entered. +func (s *BaseNumScriptListener) EnterSrcAccount(ctx *SrcAccountContext) {} + +// ExitSrcAccount is called when production SrcAccount is exited. +func (s *BaseNumScriptListener) ExitSrcAccount(ctx *SrcAccountContext) {} + +// EnterSrcMaxed is called when production SrcMaxed is entered. +func (s *BaseNumScriptListener) EnterSrcMaxed(ctx *SrcMaxedContext) {} + +// ExitSrcMaxed is called when production SrcMaxed is exited. +func (s *BaseNumScriptListener) ExitSrcMaxed(ctx *SrcMaxedContext) {} + +// EnterSrcInOrder is called when production SrcInOrder is entered. +func (s *BaseNumScriptListener) EnterSrcInOrder(ctx *SrcInOrderContext) {} + +// ExitSrcInOrder is called when production SrcInOrder is exited. +func (s *BaseNumScriptListener) ExitSrcInOrder(ctx *SrcInOrderContext) {} + +// EnterSourceAllotment is called when production sourceAllotment is entered. +func (s *BaseNumScriptListener) EnterSourceAllotment(ctx *SourceAllotmentContext) {} + +// ExitSourceAllotment is called when production sourceAllotment is exited. +func (s *BaseNumScriptListener) ExitSourceAllotment(ctx *SourceAllotmentContext) {} + +// EnterSrc is called when production Src is entered. +func (s *BaseNumScriptListener) EnterSrc(ctx *SrcContext) {} + +// ExitSrc is called when production Src is exited. +func (s *BaseNumScriptListener) ExitSrc(ctx *SrcContext) {} + +// EnterSrcAllotment is called when production SrcAllotment is entered. +func (s *BaseNumScriptListener) EnterSrcAllotment(ctx *SrcAllotmentContext) {} + +// ExitSrcAllotment is called when production SrcAllotment is exited. +func (s *BaseNumScriptListener) ExitSrcAllotment(ctx *SrcAllotmentContext) {} + +// EnterPrint is called when production Print is entered. +func (s *BaseNumScriptListener) EnterPrint(ctx *PrintContext) {} + +// ExitPrint is called when production Print is exited. +func (s *BaseNumScriptListener) ExitPrint(ctx *PrintContext) {} + +// EnterSaveFromAccount is called when production SaveFromAccount is entered. +func (s *BaseNumScriptListener) EnterSaveFromAccount(ctx *SaveFromAccountContext) {} + +// ExitSaveFromAccount is called when production SaveFromAccount is exited. +func (s *BaseNumScriptListener) ExitSaveFromAccount(ctx *SaveFromAccountContext) {} + +// EnterSetTxMeta is called when production SetTxMeta is entered. +func (s *BaseNumScriptListener) EnterSetTxMeta(ctx *SetTxMetaContext) {} + +// ExitSetTxMeta is called when production SetTxMeta is exited. +func (s *BaseNumScriptListener) ExitSetTxMeta(ctx *SetTxMetaContext) {} + +// EnterSetAccountMeta is called when production SetAccountMeta is entered. +func (s *BaseNumScriptListener) EnterSetAccountMeta(ctx *SetAccountMetaContext) {} + +// ExitSetAccountMeta is called when production SetAccountMeta is exited. +func (s *BaseNumScriptListener) ExitSetAccountMeta(ctx *SetAccountMetaContext) {} + +// EnterFail is called when production Fail is entered. +func (s *BaseNumScriptListener) EnterFail(ctx *FailContext) {} + +// ExitFail is called when production Fail is exited. +func (s *BaseNumScriptListener) ExitFail(ctx *FailContext) {} + +// EnterSend is called when production Send is entered. +func (s *BaseNumScriptListener) EnterSend(ctx *SendContext) {} + +// ExitSend is called when production Send is exited. +func (s *BaseNumScriptListener) ExitSend(ctx *SendContext) {} + +// EnterType_ is called when production type_ is entered. +func (s *BaseNumScriptListener) EnterType_(ctx *Type_Context) {} + +// ExitType_ is called when production type_ is exited. +func (s *BaseNumScriptListener) ExitType_(ctx *Type_Context) {} + +// EnterOriginAccountMeta is called when production OriginAccountMeta is entered. +func (s *BaseNumScriptListener) EnterOriginAccountMeta(ctx *OriginAccountMetaContext) {} + +// ExitOriginAccountMeta is called when production OriginAccountMeta is exited. +func (s *BaseNumScriptListener) ExitOriginAccountMeta(ctx *OriginAccountMetaContext) {} + +// EnterOriginAccountBalance is called when production OriginAccountBalance is entered. +func (s *BaseNumScriptListener) EnterOriginAccountBalance(ctx *OriginAccountBalanceContext) {} + +// ExitOriginAccountBalance is called when production OriginAccountBalance is exited. +func (s *BaseNumScriptListener) ExitOriginAccountBalance(ctx *OriginAccountBalanceContext) {} + +// EnterVarDecl is called when production varDecl is entered. +func (s *BaseNumScriptListener) EnterVarDecl(ctx *VarDeclContext) {} + +// ExitVarDecl is called when production varDecl is exited. +func (s *BaseNumScriptListener) ExitVarDecl(ctx *VarDeclContext) {} + +// EnterVarListDecl is called when production varListDecl is entered. +func (s *BaseNumScriptListener) EnterVarListDecl(ctx *VarListDeclContext) {} + +// ExitVarListDecl is called when production varListDecl is exited. +func (s *BaseNumScriptListener) ExitVarListDecl(ctx *VarListDeclContext) {} + +// EnterScript is called when production script is entered. +func (s *BaseNumScriptListener) EnterScript(ctx *ScriptContext) {} + +// ExitScript is called when production script is exited. +func (s *BaseNumScriptListener) ExitScript(ctx *ScriptContext) {} diff --git a/internal/machine/script/parser/numscript_lexer.go b/internal/machine/script/parser/numscript_lexer.go new file mode 100644 index 000000000..eb341644d --- /dev/null +++ b/internal/machine/script/parser/numscript_lexer.go @@ -0,0 +1,366 @@ +// Code generated from NumScript.g4 by ANTLR 4.10.1. DO NOT EDIT. + +package parser + +import ( + "fmt" + "sync" + "unicode" + + "github.com/antlr/antlr4/runtime/Go/antlr" +) + +// Suppress unused import error +var _ = fmt.Printf +var _ = sync.Once{} +var _ = unicode.IsLetter + +type NumScriptLexer struct { + *antlr.BaseLexer + channelNames []string + modeNames []string + // TODO: EOF string +} + +var numscriptlexerLexerStaticData struct { + once sync.Once + serializedATN []int32 + channelNames []string + modeNames []string + literalNames []string + symbolicNames []string + ruleNames []string + predictionContextCache *antlr.PredictionContextCache + atn *antlr.ATN + decisionToDFA []*antlr.DFA +} + +func numscriptlexerLexerInit() { + staticData := &numscriptlexerLexerStaticData + staticData.channelNames = []string{ + "DEFAULT_TOKEN_CHANNEL", "HIDDEN", + } + staticData.modeNames = []string{ + "DEFAULT_MODE", + } + staticData.literalNames = []string{ + "", "'*'", "'allowing overdraft up to'", "'allowing unbounded overdraft'", + "','", "", "", "", "", "'vars'", "'meta'", "'set_tx_meta'", "'set_account_meta'", + "'print'", "'fail'", "'send'", "'source'", "'from'", "'max'", "'destination'", + "'to'", "'allocate'", "'+'", "'-'", "'('", "')'", "'['", "']'", "'{'", + "'}'", "'='", "'account'", "'asset'", "'number'", "'monetary'", "'portion'", + "'string'", "", "", "'remaining'", "'kept'", "'balance'", "'save'", + "", "'%'", + } + staticData.symbolicNames = []string{ + "", "", "", "", "", "NEWLINE", "WHITESPACE", "MULTILINE_COMMENT", "LINE_COMMENT", + "VARS", "META", "SET_TX_META", "SET_ACCOUNT_META", "PRINT", "FAIL", + "SEND", "SOURCE", "FROM", "MAX", "DESTINATION", "TO", "ALLOCATE", "OP_ADD", + "OP_SUB", "LPAREN", "RPAREN", "LBRACK", "RBRACK", "LBRACE", "RBRACE", + "EQ", "TY_ACCOUNT", "TY_ASSET", "TY_NUMBER", "TY_MONETARY", "TY_PORTION", + "TY_STRING", "STRING", "PORTION", "REMAINING", "KEPT", "BALANCE", "SAVE", + "NUMBER", "PERCENT", "VARIABLE_NAME", "ACCOUNT", "ASSET", + } + staticData.ruleNames = []string{ + "T__0", "T__1", "T__2", "T__3", "NEWLINE", "WHITESPACE", "MULTILINE_COMMENT", + "LINE_COMMENT", "VARS", "META", "SET_TX_META", "SET_ACCOUNT_META", "PRINT", + "FAIL", "SEND", "SOURCE", "FROM", "MAX", "DESTINATION", "TO", "ALLOCATE", + "OP_ADD", "OP_SUB", "LPAREN", "RPAREN", "LBRACK", "RBRACK", "LBRACE", + "RBRACE", "EQ", "TY_ACCOUNT", "TY_ASSET", "TY_NUMBER", "TY_MONETARY", + "TY_PORTION", "TY_STRING", "STRING", "PORTION", "REMAINING", "KEPT", + "BALANCE", "SAVE", "NUMBER", "PERCENT", "VARIABLE_NAME", "ACCOUNT", + "ASSET", + } + staticData.predictionContextCache = antlr.NewPredictionContextCache() + staticData.serializedATN = []int32{ + 4, 0, 47, 457, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, + 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, + 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, + 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, + 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, + 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, + 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, + 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, + 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, + 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, + 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, + 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 4, 4, + 155, 8, 4, 11, 4, 12, 4, 156, 1, 5, 4, 5, 160, 8, 5, 11, 5, 12, 5, 161, + 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 5, 6, 171, 8, 6, 10, 6, 12, 6, + 174, 9, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 5, 7, + 185, 8, 7, 10, 7, 12, 7, 188, 9, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, + 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, + 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, + 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, + 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, + 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, + 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, + 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, + 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 20, + 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, + 22, 1, 22, 1, 23, 1, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 27, + 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, + 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, + 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, + 33, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, + 1, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 5, + 36, 354, 8, 36, 10, 36, 12, 36, 357, 9, 36, 1, 36, 1, 36, 1, 37, 4, 37, + 362, 8, 37, 11, 37, 12, 37, 363, 1, 37, 3, 37, 367, 8, 37, 1, 37, 1, 37, + 3, 37, 371, 8, 37, 1, 37, 4, 37, 374, 8, 37, 11, 37, 12, 37, 375, 1, 37, + 4, 37, 379, 8, 37, 11, 37, 12, 37, 380, 1, 37, 1, 37, 4, 37, 385, 8, 37, + 11, 37, 12, 37, 386, 3, 37, 389, 8, 37, 1, 37, 3, 37, 392, 8, 37, 1, 38, + 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, + 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, + 1, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 4, 42, 423, 8, 42, 11, + 42, 12, 42, 424, 1, 43, 1, 43, 1, 44, 1, 44, 4, 44, 431, 8, 44, 11, 44, + 12, 44, 432, 1, 44, 5, 44, 436, 8, 44, 10, 44, 12, 44, 439, 9, 44, 1, 45, + 1, 45, 4, 45, 443, 8, 45, 11, 45, 12, 45, 444, 1, 45, 5, 45, 448, 8, 45, + 10, 45, 12, 45, 451, 9, 45, 1, 46, 4, 46, 454, 8, 46, 11, 46, 12, 46, 455, + 2, 172, 186, 0, 47, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, + 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 17, + 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51, 26, + 53, 27, 55, 28, 57, 29, 59, 30, 61, 31, 63, 32, 65, 33, 67, 34, 69, 35, + 71, 36, 73, 37, 75, 38, 77, 39, 79, 40, 81, 41, 83, 42, 85, 43, 87, 44, + 89, 45, 91, 46, 93, 47, 1, 0, 10, 2, 0, 10, 10, 13, 13, 2, 0, 9, 9, 32, + 32, 6, 0, 32, 32, 45, 45, 48, 57, 65, 90, 95, 95, 97, 122, 1, 0, 48, 57, + 1, 0, 32, 32, 2, 0, 95, 95, 97, 122, 3, 0, 48, 57, 95, 95, 97, 122, 3, + 0, 65, 90, 95, 95, 97, 122, 4, 0, 48, 58, 65, 90, 95, 95, 97, 122, 2, 0, + 47, 57, 65, 90, 476, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, + 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, + 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, + 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, + 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, + 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, + 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, + 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, + 0, 0, 0, 61, 1, 0, 0, 0, 0, 63, 1, 0, 0, 0, 0, 65, 1, 0, 0, 0, 0, 67, 1, + 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, 0, 0, 0, 0, 73, 1, 0, 0, 0, 0, 75, + 1, 0, 0, 0, 0, 77, 1, 0, 0, 0, 0, 79, 1, 0, 0, 0, 0, 81, 1, 0, 0, 0, 0, + 83, 1, 0, 0, 0, 0, 85, 1, 0, 0, 0, 0, 87, 1, 0, 0, 0, 0, 89, 1, 0, 0, 0, + 0, 91, 1, 0, 0, 0, 0, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 3, 97, 1, 0, 0, + 0, 5, 122, 1, 0, 0, 0, 7, 151, 1, 0, 0, 0, 9, 154, 1, 0, 0, 0, 11, 159, + 1, 0, 0, 0, 13, 165, 1, 0, 0, 0, 15, 180, 1, 0, 0, 0, 17, 193, 1, 0, 0, + 0, 19, 198, 1, 0, 0, 0, 21, 203, 1, 0, 0, 0, 23, 215, 1, 0, 0, 0, 25, 232, + 1, 0, 0, 0, 27, 238, 1, 0, 0, 0, 29, 243, 1, 0, 0, 0, 31, 248, 1, 0, 0, + 0, 33, 255, 1, 0, 0, 0, 35, 260, 1, 0, 0, 0, 37, 264, 1, 0, 0, 0, 39, 276, + 1, 0, 0, 0, 41, 279, 1, 0, 0, 0, 43, 288, 1, 0, 0, 0, 45, 290, 1, 0, 0, + 0, 47, 292, 1, 0, 0, 0, 49, 294, 1, 0, 0, 0, 51, 296, 1, 0, 0, 0, 53, 298, + 1, 0, 0, 0, 55, 300, 1, 0, 0, 0, 57, 302, 1, 0, 0, 0, 59, 304, 1, 0, 0, + 0, 61, 306, 1, 0, 0, 0, 63, 314, 1, 0, 0, 0, 65, 320, 1, 0, 0, 0, 67, 327, + 1, 0, 0, 0, 69, 336, 1, 0, 0, 0, 71, 344, 1, 0, 0, 0, 73, 351, 1, 0, 0, + 0, 75, 391, 1, 0, 0, 0, 77, 393, 1, 0, 0, 0, 79, 403, 1, 0, 0, 0, 81, 408, + 1, 0, 0, 0, 83, 416, 1, 0, 0, 0, 85, 422, 1, 0, 0, 0, 87, 426, 1, 0, 0, + 0, 89, 428, 1, 0, 0, 0, 91, 440, 1, 0, 0, 0, 93, 453, 1, 0, 0, 0, 95, 96, + 5, 42, 0, 0, 96, 2, 1, 0, 0, 0, 97, 98, 5, 97, 0, 0, 98, 99, 5, 108, 0, + 0, 99, 100, 5, 108, 0, 0, 100, 101, 5, 111, 0, 0, 101, 102, 5, 119, 0, + 0, 102, 103, 5, 105, 0, 0, 103, 104, 5, 110, 0, 0, 104, 105, 5, 103, 0, + 0, 105, 106, 5, 32, 0, 0, 106, 107, 5, 111, 0, 0, 107, 108, 5, 118, 0, + 0, 108, 109, 5, 101, 0, 0, 109, 110, 5, 114, 0, 0, 110, 111, 5, 100, 0, + 0, 111, 112, 5, 114, 0, 0, 112, 113, 5, 97, 0, 0, 113, 114, 5, 102, 0, + 0, 114, 115, 5, 116, 0, 0, 115, 116, 5, 32, 0, 0, 116, 117, 5, 117, 0, + 0, 117, 118, 5, 112, 0, 0, 118, 119, 5, 32, 0, 0, 119, 120, 5, 116, 0, + 0, 120, 121, 5, 111, 0, 0, 121, 4, 1, 0, 0, 0, 122, 123, 5, 97, 0, 0, 123, + 124, 5, 108, 0, 0, 124, 125, 5, 108, 0, 0, 125, 126, 5, 111, 0, 0, 126, + 127, 5, 119, 0, 0, 127, 128, 5, 105, 0, 0, 128, 129, 5, 110, 0, 0, 129, + 130, 5, 103, 0, 0, 130, 131, 5, 32, 0, 0, 131, 132, 5, 117, 0, 0, 132, + 133, 5, 110, 0, 0, 133, 134, 5, 98, 0, 0, 134, 135, 5, 111, 0, 0, 135, + 136, 5, 117, 0, 0, 136, 137, 5, 110, 0, 0, 137, 138, 5, 100, 0, 0, 138, + 139, 5, 101, 0, 0, 139, 140, 5, 100, 0, 0, 140, 141, 5, 32, 0, 0, 141, + 142, 5, 111, 0, 0, 142, 143, 5, 118, 0, 0, 143, 144, 5, 101, 0, 0, 144, + 145, 5, 114, 0, 0, 145, 146, 5, 100, 0, 0, 146, 147, 5, 114, 0, 0, 147, + 148, 5, 97, 0, 0, 148, 149, 5, 102, 0, 0, 149, 150, 5, 116, 0, 0, 150, + 6, 1, 0, 0, 0, 151, 152, 5, 44, 0, 0, 152, 8, 1, 0, 0, 0, 153, 155, 7, + 0, 0, 0, 154, 153, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156, 154, 1, 0, 0, + 0, 156, 157, 1, 0, 0, 0, 157, 10, 1, 0, 0, 0, 158, 160, 7, 1, 0, 0, 159, + 158, 1, 0, 0, 0, 160, 161, 1, 0, 0, 0, 161, 159, 1, 0, 0, 0, 161, 162, + 1, 0, 0, 0, 162, 163, 1, 0, 0, 0, 163, 164, 6, 5, 0, 0, 164, 12, 1, 0, + 0, 0, 165, 166, 5, 47, 0, 0, 166, 167, 5, 42, 0, 0, 167, 172, 1, 0, 0, + 0, 168, 171, 3, 13, 6, 0, 169, 171, 9, 0, 0, 0, 170, 168, 1, 0, 0, 0, 170, + 169, 1, 0, 0, 0, 171, 174, 1, 0, 0, 0, 172, 173, 1, 0, 0, 0, 172, 170, + 1, 0, 0, 0, 173, 175, 1, 0, 0, 0, 174, 172, 1, 0, 0, 0, 175, 176, 5, 42, + 0, 0, 176, 177, 5, 47, 0, 0, 177, 178, 1, 0, 0, 0, 178, 179, 6, 6, 0, 0, + 179, 14, 1, 0, 0, 0, 180, 181, 5, 47, 0, 0, 181, 182, 5, 47, 0, 0, 182, + 186, 1, 0, 0, 0, 183, 185, 9, 0, 0, 0, 184, 183, 1, 0, 0, 0, 185, 188, + 1, 0, 0, 0, 186, 187, 1, 0, 0, 0, 186, 184, 1, 0, 0, 0, 187, 189, 1, 0, + 0, 0, 188, 186, 1, 0, 0, 0, 189, 190, 3, 9, 4, 0, 190, 191, 1, 0, 0, 0, + 191, 192, 6, 7, 0, 0, 192, 16, 1, 0, 0, 0, 193, 194, 5, 118, 0, 0, 194, + 195, 5, 97, 0, 0, 195, 196, 5, 114, 0, 0, 196, 197, 5, 115, 0, 0, 197, + 18, 1, 0, 0, 0, 198, 199, 5, 109, 0, 0, 199, 200, 5, 101, 0, 0, 200, 201, + 5, 116, 0, 0, 201, 202, 5, 97, 0, 0, 202, 20, 1, 0, 0, 0, 203, 204, 5, + 115, 0, 0, 204, 205, 5, 101, 0, 0, 205, 206, 5, 116, 0, 0, 206, 207, 5, + 95, 0, 0, 207, 208, 5, 116, 0, 0, 208, 209, 5, 120, 0, 0, 209, 210, 5, + 95, 0, 0, 210, 211, 5, 109, 0, 0, 211, 212, 5, 101, 0, 0, 212, 213, 5, + 116, 0, 0, 213, 214, 5, 97, 0, 0, 214, 22, 1, 0, 0, 0, 215, 216, 5, 115, + 0, 0, 216, 217, 5, 101, 0, 0, 217, 218, 5, 116, 0, 0, 218, 219, 5, 95, + 0, 0, 219, 220, 5, 97, 0, 0, 220, 221, 5, 99, 0, 0, 221, 222, 5, 99, 0, + 0, 222, 223, 5, 111, 0, 0, 223, 224, 5, 117, 0, 0, 224, 225, 5, 110, 0, + 0, 225, 226, 5, 116, 0, 0, 226, 227, 5, 95, 0, 0, 227, 228, 5, 109, 0, + 0, 228, 229, 5, 101, 0, 0, 229, 230, 5, 116, 0, 0, 230, 231, 5, 97, 0, + 0, 231, 24, 1, 0, 0, 0, 232, 233, 5, 112, 0, 0, 233, 234, 5, 114, 0, 0, + 234, 235, 5, 105, 0, 0, 235, 236, 5, 110, 0, 0, 236, 237, 5, 116, 0, 0, + 237, 26, 1, 0, 0, 0, 238, 239, 5, 102, 0, 0, 239, 240, 5, 97, 0, 0, 240, + 241, 5, 105, 0, 0, 241, 242, 5, 108, 0, 0, 242, 28, 1, 0, 0, 0, 243, 244, + 5, 115, 0, 0, 244, 245, 5, 101, 0, 0, 245, 246, 5, 110, 0, 0, 246, 247, + 5, 100, 0, 0, 247, 30, 1, 0, 0, 0, 248, 249, 5, 115, 0, 0, 249, 250, 5, + 111, 0, 0, 250, 251, 5, 117, 0, 0, 251, 252, 5, 114, 0, 0, 252, 253, 5, + 99, 0, 0, 253, 254, 5, 101, 0, 0, 254, 32, 1, 0, 0, 0, 255, 256, 5, 102, + 0, 0, 256, 257, 5, 114, 0, 0, 257, 258, 5, 111, 0, 0, 258, 259, 5, 109, + 0, 0, 259, 34, 1, 0, 0, 0, 260, 261, 5, 109, 0, 0, 261, 262, 5, 97, 0, + 0, 262, 263, 5, 120, 0, 0, 263, 36, 1, 0, 0, 0, 264, 265, 5, 100, 0, 0, + 265, 266, 5, 101, 0, 0, 266, 267, 5, 115, 0, 0, 267, 268, 5, 116, 0, 0, + 268, 269, 5, 105, 0, 0, 269, 270, 5, 110, 0, 0, 270, 271, 5, 97, 0, 0, + 271, 272, 5, 116, 0, 0, 272, 273, 5, 105, 0, 0, 273, 274, 5, 111, 0, 0, + 274, 275, 5, 110, 0, 0, 275, 38, 1, 0, 0, 0, 276, 277, 5, 116, 0, 0, 277, + 278, 5, 111, 0, 0, 278, 40, 1, 0, 0, 0, 279, 280, 5, 97, 0, 0, 280, 281, + 5, 108, 0, 0, 281, 282, 5, 108, 0, 0, 282, 283, 5, 111, 0, 0, 283, 284, + 5, 99, 0, 0, 284, 285, 5, 97, 0, 0, 285, 286, 5, 116, 0, 0, 286, 287, 5, + 101, 0, 0, 287, 42, 1, 0, 0, 0, 288, 289, 5, 43, 0, 0, 289, 44, 1, 0, 0, + 0, 290, 291, 5, 45, 0, 0, 291, 46, 1, 0, 0, 0, 292, 293, 5, 40, 0, 0, 293, + 48, 1, 0, 0, 0, 294, 295, 5, 41, 0, 0, 295, 50, 1, 0, 0, 0, 296, 297, 5, + 91, 0, 0, 297, 52, 1, 0, 0, 0, 298, 299, 5, 93, 0, 0, 299, 54, 1, 0, 0, + 0, 300, 301, 5, 123, 0, 0, 301, 56, 1, 0, 0, 0, 302, 303, 5, 125, 0, 0, + 303, 58, 1, 0, 0, 0, 304, 305, 5, 61, 0, 0, 305, 60, 1, 0, 0, 0, 306, 307, + 5, 97, 0, 0, 307, 308, 5, 99, 0, 0, 308, 309, 5, 99, 0, 0, 309, 310, 5, + 111, 0, 0, 310, 311, 5, 117, 0, 0, 311, 312, 5, 110, 0, 0, 312, 313, 5, + 116, 0, 0, 313, 62, 1, 0, 0, 0, 314, 315, 5, 97, 0, 0, 315, 316, 5, 115, + 0, 0, 316, 317, 5, 115, 0, 0, 317, 318, 5, 101, 0, 0, 318, 319, 5, 116, + 0, 0, 319, 64, 1, 0, 0, 0, 320, 321, 5, 110, 0, 0, 321, 322, 5, 117, 0, + 0, 322, 323, 5, 109, 0, 0, 323, 324, 5, 98, 0, 0, 324, 325, 5, 101, 0, + 0, 325, 326, 5, 114, 0, 0, 326, 66, 1, 0, 0, 0, 327, 328, 5, 109, 0, 0, + 328, 329, 5, 111, 0, 0, 329, 330, 5, 110, 0, 0, 330, 331, 5, 101, 0, 0, + 331, 332, 5, 116, 0, 0, 332, 333, 5, 97, 0, 0, 333, 334, 5, 114, 0, 0, + 334, 335, 5, 121, 0, 0, 335, 68, 1, 0, 0, 0, 336, 337, 5, 112, 0, 0, 337, + 338, 5, 111, 0, 0, 338, 339, 5, 114, 0, 0, 339, 340, 5, 116, 0, 0, 340, + 341, 5, 105, 0, 0, 341, 342, 5, 111, 0, 0, 342, 343, 5, 110, 0, 0, 343, + 70, 1, 0, 0, 0, 344, 345, 5, 115, 0, 0, 345, 346, 5, 116, 0, 0, 346, 347, + 5, 114, 0, 0, 347, 348, 5, 105, 0, 0, 348, 349, 5, 110, 0, 0, 349, 350, + 5, 103, 0, 0, 350, 72, 1, 0, 0, 0, 351, 355, 5, 34, 0, 0, 352, 354, 7, + 2, 0, 0, 353, 352, 1, 0, 0, 0, 354, 357, 1, 0, 0, 0, 355, 353, 1, 0, 0, + 0, 355, 356, 1, 0, 0, 0, 356, 358, 1, 0, 0, 0, 357, 355, 1, 0, 0, 0, 358, + 359, 5, 34, 0, 0, 359, 74, 1, 0, 0, 0, 360, 362, 7, 3, 0, 0, 361, 360, + 1, 0, 0, 0, 362, 363, 1, 0, 0, 0, 363, 361, 1, 0, 0, 0, 363, 364, 1, 0, + 0, 0, 364, 366, 1, 0, 0, 0, 365, 367, 7, 4, 0, 0, 366, 365, 1, 0, 0, 0, + 366, 367, 1, 0, 0, 0, 367, 368, 1, 0, 0, 0, 368, 370, 5, 47, 0, 0, 369, + 371, 7, 4, 0, 0, 370, 369, 1, 0, 0, 0, 370, 371, 1, 0, 0, 0, 371, 373, + 1, 0, 0, 0, 372, 374, 7, 3, 0, 0, 373, 372, 1, 0, 0, 0, 374, 375, 1, 0, + 0, 0, 375, 373, 1, 0, 0, 0, 375, 376, 1, 0, 0, 0, 376, 392, 1, 0, 0, 0, + 377, 379, 7, 3, 0, 0, 378, 377, 1, 0, 0, 0, 379, 380, 1, 0, 0, 0, 380, + 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 388, 1, 0, 0, 0, 382, 384, + 5, 46, 0, 0, 383, 385, 7, 3, 0, 0, 384, 383, 1, 0, 0, 0, 385, 386, 1, 0, + 0, 0, 386, 384, 1, 0, 0, 0, 386, 387, 1, 0, 0, 0, 387, 389, 1, 0, 0, 0, + 388, 382, 1, 0, 0, 0, 388, 389, 1, 0, 0, 0, 389, 390, 1, 0, 0, 0, 390, + 392, 5, 37, 0, 0, 391, 361, 1, 0, 0, 0, 391, 378, 1, 0, 0, 0, 392, 76, + 1, 0, 0, 0, 393, 394, 5, 114, 0, 0, 394, 395, 5, 101, 0, 0, 395, 396, 5, + 109, 0, 0, 396, 397, 5, 97, 0, 0, 397, 398, 5, 105, 0, 0, 398, 399, 5, + 110, 0, 0, 399, 400, 5, 105, 0, 0, 400, 401, 5, 110, 0, 0, 401, 402, 5, + 103, 0, 0, 402, 78, 1, 0, 0, 0, 403, 404, 5, 107, 0, 0, 404, 405, 5, 101, + 0, 0, 405, 406, 5, 112, 0, 0, 406, 407, 5, 116, 0, 0, 407, 80, 1, 0, 0, + 0, 408, 409, 5, 98, 0, 0, 409, 410, 5, 97, 0, 0, 410, 411, 5, 108, 0, 0, + 411, 412, 5, 97, 0, 0, 412, 413, 5, 110, 0, 0, 413, 414, 5, 99, 0, 0, 414, + 415, 5, 101, 0, 0, 415, 82, 1, 0, 0, 0, 416, 417, 5, 115, 0, 0, 417, 418, + 5, 97, 0, 0, 418, 419, 5, 118, 0, 0, 419, 420, 5, 101, 0, 0, 420, 84, 1, + 0, 0, 0, 421, 423, 7, 3, 0, 0, 422, 421, 1, 0, 0, 0, 423, 424, 1, 0, 0, + 0, 424, 422, 1, 0, 0, 0, 424, 425, 1, 0, 0, 0, 425, 86, 1, 0, 0, 0, 426, + 427, 5, 37, 0, 0, 427, 88, 1, 0, 0, 0, 428, 430, 5, 36, 0, 0, 429, 431, + 7, 5, 0, 0, 430, 429, 1, 0, 0, 0, 431, 432, 1, 0, 0, 0, 432, 430, 1, 0, + 0, 0, 432, 433, 1, 0, 0, 0, 433, 437, 1, 0, 0, 0, 434, 436, 7, 6, 0, 0, + 435, 434, 1, 0, 0, 0, 436, 439, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 437, + 438, 1, 0, 0, 0, 438, 90, 1, 0, 0, 0, 439, 437, 1, 0, 0, 0, 440, 442, 5, + 64, 0, 0, 441, 443, 7, 7, 0, 0, 442, 441, 1, 0, 0, 0, 443, 444, 1, 0, 0, + 0, 444, 442, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 449, 1, 0, 0, 0, 446, + 448, 7, 8, 0, 0, 447, 446, 1, 0, 0, 0, 448, 451, 1, 0, 0, 0, 449, 447, + 1, 0, 0, 0, 449, 450, 1, 0, 0, 0, 450, 92, 1, 0, 0, 0, 451, 449, 1, 0, + 0, 0, 452, 454, 7, 9, 0, 0, 453, 452, 1, 0, 0, 0, 454, 455, 1, 0, 0, 0, + 455, 453, 1, 0, 0, 0, 455, 456, 1, 0, 0, 0, 456, 94, 1, 0, 0, 0, 21, 0, + 156, 161, 170, 172, 186, 355, 363, 366, 370, 375, 380, 386, 388, 391, 424, + 432, 437, 444, 449, 455, 1, 6, 0, 0, + } + deserializer := antlr.NewATNDeserializer(nil) + staticData.atn = deserializer.Deserialize(staticData.serializedATN) + atn := staticData.atn + staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState)) + decisionToDFA := staticData.decisionToDFA + for index, state := range atn.DecisionToState { + decisionToDFA[index] = antlr.NewDFA(state, index) + } +} + +// NumScriptLexerInit initializes any static state used to implement NumScriptLexer. By default the +// static state used to implement the lexer is lazily initialized during the first call to +// NewNumScriptLexer(). You can call this function if you wish to initialize the static state ahead +// of time. +func NumScriptLexerInit() { + staticData := &numscriptlexerLexerStaticData + staticData.once.Do(numscriptlexerLexerInit) +} + +// NewNumScriptLexer produces a new lexer instance for the optional input antlr.CharStream. +func NewNumScriptLexer(input antlr.CharStream) *NumScriptLexer { + NumScriptLexerInit() + l := new(NumScriptLexer) + l.BaseLexer = antlr.NewBaseLexer(input) + staticData := &numscriptlexerLexerStaticData + l.Interpreter = antlr.NewLexerATNSimulator(l, staticData.atn, staticData.decisionToDFA, staticData.predictionContextCache) + l.channelNames = staticData.channelNames + l.modeNames = staticData.modeNames + l.RuleNames = staticData.ruleNames + l.LiteralNames = staticData.literalNames + l.SymbolicNames = staticData.symbolicNames + l.GrammarFileName = "NumScript.g4" + // TODO: l.EOF = antlr.TokenEOF + + return l +} + +// NumScriptLexer tokens. +const ( + NumScriptLexerT__0 = 1 + NumScriptLexerT__1 = 2 + NumScriptLexerT__2 = 3 + NumScriptLexerT__3 = 4 + NumScriptLexerNEWLINE = 5 + NumScriptLexerWHITESPACE = 6 + NumScriptLexerMULTILINE_COMMENT = 7 + NumScriptLexerLINE_COMMENT = 8 + NumScriptLexerVARS = 9 + NumScriptLexerMETA = 10 + NumScriptLexerSET_TX_META = 11 + NumScriptLexerSET_ACCOUNT_META = 12 + NumScriptLexerPRINT = 13 + NumScriptLexerFAIL = 14 + NumScriptLexerSEND = 15 + NumScriptLexerSOURCE = 16 + NumScriptLexerFROM = 17 + NumScriptLexerMAX = 18 + NumScriptLexerDESTINATION = 19 + NumScriptLexerTO = 20 + NumScriptLexerALLOCATE = 21 + NumScriptLexerOP_ADD = 22 + NumScriptLexerOP_SUB = 23 + NumScriptLexerLPAREN = 24 + NumScriptLexerRPAREN = 25 + NumScriptLexerLBRACK = 26 + NumScriptLexerRBRACK = 27 + NumScriptLexerLBRACE = 28 + NumScriptLexerRBRACE = 29 + NumScriptLexerEQ = 30 + NumScriptLexerTY_ACCOUNT = 31 + NumScriptLexerTY_ASSET = 32 + NumScriptLexerTY_NUMBER = 33 + NumScriptLexerTY_MONETARY = 34 + NumScriptLexerTY_PORTION = 35 + NumScriptLexerTY_STRING = 36 + NumScriptLexerSTRING = 37 + NumScriptLexerPORTION = 38 + NumScriptLexerREMAINING = 39 + NumScriptLexerKEPT = 40 + NumScriptLexerBALANCE = 41 + NumScriptLexerSAVE = 42 + NumScriptLexerNUMBER = 43 + NumScriptLexerPERCENT = 44 + NumScriptLexerVARIABLE_NAME = 45 + NumScriptLexerACCOUNT = 46 + NumScriptLexerASSET = 47 +) diff --git a/internal/machine/script/parser/numscript_listener.go b/internal/machine/script/parser/numscript_listener.go new file mode 100644 index 000000000..bf9a31377 --- /dev/null +++ b/internal/machine/script/parser/numscript_listener.go @@ -0,0 +1,280 @@ +// Code generated from NumScript.g4 by ANTLR 4.10.1. DO NOT EDIT. + +package parser // NumScript + +import "github.com/antlr/antlr4/runtime/Go/antlr" + +// NumScriptListener is a complete listener for a parse tree produced by NumScriptParser. +type NumScriptListener interface { + antlr.ParseTreeListener + + // EnterMonetary is called when entering the monetary production. + EnterMonetary(c *MonetaryContext) + + // EnterMonetaryAll is called when entering the monetaryAll production. + EnterMonetaryAll(c *MonetaryAllContext) + + // EnterLitAccount is called when entering the LitAccount production. + EnterLitAccount(c *LitAccountContext) + + // EnterLitAsset is called when entering the LitAsset production. + EnterLitAsset(c *LitAssetContext) + + // EnterLitNumber is called when entering the LitNumber production. + EnterLitNumber(c *LitNumberContext) + + // EnterLitString is called when entering the LitString production. + EnterLitString(c *LitStringContext) + + // EnterLitPortion is called when entering the LitPortion production. + EnterLitPortion(c *LitPortionContext) + + // EnterLitMonetary is called when entering the LitMonetary production. + EnterLitMonetary(c *LitMonetaryContext) + + // EnterVariable is called when entering the variable production. + EnterVariable(c *VariableContext) + + // EnterExprAddSub is called when entering the ExprAddSub production. + EnterExprAddSub(c *ExprAddSubContext) + + // EnterExprLiteral is called when entering the ExprLiteral production. + EnterExprLiteral(c *ExprLiteralContext) + + // EnterExprVariable is called when entering the ExprVariable production. + EnterExprVariable(c *ExprVariableContext) + + // EnterAllotmentPortionConst is called when entering the AllotmentPortionConst production. + EnterAllotmentPortionConst(c *AllotmentPortionConstContext) + + // EnterAllotmentPortionVar is called when entering the AllotmentPortionVar production. + EnterAllotmentPortionVar(c *AllotmentPortionVarContext) + + // EnterAllotmentPortionRemaining is called when entering the AllotmentPortionRemaining production. + EnterAllotmentPortionRemaining(c *AllotmentPortionRemainingContext) + + // EnterDestinationInOrder is called when entering the destinationInOrder production. + EnterDestinationInOrder(c *DestinationInOrderContext) + + // EnterDestinationAllotment is called when entering the destinationAllotment production. + EnterDestinationAllotment(c *DestinationAllotmentContext) + + // EnterIsDestination is called when entering the IsDestination production. + EnterIsDestination(c *IsDestinationContext) + + // EnterIsKept is called when entering the IsKept production. + EnterIsKept(c *IsKeptContext) + + // EnterDestAccount is called when entering the DestAccount production. + EnterDestAccount(c *DestAccountContext) + + // EnterDestInOrder is called when entering the DestInOrder production. + EnterDestInOrder(c *DestInOrderContext) + + // EnterDestAllotment is called when entering the DestAllotment production. + EnterDestAllotment(c *DestAllotmentContext) + + // EnterSrcAccountOverdraftSpecific is called when entering the SrcAccountOverdraftSpecific production. + EnterSrcAccountOverdraftSpecific(c *SrcAccountOverdraftSpecificContext) + + // EnterSrcAccountOverdraftUnbounded is called when entering the SrcAccountOverdraftUnbounded production. + EnterSrcAccountOverdraftUnbounded(c *SrcAccountOverdraftUnboundedContext) + + // EnterSourceAccount is called when entering the sourceAccount production. + EnterSourceAccount(c *SourceAccountContext) + + // EnterSourceInOrder is called when entering the sourceInOrder production. + EnterSourceInOrder(c *SourceInOrderContext) + + // EnterSourceMaxed is called when entering the sourceMaxed production. + EnterSourceMaxed(c *SourceMaxedContext) + + // EnterSrcAccount is called when entering the SrcAccount production. + EnterSrcAccount(c *SrcAccountContext) + + // EnterSrcMaxed is called when entering the SrcMaxed production. + EnterSrcMaxed(c *SrcMaxedContext) + + // EnterSrcInOrder is called when entering the SrcInOrder production. + EnterSrcInOrder(c *SrcInOrderContext) + + // EnterSourceAllotment is called when entering the sourceAllotment production. + EnterSourceAllotment(c *SourceAllotmentContext) + + // EnterSrc is called when entering the Src production. + EnterSrc(c *SrcContext) + + // EnterSrcAllotment is called when entering the SrcAllotment production. + EnterSrcAllotment(c *SrcAllotmentContext) + + // EnterPrint is called when entering the Print production. + EnterPrint(c *PrintContext) + + // EnterSaveFromAccount is called when entering the SaveFromAccount production. + EnterSaveFromAccount(c *SaveFromAccountContext) + + // EnterSetTxMeta is called when entering the SetTxMeta production. + EnterSetTxMeta(c *SetTxMetaContext) + + // EnterSetAccountMeta is called when entering the SetAccountMeta production. + EnterSetAccountMeta(c *SetAccountMetaContext) + + // EnterFail is called when entering the Fail production. + EnterFail(c *FailContext) + + // EnterSend is called when entering the Send production. + EnterSend(c *SendContext) + + // EnterType_ is called when entering the type_ production. + EnterType_(c *Type_Context) + + // EnterOriginAccountMeta is called when entering the OriginAccountMeta production. + EnterOriginAccountMeta(c *OriginAccountMetaContext) + + // EnterOriginAccountBalance is called when entering the OriginAccountBalance production. + EnterOriginAccountBalance(c *OriginAccountBalanceContext) + + // EnterVarDecl is called when entering the varDecl production. + EnterVarDecl(c *VarDeclContext) + + // EnterVarListDecl is called when entering the varListDecl production. + EnterVarListDecl(c *VarListDeclContext) + + // EnterScript is called when entering the script production. + EnterScript(c *ScriptContext) + + // ExitMonetary is called when exiting the monetary production. + ExitMonetary(c *MonetaryContext) + + // ExitMonetaryAll is called when exiting the monetaryAll production. + ExitMonetaryAll(c *MonetaryAllContext) + + // ExitLitAccount is called when exiting the LitAccount production. + ExitLitAccount(c *LitAccountContext) + + // ExitLitAsset is called when exiting the LitAsset production. + ExitLitAsset(c *LitAssetContext) + + // ExitLitNumber is called when exiting the LitNumber production. + ExitLitNumber(c *LitNumberContext) + + // ExitLitString is called when exiting the LitString production. + ExitLitString(c *LitStringContext) + + // ExitLitPortion is called when exiting the LitPortion production. + ExitLitPortion(c *LitPortionContext) + + // ExitLitMonetary is called when exiting the LitMonetary production. + ExitLitMonetary(c *LitMonetaryContext) + + // ExitVariable is called when exiting the variable production. + ExitVariable(c *VariableContext) + + // ExitExprAddSub is called when exiting the ExprAddSub production. + ExitExprAddSub(c *ExprAddSubContext) + + // ExitExprLiteral is called when exiting the ExprLiteral production. + ExitExprLiteral(c *ExprLiteralContext) + + // ExitExprVariable is called when exiting the ExprVariable production. + ExitExprVariable(c *ExprVariableContext) + + // ExitAllotmentPortionConst is called when exiting the AllotmentPortionConst production. + ExitAllotmentPortionConst(c *AllotmentPortionConstContext) + + // ExitAllotmentPortionVar is called when exiting the AllotmentPortionVar production. + ExitAllotmentPortionVar(c *AllotmentPortionVarContext) + + // ExitAllotmentPortionRemaining is called when exiting the AllotmentPortionRemaining production. + ExitAllotmentPortionRemaining(c *AllotmentPortionRemainingContext) + + // ExitDestinationInOrder is called when exiting the destinationInOrder production. + ExitDestinationInOrder(c *DestinationInOrderContext) + + // ExitDestinationAllotment is called when exiting the destinationAllotment production. + ExitDestinationAllotment(c *DestinationAllotmentContext) + + // ExitIsDestination is called when exiting the IsDestination production. + ExitIsDestination(c *IsDestinationContext) + + // ExitIsKept is called when exiting the IsKept production. + ExitIsKept(c *IsKeptContext) + + // ExitDestAccount is called when exiting the DestAccount production. + ExitDestAccount(c *DestAccountContext) + + // ExitDestInOrder is called when exiting the DestInOrder production. + ExitDestInOrder(c *DestInOrderContext) + + // ExitDestAllotment is called when exiting the DestAllotment production. + ExitDestAllotment(c *DestAllotmentContext) + + // ExitSrcAccountOverdraftSpecific is called when exiting the SrcAccountOverdraftSpecific production. + ExitSrcAccountOverdraftSpecific(c *SrcAccountOverdraftSpecificContext) + + // ExitSrcAccountOverdraftUnbounded is called when exiting the SrcAccountOverdraftUnbounded production. + ExitSrcAccountOverdraftUnbounded(c *SrcAccountOverdraftUnboundedContext) + + // ExitSourceAccount is called when exiting the sourceAccount production. + ExitSourceAccount(c *SourceAccountContext) + + // ExitSourceInOrder is called when exiting the sourceInOrder production. + ExitSourceInOrder(c *SourceInOrderContext) + + // ExitSourceMaxed is called when exiting the sourceMaxed production. + ExitSourceMaxed(c *SourceMaxedContext) + + // ExitSrcAccount is called when exiting the SrcAccount production. + ExitSrcAccount(c *SrcAccountContext) + + // ExitSrcMaxed is called when exiting the SrcMaxed production. + ExitSrcMaxed(c *SrcMaxedContext) + + // ExitSrcInOrder is called when exiting the SrcInOrder production. + ExitSrcInOrder(c *SrcInOrderContext) + + // ExitSourceAllotment is called when exiting the sourceAllotment production. + ExitSourceAllotment(c *SourceAllotmentContext) + + // ExitSrc is called when exiting the Src production. + ExitSrc(c *SrcContext) + + // ExitSrcAllotment is called when exiting the SrcAllotment production. + ExitSrcAllotment(c *SrcAllotmentContext) + + // ExitPrint is called when exiting the Print production. + ExitPrint(c *PrintContext) + + // ExitSaveFromAccount is called when exiting the SaveFromAccount production. + ExitSaveFromAccount(c *SaveFromAccountContext) + + // ExitSetTxMeta is called when exiting the SetTxMeta production. + ExitSetTxMeta(c *SetTxMetaContext) + + // ExitSetAccountMeta is called when exiting the SetAccountMeta production. + ExitSetAccountMeta(c *SetAccountMetaContext) + + // ExitFail is called when exiting the Fail production. + ExitFail(c *FailContext) + + // ExitSend is called when exiting the Send production. + ExitSend(c *SendContext) + + // ExitType_ is called when exiting the type_ production. + ExitType_(c *Type_Context) + + // ExitOriginAccountMeta is called when exiting the OriginAccountMeta production. + ExitOriginAccountMeta(c *OriginAccountMetaContext) + + // ExitOriginAccountBalance is called when exiting the OriginAccountBalance production. + ExitOriginAccountBalance(c *OriginAccountBalanceContext) + + // ExitVarDecl is called when exiting the varDecl production. + ExitVarDecl(c *VarDeclContext) + + // ExitVarListDecl is called when exiting the varListDecl production. + ExitVarListDecl(c *VarListDeclContext) + + // ExitScript is called when exiting the script production. + ExitScript(c *ScriptContext) +} diff --git a/internal/machine/script/parser/numscript_parser.go b/internal/machine/script/parser/numscript_parser.go new file mode 100644 index 000000000..bc1b1a0fd --- /dev/null +++ b/internal/machine/script/parser/numscript_parser.go @@ -0,0 +1,6148 @@ +// Code generated from NumScript.g4 by ANTLR 4.10.1. DO NOT EDIT. + +package parser // NumScript + +import ( + "fmt" + "strconv" + "sync" + + "github.com/antlr/antlr4/runtime/Go/antlr" +) + +// Suppress unused import errors +var _ = fmt.Printf +var _ = strconv.Itoa +var _ = sync.Once{} + +type NumScriptParser struct { + *antlr.BaseParser +} + +var numscriptParserStaticData struct { + once sync.Once + serializedATN []int32 + literalNames []string + symbolicNames []string + ruleNames []string + predictionContextCache *antlr.PredictionContextCache + atn *antlr.ATN + decisionToDFA []*antlr.DFA +} + +func numscriptParserInit() { + staticData := &numscriptParserStaticData + staticData.literalNames = []string{ + "", "'*'", "'allowing overdraft up to'", "'allowing unbounded overdraft'", + "','", "", "", "", "", "'vars'", "'meta'", "'set_tx_meta'", "'set_account_meta'", + "'print'", "'fail'", "'send'", "'source'", "'from'", "'max'", "'destination'", + "'to'", "'allocate'", "'+'", "'-'", "'('", "')'", "'['", "']'", "'{'", + "'}'", "'='", "'account'", "'asset'", "'number'", "'monetary'", "'portion'", + "'string'", "", "", "'remaining'", "'kept'", "'balance'", "'save'", + "", "'%'", + } + staticData.symbolicNames = []string{ + "", "", "", "", "", "NEWLINE", "WHITESPACE", "MULTILINE_COMMENT", "LINE_COMMENT", + "VARS", "META", "SET_TX_META", "SET_ACCOUNT_META", "PRINT", "FAIL", + "SEND", "SOURCE", "FROM", "MAX", "DESTINATION", "TO", "ALLOCATE", "OP_ADD", + "OP_SUB", "LPAREN", "RPAREN", "LBRACK", "RBRACK", "LBRACE", "RBRACE", + "EQ", "TY_ACCOUNT", "TY_ASSET", "TY_NUMBER", "TY_MONETARY", "TY_PORTION", + "TY_STRING", "STRING", "PORTION", "REMAINING", "KEPT", "BALANCE", "SAVE", + "NUMBER", "PERCENT", "VARIABLE_NAME", "ACCOUNT", "ASSET", + } + staticData.ruleNames = []string{ + "monetary", "monetaryAll", "literal", "variable", "expression", "allotmentPortion", + "destinationInOrder", "destinationAllotment", "keptOrDestination", "destination", + "sourceAccountOverdraft", "sourceAccount", "sourceInOrder", "sourceMaxed", + "source", "sourceAllotment", "valueAwareSource", "statement", "type_", + "origin", "varDecl", "varListDecl", "script", + } + staticData.predictionContextCache = antlr.NewPredictionContextCache() + staticData.serializedATN = []int32{ + 4, 1, 47, 292, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, + 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, + 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, + 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, + 21, 7, 21, 2, 22, 7, 22, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 63, 8, 2, 1, 3, 1, + 3, 1, 4, 1, 4, 1, 4, 3, 4, 70, 8, 4, 1, 4, 1, 4, 1, 4, 5, 4, 75, 8, 4, + 10, 4, 12, 4, 78, 9, 4, 1, 5, 1, 5, 1, 5, 3, 5, 83, 8, 5, 1, 6, 1, 6, 1, + 6, 1, 6, 1, 6, 1, 6, 1, 6, 4, 6, 92, 8, 6, 11, 6, 12, 6, 93, 1, 6, 1, 6, + 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 4, 7, 107, 8, 7, + 11, 7, 12, 7, 108, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 3, 8, 116, 8, 8, 1, 9, + 1, 9, 1, 9, 3, 9, 121, 8, 9, 1, 10, 1, 10, 1, 10, 3, 10, 126, 8, 10, 1, + 11, 1, 11, 3, 11, 130, 8, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 4, 12, + 137, 8, 12, 11, 12, 12, 12, 138, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, + 13, 1, 13, 1, 14, 1, 14, 1, 14, 3, 14, 151, 8, 14, 1, 15, 1, 15, 1, 15, + 1, 15, 1, 15, 1, 15, 1, 15, 4, 15, 160, 8, 15, 11, 15, 12, 15, 161, 1, + 15, 1, 15, 1, 16, 1, 16, 3, 16, 168, 8, 16, 1, 17, 1, 17, 1, 17, 1, 17, + 1, 17, 3, 17, 175, 8, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, + 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, + 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 200, 8, 17, 1, 17, 1, + 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, + 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 220, 8, 17, 1, 17, 1, + 17, 1, 17, 3, 17, 225, 8, 17, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, + 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 3, + 19, 243, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 3, 20, 249, 8, 20, 1, 21, 1, + 21, 1, 21, 1, 21, 1, 21, 4, 21, 256, 8, 21, 11, 21, 12, 21, 257, 4, 21, + 260, 8, 21, 11, 21, 12, 21, 261, 1, 21, 1, 21, 1, 21, 1, 22, 5, 22, 268, + 8, 22, 10, 22, 12, 22, 271, 9, 22, 1, 22, 3, 22, 274, 8, 22, 1, 22, 1, + 22, 1, 22, 5, 22, 279, 8, 22, 10, 22, 12, 22, 282, 9, 22, 1, 22, 5, 22, + 285, 8, 22, 10, 22, 12, 22, 288, 9, 22, 1, 22, 1, 22, 1, 22, 0, 1, 8, 23, + 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, + 38, 40, 42, 44, 0, 2, 1, 0, 22, 23, 1, 0, 31, 36, 305, 0, 46, 1, 0, 0, + 0, 2, 51, 1, 0, 0, 0, 4, 62, 1, 0, 0, 0, 6, 64, 1, 0, 0, 0, 8, 69, 1, 0, + 0, 0, 10, 82, 1, 0, 0, 0, 12, 84, 1, 0, 0, 0, 14, 100, 1, 0, 0, 0, 16, + 115, 1, 0, 0, 0, 18, 120, 1, 0, 0, 0, 20, 125, 1, 0, 0, 0, 22, 127, 1, + 0, 0, 0, 24, 131, 1, 0, 0, 0, 26, 142, 1, 0, 0, 0, 28, 150, 1, 0, 0, 0, + 30, 152, 1, 0, 0, 0, 32, 167, 1, 0, 0, 0, 34, 224, 1, 0, 0, 0, 36, 226, + 1, 0, 0, 0, 38, 242, 1, 0, 0, 0, 40, 244, 1, 0, 0, 0, 42, 250, 1, 0, 0, + 0, 44, 269, 1, 0, 0, 0, 46, 47, 5, 26, 0, 0, 47, 48, 3, 8, 4, 0, 48, 49, + 5, 43, 0, 0, 49, 50, 5, 27, 0, 0, 50, 1, 1, 0, 0, 0, 51, 52, 5, 26, 0, + 0, 52, 53, 3, 8, 4, 0, 53, 54, 5, 1, 0, 0, 54, 55, 5, 27, 0, 0, 55, 3, + 1, 0, 0, 0, 56, 63, 5, 46, 0, 0, 57, 63, 5, 47, 0, 0, 58, 63, 5, 43, 0, + 0, 59, 63, 5, 37, 0, 0, 60, 63, 5, 38, 0, 0, 61, 63, 3, 0, 0, 0, 62, 56, + 1, 0, 0, 0, 62, 57, 1, 0, 0, 0, 62, 58, 1, 0, 0, 0, 62, 59, 1, 0, 0, 0, + 62, 60, 1, 0, 0, 0, 62, 61, 1, 0, 0, 0, 63, 5, 1, 0, 0, 0, 64, 65, 5, 45, + 0, 0, 65, 7, 1, 0, 0, 0, 66, 67, 6, 4, -1, 0, 67, 70, 3, 4, 2, 0, 68, 70, + 3, 6, 3, 0, 69, 66, 1, 0, 0, 0, 69, 68, 1, 0, 0, 0, 70, 76, 1, 0, 0, 0, + 71, 72, 10, 3, 0, 0, 72, 73, 7, 0, 0, 0, 73, 75, 3, 8, 4, 4, 74, 71, 1, + 0, 0, 0, 75, 78, 1, 0, 0, 0, 76, 74, 1, 0, 0, 0, 76, 77, 1, 0, 0, 0, 77, + 9, 1, 0, 0, 0, 78, 76, 1, 0, 0, 0, 79, 83, 5, 38, 0, 0, 80, 83, 3, 6, 3, + 0, 81, 83, 5, 39, 0, 0, 82, 79, 1, 0, 0, 0, 82, 80, 1, 0, 0, 0, 82, 81, + 1, 0, 0, 0, 83, 11, 1, 0, 0, 0, 84, 85, 5, 28, 0, 0, 85, 91, 5, 5, 0, 0, + 86, 87, 5, 18, 0, 0, 87, 88, 3, 8, 4, 0, 88, 89, 3, 16, 8, 0, 89, 90, 5, + 5, 0, 0, 90, 92, 1, 0, 0, 0, 91, 86, 1, 0, 0, 0, 92, 93, 1, 0, 0, 0, 93, + 91, 1, 0, 0, 0, 93, 94, 1, 0, 0, 0, 94, 95, 1, 0, 0, 0, 95, 96, 5, 39, + 0, 0, 96, 97, 3, 16, 8, 0, 97, 98, 5, 5, 0, 0, 98, 99, 5, 29, 0, 0, 99, + 13, 1, 0, 0, 0, 100, 101, 5, 28, 0, 0, 101, 106, 5, 5, 0, 0, 102, 103, + 3, 10, 5, 0, 103, 104, 3, 16, 8, 0, 104, 105, 5, 5, 0, 0, 105, 107, 1, + 0, 0, 0, 106, 102, 1, 0, 0, 0, 107, 108, 1, 0, 0, 0, 108, 106, 1, 0, 0, + 0, 108, 109, 1, 0, 0, 0, 109, 110, 1, 0, 0, 0, 110, 111, 5, 29, 0, 0, 111, + 15, 1, 0, 0, 0, 112, 113, 5, 20, 0, 0, 113, 116, 3, 18, 9, 0, 114, 116, + 5, 40, 0, 0, 115, 112, 1, 0, 0, 0, 115, 114, 1, 0, 0, 0, 116, 17, 1, 0, + 0, 0, 117, 121, 3, 8, 4, 0, 118, 121, 3, 12, 6, 0, 119, 121, 3, 14, 7, + 0, 120, 117, 1, 0, 0, 0, 120, 118, 1, 0, 0, 0, 120, 119, 1, 0, 0, 0, 121, + 19, 1, 0, 0, 0, 122, 123, 5, 2, 0, 0, 123, 126, 3, 8, 4, 0, 124, 126, 5, + 3, 0, 0, 125, 122, 1, 0, 0, 0, 125, 124, 1, 0, 0, 0, 126, 21, 1, 0, 0, + 0, 127, 129, 3, 8, 4, 0, 128, 130, 3, 20, 10, 0, 129, 128, 1, 0, 0, 0, + 129, 130, 1, 0, 0, 0, 130, 23, 1, 0, 0, 0, 131, 132, 5, 28, 0, 0, 132, + 136, 5, 5, 0, 0, 133, 134, 3, 28, 14, 0, 134, 135, 5, 5, 0, 0, 135, 137, + 1, 0, 0, 0, 136, 133, 1, 0, 0, 0, 137, 138, 1, 0, 0, 0, 138, 136, 1, 0, + 0, 0, 138, 139, 1, 0, 0, 0, 139, 140, 1, 0, 0, 0, 140, 141, 5, 29, 0, 0, + 141, 25, 1, 0, 0, 0, 142, 143, 5, 18, 0, 0, 143, 144, 3, 8, 4, 0, 144, + 145, 5, 17, 0, 0, 145, 146, 3, 28, 14, 0, 146, 27, 1, 0, 0, 0, 147, 151, + 3, 22, 11, 0, 148, 151, 3, 26, 13, 0, 149, 151, 3, 24, 12, 0, 150, 147, + 1, 0, 0, 0, 150, 148, 1, 0, 0, 0, 150, 149, 1, 0, 0, 0, 151, 29, 1, 0, + 0, 0, 152, 153, 5, 28, 0, 0, 153, 159, 5, 5, 0, 0, 154, 155, 3, 10, 5, + 0, 155, 156, 5, 17, 0, 0, 156, 157, 3, 28, 14, 0, 157, 158, 5, 5, 0, 0, + 158, 160, 1, 0, 0, 0, 159, 154, 1, 0, 0, 0, 160, 161, 1, 0, 0, 0, 161, + 159, 1, 0, 0, 0, 161, 162, 1, 0, 0, 0, 162, 163, 1, 0, 0, 0, 163, 164, + 5, 29, 0, 0, 164, 31, 1, 0, 0, 0, 165, 168, 3, 28, 14, 0, 166, 168, 3, + 30, 15, 0, 167, 165, 1, 0, 0, 0, 167, 166, 1, 0, 0, 0, 168, 33, 1, 0, 0, + 0, 169, 170, 5, 13, 0, 0, 170, 225, 3, 8, 4, 0, 171, 174, 5, 42, 0, 0, + 172, 175, 3, 8, 4, 0, 173, 175, 3, 2, 1, 0, 174, 172, 1, 0, 0, 0, 174, + 173, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 177, 5, 17, 0, 0, 177, 178, + 3, 8, 4, 0, 178, 225, 1, 0, 0, 0, 179, 180, 5, 11, 0, 0, 180, 181, 5, 24, + 0, 0, 181, 182, 5, 37, 0, 0, 182, 183, 5, 4, 0, 0, 183, 184, 3, 8, 4, 0, + 184, 185, 5, 25, 0, 0, 185, 225, 1, 0, 0, 0, 186, 187, 5, 12, 0, 0, 187, + 188, 5, 24, 0, 0, 188, 189, 3, 8, 4, 0, 189, 190, 5, 4, 0, 0, 190, 191, + 5, 37, 0, 0, 191, 192, 5, 4, 0, 0, 192, 193, 3, 8, 4, 0, 193, 194, 5, 25, + 0, 0, 194, 225, 1, 0, 0, 0, 195, 225, 5, 14, 0, 0, 196, 199, 5, 15, 0, + 0, 197, 200, 3, 8, 4, 0, 198, 200, 3, 2, 1, 0, 199, 197, 1, 0, 0, 0, 199, + 198, 1, 0, 0, 0, 200, 201, 1, 0, 0, 0, 201, 202, 5, 24, 0, 0, 202, 219, + 5, 5, 0, 0, 203, 204, 5, 16, 0, 0, 204, 205, 5, 30, 0, 0, 205, 206, 3, + 32, 16, 0, 206, 207, 5, 5, 0, 0, 207, 208, 5, 19, 0, 0, 208, 209, 5, 30, + 0, 0, 209, 210, 3, 18, 9, 0, 210, 220, 1, 0, 0, 0, 211, 212, 5, 19, 0, + 0, 212, 213, 5, 30, 0, 0, 213, 214, 3, 18, 9, 0, 214, 215, 5, 5, 0, 0, + 215, 216, 5, 16, 0, 0, 216, 217, 5, 30, 0, 0, 217, 218, 3, 32, 16, 0, 218, + 220, 1, 0, 0, 0, 219, 203, 1, 0, 0, 0, 219, 211, 1, 0, 0, 0, 220, 221, + 1, 0, 0, 0, 221, 222, 5, 5, 0, 0, 222, 223, 5, 25, 0, 0, 223, 225, 1, 0, + 0, 0, 224, 169, 1, 0, 0, 0, 224, 171, 1, 0, 0, 0, 224, 179, 1, 0, 0, 0, + 224, 186, 1, 0, 0, 0, 224, 195, 1, 0, 0, 0, 224, 196, 1, 0, 0, 0, 225, + 35, 1, 0, 0, 0, 226, 227, 7, 1, 0, 0, 227, 37, 1, 0, 0, 0, 228, 229, 5, + 10, 0, 0, 229, 230, 5, 24, 0, 0, 230, 231, 3, 8, 4, 0, 231, 232, 5, 4, + 0, 0, 232, 233, 5, 37, 0, 0, 233, 234, 5, 25, 0, 0, 234, 243, 1, 0, 0, + 0, 235, 236, 5, 41, 0, 0, 236, 237, 5, 24, 0, 0, 237, 238, 3, 8, 4, 0, + 238, 239, 5, 4, 0, 0, 239, 240, 3, 8, 4, 0, 240, 241, 5, 25, 0, 0, 241, + 243, 1, 0, 0, 0, 242, 228, 1, 0, 0, 0, 242, 235, 1, 0, 0, 0, 243, 39, 1, + 0, 0, 0, 244, 245, 3, 36, 18, 0, 245, 248, 3, 6, 3, 0, 246, 247, 5, 30, + 0, 0, 247, 249, 3, 38, 19, 0, 248, 246, 1, 0, 0, 0, 248, 249, 1, 0, 0, + 0, 249, 41, 1, 0, 0, 0, 250, 251, 5, 9, 0, 0, 251, 252, 5, 28, 0, 0, 252, + 259, 5, 5, 0, 0, 253, 255, 3, 40, 20, 0, 254, 256, 5, 5, 0, 0, 255, 254, + 1, 0, 0, 0, 256, 257, 1, 0, 0, 0, 257, 255, 1, 0, 0, 0, 257, 258, 1, 0, + 0, 0, 258, 260, 1, 0, 0, 0, 259, 253, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, + 261, 259, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 263, 1, 0, 0, 0, 263, + 264, 5, 29, 0, 0, 264, 265, 5, 5, 0, 0, 265, 43, 1, 0, 0, 0, 266, 268, + 5, 5, 0, 0, 267, 266, 1, 0, 0, 0, 268, 271, 1, 0, 0, 0, 269, 267, 1, 0, + 0, 0, 269, 270, 1, 0, 0, 0, 270, 273, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, + 272, 274, 3, 42, 21, 0, 273, 272, 1, 0, 0, 0, 273, 274, 1, 0, 0, 0, 274, + 275, 1, 0, 0, 0, 275, 280, 3, 34, 17, 0, 276, 277, 5, 5, 0, 0, 277, 279, + 3, 34, 17, 0, 278, 276, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, 278, 1, + 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 286, 1, 0, 0, 0, 282, 280, 1, 0, 0, + 0, 283, 285, 5, 5, 0, 0, 284, 283, 1, 0, 0, 0, 285, 288, 1, 0, 0, 0, 286, + 284, 1, 0, 0, 0, 286, 287, 1, 0, 0, 0, 287, 289, 1, 0, 0, 0, 288, 286, + 1, 0, 0, 0, 289, 290, 5, 0, 0, 1, 290, 45, 1, 0, 0, 0, 26, 62, 69, 76, + 82, 93, 108, 115, 120, 125, 129, 138, 150, 161, 167, 174, 199, 219, 224, + 242, 248, 257, 261, 269, 273, 280, 286, + } + deserializer := antlr.NewATNDeserializer(nil) + staticData.atn = deserializer.Deserialize(staticData.serializedATN) + atn := staticData.atn + staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState)) + decisionToDFA := staticData.decisionToDFA + for index, state := range atn.DecisionToState { + decisionToDFA[index] = antlr.NewDFA(state, index) + } +} + +// NumScriptParserInit initializes any static state used to implement NumScriptParser. By default the +// static state used to implement the parser is lazily initialized during the first call to +// NewNumScriptParser(). You can call this function if you wish to initialize the static state ahead +// of time. +func NumScriptParserInit() { + staticData := &numscriptParserStaticData + staticData.once.Do(numscriptParserInit) +} + +// NewNumScriptParser produces a new parser instance for the optional input antlr.TokenStream. +func NewNumScriptParser(input antlr.TokenStream) *NumScriptParser { + NumScriptParserInit() + this := new(NumScriptParser) + this.BaseParser = antlr.NewBaseParser(input) + staticData := &numscriptParserStaticData + this.Interpreter = antlr.NewParserATNSimulator(this, staticData.atn, staticData.decisionToDFA, staticData.predictionContextCache) + this.RuleNames = staticData.ruleNames + this.LiteralNames = staticData.literalNames + this.SymbolicNames = staticData.symbolicNames + this.GrammarFileName = "NumScript.g4" + + return this +} + +// NumScriptParser tokens. +const ( + NumScriptParserEOF = antlr.TokenEOF + NumScriptParserT__0 = 1 + NumScriptParserT__1 = 2 + NumScriptParserT__2 = 3 + NumScriptParserT__3 = 4 + NumScriptParserNEWLINE = 5 + NumScriptParserWHITESPACE = 6 + NumScriptParserMULTILINE_COMMENT = 7 + NumScriptParserLINE_COMMENT = 8 + NumScriptParserVARS = 9 + NumScriptParserMETA = 10 + NumScriptParserSET_TX_META = 11 + NumScriptParserSET_ACCOUNT_META = 12 + NumScriptParserPRINT = 13 + NumScriptParserFAIL = 14 + NumScriptParserSEND = 15 + NumScriptParserSOURCE = 16 + NumScriptParserFROM = 17 + NumScriptParserMAX = 18 + NumScriptParserDESTINATION = 19 + NumScriptParserTO = 20 + NumScriptParserALLOCATE = 21 + NumScriptParserOP_ADD = 22 + NumScriptParserOP_SUB = 23 + NumScriptParserLPAREN = 24 + NumScriptParserRPAREN = 25 + NumScriptParserLBRACK = 26 + NumScriptParserRBRACK = 27 + NumScriptParserLBRACE = 28 + NumScriptParserRBRACE = 29 + NumScriptParserEQ = 30 + NumScriptParserTY_ACCOUNT = 31 + NumScriptParserTY_ASSET = 32 + NumScriptParserTY_NUMBER = 33 + NumScriptParserTY_MONETARY = 34 + NumScriptParserTY_PORTION = 35 + NumScriptParserTY_STRING = 36 + NumScriptParserSTRING = 37 + NumScriptParserPORTION = 38 + NumScriptParserREMAINING = 39 + NumScriptParserKEPT = 40 + NumScriptParserBALANCE = 41 + NumScriptParserSAVE = 42 + NumScriptParserNUMBER = 43 + NumScriptParserPERCENT = 44 + NumScriptParserVARIABLE_NAME = 45 + NumScriptParserACCOUNT = 46 + NumScriptParserASSET = 47 +) + +// NumScriptParser rules. +const ( + NumScriptParserRULE_monetary = 0 + NumScriptParserRULE_monetaryAll = 1 + NumScriptParserRULE_literal = 2 + NumScriptParserRULE_variable = 3 + NumScriptParserRULE_expression = 4 + NumScriptParserRULE_allotmentPortion = 5 + NumScriptParserRULE_destinationInOrder = 6 + NumScriptParserRULE_destinationAllotment = 7 + NumScriptParserRULE_keptOrDestination = 8 + NumScriptParserRULE_destination = 9 + NumScriptParserRULE_sourceAccountOverdraft = 10 + NumScriptParserRULE_sourceAccount = 11 + NumScriptParserRULE_sourceInOrder = 12 + NumScriptParserRULE_sourceMaxed = 13 + NumScriptParserRULE_source = 14 + NumScriptParserRULE_sourceAllotment = 15 + NumScriptParserRULE_valueAwareSource = 16 + NumScriptParserRULE_statement = 17 + NumScriptParserRULE_type_ = 18 + NumScriptParserRULE_origin = 19 + NumScriptParserRULE_varDecl = 20 + NumScriptParserRULE_varListDecl = 21 + NumScriptParserRULE_script = 22 +) + +// IMonetaryContext is an interface to support dynamic dispatch. +type IMonetaryContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // GetAmt returns the amt token. + GetAmt() antlr.Token + + // SetAmt sets the amt token. + SetAmt(antlr.Token) + + // GetAsset returns the asset rule contexts. + GetAsset() IExpressionContext + + // SetAsset sets the asset rule contexts. + SetAsset(IExpressionContext) + + // IsMonetaryContext differentiates from other interfaces. + IsMonetaryContext() +} + +type MonetaryContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser + asset IExpressionContext + amt antlr.Token +} + +func NewEmptyMonetaryContext() *MonetaryContext { + var p = new(MonetaryContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = NumScriptParserRULE_monetary + return p +} + +func (*MonetaryContext) IsMonetaryContext() {} + +func NewMonetaryContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *MonetaryContext { + var p = new(MonetaryContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = NumScriptParserRULE_monetary + + return p +} + +func (s *MonetaryContext) GetParser() antlr.Parser { return s.parser } + +func (s *MonetaryContext) GetAmt() antlr.Token { return s.amt } + +func (s *MonetaryContext) SetAmt(v antlr.Token) { s.amt = v } + +func (s *MonetaryContext) GetAsset() IExpressionContext { return s.asset } + +func (s *MonetaryContext) SetAsset(v IExpressionContext) { s.asset = v } + +func (s *MonetaryContext) LBRACK() antlr.TerminalNode { + return s.GetToken(NumScriptParserLBRACK, 0) +} + +func (s *MonetaryContext) RBRACK() antlr.TerminalNode { + return s.GetToken(NumScriptParserRBRACK, 0) +} + +func (s *MonetaryContext) Expression() IExpressionContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IExpressionContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IExpressionContext) +} + +func (s *MonetaryContext) NUMBER() antlr.TerminalNode { + return s.GetToken(NumScriptParserNUMBER, 0) +} + +func (s *MonetaryContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *MonetaryContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *MonetaryContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.EnterMonetary(s) + } +} + +func (s *MonetaryContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.ExitMonetary(s) + } +} + +func (p *NumScriptParser) Monetary() (localctx IMonetaryContext) { + this := p + _ = this + + localctx = NewMonetaryContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 0, NumScriptParserRULE_monetary) + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.EnterOuterAlt(localctx, 1) + { + p.SetState(46) + p.Match(NumScriptParserLBRACK) + } + { + p.SetState(47) + + var _x = p.expression(0) + + localctx.(*MonetaryContext).asset = _x + } + { + p.SetState(48) + + var _m = p.Match(NumScriptParserNUMBER) + + localctx.(*MonetaryContext).amt = _m + } + { + p.SetState(49) + p.Match(NumScriptParserRBRACK) + } + + return localctx +} + +// IMonetaryAllContext is an interface to support dynamic dispatch. +type IMonetaryAllContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // GetAsset returns the asset rule contexts. + GetAsset() IExpressionContext + + // SetAsset sets the asset rule contexts. + SetAsset(IExpressionContext) + + // IsMonetaryAllContext differentiates from other interfaces. + IsMonetaryAllContext() +} + +type MonetaryAllContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser + asset IExpressionContext +} + +func NewEmptyMonetaryAllContext() *MonetaryAllContext { + var p = new(MonetaryAllContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = NumScriptParserRULE_monetaryAll + return p +} + +func (*MonetaryAllContext) IsMonetaryAllContext() {} + +func NewMonetaryAllContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *MonetaryAllContext { + var p = new(MonetaryAllContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = NumScriptParserRULE_monetaryAll + + return p +} + +func (s *MonetaryAllContext) GetParser() antlr.Parser { return s.parser } + +func (s *MonetaryAllContext) GetAsset() IExpressionContext { return s.asset } + +func (s *MonetaryAllContext) SetAsset(v IExpressionContext) { s.asset = v } + +func (s *MonetaryAllContext) LBRACK() antlr.TerminalNode { + return s.GetToken(NumScriptParserLBRACK, 0) +} + +func (s *MonetaryAllContext) RBRACK() antlr.TerminalNode { + return s.GetToken(NumScriptParserRBRACK, 0) +} + +func (s *MonetaryAllContext) Expression() IExpressionContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IExpressionContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IExpressionContext) +} + +func (s *MonetaryAllContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *MonetaryAllContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *MonetaryAllContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.EnterMonetaryAll(s) + } +} + +func (s *MonetaryAllContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.ExitMonetaryAll(s) + } +} + +func (p *NumScriptParser) MonetaryAll() (localctx IMonetaryAllContext) { + this := p + _ = this + + localctx = NewMonetaryAllContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 2, NumScriptParserRULE_monetaryAll) + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.EnterOuterAlt(localctx, 1) + { + p.SetState(51) + p.Match(NumScriptParserLBRACK) + } + { + p.SetState(52) + + var _x = p.expression(0) + + localctx.(*MonetaryAllContext).asset = _x + } + { + p.SetState(53) + p.Match(NumScriptParserT__0) + } + { + p.SetState(54) + p.Match(NumScriptParserRBRACK) + } + + return localctx +} + +// ILiteralContext is an interface to support dynamic dispatch. +type ILiteralContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // IsLiteralContext differentiates from other interfaces. + IsLiteralContext() +} + +type LiteralContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser +} + +func NewEmptyLiteralContext() *LiteralContext { + var p = new(LiteralContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = NumScriptParserRULE_literal + return p +} + +func (*LiteralContext) IsLiteralContext() {} + +func NewLiteralContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *LiteralContext { + var p = new(LiteralContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = NumScriptParserRULE_literal + + return p +} + +func (s *LiteralContext) GetParser() antlr.Parser { return s.parser } + +func (s *LiteralContext) CopyFrom(ctx *LiteralContext) { + s.BaseParserRuleContext.CopyFrom(ctx.BaseParserRuleContext) +} + +func (s *LiteralContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *LiteralContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +type LitPortionContext struct { + *LiteralContext +} + +func NewLitPortionContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *LitPortionContext { + var p = new(LitPortionContext) + + p.LiteralContext = NewEmptyLiteralContext() + p.parser = parser + p.CopyFrom(ctx.(*LiteralContext)) + + return p +} + +func (s *LitPortionContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *LitPortionContext) PORTION() antlr.TerminalNode { + return s.GetToken(NumScriptParserPORTION, 0) +} + +func (s *LitPortionContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.EnterLitPortion(s) + } +} + +func (s *LitPortionContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.ExitLitPortion(s) + } +} + +type LitStringContext struct { + *LiteralContext +} + +func NewLitStringContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *LitStringContext { + var p = new(LitStringContext) + + p.LiteralContext = NewEmptyLiteralContext() + p.parser = parser + p.CopyFrom(ctx.(*LiteralContext)) + + return p +} + +func (s *LitStringContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *LitStringContext) STRING() antlr.TerminalNode { + return s.GetToken(NumScriptParserSTRING, 0) +} + +func (s *LitStringContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.EnterLitString(s) + } +} + +func (s *LitStringContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.ExitLitString(s) + } +} + +type LitAccountContext struct { + *LiteralContext +} + +func NewLitAccountContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *LitAccountContext { + var p = new(LitAccountContext) + + p.LiteralContext = NewEmptyLiteralContext() + p.parser = parser + p.CopyFrom(ctx.(*LiteralContext)) + + return p +} + +func (s *LitAccountContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *LitAccountContext) ACCOUNT() antlr.TerminalNode { + return s.GetToken(NumScriptParserACCOUNT, 0) +} + +func (s *LitAccountContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.EnterLitAccount(s) + } +} + +func (s *LitAccountContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.ExitLitAccount(s) + } +} + +type LitAssetContext struct { + *LiteralContext +} + +func NewLitAssetContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *LitAssetContext { + var p = new(LitAssetContext) + + p.LiteralContext = NewEmptyLiteralContext() + p.parser = parser + p.CopyFrom(ctx.(*LiteralContext)) + + return p +} + +func (s *LitAssetContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *LitAssetContext) ASSET() antlr.TerminalNode { + return s.GetToken(NumScriptParserASSET, 0) +} + +func (s *LitAssetContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.EnterLitAsset(s) + } +} + +func (s *LitAssetContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.ExitLitAsset(s) + } +} + +type LitMonetaryContext struct { + *LiteralContext +} + +func NewLitMonetaryContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *LitMonetaryContext { + var p = new(LitMonetaryContext) + + p.LiteralContext = NewEmptyLiteralContext() + p.parser = parser + p.CopyFrom(ctx.(*LiteralContext)) + + return p +} + +func (s *LitMonetaryContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *LitMonetaryContext) Monetary() IMonetaryContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IMonetaryContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IMonetaryContext) +} + +func (s *LitMonetaryContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.EnterLitMonetary(s) + } +} + +func (s *LitMonetaryContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.ExitLitMonetary(s) + } +} + +type LitNumberContext struct { + *LiteralContext +} + +func NewLitNumberContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *LitNumberContext { + var p = new(LitNumberContext) + + p.LiteralContext = NewEmptyLiteralContext() + p.parser = parser + p.CopyFrom(ctx.(*LiteralContext)) + + return p +} + +func (s *LitNumberContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *LitNumberContext) NUMBER() antlr.TerminalNode { + return s.GetToken(NumScriptParserNUMBER, 0) +} + +func (s *LitNumberContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.EnterLitNumber(s) + } +} + +func (s *LitNumberContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.ExitLitNumber(s) + } +} + +func (p *NumScriptParser) Literal() (localctx ILiteralContext) { + this := p + _ = this + + localctx = NewLiteralContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 4, NumScriptParserRULE_literal) + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.SetState(62) + p.GetErrorHandler().Sync(p) + + switch p.GetTokenStream().LA(1) { + case NumScriptParserACCOUNT: + localctx = NewLitAccountContext(p, localctx) + p.EnterOuterAlt(localctx, 1) + { + p.SetState(56) + p.Match(NumScriptParserACCOUNT) + } + + case NumScriptParserASSET: + localctx = NewLitAssetContext(p, localctx) + p.EnterOuterAlt(localctx, 2) + { + p.SetState(57) + p.Match(NumScriptParserASSET) + } + + case NumScriptParserNUMBER: + localctx = NewLitNumberContext(p, localctx) + p.EnterOuterAlt(localctx, 3) + { + p.SetState(58) + p.Match(NumScriptParserNUMBER) + } + + case NumScriptParserSTRING: + localctx = NewLitStringContext(p, localctx) + p.EnterOuterAlt(localctx, 4) + { + p.SetState(59) + p.Match(NumScriptParserSTRING) + } + + case NumScriptParserPORTION: + localctx = NewLitPortionContext(p, localctx) + p.EnterOuterAlt(localctx, 5) + { + p.SetState(60) + p.Match(NumScriptParserPORTION) + } + + case NumScriptParserLBRACK: + localctx = NewLitMonetaryContext(p, localctx) + p.EnterOuterAlt(localctx, 6) + { + p.SetState(61) + p.Monetary() + } + + default: + panic(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + } + + return localctx +} + +// IVariableContext is an interface to support dynamic dispatch. +type IVariableContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // IsVariableContext differentiates from other interfaces. + IsVariableContext() +} + +type VariableContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser +} + +func NewEmptyVariableContext() *VariableContext { + var p = new(VariableContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = NumScriptParserRULE_variable + return p +} + +func (*VariableContext) IsVariableContext() {} + +func NewVariableContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *VariableContext { + var p = new(VariableContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = NumScriptParserRULE_variable + + return p +} + +func (s *VariableContext) GetParser() antlr.Parser { return s.parser } + +func (s *VariableContext) VARIABLE_NAME() antlr.TerminalNode { + return s.GetToken(NumScriptParserVARIABLE_NAME, 0) +} + +func (s *VariableContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *VariableContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *VariableContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.EnterVariable(s) + } +} + +func (s *VariableContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.ExitVariable(s) + } +} + +func (p *NumScriptParser) Variable() (localctx IVariableContext) { + this := p + _ = this + + localctx = NewVariableContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 6, NumScriptParserRULE_variable) + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.EnterOuterAlt(localctx, 1) + { + p.SetState(64) + p.Match(NumScriptParserVARIABLE_NAME) + } + + return localctx +} + +// IExpressionContext is an interface to support dynamic dispatch. +type IExpressionContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // IsExpressionContext differentiates from other interfaces. + IsExpressionContext() +} + +type ExpressionContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser +} + +func NewEmptyExpressionContext() *ExpressionContext { + var p = new(ExpressionContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = NumScriptParserRULE_expression + return p +} + +func (*ExpressionContext) IsExpressionContext() {} + +func NewExpressionContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ExpressionContext { + var p = new(ExpressionContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = NumScriptParserRULE_expression + + return p +} + +func (s *ExpressionContext) GetParser() antlr.Parser { return s.parser } + +func (s *ExpressionContext) CopyFrom(ctx *ExpressionContext) { + s.BaseParserRuleContext.CopyFrom(ctx.BaseParserRuleContext) +} + +func (s *ExpressionContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpressionContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +type ExprAddSubContext struct { + *ExpressionContext + lhs IExpressionContext + op antlr.Token + rhs IExpressionContext +} + +func NewExprAddSubContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExprAddSubContext { + var p = new(ExprAddSubContext) + + p.ExpressionContext = NewEmptyExpressionContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpressionContext)) + + return p +} + +func (s *ExprAddSubContext) GetOp() antlr.Token { return s.op } + +func (s *ExprAddSubContext) SetOp(v antlr.Token) { s.op = v } + +func (s *ExprAddSubContext) GetLhs() IExpressionContext { return s.lhs } + +func (s *ExprAddSubContext) GetRhs() IExpressionContext { return s.rhs } + +func (s *ExprAddSubContext) SetLhs(v IExpressionContext) { s.lhs = v } + +func (s *ExprAddSubContext) SetRhs(v IExpressionContext) { s.rhs = v } + +func (s *ExprAddSubContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExprAddSubContext) AllExpression() []IExpressionContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IExpressionContext); ok { + len++ + } + } + + tst := make([]IExpressionContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IExpressionContext); ok { + tst[i] = t.(IExpressionContext) + i++ + } + } + + return tst +} + +func (s *ExprAddSubContext) Expression(i int) IExpressionContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IExpressionContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(IExpressionContext) +} + +func (s *ExprAddSubContext) OP_ADD() antlr.TerminalNode { + return s.GetToken(NumScriptParserOP_ADD, 0) +} + +func (s *ExprAddSubContext) OP_SUB() antlr.TerminalNode { + return s.GetToken(NumScriptParserOP_SUB, 0) +} + +func (s *ExprAddSubContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.EnterExprAddSub(s) + } +} + +func (s *ExprAddSubContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.ExitExprAddSub(s) + } +} + +type ExprLiteralContext struct { + *ExpressionContext + lit ILiteralContext +} + +func NewExprLiteralContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExprLiteralContext { + var p = new(ExprLiteralContext) + + p.ExpressionContext = NewEmptyExpressionContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpressionContext)) + + return p +} + +func (s *ExprLiteralContext) GetLit() ILiteralContext { return s.lit } + +func (s *ExprLiteralContext) SetLit(v ILiteralContext) { s.lit = v } + +func (s *ExprLiteralContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExprLiteralContext) Literal() ILiteralContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ILiteralContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(ILiteralContext) +} + +func (s *ExprLiteralContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.EnterExprLiteral(s) + } +} + +func (s *ExprLiteralContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.ExitExprLiteral(s) + } +} + +type ExprVariableContext struct { + *ExpressionContext + var_ IVariableContext +} + +func NewExprVariableContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExprVariableContext { + var p = new(ExprVariableContext) + + p.ExpressionContext = NewEmptyExpressionContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpressionContext)) + + return p +} + +func (s *ExprVariableContext) GetVar_() IVariableContext { return s.var_ } + +func (s *ExprVariableContext) SetVar_(v IVariableContext) { s.var_ = v } + +func (s *ExprVariableContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExprVariableContext) Variable() IVariableContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IVariableContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IVariableContext) +} + +func (s *ExprVariableContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.EnterExprVariable(s) + } +} + +func (s *ExprVariableContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.ExitExprVariable(s) + } +} + +func (p *NumScriptParser) Expression() (localctx IExpressionContext) { + return p.expression(0) +} + +func (p *NumScriptParser) expression(_p int) (localctx IExpressionContext) { + this := p + _ = this + + var _parentctx antlr.ParserRuleContext = p.GetParserRuleContext() + _parentState := p.GetState() + localctx = NewExpressionContext(p, p.GetParserRuleContext(), _parentState) + var _prevctx IExpressionContext = localctx + var _ antlr.ParserRuleContext = _prevctx // TODO: To prevent unused variable warning. + _startState := 8 + p.EnterRecursionRule(localctx, 8, NumScriptParserRULE_expression, _p) + var _la int + + defer func() { + p.UnrollRecursionContexts(_parentctx) + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + var _alt int + + p.EnterOuterAlt(localctx, 1) + p.SetState(69) + p.GetErrorHandler().Sync(p) + + switch p.GetTokenStream().LA(1) { + case NumScriptParserLBRACK, NumScriptParserSTRING, NumScriptParserPORTION, NumScriptParserNUMBER, NumScriptParserACCOUNT, NumScriptParserASSET: + localctx = NewExprLiteralContext(p, localctx) + p.SetParserRuleContext(localctx) + _prevctx = localctx + + { + p.SetState(67) + + var _x = p.Literal() + + localctx.(*ExprLiteralContext).lit = _x + } + + case NumScriptParserVARIABLE_NAME: + localctx = NewExprVariableContext(p, localctx) + p.SetParserRuleContext(localctx) + _prevctx = localctx + { + p.SetState(68) + + var _x = p.Variable() + + localctx.(*ExprVariableContext).var_ = _x + } + + default: + panic(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + } + p.GetParserRuleContext().SetStop(p.GetTokenStream().LT(-1)) + p.SetState(76) + p.GetErrorHandler().Sync(p) + _alt = p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 2, p.GetParserRuleContext()) + + for _alt != 2 && _alt != antlr.ATNInvalidAltNumber { + if _alt == 1 { + if p.GetParseListeners() != nil { + p.TriggerExitRuleEvent() + } + _prevctx = localctx + localctx = NewExprAddSubContext(p, NewExpressionContext(p, _parentctx, _parentState)) + localctx.(*ExprAddSubContext).lhs = _prevctx + + p.PushNewRecursionContext(localctx, _startState, NumScriptParserRULE_expression) + p.SetState(71) + + if !(p.Precpred(p.GetParserRuleContext(), 3)) { + panic(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 3)", "")) + } + { + p.SetState(72) + + var _lt = p.GetTokenStream().LT(1) + + localctx.(*ExprAddSubContext).op = _lt + + _la = p.GetTokenStream().LA(1) + + if !(_la == NumScriptParserOP_ADD || _la == NumScriptParserOP_SUB) { + var _ri = p.GetErrorHandler().RecoverInline(p) + + localctx.(*ExprAddSubContext).op = _ri + } else { + p.GetErrorHandler().ReportMatch(p) + p.Consume() + } + } + { + p.SetState(73) + + var _x = p.expression(4) + + localctx.(*ExprAddSubContext).rhs = _x + } + + } + p.SetState(78) + p.GetErrorHandler().Sync(p) + _alt = p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 2, p.GetParserRuleContext()) + } + + return localctx +} + +// IAllotmentPortionContext is an interface to support dynamic dispatch. +type IAllotmentPortionContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // IsAllotmentPortionContext differentiates from other interfaces. + IsAllotmentPortionContext() +} + +type AllotmentPortionContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser +} + +func NewEmptyAllotmentPortionContext() *AllotmentPortionContext { + var p = new(AllotmentPortionContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = NumScriptParserRULE_allotmentPortion + return p +} + +func (*AllotmentPortionContext) IsAllotmentPortionContext() {} + +func NewAllotmentPortionContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *AllotmentPortionContext { + var p = new(AllotmentPortionContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = NumScriptParserRULE_allotmentPortion + + return p +} + +func (s *AllotmentPortionContext) GetParser() antlr.Parser { return s.parser } + +func (s *AllotmentPortionContext) CopyFrom(ctx *AllotmentPortionContext) { + s.BaseParserRuleContext.CopyFrom(ctx.BaseParserRuleContext) +} + +func (s *AllotmentPortionContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *AllotmentPortionContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +type AllotmentPortionRemainingContext struct { + *AllotmentPortionContext +} + +func NewAllotmentPortionRemainingContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *AllotmentPortionRemainingContext { + var p = new(AllotmentPortionRemainingContext) + + p.AllotmentPortionContext = NewEmptyAllotmentPortionContext() + p.parser = parser + p.CopyFrom(ctx.(*AllotmentPortionContext)) + + return p +} + +func (s *AllotmentPortionRemainingContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *AllotmentPortionRemainingContext) REMAINING() antlr.TerminalNode { + return s.GetToken(NumScriptParserREMAINING, 0) +} + +func (s *AllotmentPortionRemainingContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.EnterAllotmentPortionRemaining(s) + } +} + +func (s *AllotmentPortionRemainingContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.ExitAllotmentPortionRemaining(s) + } +} + +type AllotmentPortionVarContext struct { + *AllotmentPortionContext + por IVariableContext +} + +func NewAllotmentPortionVarContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *AllotmentPortionVarContext { + var p = new(AllotmentPortionVarContext) + + p.AllotmentPortionContext = NewEmptyAllotmentPortionContext() + p.parser = parser + p.CopyFrom(ctx.(*AllotmentPortionContext)) + + return p +} + +func (s *AllotmentPortionVarContext) GetPor() IVariableContext { return s.por } + +func (s *AllotmentPortionVarContext) SetPor(v IVariableContext) { s.por = v } + +func (s *AllotmentPortionVarContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *AllotmentPortionVarContext) Variable() IVariableContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IVariableContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IVariableContext) +} + +func (s *AllotmentPortionVarContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.EnterAllotmentPortionVar(s) + } +} + +func (s *AllotmentPortionVarContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.ExitAllotmentPortionVar(s) + } +} + +type AllotmentPortionConstContext struct { + *AllotmentPortionContext +} + +func NewAllotmentPortionConstContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *AllotmentPortionConstContext { + var p = new(AllotmentPortionConstContext) + + p.AllotmentPortionContext = NewEmptyAllotmentPortionContext() + p.parser = parser + p.CopyFrom(ctx.(*AllotmentPortionContext)) + + return p +} + +func (s *AllotmentPortionConstContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *AllotmentPortionConstContext) PORTION() antlr.TerminalNode { + return s.GetToken(NumScriptParserPORTION, 0) +} + +func (s *AllotmentPortionConstContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.EnterAllotmentPortionConst(s) + } +} + +func (s *AllotmentPortionConstContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.ExitAllotmentPortionConst(s) + } +} + +func (p *NumScriptParser) AllotmentPortion() (localctx IAllotmentPortionContext) { + this := p + _ = this + + localctx = NewAllotmentPortionContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 10, NumScriptParserRULE_allotmentPortion) + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.SetState(82) + p.GetErrorHandler().Sync(p) + + switch p.GetTokenStream().LA(1) { + case NumScriptParserPORTION: + localctx = NewAllotmentPortionConstContext(p, localctx) + p.EnterOuterAlt(localctx, 1) + { + p.SetState(79) + p.Match(NumScriptParserPORTION) + } + + case NumScriptParserVARIABLE_NAME: + localctx = NewAllotmentPortionVarContext(p, localctx) + p.EnterOuterAlt(localctx, 2) + { + p.SetState(80) + + var _x = p.Variable() + + localctx.(*AllotmentPortionVarContext).por = _x + } + + case NumScriptParserREMAINING: + localctx = NewAllotmentPortionRemainingContext(p, localctx) + p.EnterOuterAlt(localctx, 3) + { + p.SetState(81) + p.Match(NumScriptParserREMAINING) + } + + default: + panic(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + } + + return localctx +} + +// IDestinationInOrderContext is an interface to support dynamic dispatch. +type IDestinationInOrderContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // Get_expression returns the _expression rule contexts. + Get_expression() IExpressionContext + + // Get_keptOrDestination returns the _keptOrDestination rule contexts. + Get_keptOrDestination() IKeptOrDestinationContext + + // GetRemainingDest returns the remainingDest rule contexts. + GetRemainingDest() IKeptOrDestinationContext + + // Set_expression sets the _expression rule contexts. + Set_expression(IExpressionContext) + + // Set_keptOrDestination sets the _keptOrDestination rule contexts. + Set_keptOrDestination(IKeptOrDestinationContext) + + // SetRemainingDest sets the remainingDest rule contexts. + SetRemainingDest(IKeptOrDestinationContext) + + // GetAmounts returns the amounts rule context list. + GetAmounts() []IExpressionContext + + // GetDests returns the dests rule context list. + GetDests() []IKeptOrDestinationContext + + // SetAmounts sets the amounts rule context list. + SetAmounts([]IExpressionContext) + + // SetDests sets the dests rule context list. + SetDests([]IKeptOrDestinationContext) + + // IsDestinationInOrderContext differentiates from other interfaces. + IsDestinationInOrderContext() +} + +type DestinationInOrderContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser + _expression IExpressionContext + amounts []IExpressionContext + _keptOrDestination IKeptOrDestinationContext + dests []IKeptOrDestinationContext + remainingDest IKeptOrDestinationContext +} + +func NewEmptyDestinationInOrderContext() *DestinationInOrderContext { + var p = new(DestinationInOrderContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = NumScriptParserRULE_destinationInOrder + return p +} + +func (*DestinationInOrderContext) IsDestinationInOrderContext() {} + +func NewDestinationInOrderContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *DestinationInOrderContext { + var p = new(DestinationInOrderContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = NumScriptParserRULE_destinationInOrder + + return p +} + +func (s *DestinationInOrderContext) GetParser() antlr.Parser { return s.parser } + +func (s *DestinationInOrderContext) Get_expression() IExpressionContext { return s._expression } + +func (s *DestinationInOrderContext) Get_keptOrDestination() IKeptOrDestinationContext { + return s._keptOrDestination +} + +func (s *DestinationInOrderContext) GetRemainingDest() IKeptOrDestinationContext { + return s.remainingDest +} + +func (s *DestinationInOrderContext) Set_expression(v IExpressionContext) { s._expression = v } + +func (s *DestinationInOrderContext) Set_keptOrDestination(v IKeptOrDestinationContext) { + s._keptOrDestination = v +} + +func (s *DestinationInOrderContext) SetRemainingDest(v IKeptOrDestinationContext) { + s.remainingDest = v +} + +func (s *DestinationInOrderContext) GetAmounts() []IExpressionContext { return s.amounts } + +func (s *DestinationInOrderContext) GetDests() []IKeptOrDestinationContext { return s.dests } + +func (s *DestinationInOrderContext) SetAmounts(v []IExpressionContext) { s.amounts = v } + +func (s *DestinationInOrderContext) SetDests(v []IKeptOrDestinationContext) { s.dests = v } + +func (s *DestinationInOrderContext) LBRACE() antlr.TerminalNode { + return s.GetToken(NumScriptParserLBRACE, 0) +} + +func (s *DestinationInOrderContext) AllNEWLINE() []antlr.TerminalNode { + return s.GetTokens(NumScriptParserNEWLINE) +} + +func (s *DestinationInOrderContext) NEWLINE(i int) antlr.TerminalNode { + return s.GetToken(NumScriptParserNEWLINE, i) +} + +func (s *DestinationInOrderContext) REMAINING() antlr.TerminalNode { + return s.GetToken(NumScriptParserREMAINING, 0) +} + +func (s *DestinationInOrderContext) RBRACE() antlr.TerminalNode { + return s.GetToken(NumScriptParserRBRACE, 0) +} + +func (s *DestinationInOrderContext) AllKeptOrDestination() []IKeptOrDestinationContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IKeptOrDestinationContext); ok { + len++ + } + } + + tst := make([]IKeptOrDestinationContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IKeptOrDestinationContext); ok { + tst[i] = t.(IKeptOrDestinationContext) + i++ + } + } + + return tst +} + +func (s *DestinationInOrderContext) KeptOrDestination(i int) IKeptOrDestinationContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IKeptOrDestinationContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(IKeptOrDestinationContext) +} + +func (s *DestinationInOrderContext) AllMAX() []antlr.TerminalNode { + return s.GetTokens(NumScriptParserMAX) +} + +func (s *DestinationInOrderContext) MAX(i int) antlr.TerminalNode { + return s.GetToken(NumScriptParserMAX, i) +} + +func (s *DestinationInOrderContext) AllExpression() []IExpressionContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IExpressionContext); ok { + len++ + } + } + + tst := make([]IExpressionContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IExpressionContext); ok { + tst[i] = t.(IExpressionContext) + i++ + } + } + + return tst +} + +func (s *DestinationInOrderContext) Expression(i int) IExpressionContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IExpressionContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(IExpressionContext) +} + +func (s *DestinationInOrderContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *DestinationInOrderContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *DestinationInOrderContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.EnterDestinationInOrder(s) + } +} + +func (s *DestinationInOrderContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.ExitDestinationInOrder(s) + } +} + +func (p *NumScriptParser) DestinationInOrder() (localctx IDestinationInOrderContext) { + this := p + _ = this + + localctx = NewDestinationInOrderContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 12, NumScriptParserRULE_destinationInOrder) + var _la int + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.EnterOuterAlt(localctx, 1) + { + p.SetState(84) + p.Match(NumScriptParserLBRACE) + } + { + p.SetState(85) + p.Match(NumScriptParserNEWLINE) + } + p.SetState(91) + p.GetErrorHandler().Sync(p) + _la = p.GetTokenStream().LA(1) + + for ok := true; ok; ok = _la == NumScriptParserMAX { + { + p.SetState(86) + p.Match(NumScriptParserMAX) + } + { + p.SetState(87) + + var _x = p.expression(0) + + localctx.(*DestinationInOrderContext)._expression = _x + } + localctx.(*DestinationInOrderContext).amounts = append(localctx.(*DestinationInOrderContext).amounts, localctx.(*DestinationInOrderContext)._expression) + { + p.SetState(88) + + var _x = p.KeptOrDestination() + + localctx.(*DestinationInOrderContext)._keptOrDestination = _x + } + localctx.(*DestinationInOrderContext).dests = append(localctx.(*DestinationInOrderContext).dests, localctx.(*DestinationInOrderContext)._keptOrDestination) + { + p.SetState(89) + p.Match(NumScriptParserNEWLINE) + } + + p.SetState(93) + p.GetErrorHandler().Sync(p) + _la = p.GetTokenStream().LA(1) + } + { + p.SetState(95) + p.Match(NumScriptParserREMAINING) + } + { + p.SetState(96) + + var _x = p.KeptOrDestination() + + localctx.(*DestinationInOrderContext).remainingDest = _x + } + { + p.SetState(97) + p.Match(NumScriptParserNEWLINE) + } + { + p.SetState(98) + p.Match(NumScriptParserRBRACE) + } + + return localctx +} + +// IDestinationAllotmentContext is an interface to support dynamic dispatch. +type IDestinationAllotmentContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // Get_allotmentPortion returns the _allotmentPortion rule contexts. + Get_allotmentPortion() IAllotmentPortionContext + + // Get_keptOrDestination returns the _keptOrDestination rule contexts. + Get_keptOrDestination() IKeptOrDestinationContext + + // Set_allotmentPortion sets the _allotmentPortion rule contexts. + Set_allotmentPortion(IAllotmentPortionContext) + + // Set_keptOrDestination sets the _keptOrDestination rule contexts. + Set_keptOrDestination(IKeptOrDestinationContext) + + // GetPortions returns the portions rule context list. + GetPortions() []IAllotmentPortionContext + + // GetDests returns the dests rule context list. + GetDests() []IKeptOrDestinationContext + + // SetPortions sets the portions rule context list. + SetPortions([]IAllotmentPortionContext) + + // SetDests sets the dests rule context list. + SetDests([]IKeptOrDestinationContext) + + // IsDestinationAllotmentContext differentiates from other interfaces. + IsDestinationAllotmentContext() +} + +type DestinationAllotmentContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser + _allotmentPortion IAllotmentPortionContext + portions []IAllotmentPortionContext + _keptOrDestination IKeptOrDestinationContext + dests []IKeptOrDestinationContext +} + +func NewEmptyDestinationAllotmentContext() *DestinationAllotmentContext { + var p = new(DestinationAllotmentContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = NumScriptParserRULE_destinationAllotment + return p +} + +func (*DestinationAllotmentContext) IsDestinationAllotmentContext() {} + +func NewDestinationAllotmentContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *DestinationAllotmentContext { + var p = new(DestinationAllotmentContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = NumScriptParserRULE_destinationAllotment + + return p +} + +func (s *DestinationAllotmentContext) GetParser() antlr.Parser { return s.parser } + +func (s *DestinationAllotmentContext) Get_allotmentPortion() IAllotmentPortionContext { + return s._allotmentPortion +} + +func (s *DestinationAllotmentContext) Get_keptOrDestination() IKeptOrDestinationContext { + return s._keptOrDestination +} + +func (s *DestinationAllotmentContext) Set_allotmentPortion(v IAllotmentPortionContext) { + s._allotmentPortion = v +} + +func (s *DestinationAllotmentContext) Set_keptOrDestination(v IKeptOrDestinationContext) { + s._keptOrDestination = v +} + +func (s *DestinationAllotmentContext) GetPortions() []IAllotmentPortionContext { return s.portions } + +func (s *DestinationAllotmentContext) GetDests() []IKeptOrDestinationContext { return s.dests } + +func (s *DestinationAllotmentContext) SetPortions(v []IAllotmentPortionContext) { s.portions = v } + +func (s *DestinationAllotmentContext) SetDests(v []IKeptOrDestinationContext) { s.dests = v } + +func (s *DestinationAllotmentContext) LBRACE() antlr.TerminalNode { + return s.GetToken(NumScriptParserLBRACE, 0) +} + +func (s *DestinationAllotmentContext) AllNEWLINE() []antlr.TerminalNode { + return s.GetTokens(NumScriptParserNEWLINE) +} + +func (s *DestinationAllotmentContext) NEWLINE(i int) antlr.TerminalNode { + return s.GetToken(NumScriptParserNEWLINE, i) +} + +func (s *DestinationAllotmentContext) RBRACE() antlr.TerminalNode { + return s.GetToken(NumScriptParserRBRACE, 0) +} + +func (s *DestinationAllotmentContext) AllAllotmentPortion() []IAllotmentPortionContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IAllotmentPortionContext); ok { + len++ + } + } + + tst := make([]IAllotmentPortionContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IAllotmentPortionContext); ok { + tst[i] = t.(IAllotmentPortionContext) + i++ + } + } + + return tst +} + +func (s *DestinationAllotmentContext) AllotmentPortion(i int) IAllotmentPortionContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IAllotmentPortionContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(IAllotmentPortionContext) +} + +func (s *DestinationAllotmentContext) AllKeptOrDestination() []IKeptOrDestinationContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IKeptOrDestinationContext); ok { + len++ + } + } + + tst := make([]IKeptOrDestinationContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IKeptOrDestinationContext); ok { + tst[i] = t.(IKeptOrDestinationContext) + i++ + } + } + + return tst +} + +func (s *DestinationAllotmentContext) KeptOrDestination(i int) IKeptOrDestinationContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IKeptOrDestinationContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(IKeptOrDestinationContext) +} + +func (s *DestinationAllotmentContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *DestinationAllotmentContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *DestinationAllotmentContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.EnterDestinationAllotment(s) + } +} + +func (s *DestinationAllotmentContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(NumScriptListener); ok { + listenerT.ExitDestinationAllotment(s) + } +} + +func (p *NumScriptParser) DestinationAllotment() (localctx IDestinationAllotmentContext) { + this := p + _ = this + + localctx = NewDestinationAllotmentContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 14, NumScriptParserRULE_destinationAllotment) + var _la int + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.EnterOuterAlt(localctx, 1) + { + p.SetState(100) + p.Match(NumScriptParserLBRACE) + } + { + p.SetState(101) + p.Match(NumScriptParserNEWLINE) + } + p.SetState(106) + p.GetErrorHandler().Sync(p) + _la = p.GetTokenStream().LA(1) + + for ok := true; ok; ok = (((_la-38)&-(0x1f+1)) == 0 && ((1<= len(m.Resources) { + return nil, false + } + return &m.Resources[a], true +} + +func (m *Machine) withdrawAll(account internal.AccountAddress, asset internal.Asset, overdraft *internal.MonetaryInt) (*internal.Funding, error) { + if accBalances, ok := m.Balances[account]; ok { + if balance, ok := accBalances[asset]; ok { + amountTaken := internal.Zero + balanceWithOverdraft := balance.Add(overdraft) + if balanceWithOverdraft.Gt(internal.Zero) { + amountTaken = balanceWithOverdraft + accBalances[asset] = overdraft.Neg() + } + + return &internal.Funding{ + Asset: asset, + Parts: []internal.FundingPart{{ + Account: account, + Amount: amountTaken, + }}, + }, nil + } + } + return nil, fmt.Errorf("missing %v balance from %v", asset, account) +} + +func (m *Machine) withdrawAlways(account internal.AccountAddress, mon internal.Monetary) (*internal.Funding, error) { + if accBalance, ok := m.Balances[account]; ok { + if balance, ok := accBalance[mon.Asset]; ok { + accBalance[mon.Asset] = balance.Sub(mon.Amount) + return &internal.Funding{ + Asset: mon.Asset, + Parts: []internal.FundingPart{{ + Account: account, + Amount: mon.Amount, + }}, + }, nil + } + } + return nil, fmt.Errorf("missing %v balance from %v", mon.Asset, account) +} + +func (m *Machine) credit(account internal.AccountAddress, funding internal.Funding) { + if account == "world" { + return + } + if accBalance, ok := m.Balances[account]; ok { + if _, ok := accBalance[funding.Asset]; ok { + for _, part := range funding.Parts { + balance := accBalance[funding.Asset] + accBalance[funding.Asset] = balance.Add(part.Amount) + } + } + } +} + +func (m *Machine) repay(funding internal.Funding) { + for _, part := range funding.Parts { + if part.Account == "world" { + continue + } + balance := m.Balances[part.Account][funding.Asset] + m.Balances[part.Account][funding.Asset] = balance.Add(part.Amount) + } +} + +func (m *Machine) tick() (bool, error) { + op := m.Program.Instructions[m.P] + + if m.Debug { + fmt.Println("STATE ---------------------------------------------------------------------") + fmt.Printf(" %v\n", aurora.Blue(m.Stack)) + fmt.Printf(" %v\n", aurora.Cyan(m.Balances)) + fmt.Printf(" %v\n", program2.OpcodeName(op)) + } + + switch op { + case program2.OP_APUSH: + bytes := m.Program.Instructions[m.P+1 : m.P+3] + v, ok := m.getResource(internal.Address(binary.LittleEndian.Uint16(bytes))) + if !ok { + return true, ErrResourceNotFound + } + m.Stack = append(m.Stack, *v) + m.P += 2 + + case program2.OP_BUMP: + n := big.Int(*pop[internal.Number](m)) + idx := len(m.Stack) - int(n.Uint64()) - 1 + v := m.Stack[idx] + m.Stack = append(m.Stack[:idx], m.Stack[idx+1:]...) + m.Stack = append(m.Stack, v) + + case program2.OP_DELETE: + n := m.popValue() + if n.GetType() == internal.TypeFunding { + return true, errorsutil.NewError(ErrInvalidScript, + errors.Errorf("wrong type: want: %v, got: %v", n.GetType(), internal.TypeFunding)) + } + + case program2.OP_IADD: + b := pop[internal.Number](m) + a := pop[internal.Number](m) + m.pushValue(a.Add(b)) + + case program2.OP_ISUB: + b := pop[internal.Number](m) + a := pop[internal.Number](m) + m.pushValue(a.Sub(b)) + + case program2.OP_PRINT: + a := m.popValue() + m.printChan <- a + + case program2.OP_FAIL: + return true, ErrScriptFailed + + case program2.OP_ASSET: + v := m.popValue() + switch v := v.(type) { + case internal.Asset: + m.pushValue(v) + case internal.Monetary: + m.pushValue(v.Asset) + case internal.Funding: + m.pushValue(v.Asset) + default: + return true, errorsutil.NewError(ErrInvalidScript, + errors.Errorf("wrong type for op asset: %v", v.GetType())) + } + + case program2.OP_MONETARY_NEW: + amount := pop[internal.Number](m) + asset := pop[internal.Asset](m) + m.pushValue(internal.Monetary{ + Asset: asset, + Amount: amount, + }) + + case program2.OP_MONETARY_ADD: + b := pop[internal.Monetary](m) + a := pop[internal.Monetary](m) + if a.Asset != b.Asset { + return true, errorsutil.NewError(ErrInvalidScript, + errors.Errorf("cannot add different assets: %v and %v", a.Asset, b.Asset)) + } + m.pushValue(internal.Monetary{ + Asset: a.Asset, + Amount: a.Amount.Add(b.Amount), + }) + + case program2.OP_MONETARY_SUB: + b := pop[internal.Monetary](m) + a := pop[internal.Monetary](m) + if a.Asset != b.Asset { + return true, fmt.Errorf("%s", program2.OpcodeName(op)) + } + m.pushValue(internal.Monetary{ + Asset: a.Asset, + Amount: a.Amount.Sub(b.Amount), + }) + + case program2.OP_MAKE_ALLOTMENT: + n := pop[internal.Number](m) + portions := make([]internal.Portion, n.Uint64()) + for i := uint64(0); i < n.Uint64(); i++ { + p := pop[internal.Portion](m) + portions[i] = p + } + allotment, err := internal.NewAllotment(portions) + if err != nil { + return true, errorsutil.NewError(ErrInvalidScript, err) + } + m.pushValue(*allotment) + + case program2.OP_TAKE_ALL: + overdraft := pop[internal.Monetary](m) + account := pop[internal.AccountAddress](m) + funding, err := m.withdrawAll(account, overdraft.Asset, overdraft.Amount) + if err != nil { + return true, errorsutil.NewError(ErrInvalidScript, err) + } + m.pushValue(*funding) + + case program2.OP_TAKE_ALWAYS: + mon := pop[internal.Monetary](m) + account := pop[internal.AccountAddress](m) + funding, err := m.withdrawAlways(account, mon) + if err != nil { + return true, errorsutil.NewError(ErrInvalidScript, err) + } + m.pushValue(*funding) + + case program2.OP_TAKE: + mon := pop[internal.Monetary](m) + funding := pop[internal.Funding](m) + if funding.Asset != mon.Asset { + return true, errorsutil.NewError(ErrInvalidScript, + errors.Errorf("cannot take from different assets: %v and %v", funding.Asset, mon.Asset)) + } + result, remainder, err := funding.Take(mon.Amount) + if err != nil { + return true, errorsutil.NewError(ErrInsufficientFund, err) + } + m.pushValue(remainder) + m.pushValue(result) + + case program2.OP_TAKE_MAX: + mon := pop[internal.Monetary](m) + if mon.Amount.Ltz() { + return true, fmt.Errorf( + "cannot send a monetary with a negative amount: [%s %s]", + string(mon.Asset), mon.Amount) + } + funding := pop[internal.Funding](m) + if funding.Asset != mon.Asset { + return true, errorsutil.NewError(ErrInvalidScript, + errors.Errorf("cannot take from different assets: %v and %v", funding.Asset, mon.Asset)) + } + missing := internal.Zero + total := funding.Total() + if mon.Amount.Gt(total) { + missing = mon.Amount.Sub(total) + } + m.pushValue(internal.Monetary{ + Asset: mon.Asset, + Amount: missing, + }) + result, remainder := funding.TakeMax(mon.Amount) + m.pushValue(remainder) + m.pushValue(result) + + case program2.OP_FUNDING_ASSEMBLE: + num := pop[internal.Number](m) + n := int(num.Uint64()) + if n == 0 { + return true, errorsutil.NewError(ErrInvalidScript, + errors.New("cannot assemble zero fundings")) + } + first := pop[internal.Funding](m) + result := internal.Funding{ + Asset: first.Asset, + } + fundings_rev := make([]internal.Funding, n) + fundings_rev[0] = first + for i := 1; i < n; i++ { + f := pop[internal.Funding](m) + if f.Asset != result.Asset { + return true, errorsutil.NewError(ErrInvalidScript, + errors.Errorf("cannot assemble different assets: %v and %v", f.Asset, result.Asset)) + } + fundings_rev[i] = f + } + for i := 0; i < n; i++ { + res, err := result.Concat(fundings_rev[n-1-i]) + if err != nil { + return true, errorsutil.NewError(ErrInvalidScript, err) + } + result = res + } + m.pushValue(result) + + case program2.OP_FUNDING_SUM: + funding := pop[internal.Funding](m) + sum := funding.Total() + m.pushValue(funding) + m.pushValue(internal.Monetary{ + Asset: funding.Asset, + Amount: sum, + }) + + case program2.OP_FUNDING_REVERSE: + funding := pop[internal.Funding](m) + result := funding.Reverse() + m.pushValue(result) + + case program2.OP_ALLOC: + allotment := pop[internal.Allotment](m) + monetary := pop[internal.Monetary](m) + total := monetary.Amount + parts := allotment.Allocate(total) + for i := len(parts) - 1; i >= 0; i-- { + m.pushValue(internal.Monetary{ + Asset: monetary.Asset, + Amount: parts[i], + }) + } + + case program2.OP_REPAY: + m.repay(pop[internal.Funding](m)) + + case program2.OP_SEND: + dest := pop[internal.AccountAddress](m) + funding := pop[internal.Funding](m) + m.credit(dest, funding) + for _, part := range funding.Parts { + src := part.Account + amt := part.Amount + m.Postings = append(m.Postings, Posting{ + Source: string(src), + Destination: string(dest), + Asset: string(funding.Asset), + Amount: amt, + }) + } + + case program2.OP_TX_META: + k := pop[internal.String](m) + v := m.popValue() + m.TxMeta[string(k)] = v + + case program2.OP_ACCOUNT_META: + a := pop[internal.AccountAddress](m) + k := pop[internal.String](m) + v := m.popValue() + if m.AccountsMeta[a] == nil { + m.AccountsMeta[a] = map[string]internal.Value{} + } + m.AccountsMeta[a][string(k)] = v + + case program2.OP_SAVE: + a := pop[internal.AccountAddress](m) + v := m.popValue() + switch v := v.(type) { + case internal.Asset: + m.Balances[a][v] = internal.Zero + case internal.Monetary: + m.Balances[a][v.Asset] = m.Balances[a][v.Asset].Sub(v.Amount) + default: + panic(fmt.Errorf("invalid value type: %T", v)) + } + + default: + return true, errorsutil.NewError(ErrInvalidScript, + errors.Errorf("invalid opcode: %v", op)) + } + + m.P += 1 + + if int(m.P) >= len(m.Program.Instructions) { + return true, nil + } + + return false, nil +} + +func (m *Machine) Execute() error { + go m.Printer(m.printChan) + defer close(m.printChan) + + if len(m.Resources) != len(m.UnresolvedResources) { + return ErrResourcesNotInitialized + } else if m.Balances == nil { + return ErrBalancesNotInitialized + } + + for { + finished, err := m.tick() + if finished { + if err == nil && len(m.Stack) != 0 { + return errorsutil.NewError(ErrInvalidScript, + errors.New("stack not empty after execution")) + } else { + return err + } + } + } +} + +type BalanceRequest struct { + Account string + Asset string + Response chan *internal.MonetaryInt + Error error +} + +func (m *Machine) ResolveBalances(ctx context.Context, store Store) error { + if len(m.Resources) != len(m.UnresolvedResources) { + return errors.New("tried to resolve balances before resources") + } + if m.setBalanceCalled { + return errors.New("tried to call ResolveBalances twice") + } + m.setBalanceCalled = true + m.Balances = make(map[internal.AccountAddress]map[internal.Asset]*internal.MonetaryInt) + + for address, resourceIndex := range m.UnresolvedResourceBalances { + monetary := m.Resources[resourceIndex].(internal.Monetary) + balance, err := store.GetBalance(ctx, address, string(monetary.Asset)) + if err != nil { + return err + } + if balance.Cmp(ledger.Zero) < 0 { + return errorsutil.NewError(ErrNegativeMonetaryAmount, fmt.Errorf( + "tried to request the balance of account %s for asset %s: received %s: monetary amounts must be non-negative", + address, monetary.Asset, balance)) + } + monetary.Amount = internal.NewMonetaryIntFromBigInt(balance) + m.Resources[resourceIndex] = monetary + } + + // for every account that we need balances of, check if it's there + for addr, neededAssets := range m.Program.NeededBalances { + account, ok := m.getResource(addr) + if !ok { + return errors.New("invalid program (resolve balances: invalid address of account)") + } + accountAddress := (*account).(internal.AccountAddress) + m.Balances[accountAddress] = make(map[internal.Asset]*internal.MonetaryInt) + // for every asset, send request + for addr := range neededAssets { + mon, ok := m.getResource(addr) + if !ok { + return errors.New("invalid program (resolve balances: invalid address of monetary)") + } + + asset := (*mon).(internal.HasAsset).GetAsset() + if string(accountAddress) == "world" { + m.Balances[accountAddress][asset] = internal.Zero + continue + } + + balance, err := store.GetBalance(ctx, string(accountAddress), string(asset)) + if err != nil { + return errors.Wrap(err, fmt.Sprintf("could not get balance for account %q", addr)) + } + + m.Balances[accountAddress][asset] = internal.NewMonetaryIntFromBigInt(balance) + } + } + return nil +} + +func (m *Machine) ResolveResources(ctx context.Context, store Store) ([]string, []string, error) { + //TODO(gfyrag): Is that really required? Feel like defensive programming. + if m.resolveCalled { + return nil, nil, errors.New("tried to call ResolveResources twice") + } + + m.resolveCalled = true + involvedAccountsMap := make(map[internal.Address]string) + for len(m.Resources) != len(m.UnresolvedResources) { + idx := len(m.Resources) + res := m.UnresolvedResources[idx] + var val internal.Value + switch res := res.(type) { + case program2.Constant: + val = res.Inner + if val.GetType() == internal.TypeAccount { + involvedAccountsMap[internal.Address(idx)] = string(val.(internal.AccountAddress)) + } + case program2.Variable: + var ok bool + val, ok = m.Vars[res.Name] + if !ok { + return nil, nil, fmt.Errorf("missing variable '%s'", res.Name) + } + if val.GetType() == internal.TypeAccount { + involvedAccountsMap[internal.Address(idx)] = string(val.(internal.AccountAddress)) + } + case program2.VariableAccountMetadata: + acc, _ := m.getResource(res.Account) + addr := string((*acc).(internal.AccountAddress)) + + account, err := store.GetAccount(ctx, addr) + if err != nil { + return nil, nil, err + } + + metadata, ok := account.Metadata[res.Key] + if !ok { + return nil, nil, errorsutil.NewError(ErrResourceResolutionMissingMetadata, errors.New( + fmt.Sprintf("missing key %v in metadata for account %s", res.Key, addr))) + } + + val, err = internal.NewValueFromString(res.Typ, metadata) + if err != nil { + return nil, nil, err + } + case program2.VariableAccountBalance: + acc, _ := m.getResource(res.Account) + address := string((*acc).(internal.AccountAddress)) + involvedAccountsMap[internal.Address(idx)] = address + m.UnresolvedResourceBalances[address] = idx + + ass, ok := m.getResource(res.Asset) + if !ok { + return nil, nil, fmt.Errorf( + "variable '%s': tried to request account balance of an asset which has not yet been solved", + res.Name) + } + if (*ass).GetType() != internal.TypeAsset { + return nil, nil, fmt.Errorf( + "variable '%s': tried to request account balance for an asset on wrong entity: %v instead of asset", + res.Name, (*ass).GetType()) + } + + val = internal.Monetary{ + Asset: (*ass).(internal.Asset), + } + case program2.Monetary: + ass, _ := m.getResource(res.Asset) + val = internal.Monetary{ + Asset: (*ass).(internal.Asset), + Amount: res.Amount, + } + default: + panic(fmt.Errorf("type %T not implemented", res)) + } + m.Resources = append(m.Resources, val) + } + + involvedAccounts := make([]string, 0) + involvedSources := make([]string, 0) + for _, accountAddress := range involvedAccountsMap { + involvedAccounts = append(involvedAccounts, accountAddress) + } + for _, machineAddress := range m.Program.Sources { + involvedSources = append(involvedSources, involvedAccountsMap[machineAddress]) + } + + return involvedAccounts, involvedSources, nil +} + +func (m *Machine) SetVarsFromJSON(vars map[string]string) error { + v, err := m.Program.ParseVariablesJSON(vars) + if err != nil { + return errorsutil.NewError(ErrInvalidVars, err) + } + m.Vars = v + return nil +} diff --git a/internal/machine/vm/machine_kept_test.go b/internal/machine/vm/machine_kept_test.go new file mode 100644 index 000000000..b6ff67822 --- /dev/null +++ b/internal/machine/vm/machine_kept_test.go @@ -0,0 +1,118 @@ +package vm + +import ( + "testing" + + internal2 "github.com/formancehq/ledger/internal/machine/internal" +) + +func TestKeptDestinationAllotment(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [GEM 100] ( + source = { + @a + @world + } + destination = { + 50% kept + 25% to @x + 25% to @y + } + )`) + tc.setBalance("a", "GEM", 1) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(1), + Source: "a", + Destination: "x", + }, + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(24), + Source: "world", + Destination: "x", + }, + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(25), + Source: "world", + Destination: "y", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestKeptComplex(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [GEM 100] ( + source = { + @foo + @bar + @baz + } + destination = { + 50% to { + max [GEM 8] to { + 50% kept + 25% to @arst + 25% kept + } + remaining to @thing + } + 20% to @qux + 5% kept + remaining to @quz + } + )`) + tc.setBalance("foo", "GEM", 20) + tc.setBalance("bar", "GEM", 40) + tc.setBalance("baz", "GEM", 40) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(2), + Source: "foo", + Destination: "arst", + }, + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(18), + Source: "foo", + Destination: "thing", + }, + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(24), + Source: "bar", + Destination: "thing", + }, + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(16), + Source: "bar", + Destination: "qux", + }, + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(4), + Source: "baz", + Destination: "qux", + }, + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(25), + Source: "baz", + Destination: "quz", + }, + }, + Error: nil, + } + test(t, tc) +} diff --git a/internal/machine/vm/machine_overdraft_test.go b/internal/machine/vm/machine_overdraft_test.go new file mode 100644 index 000000000..a285f76a1 --- /dev/null +++ b/internal/machine/vm/machine_overdraft_test.go @@ -0,0 +1,271 @@ +package vm + +import ( + "testing" + + internal2 "github.com/formancehq/ledger/internal/machine/internal" +) + +func TestOverdraftNotEnough(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [GEM 100] ( + source = @foo allowing overdraft up to [GEM 10] + destination = @world + )`) + tc.setBalance("foo", "GEM", 89) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{}, + Error: ErrInsufficientFund, + } + test(t, tc) +} + +func TestOverdraftEnough(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [GEM 100] ( + source = @foo allowing overdraft up to [GEM 10] + destination = @world + )`) + tc.setBalance("foo", "GEM", 90) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(100), + Source: "foo", + Destination: "world", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestOverdraftUnbounded(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [GEM 1000] ( + source = @foo allowing unbounded overdraft + destination = @world + )`) + tc.setBalance("foo", "GEM", 90) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(1000), + Source: "foo", + Destination: "world", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestOverdraftSourceAllotmentSuccess(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [GEM 100] ( + source = { + 50% from @foo allowing overdraft up to [GEM 10] + 50% from { + @bar allowing overdraft up to [GEM 20] + @baz allowing unbounded overdraft + } + } + destination = @world + )`) + tc.setBalance("foo", "GEM", 40) + tc.setBalance("bar", "GEM", 20) + tc.setBalance("baz", "GEM", 0) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(50), + Source: "foo", + Destination: "world", + }, + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(40), + Source: "bar", + Destination: "world", + }, + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(10), + Source: "baz", + Destination: "world", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestOverdraftSourceInOrderSuccess(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [GEM 100] ( + source = { + max [GEM 50] from { + @foo allowing overdraft up to [GEM 10] + @bar allowing overdraft up to [GEM 20] + @baz allowing unbounded overdraft + } + @qux allowing unbounded overdraft + } + destination = @world + )`) + tc.setBalance("foo", "GEM", 0) + tc.setBalance("bar", "GEM", 0) + tc.setBalance("baz", "GEM", 0) + tc.setBalance("qux", "GEM", 0) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(10), + Source: "foo", + Destination: "world", + }, + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(20), + Source: "bar", + Destination: "world", + }, + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(20), + Source: "baz", + Destination: "world", + }, + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(50), + Source: "qux", + Destination: "world", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestOverdraftBalanceTracking(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [GEM 100] ( + source = @foo allowing unbounded overdraft + destination = @world + ) + send [GEM 200] ( + source = @foo allowing overdraft up to [GEM 300] + destination = @world + ) + send [GEM 300] ( + source = @foo allowing unbounded overdraft + destination = @world + ) + `) + tc.setBalance("foo", "GEM", 0) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(100), + Source: "foo", + Destination: "world", + }, + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(200), + Source: "foo", + Destination: "world", + }, + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(300), + Source: "foo", + Destination: "world", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestWorldIsUnbounded(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [GEM 100] ( + source = @world + destination = @foo + ) + send [GEM 200] ( + source = @world + destination = @foo + ) + `) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(100), + Source: "world", + Destination: "foo", + }, + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(200), + Source: "world", + Destination: "foo", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestOverdraftComplexFailure(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [GEM 100] ( + source = { + 50% from @foo allowing overdraft up to [GEM 10] + 50% from { + @bar allowing overdraft up to [GEM 20] + @baz + } + } + destination = @world + )`) + tc.setBalance("foo", "GEM", 40) + tc.setBalance("bar", "GEM", 20) + tc.setBalance("baz", "GEM", 0) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{}, + Error: ErrInsufficientFund, + } + test(t, tc) +} + +func TestNegativeBalance(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [GEM 100] ( + source = @foo + destination = @world + )`) + tc.setBalance("foo", "GEM", -50) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{}, + Error: ErrInsufficientFund, + } + test(t, tc) +} diff --git a/internal/machine/vm/machine_test.go b/internal/machine/vm/machine_test.go new file mode 100644 index 000000000..12968644f --- /dev/null +++ b/internal/machine/vm/machine_test.go @@ -0,0 +1,2132 @@ +package vm + +import ( + "context" + "encoding/json" + "fmt" + "math/big" + "sync" + "testing" + + ledger "github.com/formancehq/ledger/internal" + internal2 "github.com/formancehq/ledger/internal/machine/internal" + "github.com/formancehq/ledger/internal/machine/script/compiler" + "github.com/formancehq/ledger/internal/machine/vm/program" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +const ( + DEBUG bool = false +) + +type CaseResult struct { + Printed []internal2.Value + Postings []Posting + Metadata map[string]internal2.Value + Error error + ErrorContains string +} + +type TestCase struct { + program *program.Program + vars map[string]string + meta map[string]metadata.Metadata + balances map[string]map[string]*internal2.MonetaryInt + expected CaseResult +} + +func NewTestCase() TestCase { + return TestCase{ + vars: make(map[string]string), + meta: make(map[string]metadata.Metadata), + balances: make(map[string]map[string]*internal2.MonetaryInt), + expected: CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{}, + Metadata: make(map[string]internal2.Value), + Error: nil, + }, + } +} + +func (c *TestCase) compile(t *testing.T, code string) { + p, err := compiler.Compile(code) + if err != nil { + t.Fatalf("compile error: %v", err) + return + } + c.program = p +} + +func (c *TestCase) setVarsFromJSON(t *testing.T, str string) { + var jsonVars map[string]string + err := json.Unmarshal([]byte(str), &jsonVars) + require.NoError(t, err) + c.vars = jsonVars +} + +func (c *TestCase) setBalance(account, asset string, amount int64) { + if _, ok := c.balances[account]; !ok { + c.balances[account] = make(map[string]*internal2.MonetaryInt) + } + c.balances[account][asset] = internal2.NewMonetaryInt(amount) +} + +func test(t *testing.T, testCase TestCase) { + testImpl(t, testCase.program, testCase.expected, func(m *Machine) error { + if err := m.SetVarsFromJSON(testCase.vars); err != nil { + return err + } + + store := StaticStore{} + for account, balances := range testCase.balances { + store[account] = &AccountWithBalances{ + Account: ledger.Account{ + Address: account, + Metadata: testCase.meta[account], + }, + Balances: func() map[string]*big.Int { + ret := make(map[string]*big.Int) + for asset, balance := range balances { + ret[asset] = (*big.Int)(balance) + } + return ret + }(), + } + } + + _, _, err := m.ResolveResources(context.Background(), store) + if err != nil { + return err + } + + err = m.ResolveBalances(context.Background(), store) + if err != nil { + return err + } + + return m.Execute() + }) +} + +func testImpl(t *testing.T, prog *program.Program, expected CaseResult, exec func(*Machine) error) { + printed := []internal2.Value{} + + var wg sync.WaitGroup + wg.Add(1) + + require.NotNil(t, prog) + + m := NewMachine(*prog) + m.Debug = DEBUG + m.Printer = func(c chan internal2.Value) { + for v := range c { + printed = append(printed, v) + } + wg.Done() + } + + err := exec(m) + if expected.Error != nil { + require.True(t, errors.Is(err, expected.Error), "got wrong error, want: %v, got: %v", expected.Error, err) + if expected.ErrorContains != "" { + require.ErrorContains(t, err, expected.ErrorContains) + } + } else { + require.NoError(t, err) + } + if err != nil { + return + } + + if expected.Postings == nil { + expected.Postings = make([]Posting, 0) + } + if expected.Metadata == nil { + expected.Metadata = make(map[string]internal2.Value) + } + + assert.Equalf(t, expected.Postings, m.Postings, "unexpected postings output: %v", m.Postings) + assert.Equalf(t, expected.Metadata, m.TxMeta, "unexpected metadata output: %v", m.TxMeta) + + wg.Wait() + + assert.Equalf(t, expected.Printed, printed, "unexpected metadata output: %v", printed) +} + +func TestFail(t *testing.T) { + tc := NewTestCase() + tc.compile(t, "fail") + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{}, + Error: ErrScriptFailed, + } + test(t, tc) +} + +func TestPrint(t *testing.T) { + tc := NewTestCase() + tc.compile(t, "print 29 + 15 - 2") + mi := internal2.MonetaryInt(*big.NewInt(42)) + tc.expected = CaseResult{ + Printed: []internal2.Value{&mi}, + Postings: []Posting{}, + Error: nil, + } + test(t, tc) +} + +func TestSend(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [EUR/2 100] ( + source=@alice + destination=@bob + )`) + tc.setBalance("alice", "EUR/2", 100) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "EUR/2", + Amount: internal2.NewMonetaryInt(100), + Source: "alice", + Destination: "bob", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestVariables(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `vars { + account $rider + account $driver + string $description + number $nb + asset $ass + } + send [$ass 999] ( + source=$rider + destination=$driver + ) + set_tx_meta("description", $description) + set_tx_meta("ride", $nb)`) + tc.vars = map[string]string{ + "rider": "users:001", + "driver": "users:002", + "description": "midnight ride", + "nb": "1", + "ass": "EUR/2", + } + tc.setBalance("users:001", "EUR/2", 1000) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "EUR/2", + Amount: internal2.NewMonetaryInt(999), + Source: "users:001", + Destination: "users:002", + }, + }, + Metadata: map[string]internal2.Value{ + "description": internal2.String("midnight ride"), + "ride": internal2.NewMonetaryInt(1), + }, + Error: nil, + } +} + +func TestVariablesJSON(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `vars { + account $rider + account $driver + string $description + number $nb + asset $ass + } + send [$ass 999] ( + source=$rider + destination=$driver + ) + set_tx_meta("description", $description) + set_tx_meta("ride", $nb)`) + tc.setVarsFromJSON(t, `{ + "rider": "users:001", + "driver": "users:002", + "description": "midnight ride", + "nb": "1", + "ass": "EUR/2" + }`) + tc.setBalance("users:001", "EUR/2", 1000) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "EUR/2", + Amount: internal2.NewMonetaryInt(999), + Source: "users:001", + Destination: "users:002", + }, + }, + Metadata: map[string]internal2.Value{ + "description": internal2.String("midnight ride"), + "ride": internal2.NewMonetaryInt(1), + }, + Error: nil, + } + test(t, tc) +} + +func TestSource(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `vars { + account $balance + account $payment + account $seller + } + send [GEM 15] ( + source = { + $balance + $payment + } + destination = $seller + )`) + tc.setVarsFromJSON(t, `{ + "balance": "users:001", + "payment": "payments:001", + "seller": "users:002" + }`) + tc.setBalance("users:001", "GEM", 3) + tc.setBalance("payments:001", "GEM", 12) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(3), + Source: "users:001", + Destination: "users:002", + }, + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(12), + Source: "payments:001", + Destination: "users:002", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestAllocation(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `vars { + account $rider + account $driver + } + send [GEM 15] ( + source = $rider + destination = { + 80% to $driver + 8% to @a + 12% to @b + } + )`) + tc.setVarsFromJSON(t, `{ + "rider": "users:001", + "driver": "users:002" + }`) + tc.setBalance("users:001", "GEM", 15) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(13), + Source: "users:001", + Destination: "users:002", + }, + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(1), + Source: "users:001", + Destination: "a", + }, + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(1), + Source: "users:001", + Destination: "b", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestDynamicAllocation(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `vars { + portion $p + } + send [GEM 15] ( + source = @a + destination = { + 80% to @b + $p to @c + remaining to @d + } + )`) + tc.setVarsFromJSON(t, `{ + "p": "15%" + }`) + tc.setBalance("a", "GEM", 15) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(13), + Source: "a", + Destination: "b", + }, + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(2), + Source: "a", + Destination: "c", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestSendAll(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [USD/2 *] ( + source = @users:001 + destination = @platform + )`) + tc.setBalance("users:001", "USD/2", 17) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "USD/2", + Amount: internal2.NewMonetaryInt(17), + Source: "users:001", + Destination: "platform", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestSendAllMulti(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [USD/2 *] ( + source = { + @users:001:wallet + @users:001:credit + } + destination = @platform + ) + `) + tc.setBalance("users:001:wallet", "USD/2", 19) + tc.setBalance("users:001:credit", "USD/2", 22) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "USD/2", + Amount: internal2.NewMonetaryInt(19), + Source: "users:001:wallet", + Destination: "platform", + }, + { + Asset: "USD/2", + Amount: internal2.NewMonetaryInt(22), + Source: "users:001:credit", + Destination: "platform", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestInsufficientFunds(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `vars { + account $balance + account $payment + account $seller + } + send [GEM 16] ( + source = { + $balance + $payment + } + destination = $seller + )`) + tc.setVarsFromJSON(t, `{ + "balance": "users:001", + "payment": "payments:001", + "seller": "users:002" + }`) + tc.setBalance("users:001", "GEM", 3) + tc.setBalance("payments:001", "GEM", 12) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{}, + Error: ErrInsufficientFund, + } + test(t, tc) +} + +func TestWorldSource(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [GEM 15] ( + source = { + @a + @world + } + destination = @b + )`) + tc.setBalance("a", "GEM", 1) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(1), + Source: "a", + Destination: "b", + }, + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(14), + Source: "world", + Destination: "b", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestNoEmptyPostings(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [GEM 2] ( + source = @world + destination = { + 90% to @a + 10% to @b + } + )`) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "GEM", + Amount: internal2.NewMonetaryInt(2), + Source: "world", + Destination: "a", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestEmptyPostings(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [GEM *] ( + source = @foo + destination = @bar + )`) + tc.setBalance("foo", "GEM", 0) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Source: "foo", + Destination: "bar", + Amount: internal2.NewMonetaryInt(0), + Asset: "GEM", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestAllocateDontTakeTooMuch(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [CREDIT 200] ( + source = { + @users:001 + @users:002 + } + destination = { + 1/2 to @foo + 1/2 to @bar + } + )`) + tc.setBalance("users:001", "CREDIT", 100) + tc.setBalance("users:002", "CREDIT", 110) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "CREDIT", + Amount: internal2.NewMonetaryInt(100), + Source: "users:001", + Destination: "foo", + }, + { + Asset: "CREDIT", + Amount: internal2.NewMonetaryInt(100), + Source: "users:002", + Destination: "bar", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestMetadata(t *testing.T) { + //commission, _ := internal.NewPortionSpecific(*big.NewRat(125, 1000)) + tc := NewTestCase() + tc.compile(t, `vars { + account $sale + account $seller = meta($sale, "seller") + portion $commission = meta($seller, "commission") + } + send [EUR/2 100] ( + source = $sale + destination = { + remaining to $seller + $commission to @platform + } + )`) + tc.setVarsFromJSON(t, `{ + "sale": "sales:042" + }`) + tc.meta = map[string]metadata.Metadata{ + "sales:042": { + "seller": "users:053", + }, + "users:053": { + "commission": "12.5%", + }, + } + tc.setBalance("sales:042", "EUR/2", 2500) + tc.setBalance("users:053", "EUR/2", 500) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "EUR/2", + Amount: internal2.NewMonetaryInt(88), + Source: "sales:042", + Destination: "users:053", + }, + { + Asset: "EUR/2", + Amount: internal2.NewMonetaryInt(12), + Source: "sales:042", + Destination: "platform", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestTrackBalances(t *testing.T) { + tc := NewTestCase() + tc.compile(t, ` + send [COIN 50] ( + source = @world + destination = @a + ) + send [COIN 100] ( + source = @a + destination = @b + )`) + tc.setBalance("a", "COIN", 50) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "COIN", + Amount: internal2.NewMonetaryInt(50), + Source: "world", + Destination: "a", + }, + { + Asset: "COIN", + Amount: internal2.NewMonetaryInt(100), + Source: "a", + Destination: "b", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestTrackBalances2(t *testing.T) { + tc := NewTestCase() + tc.compile(t, ` + send [COIN 50] ( + source = @a + destination = @z + ) + send [COIN 50] ( + source = @a + destination = @z + )`) + tc.setBalance("a", "COIN", 60) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{}, + Error: ErrInsufficientFund, + } + test(t, tc) +} + +func TestTrackBalances3(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [COIN *] ( + source = @foo + destination = { + max [COIN 1000] to @bar + remaining kept + } + ) + send [COIN *] ( + source = @foo + destination = @bar + )`) + tc.setBalance("foo", "COIN", 2000) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "COIN", + Amount: internal2.NewMonetaryInt(1000), + Source: "foo", + Destination: "bar", + }, + { + Asset: "COIN", + Amount: internal2.NewMonetaryInt(1000), + Source: "foo", + Destination: "bar", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestSourceAllotment(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [COIN 100] ( + source = { + 60% from @a + 35.5% from @b + 4.5% from @c + } + destination = @d + )`) + tc.setBalance("a", "COIN", 100) + tc.setBalance("b", "COIN", 100) + tc.setBalance("c", "COIN", 100) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "COIN", + Amount: internal2.NewMonetaryInt(61), + Source: "a", + Destination: "d", + }, + { + Asset: "COIN", + Amount: internal2.NewMonetaryInt(35), + Source: "b", + Destination: "d", + }, + { + Asset: "COIN", + Amount: internal2.NewMonetaryInt(4), + Source: "c", + Destination: "d", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestSourceOverlapping(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [COIN 99] ( + source = { + 15% from { + @b + @a + } + 30% from @a + remaining from @a + } + destination = @world + )`) + tc.setBalance("a", "COIN", 99) + tc.setBalance("b", "COIN", 3) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "COIN", + Amount: internal2.NewMonetaryInt(3), + Source: "b", + Destination: "world", + }, + { + Asset: "COIN", + Amount: internal2.NewMonetaryInt(96), + Source: "a", + Destination: "world", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestSourceComplex(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `vars { + monetary $max + } + send [COIN 200] ( + source = { + 50% from { + max [COIN 4] from @a + @b + @c + } + remaining from max $max from @d + } + destination = @platform + )`) + tc.setVarsFromJSON(t, `{ + "max": "COIN 120" + }`) + tc.setBalance("a", "COIN", 1000) + tc.setBalance("b", "COIN", 40) + tc.setBalance("c", "COIN", 1000) + tc.setBalance("d", "COIN", 1000) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "COIN", + Amount: internal2.NewMonetaryInt(4), + Source: "a", + Destination: "platform", + }, + { + Asset: "COIN", + Amount: internal2.NewMonetaryInt(40), + Source: "b", + Destination: "platform", + }, + { + Asset: "COIN", + Amount: internal2.NewMonetaryInt(56), + Source: "c", + Destination: "platform", + }, + { + Asset: "COIN", + Amount: internal2.NewMonetaryInt(100), + Source: "d", + Destination: "platform", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestDestinationComplex(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `send [COIN 100] ( + source = @world + destination = { + 20% to @a + 20% kept + 60% to { + max [COIN 10] to @b + remaining to @c + } + } + )`) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "COIN", + Amount: internal2.NewMonetaryInt(20), + Source: "world", + Destination: "a", + }, + { + Asset: "COIN", + Amount: internal2.NewMonetaryInt(10), + Source: "world", + Destination: "b", + }, + { + Asset: "COIN", + Amount: internal2.NewMonetaryInt(50), + Source: "world", + Destination: "c", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestNeededBalances(t *testing.T) { + p, err := compiler.Compile(`vars { + account $a + } + send [GEM 15] ( + source = { + $a + @b + @world + } + destination = @c + )`) + + if err != nil { + t.Fatalf("did not expect error on Compile, got: %v", err) + } + + m := NewMachine(*p) + + err = m.SetVarsFromJSON(map[string]string{ + "a": "a", + }) + if err != nil { + t.Fatalf("did not expect error on SetVars, got: %v", err) + } + _, _, err = m.ResolveResources(context.Background(), EmptyStore) + require.NoError(t, err) + + err = m.ResolveBalances(context.Background(), EmptyStore) + require.NoError(t, err) +} + +func TestSetTxMeta(t *testing.T) { + p, err := compiler.Compile(` + set_tx_meta("aaa", @platform) + set_tx_meta("bbb", GEM) + set_tx_meta("ccc", 45) + set_tx_meta("ddd", "hello") + set_tx_meta("eee", [COIN 30]) + set_tx_meta("fff", 15%) + `) + require.NoError(t, err) + + m := NewMachine(*p) + + _, _, err = m.ResolveResources(context.Background(), EmptyStore) + require.NoError(t, err) + err = m.ResolveBalances(context.Background(), EmptyStore) + require.NoError(t, err) + + err = m.Execute() + require.NoError(t, err) + + expectedMeta := map[string]string{ + "aaa": "platform", + "bbb": "GEM", + "ccc": "45", + "ddd": "hello", + "eee": "COIN 30", + "fff": "3/20", + } + + resMeta := m.GetTxMetaJSON() + assert.Equal(t, 6, len(resMeta)) + + for key, val := range resMeta { + assert.Equal(t, string(expectedMeta[key]), val) + } +} + +func TestSetAccountMeta(t *testing.T) { + t.Run("all types", func(t *testing.T) { + p, err := compiler.Compile(` + set_account_meta(@platform, "aaa", @platform) + set_account_meta(@platform, "bbb", GEM) + set_account_meta(@platform, "ccc", 45) + set_account_meta(@platform, "ddd", "hello") + set_account_meta(@platform, "eee", [COIN 30]) + set_account_meta(@platform, "fff", 15%)`) + require.NoError(t, err) + + m := NewMachine(*p) + + _, _, err = m.ResolveResources(context.Background(), EmptyStore) + require.NoError(t, err) + + err = m.ResolveBalances(context.Background(), EmptyStore) + require.NoError(t, err) + + err = m.Execute() + require.NoError(t, err) + + expectedMeta := metadata.Metadata{ + "aaa": "platform", + "bbb": "GEM", + "ccc": "45", + "ddd": "hello", + "eee": "COIN 30", + "fff": "3/20", + } + + resMeta := m.GetAccountsMetaJSON() + assert.Equal(t, 1, len(resMeta)) + + for acc, meta := range resMeta { + assert.Equal(t, "platform", acc) + assert.Equal(t, 6, len(meta)) + for key, val := range meta { + assert.Equal(t, expectedMeta[key], val) + } + } + }) + + t.Run("with vars", func(t *testing.T) { + p, err := compiler.Compile(` + vars { + account $acc + } + send [EUR/2 100] ( + source = @world + destination = $acc + ) + set_account_meta($acc, "fees", 1%) + `) + require.NoError(t, err) + + m := NewMachine(*p) + + require.NoError(t, m.SetVarsFromJSON(map[string]string{ + "acc": "test", + })) + + _, _, err = m.ResolveResources(context.Background(), EmptyStore) + require.NoError(t, err) + + err = m.ResolveBalances(context.Background(), EmptyStore) + require.NoError(t, err) + + err = m.Execute() + require.NoError(t, err) + + expectedMeta := map[string]json.RawMessage{ + "fees": json.RawMessage("1/100"), + } + + resMeta := m.GetAccountsMetaJSON() + assert.Equal(t, 1, len(resMeta)) + + for acc, meta := range resMeta { + assert.Equal(t, "test", acc) + assert.Equal(t, 1, len(meta)) + for key, val := range meta { + assert.Equal(t, string(expectedMeta[key]), val) + } + } + }) +} + +func TestVariableBalance(t *testing.T) { + script := ` + vars { + monetary $initial = balance(@A, USD/2) + } + send [USD/2 100] ( + source = { + @A + @C + } + destination = { + max $initial to @B + remaining to @D + } + )` + + t.Run("1", func(t *testing.T) { + tc := NewTestCase() + tc.compile(t, script) + tc.setBalance("A", "USD/2", 40) + tc.setBalance("C", "USD/2", 90) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "USD/2", + Amount: internal2.NewMonetaryInt(40), + Source: "A", + Destination: "B", + }, + { + Asset: "USD/2", + Amount: internal2.NewMonetaryInt(60), + Source: "C", + Destination: "D", + }, + }, + Error: nil, + } + test(t, tc) + }) + + t.Run("2", func(t *testing.T) { + tc := NewTestCase() + tc.compile(t, script) + tc.setBalance("A", "USD/2", 400) + tc.setBalance("C", "USD/2", 90) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "USD/2", + Amount: internal2.NewMonetaryInt(100), + Source: "A", + Destination: "B", + }, + }, + Error: nil, + } + test(t, tc) + }) + + script = ` + vars { + account $acc + monetary $initial = balance($acc, USD/2) + } + send [USD/2 100] ( + source = { + $acc + @C + } + destination = { + max $initial to @B + remaining to @D + } + )` + + t.Run("3", func(t *testing.T) { + tc := NewTestCase() + tc.compile(t, script) + tc.setBalance("A", "USD/2", 40) + tc.setBalance("C", "USD/2", 90) + tc.setVarsFromJSON(t, `{"acc": "A"}`) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "USD/2", + Amount: internal2.NewMonetaryInt(40), + Source: "A", + Destination: "B", + }, + { + Asset: "USD/2", + Amount: internal2.NewMonetaryInt(60), + Source: "C", + Destination: "D", + }, + }, + Error: nil, + } + test(t, tc) + }) + + t.Run("4", func(t *testing.T) { + tc := NewTestCase() + tc.compile(t, script) + tc.setBalance("A", "USD/2", 400) + tc.setBalance("C", "USD/2", 90) + tc.setVarsFromJSON(t, `{"acc": "A"}`) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "USD/2", + Amount: internal2.NewMonetaryInt(100), + Source: "A", + Destination: "B", + }, + }, + Error: nil, + } + test(t, tc) + }) + + t.Run("5", func(t *testing.T) { + tc := NewTestCase() + tc.compile(t, ` + vars { + monetary $max = balance(@maxAcc, COIN) + } + send [COIN 200] ( + source = { + 50% from { + max [COIN 4] from @a + @b + @c + } + remaining from max $max from @d + } + destination = @platform + )`) + tc.setBalance("maxAcc", "COIN", 120) + tc.setBalance("a", "COIN", 1000) + tc.setBalance("b", "COIN", 40) + tc.setBalance("c", "COIN", 1000) + tc.setBalance("d", "COIN", 1000) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "COIN", + Amount: internal2.NewMonetaryInt(4), + Source: "a", + Destination: "platform", + }, + { + Asset: "COIN", + Amount: internal2.NewMonetaryInt(40), + Source: "b", + Destination: "platform", + }, + { + Asset: "COIN", + Amount: internal2.NewMonetaryInt(56), + Source: "c", + Destination: "platform", + }, + { + Asset: "COIN", + Amount: internal2.NewMonetaryInt(100), + Source: "d", + Destination: "platform", + }, + }, + Error: nil, + } + test(t, tc) + }) + + t.Run("send negative monetary", func(t *testing.T) { + tc := NewTestCase() + script = ` + vars { + monetary $amount = balance(@world, USD/2) + } + send $amount ( + source = @A + destination = @B + )` + tc.compile(t, script) + tc.setBalance("world", "USD/2", -40) + tc.expected = CaseResult{ + Error: ErrNegativeMonetaryAmount, + ErrorContains: "must be non-negative", + } + test(t, tc) + }) +} + +func TestVariablesParsing(t *testing.T) { + t.Run("account", func(t *testing.T) { + p, err := compiler.Compile(` + vars { + account $acc + } + set_tx_meta("account", $acc) + `) + require.NoError(t, err) + + m := NewMachine(*p) + + require.NoError(t, m.SetVarsFromJSON(map[string]string{ + "acc": "valid:acc", + })) + + require.Error(t, m.SetVarsFromJSON(map[string]string{ + "acc": "invalid-acc", + })) + + require.NoError(t, m.SetVarsFromJSON(map[string]string{ + "acc": "valid:acc", + })) + + require.Error(t, m.SetVarsFromJSON(map[string]string{ + "acc": "invalid-acc", + })) + }) + + t.Run("asset", func(t *testing.T) { + p, err := compiler.Compile(` + vars { + asset $ass + } + set_tx_meta("asset", $ass) + `) + require.NoError(t, err) + + m := NewMachine(*p) + + require.NoError(t, m.SetVarsFromJSON(map[string]string{ + "ass": "USD/2", + })) + + require.Error(t, m.SetVarsFromJSON(map[string]string{ + "ass": "USD-2", + })) + + require.NoError(t, m.SetVarsFromJSON(map[string]string{ + "ass": "USD/2", + })) + + require.Error(t, m.SetVarsFromJSON(map[string]string{ + "ass": "USD-2", + })) + }) + + t.Run("monetary", func(t *testing.T) { + p, err := compiler.Compile(` + vars { + monetary $mon + } + set_tx_meta("monetary", $mon) + `) + require.NoError(t, err) + + m := NewMachine(*p) + + require.NoError(t, m.SetVarsFromJSON(map[string]string{ + "mon": "EUR/2 100", + })) + + require.Error(t, m.SetVarsFromJSON(map[string]string{ + "mon": "invalid-asset 100", + })) + + require.Error(t, m.SetVarsFromJSON(map[string]string{ + "mon": "EUR/2", + })) + + require.NoError(t, m.SetVarsFromJSON(map[string]string{ + "mon": "EUR/2 100", + })) + + require.Error(t, m.SetVarsFromJSON(map[string]string{ + "mon": "invalid-asset 100", + })) + + require.Error(t, m.SetVarsFromJSON(map[string]string{ + "mon": "EUR/2 null", + })) + }) + + t.Run("portion", func(t *testing.T) { + p, err := compiler.Compile(` + vars { + portion $por + } + set_tx_meta("portion", $por) + `) + require.NoError(t, err) + + m := NewMachine(*p) + + require.NoError(t, m.SetVarsFromJSON(map[string]string{ + "por": "1/2", + })) + + require.Error(t, m.SetVarsFromJSON(map[string]string{ + "por": "", + })) + + require.NoError(t, m.SetVarsFromJSON(map[string]string{ + "por": "1/2", + })) + + require.NoError(t, m.SetVarsFromJSON(map[string]string{ + "por": "50%", + })) + + require.Error(t, m.SetVarsFromJSON(map[string]string{ + "por": "3/2", + })) + + require.Error(t, m.SetVarsFromJSON(map[string]string{ + "por": "200%", + })) + + require.Error(t, m.SetVarsFromJSON(map[string]string{ + "por": "", + })) + }) + + t.Run("string", func(t *testing.T) { + p, err := compiler.Compile(` + vars { + string $str + } + set_tx_meta("string", $str) + `) + require.NoError(t, err) + + m := NewMachine(*p) + require.NoError(t, m.SetVarsFromJSON(map[string]string{ + "str": "valid string", + })) + }) + + t.Run("number", func(t *testing.T) { + p, err := compiler.Compile(` + vars { + number $nbr + } + set_tx_meta("number", $nbr) + `) + require.NoError(t, err) + + m := NewMachine(*p) + + require.NoError(t, m.SetVarsFromJSON(map[string]string{ + "nbr": "100", + })) + + require.Error(t, m.SetVarsFromJSON(map[string]string{ + "nbr": "string", + })) + + require.Error(t, m.SetVarsFromJSON(map[string]string{ + "nbr": `nil`, + })) + }) + + t.Run("missing variable", func(t *testing.T) { + p, err := compiler.Compile(` + vars { + number $nbr + string $str + } + set_tx_meta("number", $nbr) + `) + require.NoError(t, err) + + m := NewMachine(*p) + + require.ErrorContains(t, m.SetVarsFromJSON(map[string]string{ + "nbr": "100", + }), "missing variable $str") + }) + + t.Run("extraneous variable SetVars", func(t *testing.T) { + p, err := compiler.Compile(` + vars { + number $nbr + } + set_tx_meta("number", $nbr) + `) + require.NoError(t, err) + + m := NewMachine(*p) + + require.ErrorContains(t, m.SetVarsFromJSON(map[string]string{ + "nbr": "100", + "nbr2": "100", + }), "extraneous variable $nbr2") + }) + + t.Run("extraneous variable SetVarsFromJSON", func(t *testing.T) { + p, err := compiler.Compile(` + vars { + number $nbr + } + set_tx_meta("number", $nbr) + `) + require.NoError(t, err) + + m := NewMachine(*p) + + require.ErrorContains(t, m.SetVarsFromJSON(map[string]string{ + "nbr": `100`, + "nbr2": `100`, + }), "extraneous variable $nbr2") + }) +} + +func TestVariablesErrors(t *testing.T) { + tc := NewTestCase() + tc.compile(t, `vars { + monetary $mon + } + send $mon ( + source = @alice + destination = @bob + )`) + tc.setBalance("alice", "COIN", 10) + tc.vars = map[string]string{ + "mon": "COIN -1", + } + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{}, + Error: ErrInvalidVars, + ErrorContains: "negative amount", + } + test(t, tc) +} + +func TestSetVarsFromJSON(t *testing.T) { + + type testCase struct { + name string + script string + expectedError error + vars map[string]string + } + for _, tc := range []testCase{ + { + name: "missing var", + script: `vars { + account $dest + } + send [COIN 99] ( + source = @world + destination = $dest + )`, + expectedError: fmt.Errorf("missing variable $dest"), + }, + { + name: "invalid format for account", + script: `vars { + account $dest + } + send [COIN 99] ( + source = @world + destination = $dest + )`, + vars: map[string]string{ + "dest": "invalid-acc", + }, + expectedError: fmt.Errorf("invalid JSON value for variable $dest of type account: value invalid-acc: accounts should respect pattern ^[a-zA-Z_]+[a-zA-Z0-9_:]*$"), + }, + } { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + p, err := compiler.Compile(tc.script) + require.NoError(t, err) + + m := NewMachine(*p) + err = m.SetVarsFromJSON(tc.vars) + if tc.expectedError != nil { + require.Error(t, err) + //TODO(gfyrag): refine error handling of SetVars/ResolveResources/ResolveBalances + require.Equal(t, tc.expectedError.Error(), err.Error()) + } else { + require.Nil(t, err) + } + }) + } +} + +func TestResolveResources(t *testing.T) { + + type testCase struct { + name string + script string + expectedError error + vars map[string]string + } + for _, tc := range []testCase{ + { + name: "missing metadata", + script: `vars { + account $sale + account $seller = meta($sale, "seller") + } + send [COIN *] ( + source = $sale + destination = $seller + )`, + vars: map[string]string{ + "sale": "sales:042", + }, + expectedError: ErrResourceResolutionMissingMetadata, + }, + } { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + p, err := compiler.Compile(tc.script) + require.NoError(t, err) + + m := NewMachine(*p) + require.NoError(t, m.SetVarsFromJSON(tc.vars)) + _, _, err = m.ResolveResources(context.Background(), EmptyStore) + if tc.expectedError != nil { + require.Error(t, err) + require.True(t, errors.Is(err, tc.expectedError)) + } else { + require.NoError(t, err) + } + }) + } +} + +func TestResolveBalances(t *testing.T) { + + type testCase struct { + name string + script string + expectedError error + vars map[string]string + store Store + } + for _, tc := range []testCase{ + { + name: "balance function with negative balance", + store: StaticStore{ + "users:001": &AccountWithBalances{ + Account: ledger.Account{ + Address: "users:001", + Metadata: metadata.Metadata{}, + }, + Balances: map[string]*big.Int{ + "COIN": big.NewInt(-100), + }, + }, + }, + script: ` + vars { + monetary $bal = balance(@users:001, COIN) + } + send $bal ( + source = @users:001 + destination = @world + )`, + expectedError: ErrNegativeMonetaryAmount, + }, + } { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + p, err := compiler.Compile(tc.script) + require.NoError(t, err) + + m := NewMachine(*p) + require.NoError(t, m.SetVarsFromJSON(tc.vars)) + _, _, err = m.ResolveResources(context.Background(), EmptyStore) + require.NoError(t, err) + + store := tc.store + if store == nil { + store = EmptyStore + } + + err = m.ResolveBalances(context.Background(), store) + if tc.expectedError != nil { + require.Error(t, err) + require.True(t, errors.Is(err, tc.expectedError)) + } else { + require.NoError(t, err) + } + }) + } +} + +func TestMachine(t *testing.T) { + p, err := compiler.Compile(` + vars { + account $dest + } + send [COIN 99] ( + source = @world + destination = $dest + )`) + require.NoError(t, err) + + t.Run("with debug", func(t *testing.T) { + m := NewMachine(*p) + m.Debug = true + + err = m.SetVarsFromJSON(map[string]string{ + "dest": "charlie", + }) + require.NoError(t, err) + + _, _, err := m.ResolveResources(context.Background(), EmptyStore) + require.NoError(t, err) + + err = m.ResolveBalances(context.Background(), EmptyStore) + require.NoError(t, err) + + err = m.Execute() + require.NoError(t, err) + }) + + t.Run("err resources", func(t *testing.T) { + m := NewMachine(*p) + err := m.Execute() + require.True(t, errors.Is(err, ErrResourcesNotInitialized)) + }) + + t.Run("err balances not initialized", func(t *testing.T) { + m := NewMachine(*p) + + err = m.SetVarsFromJSON(map[string]string{ + "dest": "charlie", + }) + require.NoError(t, err) + + _, _, err := m.ResolveResources(context.Background(), EmptyStore) + require.NoError(t, err) + + err = m.Execute() + require.True(t, errors.Is(err, ErrBalancesNotInitialized)) + }) + + t.Run("err resolve resources twice", func(t *testing.T) { + m := NewMachine(*p) + + err = m.SetVarsFromJSON(map[string]string{ + "dest": "charlie", + }) + require.NoError(t, err) + + _, _, err := m.ResolveResources(context.Background(), EmptyStore) + require.NoError(t, err) + + _, _, err = m.ResolveResources(context.Background(), EmptyStore) + require.ErrorContains(t, err, "tried to call ResolveResources twice") + }) + + t.Run("err balances before resources", func(t *testing.T) { + m := NewMachine(*p) + + err := m.ResolveBalances(context.Background(), EmptyStore) + require.ErrorContains(t, err, "tried to resolve balances before resources") + }) + + t.Run("err resolve balances twice", func(t *testing.T) { + m := NewMachine(*p) + + err = m.SetVarsFromJSON(map[string]string{ + "dest": "charlie", + }) + require.NoError(t, err) + + _, _, err := m.ResolveResources(context.Background(), EmptyStore) + require.NoError(t, err) + + err = m.ResolveBalances(context.Background(), EmptyStore) + require.NoError(t, err) + + err = m.ResolveBalances(context.Background(), EmptyStore) + require.ErrorContains(t, err, "tried to call ResolveBalances twice") + }) + + t.Run("err missing var", func(t *testing.T) { + m := NewMachine(*p) + + _, _, err := m.ResolveResources(context.Background(), EmptyStore) + require.Error(t, err) + }) +} + +func TestVariableAsset(t *testing.T) { + script := ` + vars { + asset $ass + monetary $bal = balance(@alice, $ass) + } + + send [$ass 15] ( + source = { + @alice + @bob + } + destination = @swap + ) + + send [$ass *] ( + source = @swap + destination = { + max $bal to @alice_2 + remaining to @bob_2 + } + )` + + tc := NewTestCase() + tc.compile(t, script) + tc.vars = map[string]string{ + "ass": "USD", + } + tc.setBalance("alice", "USD", 10) + tc.setBalance("bob", "USD", 10) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "USD", + Amount: internal2.NewMonetaryInt(10), + Source: "alice", + Destination: "swap", + }, + { + Asset: "USD", + Amount: internal2.NewMonetaryInt(5), + Source: "bob", + Destination: "swap", + }, + { + Asset: "USD", + Amount: internal2.NewMonetaryInt(10), + Source: "swap", + Destination: "alice_2", + }, + { + Asset: "USD", + Amount: internal2.NewMonetaryInt(5), + Source: "swap", + Destination: "bob_2", + }, + }, + Error: nil, + } + test(t, tc) +} + +func TestSaveFromAccount(t *testing.T) { + t.Run("simple", func(t *testing.T) { + script := ` + save [USD 10] from @alice + + send [USD 30] ( + source = { + @alice + @world + } + destination = @bob + )` + tc := NewTestCase() + tc.compile(t, script) + tc.setBalance("alice", "USD", 20) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "USD", + Amount: internal2.NewMonetaryInt(10), + Source: "alice", + Destination: "bob", + }, + { + Asset: "USD", + Amount: internal2.NewMonetaryInt(20), + Source: "world", + Destination: "bob", + }, + }, + Error: nil, + } + test(t, tc) + }) + + t.Run("save all", func(t *testing.T) { + script := ` + save [USD *] from @alice + + send [USD 30] ( + source = { + @alice + @world + } + destination = @bob + )` + tc := NewTestCase() + tc.compile(t, script) + tc.setBalance("alice", "USD", 20) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "USD", + Amount: internal2.NewMonetaryInt(0), + Source: "alice", + Destination: "bob", + }, + { + Asset: "USD", + Amount: internal2.NewMonetaryInt(30), + Source: "world", + Destination: "bob", + }, + }, + Error: nil, + } + test(t, tc) + }) + + t.Run("save more than balance", func(t *testing.T) { + script := ` + save [USD 30] from @alice + + send [USD 30] ( + source = { + @alice + @world + } + destination = @bob + )` + tc := NewTestCase() + tc.compile(t, script) + tc.setBalance("alice", "USD", 20) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "USD", + Amount: internal2.NewMonetaryInt(0), + Source: "alice", + Destination: "bob", + }, + { + Asset: "USD", + Amount: internal2.NewMonetaryInt(30), + Source: "world", + Destination: "bob", + }, + }, + Error: nil, + } + test(t, tc) + }) + + t.Run("with asset var", func(t *testing.T) { + script := ` + vars { + asset $ass + } + save [$ass 10] from @alice + + send [$ass 30] ( + source = { + @alice + @world + } + destination = @bob + )` + tc := NewTestCase() + tc.compile(t, script) + tc.vars = map[string]string{ + "ass": "USD", + } + tc.setBalance("alice", "USD", 20) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "USD", + Amount: internal2.NewMonetaryInt(10), + Source: "alice", + Destination: "bob", + }, + { + Asset: "USD", + Amount: internal2.NewMonetaryInt(20), + Source: "world", + Destination: "bob", + }, + }, + Error: nil, + } + test(t, tc) + }) + + t.Run("with monetary var", func(t *testing.T) { + script := ` + vars { + monetary $mon + } + + save $mon from @alice + + send [USD 30] ( + source = { + @alice + @world + } + destination = @bob + )` + tc := NewTestCase() + tc.compile(t, script) + tc.vars = map[string]string{ + "mon": "USD 10", + } + tc.setBalance("alice", "USD", 20) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "USD", + Amount: internal2.NewMonetaryInt(10), + Source: "alice", + Destination: "bob", + }, + { + Asset: "USD", + Amount: internal2.NewMonetaryInt(20), + Source: "world", + Destination: "bob", + }, + }, + Error: nil, + } + test(t, tc) + }) + + t.Run("multi postings", func(t *testing.T) { + script := ` + send [USD 10] ( + source = @alice + destination = @bob + ) + + save [USD 5] from @alice + + send [USD 30] ( + source = { + @alice + @world + } + destination = @bob + )` + tc := NewTestCase() + tc.compile(t, script) + tc.setBalance("alice", "USD", 20) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "USD", + Amount: internal2.NewMonetaryInt(10), + Source: "alice", + Destination: "bob", + }, + { + Asset: "USD", + Amount: internal2.NewMonetaryInt(5), + Source: "alice", + Destination: "bob", + }, + { + Asset: "USD", + Amount: internal2.NewMonetaryInt(25), + Source: "world", + Destination: "bob", + }, + }, + Error: nil, + } + test(t, tc) + }) + + t.Run("save a different asset", func(t *testing.T) { + script := ` + save [COIN 100] from @alice + + send [USD 30] ( + source = { + @alice + @world + } + destination = @bob + )` + tc := NewTestCase() + tc.compile(t, script) + tc.setBalance("alice", "COIN", 100) + tc.setBalance("alice", "USD", 20) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{ + { + Asset: "USD", + Amount: internal2.NewMonetaryInt(20), + Source: "alice", + Destination: "bob", + }, + { + Asset: "USD", + Amount: internal2.NewMonetaryInt(10), + Source: "world", + Destination: "bob", + }, + }, + Error: nil, + } + test(t, tc) + }) + + t.Run("negative amount", func(t *testing.T) { + script := ` + vars { + monetary $amt = balance(@A, USD) + } + save $amt from @A` + tc := NewTestCase() + tc.compile(t, script) + tc.setBalance("A", "USD", -100) + tc.expected = CaseResult{ + Printed: []internal2.Value{}, + Postings: []Posting{}, + Error: ErrNegativeMonetaryAmount, + } + test(t, tc) + }) +} diff --git a/internal/machine/vm/program/instructions.go b/internal/machine/vm/program/instructions.go new file mode 100644 index 000000000..8d109451e --- /dev/null +++ b/internal/machine/vm/program/instructions.go @@ -0,0 +1,86 @@ +package program + +const ( + OP_APUSH = byte(iota + 1) + OP_BUMP // *N => *N + OP_DELETE // + OP_IADD // + => + OP_ISUB // - => + OP_PRINT // + OP_FAIL // + OP_ASSET // => + OP_MONETARY_NEW // => + OP_MONETARY_ADD // + => // panics if not same asset + OP_MONETARY_SUB // - => // panics if not same asset + OP_MAKE_ALLOTMENT // *N => + OP_TAKE_ALL // => + OP_TAKE_ALWAYS // => // takes amount from account unconditionally + OP_TAKE // => // fails with EXIT_INSUFFICIENT_FUNDS if not enough + OP_TAKE_MAX // => // Doesn't fail on insufficient funds. Either missing or remaining is zero. + OP_FUNDING_ASSEMBLE // *N => (first has highest priority) + OP_FUNDING_SUM // => + OP_FUNDING_REVERSE // => + OP_REPAY // + OP_ALLOC // => *N + OP_SEND // + OP_TX_META // + OP_ACCOUNT_META // + OP_SAVE +) + +func OpcodeName(op byte) string { + switch op { + case OP_APUSH: + return "OP_APUSH" + case OP_BUMP: + return "OP_BUMP" + case OP_DELETE: + return "OP_DELETE" + case OP_IADD: + return "OP_IADD" + case OP_ISUB: + return "OP_ISUB" + case OP_PRINT: + return "OP_PRINT" + case OP_FAIL: + return "OP_FAIL" + case OP_ASSET: + return "OP_ASSET" + case OP_MONETARY_NEW: + return "OP_MONETARY_NEW" + case OP_MONETARY_ADD: + return "OP_MONETARY_ADD" + case OP_MONETARY_SUB: + return "OP_MONETARY_SUB" + case OP_MAKE_ALLOTMENT: + return "OP_MAKE_ALLOTMENT" + case OP_TAKE_ALL: + return "OP_TAKE_ALL" + case OP_TAKE_ALWAYS: + return "OP_TAKE_ALWAYS" + case OP_TAKE: + return "OP_TAKE" + case OP_TAKE_MAX: + return "OP_TAKE_MAX" + case OP_FUNDING_ASSEMBLE: + return "OP_FUNDING_ASSEMBLE" + case OP_FUNDING_SUM: + return "OP_FUNDING_SUM" + case OP_FUNDING_REVERSE: + return "OP_FUNDING_REVERSE" + case OP_REPAY: + return "OP_REPAY" + case OP_ALLOC: + return "OP_ALLOC" + case OP_SEND: + return "OP_SEND" + case OP_TX_META: + return "OP_TX_META" + case OP_ACCOUNT_META: + return "OP_ACCOUNT_META" + case OP_SAVE: + return "OP_SAVE" + default: + return "Unknown opcode" + } +} diff --git a/internal/machine/vm/program/program.go b/internal/machine/vm/program/program.go new file mode 100644 index 000000000..73f75ba1c --- /dev/null +++ b/internal/machine/vm/program/program.go @@ -0,0 +1,112 @@ +package program + +import ( + "encoding/binary" + "fmt" + + internal2 "github.com/formancehq/ledger/internal/machine/internal" + "github.com/pkg/errors" +) + +type Program struct { + Instructions []byte + Resources []Resource + Sources []internal2.Address + NeededBalances map[internal2.Address]map[internal2.Address]struct{} +} + +func (p Program) String() string { + out := "Program:\nINSTRUCTIONS\n" + for i := 0; i < len(p.Instructions); i++ { + out += fmt.Sprintf("%02d----- ", i) + switch p.Instructions[i] { + case OP_APUSH: + out += "OP_APUSH " + address := binary.LittleEndian.Uint16(p.Instructions[i+1 : i+3]) + out += fmt.Sprintf("#%d\n", address) + i += 2 + default: + out += OpcodeName(p.Instructions[i]) + "\n" + } + } + + out += fmt.Sprintln("RESOURCES") + i := 0 + for i = 0; i < len(p.Resources); i++ { + out += fmt.Sprintf("%02d ", i) + out += fmt.Sprintf("%v\n", p.Resources[i]) + } + return out +} + +func (p *Program) ParseVariables(vars map[string]internal2.Value) (map[string]internal2.Value, error) { + variables := make(map[string]internal2.Value) + for _, res := range p.Resources { + if variable, ok := res.(Variable); ok { + if val, ok := vars[variable.Name]; ok && val.GetType() == variable.Typ { + variables[variable.Name] = val + switch val.GetType() { + case internal2.TypeAccount: + if err := internal2.ValidateAccountAddress(val.(internal2.AccountAddress)); err != nil { + return nil, errors.Wrapf(err, "invalid variable $%s value '%s'", + variable.Name, string(val.(internal2.AccountAddress))) + } + case internal2.TypeAsset: + if err := internal2.ValidateAsset(val.(internal2.Asset)); err != nil { + return nil, errors.Wrapf(err, "invalid variable $%s value '%s'", + variable.Name, string(val.(internal2.Asset))) + } + case internal2.TypeMonetary: + if err := internal2.ParseMonetary(val.(internal2.Monetary)); err != nil { + return nil, errors.Wrapf(err, "invalid variable $%s value '%s'", + variable.Name, val.(internal2.Monetary).String()) + } + case internal2.TypePortion: + if err := internal2.ValidatePortionSpecific(val.(internal2.Portion)); err != nil { + return nil, errors.Wrapf(err, "invalid variable $%s value '%s'", + variable.Name, val.(internal2.Portion).String()) + } + case internal2.TypeString: + case internal2.TypeNumber: + default: + return nil, fmt.Errorf("unsupported type for variable $%s: %s", + variable.Name, val.GetType()) + } + delete(vars, variable.Name) + } else if val, ok := vars[variable.Name]; ok && val.GetType() != variable.Typ { + return nil, fmt.Errorf("wrong type for variable $%s: %s instead of %s", + variable.Name, variable.Typ, val.GetType()) + } else { + return nil, fmt.Errorf("missing variable $%s", variable.Name) + } + } + } + for name := range vars { + return nil, fmt.Errorf("extraneous variable $%s", name) + } + return variables, nil +} + +func (p *Program) ParseVariablesJSON(vars map[string]string) (map[string]internal2.Value, error) { + variables := make(map[string]internal2.Value) + for _, res := range p.Resources { + if param, ok := res.(Variable); ok { + data, ok := vars[param.Name] + if !ok { + return nil, fmt.Errorf("missing variable $%s", param.Name) + } + val, err := internal2.NewValueFromString(param.Typ, data) + if err != nil { + return nil, fmt.Errorf( + "invalid JSON value for variable $%s of type %v: %w", + param.Name, param.Typ, err) + } + variables[param.Name] = val + delete(vars, param.Name) + } + } + for name := range vars { + return nil, fmt.Errorf("extraneous variable $%s", name) + } + return variables, nil +} diff --git a/internal/machine/vm/program/program_test.go b/internal/machine/vm/program/program_test.go new file mode 100644 index 000000000..12303e0d9 --- /dev/null +++ b/internal/machine/vm/program/program_test.go @@ -0,0 +1,18 @@ +package program_test + +import ( + "testing" + + "github.com/formancehq/ledger/internal/machine/script/compiler" + "github.com/stretchr/testify/require" +) + +func TestProgram_String(t *testing.T) { + p, err := compiler.Compile(` + send [COIN 99] ( + source = @world + destination = @alice + )`) + require.NoError(t, err) + _ = p.String() +} diff --git a/internal/machine/vm/program/resource.go b/internal/machine/vm/program/resource.go new file mode 100644 index 000000000..3e591592d --- /dev/null +++ b/internal/machine/vm/program/resource.go @@ -0,0 +1,59 @@ +package program + +import ( + "fmt" + + internal2 "github.com/formancehq/ledger/internal/machine/internal" +) + +type Resource interface { + GetType() internal2.Type +} + +type Constant struct { + Inner internal2.Value +} + +func (c Constant) GetType() internal2.Type { return c.Inner.GetType() } +func (c Constant) String() string { return fmt.Sprintf("%v", c.Inner) } + +type Variable struct { + Typ internal2.Type + Name string +} + +func (p Variable) GetType() internal2.Type { return p.Typ } +func (p Variable) String() string { return fmt.Sprintf("<%v %v>", p.Typ, p.Name) } + +type VariableAccountMetadata struct { + Typ internal2.Type + Name string + Account internal2.Address + Key string +} + +func (m VariableAccountMetadata) GetType() internal2.Type { return m.Typ } +func (m VariableAccountMetadata) String() string { + return fmt.Sprintf("<%v %v meta(%v, %v)>", m.Typ, m.Name, m.Account, m.Key) +} + +type VariableAccountBalance struct { + Name string + Account internal2.Address + Asset internal2.Address +} + +func (a VariableAccountBalance) GetType() internal2.Type { return internal2.TypeMonetary } +func (a VariableAccountBalance) String() string { + return fmt.Sprintf("<%v %v balance(%v, %v)>", internal2.TypeMonetary, a.Name, a.Account, a.Asset) +} + +type Monetary struct { + Asset internal2.Address + Amount *internal2.MonetaryInt +} + +func (a Monetary) GetType() internal2.Type { return internal2.TypeMonetary } +func (a Monetary) String() string { + return fmt.Sprintf("<%v [%v %v]>", internal2.TypeMonetary, a.Asset, a.Amount) +} diff --git a/internal/machine/vm/program/resource_test.go b/internal/machine/vm/program/resource_test.go new file mode 100644 index 000000000..90c1f8127 --- /dev/null +++ b/internal/machine/vm/program/resource_test.go @@ -0,0 +1,37 @@ +package program + +import ( + "testing" + + internal2 "github.com/formancehq/ledger/internal/machine/internal" + "github.com/stretchr/testify/require" +) + +func TestResource(t *testing.T) { + c := Constant{ + Inner: internal2.NewMonetaryInt(0), + } + c.GetType() + require.Equal(t, "0", c.String()) + + v := Variable{ + Typ: internal2.TypeAccount, + Name: "acc", + } + require.Equal(t, "", v.String()) + + vab := VariableAccountBalance{ + Name: "name", + Account: internal2.Address(0), + Asset: internal2.Address(1), + } + require.Equal(t, "", vab.String()) + + vam := VariableAccountMetadata{ + Typ: internal2.TypeMonetary, + Name: "name", + Account: internal2.Address(0), + Key: "key", + } + require.Equal(t, "", vam.String()) +} diff --git a/internal/machine/vm/stack.go b/internal/machine/vm/stack.go new file mode 100644 index 000000000..81d3bacb3 --- /dev/null +++ b/internal/machine/vm/stack.go @@ -0,0 +1,26 @@ +package vm + +import ( + "fmt" + + "github.com/formancehq/ledger/internal/machine/internal" +) + +func (m *Machine) popValue() internal.Value { + l := len(m.Stack) + x := m.Stack[l-1] + m.Stack = m.Stack[:l-1] + return x +} + +func pop[T internal.Value](m *Machine) T { + x := m.popValue() + if v, ok := x.(T); ok { + return v + } + panic(fmt.Errorf("unexpected type '%T' on stack", x)) +} + +func (m *Machine) pushValue(v internal.Value) { + m.Stack = append(m.Stack, v) +} diff --git a/internal/machine/vm/store.go b/internal/machine/vm/store.go new file mode 100644 index 000000000..455218d4d --- /dev/null +++ b/internal/machine/vm/store.go @@ -0,0 +1,65 @@ +package vm + +import ( + "context" + "math/big" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/stack/libs/go-libs/metadata" +) + +type Store interface { + GetBalance(ctx context.Context, address, asset string) (*big.Int, error) + GetAccount(ctx context.Context, address string) (*ledger.Account, error) +} + +type emptyStore struct{} + +func (e *emptyStore) GetBalance(ctx context.Context, address, asset string) (*big.Int, error) { + return new(big.Int), nil +} + +func (e *emptyStore) GetAccount(ctx context.Context, address string) (*ledger.Account, error) { + return &ledger.Account{ + Address: address, + Metadata: metadata.Metadata{}, + }, nil +} + +var _ Store = (*emptyStore)(nil) + +var EmptyStore = &emptyStore{} + +type AccountWithBalances struct { + ledger.Account + Balances map[string]*big.Int +} + +type StaticStore map[string]*AccountWithBalances + +func (s StaticStore) GetBalance(ctx context.Context, address, asset string) (*big.Int, error) { + account, ok := s[address] + if !ok { + return new(big.Int), nil + } + balance, ok := account.Balances[asset] + if !ok { + return new(big.Int), nil + } + + return balance, nil +} + +func (s StaticStore) GetAccount(ctx context.Context, address string) (*ledger.Account, error) { + account, ok := s[address] + if !ok { + return &ledger.Account{ + Address: address, + Metadata: metadata.Metadata{}, + }, nil + } + + return &account.Account, nil +} + +var _ Store = StaticStore{} diff --git a/internal/metadata.go b/internal/metadata.go new file mode 100644 index 000000000..177e7f640 --- /dev/null +++ b/internal/metadata.go @@ -0,0 +1,37 @@ +package ledger + +import ( + "math/big" + + "github.com/formancehq/stack/libs/go-libs/metadata" +) + +const ( + formanceNamespace = "com.formance.spec/" + revertKey = "state/reverts" + + MetaTargetTypeAccount = "ACCOUNT" + MetaTargetTypeTransaction = "TRANSACTION" +) + +func SpecMetadata(name string) string { + return formanceNamespace + name +} + +func MarkReverts(m metadata.Metadata, txID *big.Int) metadata.Metadata { + return m.Merge(RevertMetadata(txID)) +} + +func RevertMetadataSpecKey() string { + return SpecMetadata(revertKey) +} + +func ComputeMetadata(key, value string) metadata.Metadata { + return metadata.Metadata{ + key: value, + } +} + +func RevertMetadata(tx *big.Int) metadata.Metadata { + return ComputeMetadata(RevertMetadataSpecKey(), tx.String()) +} diff --git a/internal/numscript.go b/internal/numscript.go new file mode 100644 index 000000000..bac95b377 --- /dev/null +++ b/internal/numscript.go @@ -0,0 +1,119 @@ +package ledger + +import ( + "fmt" + "sort" + "strings" + + "github.com/formancehq/stack/libs/go-libs/metadata" +) + +type variable struct { + name string + value string +} + +func TxToScriptData(txData TransactionData) RunScript { + sb := strings.Builder{} + monetaryToVars := map[string]variable{} + accountsToVars := map[string]variable{} + i := 0 + j := 0 + for _, p := range txData.Postings { + if _, ok := accountsToVars[p.Source]; !ok { + if p.Source != WORLD { + accountsToVars[p.Source] = variable{ + name: fmt.Sprintf("va%d", i), + value: p.Source, + } + i++ + } + } + if _, ok := accountsToVars[p.Destination]; !ok { + if p.Destination != WORLD { + accountsToVars[p.Destination] = variable{ + name: fmt.Sprintf("va%d", i), + value: p.Destination, + } + i++ + } + } + mon := fmt.Sprintf("[%s %s]", p.Amount.String(), p.Asset) + if _, ok := monetaryToVars[mon]; !ok { + monetaryToVars[mon] = variable{ + name: fmt.Sprintf("vm%d", j), + value: fmt.Sprintf("%s %s", p.Asset, p.Amount.String()), + } + j++ + } + } + + sb.WriteString("vars {\n") + accVars := make([]string, 0) + for _, v := range accountsToVars { + accVars = append(accVars, v.name) + } + sort.Strings(accVars) + for _, v := range accVars { + sb.WriteString(fmt.Sprintf("\taccount $%s\n", v)) + } + monVars := make([]string, 0) + for _, v := range monetaryToVars { + monVars = append(monVars, v.name) + } + sort.Strings(monVars) + for _, v := range monVars { + sb.WriteString(fmt.Sprintf("\tmonetary $%s\n", v)) + } + sb.WriteString("}\n") + + for _, p := range txData.Postings { + m := fmt.Sprintf("[%s %s]", p.Amount.String(), p.Asset) + mon, ok := monetaryToVars[m] + if !ok { + panic(fmt.Sprintf("monetary %s not found", m)) + } + sb.WriteString(fmt.Sprintf("send $%s (\n", mon.name)) + if p.Source == WORLD { + sb.WriteString("\tsource = @world\n") + } else { + src, ok := accountsToVars[p.Source] + if !ok { + panic(fmt.Sprintf("source %s not found", p.Source)) + } + sb.WriteString(fmt.Sprintf("\tsource = $%s\n", src.name)) + } + if p.Destination == WORLD { + sb.WriteString("\tdestination = @world\n") + } else { + dest, ok := accountsToVars[p.Destination] + if !ok { + panic(fmt.Sprintf("destination %s not found", p.Destination)) + } + sb.WriteString(fmt.Sprintf("\tdestination = $%s\n", dest.name)) + } + sb.WriteString(")\n") + } + + vars := map[string]string{} + for _, v := range accountsToVars { + vars[v.name] = v.value + } + for _, v := range monetaryToVars { + vars[v.name] = v.value + } + + if txData.Metadata == nil { + txData.Metadata = metadata.Metadata{} + } + + return RunScript{ + Script: Script{ + Plain: sb.String(), + Vars: vars, + }, + Timestamp: txData.Timestamp, + Metadata: txData.Metadata, + Reference: txData.Reference, + } +} diff --git a/internal/opentelemetry/metrics/metrics.go b/internal/opentelemetry/metrics/metrics.go new file mode 100644 index 000000000..1cc65dea0 --- /dev/null +++ b/internal/opentelemetry/metrics/metrics.go @@ -0,0 +1,89 @@ +package metrics + +import ( + "go.opentelemetry.io/otel/metric" + "go.opentelemetry.io/otel/metric/noop" +) + +type GlobalRegistry interface { + APILatencies() metric.Int64Histogram + StatusCodes() metric.Int64Counter + ActiveLedgers() metric.Int64UpDownCounter +} + +type globalRegistry struct { + // API Latencies + apiLatencies metric.Int64Histogram + statusCodes metric.Int64Counter + activeLedgers metric.Int64UpDownCounter +} + +func RegisterGlobalRegistry(meterProvider metric.MeterProvider) (GlobalRegistry, error) { + meter := meterProvider.Meter("global") + + apiLatencies, err := meter.Int64Histogram( + "ledger.api.time", + metric.WithUnit("ms"), + metric.WithDescription("Latency of API calls"), + ) + if err != nil { + return nil, err + } + + statusCodes, err := meter.Int64Counter( + "ledger.api.status", + metric.WithUnit("1"), + metric.WithDescription("Status codes of API calls"), + ) + if err != nil { + return nil, err + } + + activeLedgers, err := meter.Int64UpDownCounter( + "ledger.api.ledgers", + metric.WithUnit("1"), + metric.WithDescription("Number of active ledgers"), + ) + if err != nil { + return nil, err + } + + return &globalRegistry{ + apiLatencies: apiLatencies, + statusCodes: statusCodes, + activeLedgers: activeLedgers, + }, nil +} + +func (gm *globalRegistry) APILatencies() metric.Int64Histogram { + return gm.apiLatencies +} + +func (gm *globalRegistry) StatusCodes() metric.Int64Counter { + return gm.statusCodes +} + +func (gm *globalRegistry) ActiveLedgers() metric.Int64UpDownCounter { + return gm.activeLedgers +} + +type noOpRegistry struct{} + +func NewNoOpRegistry() *noOpRegistry { + return &noOpRegistry{} +} + +func (nm *noOpRegistry) APILatencies() metric.Int64Histogram { + histogram, _ := noop.NewMeterProvider().Meter("ledger").Int64Histogram("api_latencies") + return histogram +} + +func (nm *noOpRegistry) StatusCodes() metric.Int64Counter { + counter, _ := noop.NewMeterProvider().Meter("ledger").Int64Counter("status_codes") + return counter +} + +func (nm *noOpRegistry) ActiveLedgers() metric.Int64UpDownCounter { + counter, _ := noop.NewMeterProvider().Meter("ledger").Int64UpDownCounter("active_ledgers") + return counter +} diff --git a/internal/opentelemetry/tracer/tracer.go b/internal/opentelemetry/tracer/tracer.go new file mode 100644 index 000000000..97ab0fe1b --- /dev/null +++ b/internal/opentelemetry/tracer/tracer.go @@ -0,0 +1,14 @@ +package tracer + +import ( + "context" + + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/trace" +) + +var Tracer = otel.Tracer("com.formance.ledger") + +func Start(ctx context.Context, name string, opts ...trace.SpanStartOption) (context.Context, trace.Span) { + return Tracer.Start(ctx, name, opts...) +} diff --git a/internal/pgtesting/testing.go b/internal/pgtesting/testing.go deleted file mode 100644 index 03081cf52..000000000 --- a/internal/pgtesting/testing.go +++ /dev/null @@ -1,84 +0,0 @@ -package pgtesting - -import ( - "context" - "fmt" - "os" - "time" - - "github.com/jackc/pgx/v4" - "github.com/ory/dockertest/v3" -) - -type PGServer struct { - url string - close func() error -} - -func (s *PGServer) ConnString() string { - return s.url -} - -func (s *PGServer) Close() error { - if s.close == nil { - return nil - } - return s.close() -} - -const MaxConnections = 3 - -func PostgresServer() (*PGServer, error) { - - externalConnectionString := os.Getenv("NUMARY_STORAGE_POSTGRES_CONN_STRING") - if externalConnectionString != "" { - return &PGServer{ - url: externalConnectionString, - close: func() error { - return nil - }, - }, nil - } - - pool, err := dockertest.NewPool("") - if err != nil { - return nil, err - } - - resource, err := pool.RunWithOptions(&dockertest.RunOptions{ - Repository: "postgres", - Tag: "13.4-alpine", - Env: []string{ - "POSTGRES_USER=root", - "POSTGRES_PASSWORD=root", - "POSTGRES_DB=ledger", - }, - Entrypoint: nil, - Cmd: []string{"-c", fmt.Sprintf("max_connections=%d", MaxConnections), "-c", "superuser-reserved-connections=0"}, - }) - if err != nil { - return nil, err - } - - connString := "postgresql://root:root@localhost:" + resource.GetPort("5432/tcp") + "/ledger" - try := time.Duration(0) - delay := 200 * time.Millisecond - for try*delay < 5*time.Second { - conn, err := pgx.Connect(context.Background(), connString) - if err != nil { - try++ - <-time.After(delay) - continue - } - _ = conn.Close(context.Background()) - break - } - - return &PGServer{ - url: "postgresql://root:root@localhost:" + resource.GetPort("5432/tcp") + "/ledger", - close: func() error { - return pool.Purge(resource) - }, - }, nil - -} diff --git a/pkg/core/posting.go b/internal/posting.go similarity index 67% rename from pkg/core/posting.go rename to internal/posting.go index 2c0a81090..c93cd6a17 100644 --- a/pkg/core/posting.go +++ b/internal/posting.go @@ -1,32 +1,39 @@ -package core +package ledger import ( "database/sql/driver" "encoding/json" + "math/big" "regexp" "github.com/pkg/errors" ) type Posting struct { - Source string `json:"source"` - Destination string `json:"destination"` - Amount *MonetaryInt `json:"amount"` - Asset string `json:"asset"` + Source string `json:"source"` + Destination string `json:"destination"` + Amount *big.Int `json:"amount"` + Asset string `json:"asset"` +} + +func NewPosting(source string, destination string, asset string, amount *big.Int) Posting { + return Posting{ + Source: source, + Destination: destination, + Amount: amount, + Asset: asset, + } } type Postings []Posting -func (ps Postings) Reverse() { - if len(ps) == 1 { - ps[0].Source, ps[0].Destination = ps[0].Destination, ps[0].Source - return +func (p Postings) Reverse() { + for i := range p { + p[i].Source, p[i].Destination = p[i].Destination, p[i].Source } - for i := len(ps)/2 - 1; i >= 0; i-- { - opp := len(ps) - 1 - i - ps[i], ps[opp] = ps[opp], ps[i] - ps[i].Source, ps[i].Destination = ps[i].Destination, ps[i].Source - ps[opp].Source, ps[opp].Destination = ps[opp].Destination, ps[opp].Source + + for i := 0; i < len(p)/2; i++ { + p[i], p[len(p)-i-1] = p[len(p)-i-1], p[i] } } @@ -61,7 +68,7 @@ func ValidateAddress(addr string) bool { func (p Postings) Validate() (int, error) { for i, p := range p { - if p.Amount.Ltz() { + if p.Amount.Cmp(Zero) < 0 { return i, errors.New("negative amount") } if !ValidateAddress(p.Source) { diff --git a/pkg/core/posting_test.go b/internal/posting_test.go similarity index 61% rename from pkg/core/posting_test.go rename to internal/posting_test.go index 06a0b3f9d..13114815c 100644 --- a/pkg/core/posting_test.go +++ b/internal/posting_test.go @@ -1,9 +1,10 @@ -package core +package ledger import ( + "math/big" "testing" - "github.com/google/go-cmp/cmp" + "github.com/stretchr/testify/require" ) func TestReverseMultiple(t *testing.T) { @@ -11,13 +12,13 @@ func TestReverseMultiple(t *testing.T) { { Source: "world", Destination: "users:001", - Amount: NewMonetaryInt(100), + Amount: big.NewInt(100), Asset: "COIN", }, { Source: "users:001", Destination: "payments:001", - Amount: NewMonetaryInt(100), + Amount: big.NewInt(100), Asset: "COIN", }, } @@ -26,22 +27,19 @@ func TestReverseMultiple(t *testing.T) { { Source: "payments:001", Destination: "users:001", - Amount: NewMonetaryInt(100), + Amount: big.NewInt(100), Asset: "COIN", }, { Source: "users:001", Destination: "world", - Amount: NewMonetaryInt(100), + Amount: big.NewInt(100), Asset: "COIN", }, } p.Reverse() - - if diff := cmp.Diff(expected, p); diff != "" { - t.Errorf("Reverse() mismatch (-want +got):\n%s", diff) - } + require.Equal(t, expected, p) } func TestReverseSingle(t *testing.T) { @@ -49,7 +47,7 @@ func TestReverseSingle(t *testing.T) { { Source: "world", Destination: "users:001", - Amount: NewMonetaryInt(100), + Amount: big.NewInt(100), Asset: "COIN", }, } @@ -58,14 +56,11 @@ func TestReverseSingle(t *testing.T) { { Source: "users:001", Destination: "world", - Amount: NewMonetaryInt(100), + Amount: big.NewInt(100), Asset: "COIN", }, } p.Reverse() - - if diff := cmp.Diff(expected, p); diff != "" { - t.Errorf("Reverse() mismatch (-want +got):\n%s", diff) - } + require.Equal(t, expected, p) } diff --git a/internal/script.go b/internal/script.go new file mode 100644 index 000000000..f6840973f --- /dev/null +++ b/internal/script.go @@ -0,0 +1,17 @@ +package ledger + +import ( + "github.com/formancehq/stack/libs/go-libs/metadata" +) + +type RunScript struct { + Script + Timestamp Time `json:"timestamp"` + Metadata metadata.Metadata `json:"metadata"` + Reference string `json:"reference"` +} + +type Script struct { + Plain string `json:"plain"` + Vars map[string]string `json:"vars" swaggertype:"object"` +} diff --git a/internal/storage/driver/cli.go b/internal/storage/driver/cli.go new file mode 100644 index 000000000..5290220d8 --- /dev/null +++ b/internal/storage/driver/cli.go @@ -0,0 +1,67 @@ +package driver + +import ( + "context" + "io" + "time" + + storage "github.com/formancehq/ledger/internal/storage" + "github.com/formancehq/stack/libs/go-libs/health" + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/spf13/cobra" + "github.com/spf13/viper" + "github.com/uptrace/bun" + "go.uber.org/fx" +) + +// TODO(gfyrag): maybe move flag handling inside cmd/internal (as telemetry flags) +// Or make the inverse (move analytics flags to pkg/analytics) +// IMO, flags are more easily discoverable if located inside cmd/ +func InitCLIFlags(cmd *cobra.Command) { + cmd.PersistentFlags().Int(storage.StoreWorkerMaxPendingSize, 0, "Max pending size for store worker") + cmd.PersistentFlags().Int(storage.StoreWorkerMaxWriteChanSize, 1024, "Max write channel size for store worker") + cmd.PersistentFlags().String(storage.StoragePostgresConnectionStringFlag, "postgresql://localhost/postgres", "Postgres connection string") + cmd.PersistentFlags().Int(storage.StoragePostgresMaxIdleConnsFlag, 20, "Max idle connections to database") + cmd.PersistentFlags().Duration(storage.StoragePostgresConnMaxIdleTimeFlag, time.Minute, "Max idle time of idle connections") + cmd.PersistentFlags().Int(storage.StoragePostgresMaxOpenConns, 20, "Max open connections") +} + +type PostgresConfig struct { + ConnString string +} + +type ModuleConfig struct { + PostgresConnectionOptions storage.ConnectionOptions + Debug bool +} + +func CLIModule(v *viper.Viper, output io.Writer, debug bool) fx.Option { + + options := make([]fx.Option, 0) + options = append(options, fx.Provide(func(logger logging.Logger) (*bun.DB, error) { + configuration := storage.ConnectionOptionsFromFlags(v, output, debug) + logger.WithField("config", configuration).Infof("Opening connection to database...") + return storage.OpenSQLDB(configuration) + })) + options = append(options, fx.Provide(func(db *bun.DB) (*Driver, error) { + return New(db), nil + })) + options = append(options, health.ProvideHealthCheck(func(db *bun.DB) health.NamedCheck { + return health.NewNamedCheck("postgres", health.CheckFn(db.PingContext)) + })) + + options = append(options, fx.Invoke(func(db *bun.DB, driver *Driver, lifecycle fx.Lifecycle, logger logging.Logger) error { + lifecycle.Append(fx.Hook{ + OnStart: func(ctx context.Context) error { + logger.Infof("Initializing database...") + return driver.Initialize(ctx) + }, + OnStop: func(ctx context.Context) error { + logger.Infof("Closing database...") + return db.Close() + }, + }) + return nil + })) + return fx.Options(options...) +} diff --git a/internal/storage/driver/driver.go b/internal/storage/driver/driver.go new file mode 100644 index 000000000..6cdcf801a --- /dev/null +++ b/internal/storage/driver/driver.go @@ -0,0 +1,190 @@ +package driver + +import ( + "context" + "database/sql" + "database/sql/driver" + "fmt" + "sync" + + "github.com/formancehq/ledger/internal/storage" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/systemstore" + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/pkg/errors" + "github.com/uptrace/bun" + "go.nhat.io/otelsql" +) + +const SystemSchema = "_system" + +type pgxDriver struct { + driverName string +} + +var pgxSqlDriver pgxDriver + +type otelSQLDriverWithCheckNamedValueDisabled struct { + driver.Driver +} + +func (d otelSQLDriverWithCheckNamedValueDisabled) CheckNamedValue(*driver.NamedValue) error { + return nil +} + +var _ = driver.NamedValueChecker(&otelSQLDriverWithCheckNamedValueDisabled{}) + +func init() { + // Default mapping for app driver/sql driver + pgxSqlDriver.driverName = "pgx" +} + +// todo: since se use pq, this is probably useless +func InstrumentalizeSQLDriver() { + // otelsql has a function Register which wrap the underlying driver, but does not mirror driver.NamedValuedChecker interface of the underlying driver + // pgx implements this interface and just return nil + // so, we need to manually wrap the driver to implements this interface and return a nil error + db, err := sql.Open("pgx", "") + if err != nil { + panic(err) + } + + dri := db.Driver() + + if err = db.Close(); err != nil { + panic(err) + } + + wrappedDriver := otelsql.Wrap(dri, + otelsql.AllowRoot(), + otelsql.TraceAll(), + ) + + pgxSqlDriver.driverName = fmt.Sprintf("otel-%s", pgxSqlDriver.driverName) + sql.Register(pgxSqlDriver.driverName, otelSQLDriverWithCheckNamedValueDisabled{ + wrappedDriver, + }) +} + +type Driver struct { + db *bun.DB + systemStore *systemstore.Store + lock sync.Mutex +} + +func (d *Driver) GetSystemStore() *systemstore.Store { + return d.systemStore +} + +func (d *Driver) newStore(name string) (*ledgerstore.Store, error) { + return ledgerstore.New(d.db, name, func(ctx context.Context) error { + return d.GetSystemStore().DeleteLedger(ctx, name) + }) +} + +func (d *Driver) createLedgerStore(ctx context.Context, name string) (*ledgerstore.Store, error) { + if name == SystemSchema { + return nil, errors.New("reserved name") + } + + exists, err := d.systemStore.Exists(ctx, name) + if err != nil { + return nil, err + } + if exists { + return nil, storage.ErrStoreAlreadyExists + } + + _, err = d.systemStore.Register(ctx, name) + if err != nil { + return nil, err + } + + store, err := d.newStore(name) + if err != nil { + return nil, err + } + + _, err = store.Migrate(ctx) + + return store, err +} + +func (d *Driver) CreateLedgerStore(ctx context.Context, name string) (*ledgerstore.Store, error) { + d.lock.Lock() + defer d.lock.Unlock() + + return d.createLedgerStore(ctx, name) +} + +func (d *Driver) GetLedgerStore(ctx context.Context, name string) (*ledgerstore.Store, error) { + d.lock.Lock() + defer d.lock.Unlock() + + exists, err := d.systemStore.Exists(ctx, name) + if err != nil { + return nil, err + } + + var store *ledgerstore.Store + if !exists { + store, err = d.createLedgerStore(ctx, name) + } else { + store, err = d.newStore(name) + } + if err != nil { + return nil, err + } + + return store, nil +} + +func (d *Driver) Initialize(ctx context.Context) error { + logging.FromContext(ctx).Debugf("Initialize driver") + + _, err := d.db.ExecContext(ctx, "create extension if not exists pgcrypto") + if err != nil { + return storage.PostgresError(err) + } + + _, err = d.db.ExecContext(ctx, fmt.Sprintf(`create schema if not exists "%s"`, SystemSchema)) + if err != nil { + return storage.PostgresError(err) + } + + d.systemStore = systemstore.NewStore(d.db) + + if err := d.systemStore.Initialize(ctx); err != nil { + return err + } + + return nil +} + +func (d *Driver) UpgradeAllLedgersSchemas(ctx context.Context) error { + systemStore := d.GetSystemStore() + ledgers, err := systemStore.ListLedgers(ctx) + if err != nil { + return err + } + + for _, ledger := range ledgers { + store, err := d.GetLedgerStore(ctx, ledger) + if err != nil { + return err + } + + logging.FromContext(ctx).Infof("Upgrading storage '%s'", ledger) + if _, err := store.Migrate(ctx); err != nil { + return err + } + } + + return nil +} + +func New(db *bun.DB) *Driver { + return &Driver{ + db: db, + } +} diff --git a/internal/storage/driver/driver_test.go b/internal/storage/driver/driver_test.go new file mode 100644 index 000000000..58d1146f5 --- /dev/null +++ b/internal/storage/driver/driver_test.go @@ -0,0 +1,46 @@ +package driver_test + +import ( + "context" + "testing" + + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/google/uuid" + + "github.com/formancehq/ledger/internal/storage" + "github.com/formancehq/ledger/internal/storage/storagetesting" + "github.com/stretchr/testify/require" +) + +func TestConfiguration(t *testing.T) { + d := storagetesting.StorageDriver(t) + + require.NoError(t, d.GetSystemStore().InsertConfiguration(context.Background(), "foo", "bar")) + bar, err := d.GetSystemStore().GetConfiguration(context.Background(), "foo") + require.NoError(t, err) + require.Equal(t, "bar", bar) +} + +func TestConfigurationError(t *testing.T) { + d := storagetesting.StorageDriver(t) + + _, err := d.GetSystemStore().GetConfiguration(context.Background(), "not_existing") + require.Error(t, err) + require.True(t, storage.IsNotFoundError(err)) +} + +func TestErrorOnOutdatedSchema(t *testing.T) { + d := storagetesting.StorageDriver(t) + ctx := logging.TestingContext() + + name := uuid.NewString() + _, err := d.GetSystemStore().Register(ctx, name) + require.NoError(t, err) + + store, err := d.GetLedgerStore(ctx, name) + require.NoError(t, err) + + upToDate, err := store.IsSchemaUpToDate(ctx) + require.NoError(t, err) + require.False(t, upToDate) +} diff --git a/internal/storage/driver/main_test.go b/internal/storage/driver/main_test.go new file mode 100644 index 000000000..70c6d21da --- /dev/null +++ b/internal/storage/driver/main_test.go @@ -0,0 +1,21 @@ +package driver + +import ( + "os" + "testing" + + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/formancehq/stack/libs/go-libs/pgtesting" +) + +func TestMain(t *testing.M) { + if err := pgtesting.CreatePostgresServer(); err != nil { + logging.Error(err) + os.Exit(1) + } + code := t.Run() + if err := pgtesting.DestroyPostgresServer(); err != nil { + logging.Error(err) + } + os.Exit(code) +} diff --git a/internal/storage/errors.go b/internal/storage/errors.go new file mode 100644 index 000000000..3e7eeb9a1 --- /dev/null +++ b/internal/storage/errors.go @@ -0,0 +1,52 @@ +package storage + +import ( + "database/sql" + + "github.com/formancehq/stack/libs/go-libs/errorsutil" + "github.com/lib/pq" + "github.com/pkg/errors" +) + +// postgresError is an helper to wrap postgres errors into storage errors +func PostgresError(err error) error { + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return ErrNotFound + } + + switch pge := err.(type) { + case *pq.Error: + switch pge.Code { + case "23505": + return errorsutil.NewError(ErrStorage, + errorsutil.NewError(ErrConstraintFailed, err)) + case "53300": + return errorsutil.NewError(ErrStorage, + errorsutil.NewError(ErrTooManyClients, err)) + } + } + + return errorsutil.NewError(ErrStorage, err) + } + + return nil +} + +var ( + ErrNotFound = errors.New("not found") + ErrConstraintFailed = errors.New("23505: constraint failed") + ErrTooManyClients = errors.New("53300: too many clients") + ErrStoreAlreadyExists = errors.New("store already exists") + ErrStoreNotFound = errors.New("store not found") + + ErrStorage = errors.New("storage error") +) + +func IsNotFoundError(err error) bool { + return errors.Is(err, ErrNotFound) +} + +func IsStorageError(err error) bool { + return errors.Is(err, ErrStorage) +} diff --git a/internal/storage/flags.go b/internal/storage/flags.go new file mode 100644 index 000000000..670cc1fa1 --- /dev/null +++ b/internal/storage/flags.go @@ -0,0 +1,27 @@ +package storage + +import ( + "io" + + "github.com/spf13/viper" +) + +const ( + StoreWorkerMaxPendingSize = "store-worker-max-pending-size" + StoreWorkerMaxWriteChanSize = "store-worker-max-write-chan-size" + StoragePostgresConnectionStringFlag = "storage-postgres-conn-string" + StoragePostgresMaxIdleConnsFlag = "storage-postgres-max-idle-conns" + StoragePostgresConnMaxIdleTimeFlag = "storage-postgres-conn-max-idle-time" + StoragePostgresMaxOpenConns = "storage-postgres-max-open-conns" +) + +func ConnectionOptionsFromFlags(v *viper.Viper, output io.Writer, debug bool) ConnectionOptions { + return ConnectionOptions{ + DatabaseSourceName: v.GetString(StoragePostgresConnectionStringFlag), + Debug: debug, + Writer: output, + MaxIdleConns: v.GetInt(StoragePostgresMaxIdleConnsFlag), + ConnMaxIdleTime: v.GetDuration(StoragePostgresConnMaxIdleTimeFlag), + MaxOpenConns: v.GetInt(StoragePostgresMaxOpenConns), + } +} diff --git a/internal/storage/inmemory.go b/internal/storage/inmemory.go new file mode 100644 index 000000000..f12289e62 --- /dev/null +++ b/internal/storage/inmemory.go @@ -0,0 +1,124 @@ +package storage + +import ( + "context" + "math/big" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/stack/libs/go-libs/collectionutils" + "github.com/formancehq/stack/libs/go-libs/metadata" +) + +type InMemoryStore struct { + logs []*ledger.ChainedLog + transactions []*ledger.ExpandedTransaction + accounts []*ledger.Account +} + +func (m *InMemoryStore) GetTransactionByReference(ctx context.Context, ref string) (*ledger.ExpandedTransaction, error) { + filtered := collectionutils.Filter(m.transactions, func(transaction *ledger.ExpandedTransaction) bool { + return transaction.Reference == ref + }) + if len(filtered) == 0 { + return nil, ErrNotFound + } + return filtered[0], nil +} + +func (m *InMemoryStore) GetTransaction(ctx context.Context, txID *big.Int) (*ledger.Transaction, error) { + filtered := collectionutils.Filter(m.transactions, func(transaction *ledger.ExpandedTransaction) bool { + return transaction.ID.Cmp(txID) == 0 + }) + if len(filtered) == 0 { + return nil, ErrNotFound + } + return &filtered[0].Transaction, nil +} + +func (m *InMemoryStore) GetLastLog(ctx context.Context) (*ledger.ChainedLog, error) { + if len(m.logs) == 0 { + return nil, nil + } + return m.logs[len(m.logs)-1], nil +} + +func (m *InMemoryStore) GetBalance(ctx context.Context, address, asset string) (*big.Int, error) { + balance := new(big.Int) + for _, log := range m.logs { + switch payload := log.Data.(type) { + case ledger.NewTransactionLogPayload: + postings := payload.Transaction.Postings + for _, posting := range postings { + if posting.Asset != asset { + continue + } + if posting.Source == address { + balance = balance.Sub(balance, posting.Amount) + } + if posting.Destination == address { + balance = balance.Add(balance, posting.Amount) + } + } + } + } + return balance, nil +} + +func (m *InMemoryStore) GetAccount(ctx context.Context, address string) (*ledger.Account, error) { + account := collectionutils.Filter(m.accounts, func(account *ledger.Account) bool { + return account.Address == address + }) + if len(account) == 0 { + return &ledger.Account{ + Address: address, + Metadata: metadata.Metadata{}, + }, nil + } + return account[0], nil +} + +func (m *InMemoryStore) ReadLogWithIdempotencyKey(ctx context.Context, key string) (*ledger.ChainedLog, error) { + first := collectionutils.First(m.logs, func(log *ledger.ChainedLog) bool { + return log.IdempotencyKey == key + }) + if first == nil { + return nil, ErrNotFound + } + return first, nil +} + +func (m *InMemoryStore) InsertLogs(ctx context.Context, logs ...*ledger.ChainedLog) error { + m.logs = append(m.logs, logs...) + for _, log := range logs { + switch payload := log.Data.(type) { + case ledger.NewTransactionLogPayload: + m.transactions = append(m.transactions, &ledger.ExpandedTransaction{ + Transaction: *payload.Transaction, + // TODO + PreCommitVolumes: nil, + PostCommitVolumes: nil, + }) + case ledger.RevertedTransactionLogPayload: + tx := collectionutils.Filter(m.transactions, func(transaction *ledger.ExpandedTransaction) bool { + return transaction.ID.Cmp(payload.RevertedTransactionID) == 0 + })[0] + tx.Reverted = true + case ledger.SetMetadataLogPayload: + } + } + + return nil +} + +func (m *InMemoryStore) GetLastTransaction(ctx context.Context) (*ledger.ExpandedTransaction, error) { + if len(m.transactions) == 0 { + return nil, ErrNotFound + } + return m.transactions[len(m.transactions)-1], nil +} + +func NewInMemoryStore() *InMemoryStore { + return &InMemoryStore{ + logs: []*ledger.ChainedLog{}, + } +} diff --git a/internal/storage/ledgerstore/accounts.go b/internal/storage/ledgerstore/accounts.go new file mode 100644 index 000000000..4d7035566 --- /dev/null +++ b/internal/storage/ledgerstore/accounts.go @@ -0,0 +1,202 @@ +package ledgerstore + +import ( + "context" + "errors" + "fmt" + "regexp" + + ledger "github.com/formancehq/ledger/internal" + storageerrors "github.com/formancehq/ledger/internal/storage" + "github.com/formancehq/ledger/internal/storage/paginate" + "github.com/formancehq/ledger/internal/storage/query" + "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/pointer" + "github.com/uptrace/bun" +) + +func (store *Store) buildAccountQuery(q PITFilterWithVolumes, query *bun.SelectQuery) *bun.SelectQuery { + query = query. + DistinctOn("accounts.address"). + Column("accounts.address"). + ColumnExpr("coalesce(metadata, '{}'::jsonb) as metadata"). + Table("accounts"). + Apply(filterPIT(q.PIT, "insertion_date")). + Order("accounts.address", "revision desc") + + if q.PIT == nil { + query = query.Join("left join accounts_metadata on accounts_metadata.address = accounts.address") + } else { + query = query.Join("left join accounts_metadata on accounts_metadata.address = accounts.address and accounts_metadata.date < ?", q.PIT) + } + + if q.ExpandVolumes { + query = query. + ColumnExpr("volumes.*"). + Join("join get_account_aggregated_volumes(accounts.address, ?) volumes on true", q.PIT) + } + + if q.ExpandEffectiveVolumes { + query = query. + ColumnExpr("effective_volumes.*"). + Join("join get_account_aggregated_effective_volumes(accounts.address, ?) effective_volumes on true", q.PIT) + } + + return query +} + +func (store *Store) accountQueryContext(qb query.Builder) (string, []any, error) { + metadataRegex := regexp.MustCompile("metadata\\[(.+)\\]") + balanceRegex := regexp.MustCompile("balance\\[(.*)\\]") + + return qb.Build(query.ContextFn(func(key, operator string, value any) (string, []any, error) { + switch { + case key == "address": + // TODO: Should allow comparison operator only if segments not used + if operator != "$match" { + return "", nil, errors.New("'address' column can only be used with $match") + } + switch address := value.(type) { + case string: + return filterAccountAddress(address, "accounts.address"), nil, nil + default: + return "", nil, fmt.Errorf("unexpected type %T for column 'address'", address) + } + case metadataRegex.Match([]byte(key)): + if operator != "$match" { + return "", nil, errors.New("'account' column can only be used with $match") + } + match := metadataRegex.FindAllStringSubmatch(key, 3) + + return "metadata @> ?", []any{map[string]any{ + match[0][1]: value, + }}, nil + case balanceRegex.Match([]byte(key)): + match := balanceRegex.FindAllStringSubmatch(key, 2) + + return fmt.Sprintf(`( + select balance_from_volumes(post_commit_volumes) + from moves + where asset = ? and account_address = accounts.address + order by seq desc + limit 1 + ) < ?`), []any{match[0][1], value}, nil + case key == "balance": + return fmt.Sprintf(`( + select balance_from_volumes(post_commit_volumes) + from moves + where account_address = accounts.address + order by seq desc + limit 1 + ) < ?`), nil, nil + default: + return "", nil, fmt.Errorf("unknown key '%s' when building query", key) + } + })) +} + +func (store *Store) buildAccountListQuery(selectQuery *bun.SelectQuery, q *GetAccountsQuery) *bun.SelectQuery { + selectQuery = store.buildAccountQuery(q.Options.Options, selectQuery) + + if q.Options.QueryBuilder != nil { + where, args, err := store.accountQueryContext(q.Options.QueryBuilder) + if err != nil { + // TODO: handle error + panic(err) + } + return selectQuery.Where(where, args...) + } + + return selectQuery +} + +func (store *Store) GetAccountsWithVolumes(ctx context.Context, q *GetAccountsQuery) (*api.Cursor[ledger.ExpandedAccount], error) { + return paginateWithOffset[PaginatedQueryOptions[PITFilterWithVolumes], ledger.ExpandedAccount](store, ctx, + (*paginate.OffsetPaginatedQuery[PaginatedQueryOptions[PITFilterWithVolumes]])(q), + func(query *bun.SelectQuery) *bun.SelectQuery { + return store.buildAccountListQuery(query, q) + }, + ) +} + +func (store *Store) GetAccount(ctx context.Context, address string) (*ledger.Account, error) { + account, err := fetch[*ledger.Account](store, ctx, func(query *bun.SelectQuery) *bun.SelectQuery { + return query. + ColumnExpr("accounts.address"). + ColumnExpr("coalesce(metadata, '{}'::jsonb) as metadata"). + Table("accounts"). + Join("left join accounts_metadata on accounts_metadata.address = accounts.address"). + Where("accounts.address = ?", address). + Order("revision desc"). + Limit(1) + }) + if err != nil { + if storageerrors.IsNotFoundError(err) { + return pointer.For(ledger.NewAccount(address)), nil + } + return nil, err + } + return account, nil +} + +func (store *Store) GetAccountWithVolumes(ctx context.Context, q GetAccountQuery) (*ledger.ExpandedAccount, error) { + account, err := fetch[*ledger.ExpandedAccount](store, ctx, func(query *bun.SelectQuery) *bun.SelectQuery { + query = store.buildAccountQuery(q.PITFilterWithVolumes, query). + Where("accounts.address = ?", q.Addr). + Limit(1) + + return query + }) + if err != nil { + if storageerrors.IsNotFoundError(err) { + return pointer.For(ledger.NewExpandedAccount(q.Addr)), nil + } + return nil, err + } + return account, nil +} + +func (store *Store) CountAccounts(ctx context.Context, q *GetAccountsQuery) (uint64, error) { + return count(store, ctx, func(query *bun.SelectQuery) *bun.SelectQuery { + return store.buildAccountListQuery(query, q) + }) +} + +type GetAccountQuery struct { + PITFilterWithVolumes + Addr string +} + +func (q GetAccountQuery) WithPIT(pit ledger.Time) GetAccountQuery { + q.PIT = &pit + + return q +} + +func (q GetAccountQuery) WithExpandVolumes() GetAccountQuery { + q.ExpandVolumes = true + + return q +} + +func (q GetAccountQuery) WithExpandEffectiveVolumes() GetAccountQuery { + q.ExpandEffectiveVolumes = true + + return q +} + +func NewGetAccountQuery(addr string) GetAccountQuery { + return GetAccountQuery{ + Addr: addr, + } +} + +type GetAccountsQuery paginate.OffsetPaginatedQuery[PaginatedQueryOptions[PITFilterWithVolumes]] + +func NewGetAccountsQuery(opts PaginatedQueryOptions[PITFilterWithVolumes]) *GetAccountsQuery { + return &GetAccountsQuery{ + PageSize: opts.PageSize, + Order: paginate.OrderAsc, + Options: opts, + } +} diff --git a/internal/storage/ledgerstore/accounts_test.go b/internal/storage/ledgerstore/accounts_test.go new file mode 100644 index 000000000..f55a391ca --- /dev/null +++ b/internal/storage/ledgerstore/accounts_test.go @@ -0,0 +1,314 @@ +package ledgerstore_test + +import ( + "context" + "math/big" + "testing" + "time" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/query" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/stretchr/testify/require" +) + +func TestGetAccounts(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := ledger.Now() + + require.NoError(t, store.InsertLogs(context.Background(), + ledger.ChainLogs( + ledger.NewTransactionLog( + ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(100))). + WithDate(now), + map[string]metadata.Metadata{ + "account:1": { + "category": "4", + }, + }, + ).WithDate(now), + ledger.NewSetMetadataOnAccountLog(ledger.Now(), "account:1", metadata.Metadata{"category": "1"}).WithDate(now.Add(time.Minute)), + ledger.NewSetMetadataOnAccountLog(ledger.Now(), "account:2", metadata.Metadata{"category": "2"}).WithDate(now.Add(2*time.Minute)), + ledger.NewSetMetadataOnAccountLog(ledger.Now(), "account:3", metadata.Metadata{"category": "3"}).WithDate(now.Add(3*time.Minute)), + ledger.NewSetMetadataOnAccountLog(ledger.Now(), "orders:1", metadata.Metadata{"foo": "bar"}).WithDate(now.Add(3*time.Minute)), + ledger.NewSetMetadataOnAccountLog(ledger.Now(), "orders:2", metadata.Metadata{"foo": "bar"}).WithDate(now.Add(3*time.Minute)), + ledger.NewTransactionLog( + ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(100))). + WithIDUint64(1). + WithDate(now.Add(4*time.Minute)), + map[string]metadata.Metadata{}, + ).WithDate(now.Add(100*time.Millisecond)), + ledger.NewTransactionLog( + ledger.NewTransaction(). + WithPostings(ledger.NewPosting("account:1", "bank", "USD", big.NewInt(50))). + WithDate(now.Add(3*time.Minute)). + WithIDUint64(2), + map[string]metadata.Metadata{}, + ).WithDate(now.Add(200*time.Millisecond)), + )..., + )) + + t.Run("list all", func(t *testing.T) { + accounts, err := store.GetAccountsWithVolumes(context.Background(), ledgerstore.NewGetAccountsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}))) + require.NoError(t, err) + require.Len(t, accounts.Data, 7) + }) + + t.Run("list using metadata", func(t *testing.T) { + accounts, err := store.GetAccountsWithVolumes(context.Background(), ledgerstore.NewGetAccountsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("metadata[category]", "1")), + )) + require.NoError(t, err) + require.Len(t, accounts.Data, 1) + }) + + t.Run("list before date", func(t *testing.T) { + accounts, err := store.GetAccountsWithVolumes(context.Background(), ledgerstore.NewGetAccountsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ + PITFilter: ledgerstore.PITFilter{ + PIT: &now, + }, + }))) + require.NoError(t, err) + require.Len(t, accounts.Data, 2) + }) + + t.Run("list with volumes", func(t *testing.T) { + accounts, err := store.GetAccountsWithVolumes(context.Background(), ledgerstore.NewGetAccountsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ + ExpandVolumes: true, + }).WithQueryBuilder(query.Match("address", "account:1")))) + require.NoError(t, err) + require.Len(t, accounts.Data, 1) + require.Equal(t, ledger.VolumesByAssets{ + "USD": ledger.NewVolumesInt64(200, 50), + }, accounts.Data[0].Volumes) + }) + + t.Run("list with volumes using PIT", func(t *testing.T) { + accounts, err := store.GetAccountsWithVolumes(context.Background(), ledgerstore.NewGetAccountsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ + PITFilter: ledgerstore.PITFilter{ + PIT: &now, + }, + ExpandVolumes: true, + }).WithQueryBuilder(query.Match("address", "account:1")))) + require.NoError(t, err) + require.Len(t, accounts.Data, 1) + require.Equal(t, ledger.VolumesByAssets{ + "USD": ledger.NewVolumesInt64(100, 0), + }, accounts.Data[0].Volumes) + }) + + t.Run("list with effective volumes", func(t *testing.T) { + accounts, err := store.GetAccountsWithVolumes(context.Background(), ledgerstore.NewGetAccountsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ + ExpandEffectiveVolumes: true, + }).WithQueryBuilder(query.Match("address", "account:1")))) + require.NoError(t, err) + require.Len(t, accounts.Data, 1) + require.Equal(t, ledger.VolumesByAssets{ + "USD": ledger.NewVolumesInt64(200, 50), + }, accounts.Data[0].EffectiveVolumes) + }) + + t.Run("list with effective volumes using PIT", func(t *testing.T) { + accounts, err := store.GetAccountsWithVolumes(context.Background(), ledgerstore.NewGetAccountsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ + PITFilter: ledgerstore.PITFilter{ + PIT: &now, + }, + ExpandEffectiveVolumes: true, + }).WithQueryBuilder(query.Match("address", "account:1")))) + require.NoError(t, err) + require.Len(t, accounts.Data, 1) + require.Equal(t, ledger.VolumesByAssets{ + "USD": ledger.NewVolumesInt64(100, 0), + }, accounts.Data[0].EffectiveVolumes) + }) + + t.Run("list using filter on address", func(t *testing.T) { + accounts, err := store.GetAccountsWithVolumes(context.Background(), ledgerstore.NewGetAccountsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("address", "account:")), + )) + require.NoError(t, err) + require.Len(t, accounts.Data, 3) + }) + t.Run("list using filter on multiple address", func(t *testing.T) { + accounts, err := store.GetAccountsWithVolumes(context.Background(), ledgerstore.NewGetAccountsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder( + query.Or( + query.Match("address", "account:1"), + query.Match("address", "orders:"), + ), + ), + )) + require.NoError(t, err) + require.Len(t, accounts.Data, 3) + }) + t.Run("list using filter on balances", func(t *testing.T) { + accounts, err := store.GetAccountsWithVolumes(context.Background(), ledgerstore.NewGetAccountsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Lt("balance[USD]", 0)), + )) + require.NoError(t, err) + require.Len(t, accounts.Data, 1) // world + }) +} + +func TestUpdateAccountsMetadata(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + + metadata := metadata.Metadata{ + "foo": "bar", + } + + require.NoError(t, store.InsertLogs(context.Background(), + ledger.NewSetMetadataOnAccountLog(ledger.Now(), "bank", metadata).ChainLog(nil), + ), "account insertion should not fail") + + account, err := store.GetAccountWithVolumes(context.Background(), ledgerstore.NewGetAccountQuery("bank")) + require.NoError(t, err, "account retrieval should not fail") + + require.Equal(t, "bank", account.Address, "account address should match") + require.Equal(t, metadata, account.Metadata, "account metadata should match") +} + +func TestGetAccount(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := ledger.Now() + + require.NoError(t, store.InsertLogs(context.Background(), + ledger.ChainLogs( + ledger.NewTransactionLog(ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "multi", "USD/2", big.NewInt(100)), + ).WithDate(now), map[string]metadata.Metadata{}), + ledger.NewSetMetadataLog(now.Add(time.Minute), ledger.SetMetadataLogPayload{ + TargetType: ledger.MetaTargetTypeAccount, + TargetID: "multi", + Metadata: metadata.Metadata{ + "category": "gold", + }, + }), + )..., + )) + + t.Run("find account", func(t *testing.T) { + account, err := store.GetAccountWithVolumes(context.Background(), ledgerstore.NewGetAccountQuery("multi")) + require.NoError(t, err) + require.Equal(t, ledger.ExpandedAccount{ + Account: ledger.Account{ + Address: "multi", + Metadata: metadata.Metadata{ + "category": "gold", + }, + }, + }, *account) + }) + + t.Run("find account with volumes", func(t *testing.T) { + account, err := store.GetAccountWithVolumes(context.Background(), ledgerstore.NewGetAccountQuery("multi"). + WithExpandVolumes()) + require.NoError(t, err) + require.Equal(t, ledger.ExpandedAccount{ + Account: ledger.Account{ + Address: "multi", + Metadata: metadata.Metadata{ + "category": "gold", + }, + }, + Volumes: ledger.VolumesByAssets{ + "USD/2": ledger.NewVolumesInt64(100, 0), + }, + }, *account) + }) + + t.Run("find account with effective volumes", func(t *testing.T) { + account, err := store.GetAccountWithVolumes(context.Background(), ledgerstore.NewGetAccountQuery("multi"). + WithExpandEffectiveVolumes()) + require.NoError(t, err) + require.Equal(t, ledger.ExpandedAccount{ + Account: ledger.Account{ + Address: "multi", + Metadata: metadata.Metadata{ + "category": "gold", + }, + }, + EffectiveVolumes: ledger.VolumesByAssets{ + "USD/2": ledger.NewVolumesInt64(100, 0), + }, + }, *account) + }) + + t.Run("find account using pit", func(t *testing.T) { + account, err := store.GetAccountWithVolumes(context.Background(), ledgerstore.NewGetAccountQuery("multi").WithPIT(now)) + require.NoError(t, err) + require.Equal(t, ledger.ExpandedAccount{ + Account: ledger.Account{ + Address: "multi", + Metadata: metadata.Metadata{}, + }, + Volumes: ledger.VolumesByAssets{}, + }, *account) + }) + + t.Run("not existent account", func(t *testing.T) { + account, err := store.GetAccountWithVolumes(context.Background(), ledgerstore.NewGetAccountQuery("account_not_existing")) + require.NoError(t, err) + require.NotNil(t, account) + }) +} + +func TestGetAccountWithVolumes(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + + require.NoError(t, insertTransactions(context.Background(), store, + *ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "multi", "USD/2", big.NewInt(100)), + ), + )) + + accountWithVolumes, err := store.GetAccountWithVolumes(context.Background(), + ledgerstore.NewGetAccountQuery("multi").WithExpandVolumes()) + require.NoError(t, err) + require.Equal(t, &ledger.ExpandedAccount{ + Account: ledger.Account{ + Address: "multi", + Metadata: metadata.Metadata{}, + }, + Volumes: map[string]*ledger.Volumes{ + "USD/2": ledger.NewEmptyVolumes().WithInputInt64(100), + }, + }, accountWithVolumes) +} + +func TestUpdateAccountMetadata(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + + require.NoError(t, store.InsertLogs(context.Background(), + ledger.NewSetMetadataOnAccountLog(ledger.Now(), "central_bank", metadata.Metadata{ + "foo": "bar", + }).ChainLog(nil), + )) + + account, err := store.GetAccountWithVolumes(context.Background(), ledgerstore.NewGetAccountQuery("central_bank")) + require.NoError(t, err) + require.EqualValues(t, "bar", account.Metadata["foo"]) +} + +func TestCountAccounts(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + + require.NoError(t, insertTransactions(context.Background(), store, + *ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "central_bank", "USD/2", big.NewInt(100)), + ), + )) + + countAccounts, err := store.CountAccounts(context.Background(), ledgerstore.NewGetAccountsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}))) + require.NoError(t, err) + require.EqualValues(t, 2, countAccounts) // world + central_bank +} diff --git a/internal/storage/ledgerstore/balances.go b/internal/storage/ledgerstore/balances.go new file mode 100644 index 000000000..068e477b1 --- /dev/null +++ b/internal/storage/ledgerstore/balances.go @@ -0,0 +1,84 @@ +package ledgerstore + +import ( + "context" + "errors" + "fmt" + "math/big" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/storage/paginate" + "github.com/formancehq/ledger/internal/storage/query" + "github.com/uptrace/bun" +) + +// todo: should return a cursor? +func (store *Store) GetAggregatedBalances(ctx context.Context, q *GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) { + + type Temp struct { + Aggregated ledger.VolumesByAssets `bun:"aggregated,type:jsonb"` + } + return fetchAndMap[*Temp, ledger.BalancesByAssets](store, ctx, + func(temp *Temp) ledger.BalancesByAssets { + return temp.Aggregated.Balances() + }, + func(selectQuery *bun.SelectQuery) *bun.SelectQuery { + moves := store.db. + NewSelect(). + Table(MovesTableName). + ColumnExpr("distinct on (moves.account_address, moves.asset) moves.*"). + Order("account_address", "asset", "moves.seq desc"). + Apply(filterPIT(q.Options.Options.PIT, "insertion_date")) // todo(gfyrag): expose capability to use effective_date + + if q.Options.QueryBuilder != nil { + subQuery, args, err := q.Options.QueryBuilder.Build(query.ContextFn(func(key, operator string, value any) (string, []any, error) { + switch { + case key == "address": + // TODO: Should allow comparison operator only if segments not used + if operator != "$match" { + return "", nil, errors.New("'address' column can only be used with $match") + } + switch address := value.(type) { + case string: + return filterAccountAddress(address, "account_address"), nil, nil + default: + return "", nil, fmt.Errorf("unexpected type %T for column 'address'", address) + } + default: + return "", nil, fmt.Errorf("unknown key '%s' when building query", key) + } + })) + if err != nil { + panic(err) + } + moves = moves.Where(subQuery, args...) + } + + return selectQuery. + With("moves", moves). + TableExpr("moves"). + ColumnExpr("volumes_to_jsonb((moves.asset, (sum((moves.post_commit_volumes).inputs), sum((moves.post_commit_volumes).outputs))::volumes)) as aggregated"). + Group("moves.asset") + }) +} + +func (store *Store) GetBalance(ctx context.Context, address, asset string) (*big.Int, error) { + type Temp struct { + Balance *big.Int `bun:"balance,type:numeric"` + } + return fetchAndMap[*Temp, *big.Int](store, ctx, func(temp *Temp) *big.Int { + return temp.Balance + }, func(query *bun.SelectQuery) *bun.SelectQuery { + return query.TableExpr("get_account_balance(?, ?) as balance", address, asset) + }) +} + +type GetAggregatedBalanceQuery paginate.OffsetPaginatedQuery[PaginatedQueryOptions[PITFilter]] + +func NewGetAggregatedBalancesQuery(options PaginatedQueryOptions[PITFilter]) *GetAggregatedBalanceQuery { + return &GetAggregatedBalanceQuery{ + PageSize: options.PageSize, + Order: paginate.OrderAsc, + Options: options, + } +} diff --git a/internal/storage/ledgerstore/balances_test.go b/internal/storage/ledgerstore/balances_test.go new file mode 100644 index 000000000..5de91a529 --- /dev/null +++ b/internal/storage/ledgerstore/balances_test.go @@ -0,0 +1,67 @@ +package ledgerstore_test + +import ( + "context" + "math/big" + "testing" + "time" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/query" + internaltesting "github.com/formancehq/ledger/internal/testing" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/stretchr/testify/require" +) + +func TestGetBalancesAggregated(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := ledger.Now() + + tx1 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "users:1", "USD", big.NewInt(1)), + ledger.NewPosting("world", "users:2", "USD", big.NewInt(199)), + ).WithDate(now) + + tx2 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "users:1", "USD", big.NewInt(1)), + ledger.NewPosting("world", "users:2", "USD", big.NewInt(199)), + ).WithDate(now.Add(time.Minute)).WithIDUint64(1) + + require.NoError(t, store.InsertLogs(context.Background(), + ledger.ChainLogs( + ledger.NewTransactionLog(tx1, map[string]metadata.Metadata{}).WithDate(tx1.Timestamp), + ledger.NewTransactionLog(tx2, map[string]metadata.Metadata{}).WithDate(tx2.Timestamp), + )...)) + + t.Run("aggregate on all", func(t *testing.T) { + q := ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilter{}).WithPageSize(10) + cursor, err := store.GetAggregatedBalances(context.Background(), ledgerstore.NewGetAggregatedBalancesQuery(q)) + require.NoError(t, err) + internaltesting.RequireEqual(t, ledger.BalancesByAssets{ + "USD": big.NewInt(0), + }, cursor) + }) + t.Run("filter on address", func(t *testing.T) { + ret, err := store.GetAggregatedBalances(context.Background(), ledgerstore.NewGetAggregatedBalancesQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilter{}). + WithQueryBuilder(query.Match("address", "users:")). + WithPageSize(10), + )) + require.NoError(t, err) + require.Equal(t, ledger.BalancesByAssets{ + "USD": big.NewInt(400), + }, ret) + }) + t.Run("using pit", func(t *testing.T) { + ret, err := store.GetAggregatedBalances(context.Background(), ledgerstore.NewGetAggregatedBalancesQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilter{ + PIT: &now, + }). + WithQueryBuilder(query.Match("address", "users:")). + WithPageSize(10))) + require.NoError(t, err) + require.Equal(t, ledger.BalancesByAssets{ + "USD": big.NewInt(200), + }, ret) + }) +} diff --git a/internal/storage/ledgerstore/logs.go b/internal/storage/ledgerstore/logs.go new file mode 100644 index 000000000..253a7d561 --- /dev/null +++ b/internal/storage/ledgerstore/logs.go @@ -0,0 +1,172 @@ +package ledgerstore + +import ( + "context" + "database/sql/driver" + "encoding/json" + "fmt" + "math/big" + + ledger "github.com/formancehq/ledger/internal" + storageerrors "github.com/formancehq/ledger/internal/storage" + "github.com/formancehq/ledger/internal/storage/paginate" + "github.com/formancehq/ledger/internal/storage/query" + "github.com/formancehq/stack/libs/go-libs/api" + "github.com/lib/pq" + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +const ( + LogTableName = "logs" +) + +type Logs struct { + bun.BaseModel `bun:"logs,alias:logs"` + + ID *paginate.BigInt `bun:"id,unique,type:numeric"` + Type string `bun:"type,type:log_type"` + Hash []byte `bun:"hash,type:bytea"` + Date ledger.Time `bun:"date,type:timestamptz"` + Data []byte `bun:"data,type:jsonb"` + IdempotencyKey string `bun:"idempotency_key,type:varchar(256),unique"` +} + +func (log *Logs) ToCore() *ledger.ChainedLog { + payload, err := ledger.HydrateLog(ledger.LogTypeFromString(log.Type), log.Data) + if err != nil { + panic(errors.Wrap(err, "hydrating log data")) + } + + return &ledger.ChainedLog{ + Log: ledger.Log{ + Type: ledger.LogTypeFromString(log.Type), + Data: payload, + Date: log.Date.UTC(), + IdempotencyKey: log.IdempotencyKey, + }, + ID: (*big.Int)(log.ID), + Hash: log.Hash, + } +} + +type RawMessage json.RawMessage + +func (j RawMessage) Value() (driver.Value, error) { + if j == nil { + return nil, nil + } + return string(j), nil +} + +func (store *Store) logsQueryBuilder(q PaginatedQueryOptions[any]) func(*bun.SelectQuery) *bun.SelectQuery { + return func(selectQuery *bun.SelectQuery) *bun.SelectQuery { + selectQuery = selectQuery.Table(LogTableName) + + if q.QueryBuilder != nil { + subQuery, args, err := q.QueryBuilder.Build(query.ContextFn(func(key, operator string, value any) (string, []any, error) { + switch { + case key == "date": + return fmt.Sprintf("%s %s ?", key, query.DefaultComparisonOperatorsMapping[operator]), []any{value}, nil + default: + return "", nil, fmt.Errorf("unknown key '%s' when building query", key) + } + })) + if err != nil { + panic(err) + } + selectQuery = selectQuery.Where(subQuery, args...) + } + + return selectQuery + } +} + +func (store *Store) InsertLogs(ctx context.Context, activeLogs ...*ledger.ChainedLog) error { + return store.withTransaction(ctx, func(tx bun.Tx) error { + // Beware: COPY query is not supported by bun if the pgx driver is used. + stmt, err := tx.Prepare(pq.CopyInSchema( + store.name, + LogTableName, + "id", "type", "hash", "date", "data", "idempotency_key", + )) + if err != nil { + return storageerrors.PostgresError(err) + } + + ls := make([]Logs, len(activeLogs)) + for i, chainedLogs := range activeLogs { + data, err := json.Marshal(chainedLogs.Data) + if err != nil { + return errors.Wrap(err, "marshaling log data") + } + + ls[i] = Logs{ + ID: (*paginate.BigInt)(chainedLogs.ID), + Type: chainedLogs.Type.String(), + Hash: chainedLogs.Hash, + Date: chainedLogs.Date, + Data: data, + IdempotencyKey: chainedLogs.IdempotencyKey, + } + + _, err = stmt.Exec(ls[i].ID, ls[i].Type, ls[i].Hash, ls[i].Date, RawMessage(ls[i].Data), chainedLogs.IdempotencyKey) + if err != nil { + return storageerrors.PostgresError(err) + } + } + + _, err = stmt.Exec() + if err != nil { + return storageerrors.PostgresError(err) + } + + return stmt.Close() + }) +} + +func (store *Store) GetLastLog(ctx context.Context) (*ledger.ChainedLog, error) { + return fetchAndMap[*Logs, *ledger.ChainedLog](store, ctx, (*Logs).ToCore, + func(query *bun.SelectQuery) *bun.SelectQuery { + return query. + Table(LogTableName). + OrderExpr("id desc"). + Limit(1) + }) +} + +func (store *Store) GetLogs(ctx context.Context, q *GetLogsQuery) (*api.Cursor[ledger.ChainedLog], error) { + logs, err := paginateWithColumn[PaginatedQueryOptions[any], Logs](store, ctx, + (*paginate.ColumnPaginatedQuery[PaginatedQueryOptions[any]])(q), + store.logsQueryBuilder(q.Options), + ) + if err != nil { + return nil, err + } + + return api.MapCursor(logs, func(from Logs) ledger.ChainedLog { + return *from.ToCore() + }), nil +} + +func (store *Store) ReadLogWithIdempotencyKey(ctx context.Context, key string) (*ledger.ChainedLog, error) { + return fetchAndMap[*Logs, *ledger.ChainedLog](store, ctx, (*Logs).ToCore, + func(query *bun.SelectQuery) *bun.SelectQuery { + return query. + Table(LogTableName). + OrderExpr("id desc"). + Limit(1). + Where("idempotency_key = ?", key) + }) +} + +type GetLogsQuery paginate.ColumnPaginatedQuery[PaginatedQueryOptions[any]] + +func NewGetLogsQuery(options PaginatedQueryOptions[any]) *GetLogsQuery { + return &GetLogsQuery{ + PageSize: options.PageSize, + Column: "id", + Order: paginate.OrderDesc, + Options: options, + } +} diff --git a/internal/storage/ledgerstore/logs_test.go b/internal/storage/ledgerstore/logs_test.go new file mode 100644 index 000000000..f9dfaf6e9 --- /dev/null +++ b/internal/storage/ledgerstore/logs_test.go @@ -0,0 +1,321 @@ +package ledgerstore_test + +import ( + "context" + "fmt" + "math/big" + "testing" + "time" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/storage" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/paginate" + "github.com/formancehq/ledger/internal/storage/query" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/stretchr/testify/require" +) + +func TestGetLastLog(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := ledger.Now() + + lastLog, err := store.GetLastLog(context.Background()) + require.True(t, storage.IsNotFoundError(err)) + require.Nil(t, lastLog) + tx1 := ledger.ExpandedTransaction{ + Transaction: ledger.Transaction{ + ID: big.NewInt(0), + TransactionData: ledger.TransactionData{ + Postings: []ledger.Posting{ + { + Source: "world", + Destination: "central_bank", + Amount: big.NewInt(100), + Asset: "USD", + }, + }, + Reference: "tx1", + Timestamp: now.Add(-3 * time.Hour), + }, + }, + PostCommitVolumes: ledger.AccountsAssetsVolumes{ + "world": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(100), + }, + }, + "central_bank": { + "USD": { + Input: big.NewInt(100), + Output: big.NewInt(0), + }, + }, + }, + PreCommitVolumes: ledger.AccountsAssetsVolumes{ + "world": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(0), + }, + }, + "central_bank": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(0), + }, + }, + }, + } + + logTx := ledger.NewTransactionLog(&tx1.Transaction, map[string]metadata.Metadata{}).ChainLog(nil) + appendLog(t, store, logTx) + + lastLog, err = store.GetLastLog(context.Background()) + require.NoError(t, err) + require.NotNil(t, lastLog) + + require.Equal(t, tx1.Postings, lastLog.Data.(ledger.NewTransactionLogPayload).Transaction.Postings) + require.Equal(t, tx1.Reference, lastLog.Data.(ledger.NewTransactionLogPayload).Transaction.Reference) + require.Equal(t, tx1.Timestamp, lastLog.Data.(ledger.NewTransactionLogPayload).Transaction.Timestamp) +} + +func TestReadLogWithIdempotencyKey(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + + logTx := ledger.NewTransactionLog( + ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ), + map[string]metadata.Metadata{}, + ) + log := logTx.WithIdempotencyKey("test") + + ret := appendLog(t, store, log.ChainLog(nil)) + + lastLog, err := store.ReadLogWithIdempotencyKey(context.Background(), "test") + require.NoError(t, err) + require.NotNil(t, lastLog) + require.Equal(t, *ret, *lastLog) +} + +func TestGetLogs(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := ledger.Now() + + tx1 := ledger.ExpandedTransaction{ + Transaction: ledger.Transaction{ + ID: big.NewInt(0), + TransactionData: ledger.TransactionData{ + Postings: []ledger.Posting{ + { + Source: "world", + Destination: "central_bank", + Amount: big.NewInt(100), + Asset: "USD", + }, + }, + Reference: "tx1", + Timestamp: now.Add(-3 * time.Hour), + }, + }, + PostCommitVolumes: ledger.AccountsAssetsVolumes{ + "world": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(100), + }, + }, + "central_bank": { + "USD": { + Input: big.NewInt(100), + Output: big.NewInt(0), + }, + }, + }, + PreCommitVolumes: ledger.AccountsAssetsVolumes{ + "world": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(0), + }, + }, + "central_bank": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(0), + }, + }, + }, + } + tx2 := ledger.ExpandedTransaction{ + Transaction: ledger.Transaction{ + ID: big.NewInt(1), + TransactionData: ledger.TransactionData{ + Postings: []ledger.Posting{ + { + Source: "world", + Destination: "central_bank", + Amount: big.NewInt(100), + Asset: "USD", + }, + }, + Reference: "tx2", + Timestamp: now.Add(-2 * time.Hour), + }, + }, + PostCommitVolumes: ledger.AccountsAssetsVolumes{ + "world": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(200), + }, + }, + "central_bank": { + "USD": { + Input: big.NewInt(200), + Output: big.NewInt(0), + }, + }, + }, + PreCommitVolumes: ledger.AccountsAssetsVolumes{ + "world": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(100), + }, + }, + "central_bank": { + "USD": { + Input: big.NewInt(100), + Output: big.NewInt(0), + }, + }, + }, + } + tx3 := ledger.ExpandedTransaction{ + Transaction: ledger.Transaction{ + ID: big.NewInt(2), + TransactionData: ledger.TransactionData{ + Postings: []ledger.Posting{ + { + Source: "central_bank", + Destination: "users:1", + Amount: big.NewInt(1), + Asset: "USD", + }, + }, + Reference: "tx3", + Metadata: metadata.Metadata{ + "priority": "high", + }, + Timestamp: now.Add(-1 * time.Hour), + }, + }, + PreCommitVolumes: ledger.AccountsAssetsVolumes{ + "central_bank": { + "USD": { + Input: big.NewInt(200), + Output: big.NewInt(0), + }, + }, + "users:1": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(0), + }, + }, + }, + PostCommitVolumes: ledger.AccountsAssetsVolumes{ + "central_bank": { + "USD": { + Input: big.NewInt(200), + Output: big.NewInt(1), + }, + }, + "users:1": { + "USD": { + Input: big.NewInt(1), + Output: big.NewInt(0), + }, + }, + }, + } + + var previousLog *ledger.ChainedLog + for _, tx := range []ledger.ExpandedTransaction{tx1, tx2, tx3} { + newLog := ledger.NewTransactionLog(&tx.Transaction, map[string]metadata.Metadata{}). + WithDate(tx.Timestamp). + ChainLog(previousLog) + appendLog(t, store, newLog) + previousLog = newLog + } + + cursor, err := store.GetLogs(context.Background(), ledgerstore.NewGetLogsQuery(ledgerstore.NewPaginatedQueryOptions[any](nil))) + require.NoError(t, err) + require.Equal(t, paginate.QueryDefaultPageSize, cursor.PageSize) + + require.Equal(t, 3, len(cursor.Data)) + require.Equal(t, big.NewInt(2), cursor.Data[0].ID) + require.Equal(t, tx3.Postings, cursor.Data[0].Data.(ledger.NewTransactionLogPayload).Transaction.Postings) + require.Equal(t, tx3.Reference, cursor.Data[0].Data.(ledger.NewTransactionLogPayload).Transaction.Reference) + require.Equal(t, tx3.Timestamp, cursor.Data[0].Data.(ledger.NewTransactionLogPayload).Transaction.Timestamp) + + cursor, err = store.GetLogs(context.Background(), ledgerstore.NewGetLogsQuery(ledgerstore.NewPaginatedQueryOptions[any](nil).WithPageSize(1))) + require.NoError(t, err) + // Should get only the first log. + require.Equal(t, 1, cursor.PageSize) + require.Equal(t, big.NewInt(2), cursor.Data[0].ID) + + cursor, err = store.GetLogs(context.Background(), ledgerstore.NewGetLogsQuery(ledgerstore.NewPaginatedQueryOptions[any](nil). + WithQueryBuilder(query.And( + query.Gte("date", now.Add(-2*time.Hour)), + query.Lt("date", now.Add(-time.Hour)), + )). + WithPageSize(10), + )) + require.NoError(t, err) + require.Equal(t, 10, cursor.PageSize) + // Should get only the second log, as StartTime is inclusive and EndTime exclusive. + require.Len(t, cursor.Data, 1) + require.Equal(t, big.NewInt(1), cursor.Data[0].ID) +} + +func TestGetBalance(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + + const ( + batchNumber = 100 + batchSize = 10 + input = 100 + output = 10 + ) + + logs := make([]*ledger.ChainedLog, 0) + var previousLog *ledger.ChainedLog + for i := 0; i < batchNumber; i++ { + for j := 0; j < batchSize; j++ { + chainedLog := ledger.NewTransactionLog( + ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", fmt.Sprintf("account:%d", j), "EUR/2", big.NewInt(input)), + ledger.NewPosting(fmt.Sprintf("account:%d", j), "starbucks", "EUR/2", big.NewInt(output)), + ).WithIDUint64(uint64(i*batchSize+j)), + map[string]metadata.Metadata{}, + ).ChainLog(previousLog) + logs = append(logs, chainedLog) + previousLog = chainedLog + } + } + err := store.InsertLogs(context.Background(), logs...) + require.NoError(t, err) + + balance, err := store.GetBalance(context.Background(), "account:1", "EUR/2") + require.NoError(t, err) + require.Equal(t, big.NewInt((input-output)*batchNumber), balance) +} diff --git a/internal/storage/ledgerstore/main_test.go b/internal/storage/ledgerstore/main_test.go new file mode 100644 index 000000000..57be485d4 --- /dev/null +++ b/internal/storage/ledgerstore/main_test.go @@ -0,0 +1,59 @@ +package ledgerstore_test + +import ( + "context" + "os" + "testing" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/storage" + "github.com/formancehq/ledger/internal/storage/driver" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/formancehq/stack/libs/go-libs/pgtesting" + "github.com/google/uuid" + "github.com/stretchr/testify/require" +) + +func TestMain(m *testing.M) { + if err := pgtesting.CreatePostgresServer(); err != nil { + logging.Error(err) + os.Exit(1) + } + + code := m.Run() + if err := pgtesting.DestroyPostgresServer(); err != nil { + logging.Error(err) + } + os.Exit(code) +} + +func newLedgerStore(t *testing.T) *ledgerstore.Store { + t.Helper() + + pgServer := pgtesting.NewPostgresDatabase(t) + db, err := storage.OpenSQLDB(storage.ConnectionOptions{ + DatabaseSourceName: pgServer.ConnString(), + Debug: testing.Verbose(), + }, + //&explainHook{}, + ) + require.NoError(t, err) + t.Cleanup(func() { + require.NoError(t, db.Close()) + }) + + ctx := logging.TestingContext() + driver := driver.New(db) + require.NoError(t, driver.Initialize(ctx)) + ledgerStore, err := driver.CreateLedgerStore(ctx, uuid.NewString()) + require.NoError(t, err) + + return ledgerStore +} + +func appendLog(t *testing.T, store *ledgerstore.Store, log *ledger.ChainedLog) *ledger.ChainedLog { + err := store.InsertLogs(context.Background(), log) + require.NoError(t, err) + return log +} diff --git a/internal/storage/ledgerstore/migrations.go b/internal/storage/ledgerstore/migrations.go new file mode 100644 index 000000000..c1d89cd4d --- /dev/null +++ b/internal/storage/ledgerstore/migrations.go @@ -0,0 +1,89 @@ +package ledgerstore + +import ( + "context" + _ "embed" + "fmt" + + "github.com/formancehq/stack/libs/go-libs/migrations" + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +func (store *Store) getMigrator() *migrations.Migrator { + migrator := migrations.NewMigrator(migrations.WithSchema(store.Name(), true)) + registerMigrations(migrator, store.name) + return migrator +} + +func (store *Store) Migrate(ctx context.Context) (bool, error) { + migrator := store.getMigrator() + + if err := migrator.Up(ctx, store.db); err != nil { + return false, err + } + + // TODO: Update migrations package to return modifications + return false, nil +} + +func (store *Store) GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) { + return store.getMigrator().GetMigrations(ctx, store.db) +} + +//go:embed migrations/0-init-schema.sql +var initSchema string + +func registerMigrations(migrator *migrations.Migrator, name string) { + migrator.RegisterMigrations( + migrations.Migration{ + Name: "Init schema", + UpWithContext: func(ctx context.Context, tx bun.Tx) error { + + needV1Upgrade := false + row := tx.QueryRowContext(ctx, `select exists ( + select from pg_tables + where schemaname = ? and tablename = 'log' + )`, name) + if row.Err() != nil { + return row.Err() + } + var ret string + if err := row.Scan(&ret); err != nil { + panic(err) + } + needV1Upgrade = ret != "false" + + oldSchemaRenamed := fmt.Sprintf(name + oldSchemaRenameSuffix) + if needV1Upgrade { + _, err := tx.ExecContext(ctx, fmt.Sprintf(`alter schema "%s" rename to "%s"`, name, oldSchemaRenamed)) + if err != nil { + return errors.Wrap(err, "renaming old schema") + } + _, err = tx.ExecContext(ctx, fmt.Sprintf(`create schema if not exists "%s"`, name)) + if err != nil { + return errors.Wrap(err, "creating new schema") + } + } + + _, err := tx.ExecContext(ctx, initSchema) + if err != nil { + return errors.Wrap(err, "initializing new schema") + } + + if needV1Upgrade { + if err := migrateLogs(ctx, oldSchemaRenamed, name, tx); err != nil { + return errors.Wrap(err, "migrating logs") + } + + _, err = tx.ExecContext(ctx, fmt.Sprintf(`create table goose_db_version as table "%s".goose_db_version with no data`, oldSchemaRenamed)) + if err != nil { + return err + } + } + + return nil + }, + }, + ) +} diff --git a/internal/storage/ledgerstore/migrations/0-init-schema.sql b/internal/storage/ledgerstore/migrations/0-init-schema.sql new file mode 100644 index 000000000..6d03402ba --- /dev/null +++ b/internal/storage/ledgerstore/migrations/0-init-schema.sql @@ -0,0 +1,611 @@ +/** + Some utils + */ +create aggregate aggregate_objects(jsonb) ( + sfunc = jsonb_concat, + stype = jsonb, + initcond = '{}' +); + +create function first_agg (anyelement, anyelement) + returns anyelement + language sql + immutable + strict + parallel safe +as $$ + select $1 +$$; + +create aggregate first (anyelement) ( + sfunc = first_agg, + stype = anyelement, + parallel = safe +); + +create function array_distinct(anyarray) + returns anyarray + language sql + immutable +as $$ + select array_agg(distinct x) + from unnest($1) t(x); +$$; + +/** Define types **/ +create type account_with_volumes as ( + address varchar, + metadata jsonb, + volumes jsonb +); + +create type volumes as ( + inputs numeric, + outputs numeric +); + +create type volumes_with_asset as ( + asset varchar, + volumes volumes +); + +/** Define tables **/ +create table transactions ( + id numeric not null primary key, + timestamp timestamp without time zone not null, + reference varchar, + reverted_at timestamp without time zone, + postings varchar not null +); + +create table transactions_metadata ( + transaction_id numeric not null references transactions(id), + revision numeric default 0 not null, + date timestamp not null, + metadata jsonb not null default '{}'::jsonb, + + primary key (transaction_id, revision) +); + +create table accounts ( + address varchar primary key, + address_array jsonb not null, + insertion_date timestamp not null +); + +create table accounts_metadata ( + address varchar references accounts(address), + metadata jsonb default '{}'::jsonb, + revision numeric default 0, + date timestamp +); + +create table moves ( + seq serial not null primary key , + transaction_id numeric not null references transactions(id), + account_address varchar not null, + account_address_array jsonb not null, + asset varchar not null, + amount numeric not null, + insertion_date timestamp not null, + effective_date timestamp not null, + post_commit_volumes volumes not null, + post_commit_effective_volumes volumes default null, + is_source boolean not null +); + +create type log_type as enum ( + 'NEW_TRANSACTION', + 'REVERTED_TRANSACTION', + 'SET_METADATA', + 'DELETE_METADATA' +); + +create table logs ( + id numeric not null primary key, + type log_type not null, + hash bytea not null, + date timestamp not null, + data jsonb not null, + idempotency_key varchar(255) +); + +/** Define index **/ + +create function balance_from_volumes(v volumes) + returns numeric + language sql + immutable +as $$ + select v.inputs - v.outputs +$$; + +/** Index required for write part */ +create index moves_range_dates on moves (account_address, asset, effective_date); + +/** Index requires for read */ +create index transactions_date on transactions (timestamp); +create index transactions_metadata_metadata on transactions_metadata using gin (metadata); +--create unique index transactions_revisions on transactions_metadata(id desc, revision desc); + +create index moves_account_address on moves (account_address); +create index moves_account_address_array on moves using gin (account_address_array jsonb_ops); +create index moves_account_address_array_length on moves (jsonb_array_length(account_address_array)); +create index moves_date on moves (effective_date); +create index moves_asset on moves (asset); +create index moves_balance on moves (balance_from_volumes(post_commit_volumes)); +create index moves_post_commit_volumes on moves(account_address, asset, seq); +create index moves_effective_post_commit_volumes on moves(account_address, asset, effective_date desc, seq desc); +create index moves_transactions_id on moves (transaction_id); + +create index accounts_address_array on accounts using gin (address_array jsonb_ops); +create index accounts_address_array_length on accounts (jsonb_array_length(address_array)); + +create unique index accounts_metadata_revisions on accounts_metadata(address asc, revision desc); + +/** Define write functions **/ +create function insert_new_account(_address varchar, _date timestamp) + returns bool + language plpgsql +as $$ + declare + _account accounts; + begin + insert into accounts(address, address_array, insertion_date) + values (_address, to_json(string_to_array(_address, ':')), _date) + on conflict do nothing + returning * into _account; + + return _account is not null; + end; +$$; + +create function get_account(_account_address varchar, _before timestamp default null) + returns setof accounts_metadata + language sql + stable +as $$ + select distinct on (address) * + from accounts_metadata t + where (_before is null or t.date <= _before) + and t.address = _account_address + order by address, revision desc + limit 1; +$$; + +create function get_transaction(_id numeric, _before timestamp default null) + returns setof transactions + language sql + stable +as $$ + select * + from transactions t + where (_before is null or t.timestamp <= _before) and t.id = _id + order by id desc + limit 1; +$$; + +-- a simple 'select distinct asset from moves' would be more simple +-- but Postgres is extremely inefficient with distinct +-- so the query implementation use a "hack" to emulate skip scan feature which Postgres lack natively +-- see https://wiki.postgresql.org/wiki/Loose_indexscan for more information +create function get_all_assets() + returns setof varchar + language sql +as $$ + with recursive t as ( + select min(asset) as asset + from moves + union all + select ( + select min(asset) + from moves + where asset > t.asset + ) + from t + where t.asset is not null + ) + select asset from t where asset is not null + union all + select null where exists(select 1 from moves where asset is null) +$$; + +create function get_latest_move_for_account_and_asset(_account_address varchar, _asset varchar, _before timestamp default null) + returns setof moves + language sql + stable +as $$ + select * + from moves s + where (_before is null or s.effective_date <= _before) and s.account_address = _account_address and s.asset = _asset + order by effective_date desc, seq desc + limit 1; +$$; + +create function update_account_metadata(_address varchar, _metadata jsonb, _date timestamp) + returns void + language sql +as $$ + select insert_new_account(_address, _date); + + insert into accounts_metadata (address, metadata, date, revision) + ( + select _address, accounts_metadata.metadata || _metadata, _date, accounts_metadata.revision + 1 + from accounts_metadata + where address = _address + order by revision desc + limit 1 + ) + union all -- if no metdata + select _address, _metadata, _date, 0 + limit 1; +$$; + +create function delete_account_metadata(_address varchar, _key varchar, _date timestamp) + returns void + language sql +as $$ + insert into accounts_metadata (address, metadata, date, revision) + select _address, accounts_metadata.metadata - _key, _date, accounts_metadata.revision + 1 + from accounts_metadata + where address = _address + order by revision desc + limit 1 +$$; + +create function update_transaction_metadata(_id numeric, _metadata jsonb, _date timestamp) + returns void + language sql +as $$ + insert into transactions_metadata (transaction_id, metadata, date, revision) + ( + select originalTX.transaction_id, + originalTX.metadata || _metadata, + _date, + originalTX.revision + 1 + from transactions_metadata originalTX + where transaction_id = _id + order by revision desc + limit 1 + ) + union all ( + select _id, '{}'::jsonb, null, -1 + ) + limit 1 +$$; + +create function delete_transaction_metadata(_id numeric, _key varchar, _date timestamp) + returns void + language sql +as $$ + insert into transactions_metadata (transaction_id, metadata, date, revision) + select originalTX.transaction_id, + originalTX.metadata - _key, + _date, + originalTX.revision + 1 + from transactions_metadata originalTX + where transaction_id = _id + order by revision desc + limit 1; +$$; + +create function revert_transaction(_id numeric, _date timestamp) + returns void + language sql +as $$ + update transactions + set reverted_at = _date + where id = _id; +$$; + +create or replace function insert_move(_transaction_id numeric, _insertion_date timestamp without time zone, + _effective_date timestamp without time zone, _account_address varchar, _asset varchar, _amount numeric, _is_source bool, _new_account bool) + returns void + language plpgsql +as $$ + declare + _post_commit_volumes volumes = (0, 0)::volumes; + _effective_post_commit_volumes volumes = (0, 0)::volumes; + _seq numeric; + begin + + -- todo: lock if we enable parallelism + -- perform * + -- from accounts + -- where address = _account_address + -- for update; + + if not _new_account then + select (post_commit_volumes).inputs, (post_commit_volumes).outputs into _post_commit_volumes + from moves + where account_address = _account_address + and asset = _asset + order by seq desc + limit 1; + + if not found then + _post_commit_volumes = (0, 0)::volumes; + _effective_post_commit_volumes = (0, 0)::volumes; + else + select (post_commit_effective_volumes).inputs, (post_commit_effective_volumes).outputs into _effective_post_commit_volumes + from moves + where account_address = _account_address + and asset = _asset and effective_date <= _effective_date + order by effective_date desc, seq desc + limit 1; + end if; + end if; + + if _is_source then + _post_commit_volumes.outputs = _post_commit_volumes.outputs + _amount; + _effective_post_commit_volumes.outputs = _effective_post_commit_volumes.outputs + _amount; + else + _post_commit_volumes.inputs = _post_commit_volumes.inputs + _amount; + _effective_post_commit_volumes.inputs = _effective_post_commit_volumes.inputs + _amount; + end if; + + insert into moves ( + insertion_date, + effective_date, + account_address, + asset, + transaction_id, + amount, + is_source, + account_address_array, + post_commit_volumes, + post_commit_effective_volumes + ) values (_insertion_date, _effective_date, _account_address, _asset, _transaction_id, + _amount, _is_source, (select to_json(string_to_array(_account_address, ':'))), + _post_commit_volumes, _effective_post_commit_volumes) + returning seq into _seq; + + if not _new_account then + update moves + set post_commit_effective_volumes = ( + (post_commit_effective_volumes).inputs + case when _is_source then 0 else _amount end, + (post_commit_effective_volumes).outputs + case when _is_source then _amount else 0 end + ) + where account_address = _account_address and asset = _asset and effective_date > _effective_date; + + update moves + set post_commit_effective_volumes = ( + (post_commit_effective_volumes).inputs + case when _is_source then 0 else _amount end, + (post_commit_effective_volumes).outputs + case when _is_source then _amount else 0 end + ) + where account_address = _account_address and asset = _asset and effective_date = _effective_date and seq > _seq; + end if; + end; +$$; + +create function insert_posting(_transaction_id numeric, _insertion_date timestamp without time zone, _effective_date timestamp without time zone, posting jsonb) + returns void + language plpgsql +as $$ + declare + source_created bool; + destination_created bool; + begin + select insert_new_account(posting->>'source', _insertion_date) into source_created; + select insert_new_account(posting->>'destination', _insertion_date) into destination_created; + + -- todo: sometimes the balance is known at commit time (for sources != world), we need to forward the value to populate the pre_commit_aggregated_input and output + perform insert_move(_transaction_id, _insertion_date, _effective_date, + posting->>'source', posting->>'asset', (posting->>'amount')::numeric, true, source_created); + perform insert_move(_transaction_id, _insertion_date, _effective_date, + posting->>'destination', posting->>'asset', (posting->>'amount')::numeric, false, destination_created); + end; +$$; + +-- todo: maybe we could avoid plpgsql functions +create function insert_transaction(data jsonb, _date timestamp without time zone) + returns void + language plpgsql +as $$ + declare + posting jsonb; + begin + insert into transactions (id, timestamp, reference, postings) + values ((data->>'id')::numeric, + (data->>'timestamp')::timestamp without time zone, + data->>'reference', + jsonb_pretty(data->'postings')); + + for posting in (select jsonb_array_elements(data->'postings')) loop + -- todo: sometimes the balance is known at commit time (for sources != world), we need to forward the value to populate the pre_commit_aggregated_input and output + perform insert_posting((data->>'id')::numeric, _date, (data->>'timestamp')::timestamp without time zone, posting); + end loop; + + if data->'metadata' is not null and data->>'metadata' <> '()' then + insert into transactions_metadata (transaction_id, revision, date, metadata) values ( + (data->>'id')::numeric, + 0, + (data->>'timestamp')::timestamp without time zone, + coalesce(data->'metadata', '{}'::jsonb) + ); + end if; + end +$$; + +create function handle_log() returns trigger + security definer + language plpgsql +as $$ + declare + _key varchar; + _value jsonb; + begin + if new.type = 'NEW_TRANSACTION' then + perform insert_transaction(new.data->'transaction', new.date); + for _key, _value in (select * from jsonb_each_text(new.data->'accountMetadata')) loop + perform update_account_metadata(_key, _value, (new.data->'transaction'->>'timestamp')::timestamp); + end loop; + end if; + if new.type = 'REVERTED_TRANSACTION' then + perform insert_transaction(new.data->'transaction', new.date); + perform revert_transaction((new.data->>'revertedTransactionID')::numeric, (new.data->'transaction'->>'timestamp')::timestamp); + end if; + if new.type = 'SET_METADATA' then + if new.data->>'targetType' = 'TRANSACTION' then + perform update_transaction_metadata((new.data->>'targetId')::numeric, new.data->'metadata', new.date); + else + perform update_account_metadata((new.data->>'targetId')::varchar, new.data ->'metadata', new.date); + end if; + end if; + if new.type = 'DELETE_METADATA' then + if new.data->>'targetType' = 'TRANSACTION' then + perform delete_transaction_metadata((new.data->>'targetId')::numeric, new.data->>'key', new.date); + else + perform delete_account_metadata((new.data->>'targetId')::varchar, new.data ->>'key', new.date); + end if; + end if; + + return new; + end; +$$; + +/** Define the trigger which populate table in response to new logs **/ +create trigger insert_log after insert on logs + for each row execute procedure handle_log(); + +create or replace function get_all_account_effective_volumes(_account varchar, _before timestamp default null) + returns setof volumes_with_asset + language sql + stable +as $$ + with + all_assets as ( + select v.v as asset + from get_all_assets() v + ), + moves as ( + select m.* + from all_assets assets + join lateral ( + select * + from moves s + where (_before is null or s.effective_date <= _before) and s.account_address = _account and s.asset = assets.asset + order by effective_date desc, seq desc + limit 1 + ) m on true + ) + select moves.asset, moves.post_commit_effective_volumes + from moves +$$; + +create or replace function get_all_account_volumes(_account varchar, _before timestamp default null) + returns setof volumes_with_asset + language sql + stable +as $$ + with + all_assets as ( + select v.v as asset + from get_all_assets() v + ), + moves as ( + select m.* + from all_assets assets + join lateral ( + select * + from moves s + where (_before is null or s.insertion_date <= _before) and s.account_address = _account and s.asset = assets.asset + order by seq desc + limit 1 + ) m on true + ) + select moves.asset, moves.post_commit_volumes + from moves +$$; + +create function volumes_to_jsonb(v volumes_with_asset) + returns jsonb + language sql + immutable +as $$ + select ('{"' || v.asset || '": {"input": ' || (v.volumes).inputs || ', "output": ' || (v.volumes).outputs || '}}')::jsonb +$$; + +create function get_account_aggregated_effective_volumes(_account_address varchar, _before timestamp default null) + returns jsonb + language sql + stable +as $$ + select aggregate_objects(volumes_to_jsonb(volumes_with_asset)) + from get_all_account_effective_volumes(_account_address, _before := _before) volumes_with_asset +$$; + +create function get_account_aggregated_volumes(_account_address varchar, _before timestamp default null) + returns jsonb + language sql + stable + parallel safe +as $$ + select aggregate_objects(volumes_to_jsonb(volumes_with_asset)) + from get_all_account_volumes(_account_address, _before := _before) volumes_with_asset +$$; + +create function get_account_balance(_account varchar, _asset varchar, _before timestamp default null) + returns numeric + language sql + stable +as $$ + select (post_commit_volumes).inputs - (post_commit_volumes).outputs + from moves s + where (_before is null or s.effective_date <= _before) and s.account_address = _account and s.asset = _asset + order by seq desc + limit 1 +$$; + +create function aggregate_ledger_volumes( + _before timestamp default null, + _accounts varchar[] default null, + _assets varchar[] default null +) + returns setof volumes_with_asset + language sql + stable +as $$ + with + moves as ( + select distinct on (m.account_address, m.asset) m.* + from moves m + where (_before is null or m.effective_date <= _before) and + (_accounts is null or account_address = any(_accounts)) and + (_assets is null or asset = any(_assets)) + order by account_address, asset, m.seq desc + ) + select v.asset, (sum((v.post_commit_effective_volumes).inputs), sum((v.post_commit_effective_volumes).outputs)) + from moves v + group by v.asset +$$; + +create function get_aggregated_effective_volumes_for_transaction(tx transactions) returns jsonb + stable + language sql +as +$$ +select aggregate_objects(jsonb_build_object(data.account_address, data.aggregated)) +from ( + select distinct on (move.account_address, move.asset) move.account_address, + volumes_to_jsonb((move.asset, first(move.post_commit_effective_volumes))) as aggregated + from moves move + where move.transaction_id = tx.id + group by move.account_address, move.asset +) data +$$; + +create function get_aggregated_volumes_for_transaction(tx transactions) returns jsonb + stable + language sql +as +$$ +select aggregate_objects(jsonb_build_object(data.account_address, data.aggregated)) +from ( + select distinct on (move.account_address, move.asset) move.account_address, + volumes_to_jsonb((move.asset, first(move.post_commit_volumes))) as aggregated + from moves move + where move.transaction_id = tx.id + group by move.account_address, move.asset +) data +$$; diff --git a/internal/storage/ledgerstore/migrations_v1.go b/internal/storage/ledgerstore/migrations_v1.go new file mode 100644 index 000000000..bca7f14ba --- /dev/null +++ b/internal/storage/ledgerstore/migrations_v1.go @@ -0,0 +1,200 @@ +package ledgerstore + +import ( + "context" + "encoding/json" + "fmt" + "math/big" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/storage/paginate" + "github.com/lib/pq" + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +var ( + batchSize uint64 = 10000 + oldSchemaRenameSuffix = "_save_v2_0_0" +) + +type LogV1 struct { + ID uint64 `bun:"id,unique,type:bigint"` + Type string `bun:"type,type:varchar"` + Hash string `bun:"hash,type:varchar"` + Date ledger.Time `bun:"date,type:timestamptz"` + Data json.RawMessage `bun:"data,type:jsonb"` +} + +func readLogsRange( + ctx context.Context, + schema string, + sqlTx bun.Tx, + idMin, idMax uint64, +) ([]LogV1, error) { + rawLogs := make([]LogV1, 0) + if err := sqlTx. + NewSelect(). + Table(fmt.Sprintf(`%s.log`, schema)). + Where("id >= ?", idMin). + Where("id < ?", idMax). + Scan(ctx, &rawLogs); err != nil { + return nil, err + } + + return rawLogs, nil +} + +func convertMetadata(ret map[string]any) map[string]any { + oldMetadata := ret["metadata"].(map[string]any) + newMetadata := make(map[string]string) + for k, v := range oldMetadata { + switch v := v.(type) { + case map[string]any: + if len(v) == 2 && v["type"] != nil && v["value"] != nil { + switch v["type"] { + case "asset", "string", "account": + newMetadata[k] = v["value"].(string) + case "monetary": + newMetadata[k] = fmt.Sprintf("%s %d", + v["value"].(map[string]any)["asset"].(string), + int(v["value"].(map[string]any)["amount"].(float64)), + ) + case "portion": + newMetadata[k] = v["value"].(map[string]any)["specific"].(string) + case "number": + newMetadata[k] = fmt.Sprint(v["value"]) + } + } else { + newMetadata[k] = fmt.Sprint(v) + } + default: + newMetadata[k] = fmt.Sprint(v) + } + } + ret["metadata"] = newMetadata + + return ret +} + +func convertTransaction(ret map[string]any) map[string]any { + ret = convertMetadata(ret) + ret["id"] = ret["txid"] + delete(ret, "txid") + + return ret +} + +func (l *LogV1) ToLogsV2() (Logs, error) { + logType := ledger.LogTypeFromString(l.Type) + + ret := make(map[string]any) + if err := json.Unmarshal(l.Data, &ret); err != nil { + panic(err) + } + + var data any + switch logType { + case ledger.NewTransactionLogType: + data = map[string]any{ + "transaction": convertTransaction(ret), + "accountMetadata": map[string]any{}, + } + case ledger.SetMetadataLogType: + data = convertMetadata(ret) + case ledger.RevertedTransactionLogType: + data = l.Data + default: + panic("unknown type " + logType.String()) + } + + asJson, err := json.Marshal(data) + if err != nil { + panic(err) + } + + return Logs{ + ID: (*paginate.BigInt)(big.NewInt(int64(l.ID))), + Type: logType.String(), + Hash: []byte(l.Hash), + Date: l.Date, + Data: asJson, + }, nil +} + +func batchLogs( + ctx context.Context, + schema string, + sqlTx bun.Tx, + logs []Logs, +) error { + // Beware: COPY query is not supported by bun if the pgx driver is used. + stmt, err := sqlTx.PrepareContext(ctx, pq.CopyInSchema( + schema, + "logs", + "id", "type", "hash", "date", "data", + )) + if err != nil { + return err + } + + for _, l := range logs { + _, err = stmt.ExecContext(ctx, l.ID, l.Type, l.Hash, l.Date, RawMessage(l.Data)) + if err != nil { + return err + } + } + + _, err = stmt.ExecContext(ctx) + if err != nil { + return err + } + + err = stmt.Close() + if err != nil { + return err + } + + return nil +} + +func migrateLogs( + ctx context.Context, + schemaV1Name string, + schemaV2Name string, + sqlTx bun.Tx, +) error { + + var idMin uint64 + var idMax = idMin + batchSize + for { + logs, err := readLogsRange(ctx, schemaV1Name, sqlTx, idMin, idMax) + if err != nil { + return errors.Wrap(err, "reading logs from old table") + } + + if len(logs) == 0 { + break + } + + logsV2 := make([]Logs, 0, len(logs)) + for _, l := range logs { + logV2, err := l.ToLogsV2() + if err != nil { + return err + } + + logsV2 = append(logsV2, logV2) + } + + err = batchLogs(ctx, schemaV2Name, sqlTx, logsV2) + if err != nil { + return err + } + + idMin = idMax + idMax = idMin + batchSize + } + + return nil +} diff --git a/internal/storage/ledgerstore/store.go b/internal/storage/ledgerstore/store.go new file mode 100644 index 000000000..935ef572d --- /dev/null +++ b/internal/storage/ledgerstore/store.go @@ -0,0 +1,76 @@ +package ledgerstore + +import ( + "context" + "database/sql" + "fmt" + + "github.com/formancehq/ledger/internal/storage" + _ "github.com/jackc/pgx/v5/stdlib" + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +type Store struct { + db *bun.DB + onDelete func(ctx context.Context) error + + name string +} + +func (store *Store) Name() string { + return store.name +} + +func (store *Store) GetDatabase() *bun.DB { + return store.db +} + +func (store *Store) Delete(ctx context.Context) error { + _, err := store.db.ExecContext(ctx, "delete schema ? cascade", store.name) + if err != nil { + return err + } + return errors.Wrap(store.onDelete(ctx), "deleting ledger store") +} + +func (store *Store) prepareTransaction(ctx context.Context) (bun.Tx, error) { + txOptions := &sql.TxOptions{} + + tx, err := store.db.BeginTx(ctx, txOptions) + if err != nil { + return tx, err + } + if _, err := tx.Exec(fmt.Sprintf(`set search_path = "%s"`, store.Name())); err != nil { + return tx, err + } + return tx, nil +} + +func (store *Store) withTransaction(ctx context.Context, callback func(tx bun.Tx) error) error { + tx, err := store.prepareTransaction(ctx) + if err != nil { + return err + } + if err := callback(tx); err != nil { + _ = tx.Rollback() + return storage.PostgresError(err) + } + return tx.Commit() +} + +func (store *Store) IsSchemaUpToDate(ctx context.Context) (bool, error) { + return store.getMigrator().IsUpToDate(ctx, store.db) +} + +func New( + db *bun.DB, + name string, + onDelete func(ctx context.Context) error, +) (*Store, error) { + return &Store{ + db: db, + name: name, + onDelete: onDelete, + }, nil +} diff --git a/internal/storage/ledgerstore/store_test.go b/internal/storage/ledgerstore/store_test.go new file mode 100644 index 000000000..a336af022 --- /dev/null +++ b/internal/storage/ledgerstore/store_test.go @@ -0,0 +1,35 @@ +package ledgerstore_test + +import ( + "context" + "testing" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/stack/libs/go-libs/collectionutils" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/stretchr/testify/require" +) + +func TestInitializeStore(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + + modified, err := store.Migrate(context.Background()) + require.NoError(t, err) + require.False(t, modified) + + migrationInfos, err := store.GetMigrationsInfo(context.Background()) + require.NoError(t, err) + require.Len(t, migrationInfos, 1) +} + +// TODO: remove that +func insertTransactions(ctx context.Context, s *ledgerstore.Store, txs ...ledger.Transaction) error { + var previous *ledger.ChainedLog + logs := collectionutils.Map(txs, func(from ledger.Transaction) *ledger.ChainedLog { + previous = ledger.NewTransactionLog(&from, map[string]metadata.Metadata{}).ChainLog(previous) + return previous + }) + return s.InsertLogs(ctx, logs...) +} diff --git a/internal/storage/ledgerstore/transactions.go b/internal/storage/ledgerstore/transactions.go new file mode 100644 index 000000000..b0d6e286a --- /dev/null +++ b/internal/storage/ledgerstore/transactions.go @@ -0,0 +1,322 @@ +package ledgerstore + +import ( + "context" + "database/sql/driver" + "encoding/json" + "errors" + "fmt" + "math/big" + "regexp" + "strings" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/storage/paginate" + "github.com/formancehq/ledger/internal/storage/query" + "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/uptrace/bun" +) + +const ( + MovesTableName = "moves" +) + +type Transaction struct { + bun.BaseModel `bun:"transactions,alias:transactions"` + + ID *paginate.BigInt `bun:"id,type:numeric"` + Timestamp ledger.Time `bun:"timestamp,type:timestamp without time zone"` + Reference string `bun:"reference,type:varchar,unique,nullzero"` + Postings []ledger.Posting `bun:"postings,type:jsonb"` + Metadata metadata.Metadata `bun:"metadata,type:jsonb,default:'{}'"` + PostCommitEffectiveVolumes ledger.AccountsAssetsVolumes `bun:"post_commit_effective_volumes,type:jsonb"` + PostCommitVolumes ledger.AccountsAssetsVolumes `bun:"post_commit_volumes,type:jsonb"` + RevertedAt *ledger.Time `bun:"reverted_at"` + LastUpdate *ledger.Time `bun:"last_update"` +} + +func (t *Transaction) toCore() *ledger.ExpandedTransaction { + var ( + preCommitEffectiveVolumes ledger.AccountsAssetsVolumes + preCommitVolumes ledger.AccountsAssetsVolumes + ) + if t.PostCommitEffectiveVolumes != nil { + preCommitEffectiveVolumes = t.PostCommitEffectiveVolumes.Copy() + for _, posting := range t.Postings { + preCommitEffectiveVolumes.AddOutput(posting.Source, posting.Asset, big.NewInt(0).Neg(posting.Amount)) + preCommitEffectiveVolumes.AddInput(posting.Destination, posting.Asset, big.NewInt(0).Neg(posting.Amount)) + } + } + if t.PostCommitVolumes != nil { + preCommitVolumes = t.PostCommitVolumes.Copy() + for _, posting := range t.Postings { + preCommitVolumes.AddOutput(posting.Source, posting.Asset, big.NewInt(0).Neg(posting.Amount)) + preCommitVolumes.AddInput(posting.Destination, posting.Asset, big.NewInt(0).Neg(posting.Amount)) + } + } + return &ledger.ExpandedTransaction{ + Transaction: ledger.Transaction{ + TransactionData: ledger.TransactionData{ + Reference: t.Reference, + Metadata: t.Metadata, + Timestamp: t.Timestamp, + Postings: t.Postings, + }, + ID: (*big.Int)(t.ID), + Reverted: t.RevertedAt != nil && !t.RevertedAt.IsZero(), + }, + PreCommitEffectiveVolumes: preCommitEffectiveVolumes, + PostCommitEffectiveVolumes: t.PostCommitEffectiveVolumes, + PreCommitVolumes: preCommitVolumes, + PostCommitVolumes: t.PostCommitVolumes, + } +} + +type account string + +var _ driver.Valuer = account("") + +func (m1 account) Value() (driver.Value, error) { + ret, err := json.Marshal(strings.Split(string(m1), ":")) + if err != nil { + return nil, err + } + return string(ret), nil +} + +// Scan - Implement the database/sql scanner interface +func (m1 *account) Scan(value interface{}) error { + if value == nil { + return nil + } + v, err := driver.String.ConvertValue(value) + if err != nil { + return err + } + + array := make([]string, 0) + switch vv := v.(type) { + case []uint8: + err = json.Unmarshal(vv, &array) + case string: + err = json.Unmarshal([]byte(vv), &array) + default: + panic("not handled type") + } + if err != nil { + return err + } + *m1 = account(strings.Join(array, ":")) + return nil +} + +func (store *Store) buildTransactionQuery(p PITFilterWithVolumes, query *bun.SelectQuery) *bun.SelectQuery { + + selectMetadata := query.NewSelect(). + Table("transactions_metadata"). + Where("transactions.id = transactions_metadata.transaction_id"). + Order("revision desc"). + Limit(1) + + if p.PIT != nil && !p.PIT.IsZero() { + selectMetadata = selectMetadata.Where("date <= ?", p.PIT) + } + + query = query. + Table("transactions"). + ColumnExpr("distinct on(transactions.id) transactions.*, transactions_metadata.metadata"). + Join("join moves m on transactions.id = m.transaction_id"). + Join(fmt.Sprintf(`left join lateral (%s) as transactions_metadata on true`, selectMetadata.String())) + + if p.PIT != nil && !p.PIT.IsZero() { + query = query. + Where("timestamp <= ?", p.PIT). + ColumnExpr(fmt.Sprintf("case when reverted_at is not null and reverted_at > '%s' then null else reverted_at end", p.PIT.Format(ledger.DateFormat))) + } + + if p.ExpandEffectiveVolumes { + query = query.ColumnExpr("get_aggregated_effective_volumes_for_transaction(transactions) as post_commit_effective_volumes") + } + if p.ExpandVolumes { + query = query.ColumnExpr("get_aggregated_volumes_for_transaction(transactions) as post_commit_volumes") + } + return query +} + +func (store *Store) transactionQueryContext(qb query.Builder) (string, []any, error) { + metadataRegex := regexp.MustCompile("metadata\\[(.+)\\]") + + return qb.Build(query.ContextFn(func(key, operator string, value any) (string, []any, error) { + switch { + case key == "reference" || key == "timestamp": + return fmt.Sprintf("%s %s ?", key, query.DefaultComparisonOperatorsMapping[operator]), []any{value}, nil + case key == "account": + // TODO: Should allow comparison operator only if segments not used + if operator != "$match" { + return "", nil, errors.New("'account' column can only be used with $match") + } + switch address := value.(type) { + case string: + return filterAccountAddress(address, "m.account_address"), nil, nil + default: + return "", nil, fmt.Errorf("unexpected type %T for column 'account'", address) + } + case key == "source": + // TODO: Should allow comparison operator only if segments not used + if operator != "$match" { + return "", nil, errors.New("'source' column can only be used with $match") + } + switch address := value.(type) { + case string: + return fmt.Sprintf("(%s) and m.is_source", filterAccountAddress(address, "m.account_address")), nil, nil + default: + return "", nil, fmt.Errorf("unexpected type %T for column 'source'", address) + } + case key == "destination": + // TODO: Should allow comparison operator only if segments not used + if operator != "$match" { + return "", nil, errors.New("'destination' column can only be used with $match") + } + switch address := value.(type) { + case string: + return fmt.Sprintf("(%s) and not m.is_source", filterAccountAddress(address, "m.account_address")), nil, nil + default: + return "", nil, fmt.Errorf("unexpected type %T for column 'destination'", address) + } + case metadataRegex.Match([]byte(key)): + if operator != "$match" { + return "", nil, errors.New("'account' column can only be used with $match") + } + match := metadataRegex.FindAllStringSubmatch(key, 3) + + return "metadata @> ?", []any{map[string]any{ + match[0][1]: value, + }}, nil + default: + return "", nil, fmt.Errorf("unknown key '%s' when building query", key) + } + })) +} + +func (store *Store) buildTransactionListQuery(selectQuery *bun.SelectQuery, q PaginatedQueryOptions[PITFilterWithVolumes]) *bun.SelectQuery { + + selectQuery = store.buildTransactionQuery(q.Options, selectQuery) + + if q.QueryBuilder != nil { + where, args, err := store.transactionQueryContext(q.QueryBuilder) + if err != nil { + // TODO: handle error + panic(err) + } + return selectQuery.Where(where, args...) + } + + return selectQuery +} + +func (store *Store) GetTransactions(ctx context.Context, q *GetTransactionsQuery) (*api.Cursor[ledger.ExpandedTransaction], error) { + transactions, err := paginateWithColumn[PaginatedQueryOptions[PITFilterWithVolumes], Transaction](store, ctx, + (*paginate.ColumnPaginatedQuery[PaginatedQueryOptions[PITFilterWithVolumes]])(q), + func(query *bun.SelectQuery) *bun.SelectQuery { + return store.buildTransactionListQuery(query, q.Options) + }, + ) + if err != nil { + return nil, err + } + return api.MapCursor(transactions, func(from Transaction) ledger.ExpandedTransaction { + return *from.toCore() + }), nil +} + +func (store *Store) CountTransactions(ctx context.Context, q *GetTransactionsQuery) (uint64, error) { + return count(store, ctx, func(query *bun.SelectQuery) *bun.SelectQuery { + return store.buildTransactionListQuery(query, q.Options) + }) +} + +func (store *Store) GetTransactionWithVolumes(ctx context.Context, filter GetTransactionQuery) (*ledger.ExpandedTransaction, error) { + return fetchAndMap[*Transaction, *ledger.ExpandedTransaction](store, ctx, + (*Transaction).toCore, + func(query *bun.SelectQuery) *bun.SelectQuery { + return store.buildTransactionQuery(filter.PITFilterWithVolumes, query). + Where("transactions.id = ?", filter.ID). + Limit(1) + }) +} + +func (store *Store) GetTransaction(ctx context.Context, txId *big.Int) (*ledger.Transaction, error) { + return fetch[*ledger.Transaction](store, ctx, + func(query *bun.SelectQuery) *bun.SelectQuery { + return query. + Table("transactions"). + ColumnExpr(`transactions.id, transactions.reference, transactions.postings, transactions.timestamp, transactions.reverted_at, tm.metadata`). + Join("left join transactions_metadata tm on tm.transaction_id = transactions.id"). + Where("transactions.id = ?", (*paginate.BigInt)(txId)). + Order("tm.revision desc"). + Limit(1) + }) +} + +func (store *Store) GetTransactionByReference(ctx context.Context, ref string) (*ledger.ExpandedTransaction, error) { + return fetchAndMap[*Transaction, *ledger.ExpandedTransaction](store, ctx, + (*Transaction).toCore, + func(query *bun.SelectQuery) *bun.SelectQuery { + return query. + Table("transactions"). + ColumnExpr(`transactions.id, transactions.reference, transactions.postings, transactions.timestamp, transactions.reverted_at, tm.metadata`). + Join("left join transactions_metadata tm on tm.transaction_id = transactions.id"). + Where("transactions.reference = ?", ref). + Order("tm.revision desc"). + Limit(1) + }) +} + +func (store *Store) GetLastTransaction(ctx context.Context) (*ledger.ExpandedTransaction, error) { + return fetchAndMap[*Transaction, *ledger.ExpandedTransaction](store, ctx, + (*Transaction).toCore, + func(query *bun.SelectQuery) *bun.SelectQuery { + return query. + Table("transactions"). + ColumnExpr(`transactions.id, transactions.reference, transactions.postings, transactions.timestamp, transactions.reverted_at, tm.metadata`). + Join("left join transactions_metadata tm on tm.transaction_id = transactions.id"). + Order("transactions.id desc", "tm.revision desc"). + Limit(1) + }) +} + +type GetTransactionsQuery paginate.ColumnPaginatedQuery[PaginatedQueryOptions[PITFilterWithVolumes]] + +func NewGetTransactionsQuery(options PaginatedQueryOptions[PITFilterWithVolumes]) *GetTransactionsQuery { + return &GetTransactionsQuery{ + PageSize: options.PageSize, + Column: "id", + Order: paginate.OrderDesc, + Options: options, + } +} + +type GetTransactionQuery struct { + PITFilterWithVolumes + ID *big.Int +} + +func (q GetTransactionQuery) WithExpandVolumes() GetTransactionQuery { + q.ExpandVolumes = true + + return q +} + +func (q GetTransactionQuery) WithExpandEffectiveVolumes() GetTransactionQuery { + q.ExpandEffectiveVolumes = true + + return q +} + +func NewGetTransactionQuery(id *big.Int) GetTransactionQuery { + return GetTransactionQuery{ + PITFilterWithVolumes: PITFilterWithVolumes{}, + ID: id, + } +} diff --git a/internal/storage/ledgerstore/transactions_test.go b/internal/storage/ledgerstore/transactions_test.go new file mode 100644 index 000000000..f1fd6d358 --- /dev/null +++ b/internal/storage/ledgerstore/transactions_test.go @@ -0,0 +1,1024 @@ +package ledgerstore_test + +import ( + "context" + "math/big" + "testing" + "time" + + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/formancehq/stack/libs/go-libs/pointer" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/query" + internaltesting "github.com/formancehq/ledger/internal/testing" + "github.com/formancehq/stack/libs/go-libs/api" + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/stretchr/testify/require" +) + +func expandLogs(logs ...*ledger.Log) []ledger.ExpandedTransaction { + ret := make([]ledger.ExpandedTransaction, 0) + accumulatedVolumes := ledger.AccountsAssetsVolumes{} + + appendTx := func(tx *ledger.Transaction) { + expandedTx := &ledger.ExpandedTransaction{ + Transaction: *tx, + } + for _, posting := range tx.Postings { + expandedTx.PreCommitVolumes.AddInput(posting.Destination, posting.Asset, accumulatedVolumes.GetVolumes(posting.Destination, posting.Asset).Input) + expandedTx.PreCommitVolumes.AddOutput(posting.Destination, posting.Asset, accumulatedVolumes.GetVolumes(posting.Destination, posting.Asset).Output) + expandedTx.PreCommitVolumes.AddOutput(posting.Source, posting.Asset, accumulatedVolumes.GetVolumes(posting.Source, posting.Asset).Output) + expandedTx.PreCommitVolumes.AddInput(posting.Source, posting.Asset, accumulatedVolumes.GetVolumes(posting.Source, posting.Asset).Input) + } + for _, posting := range tx.Postings { + accumulatedVolumes.AddOutput(posting.Source, posting.Asset, posting.Amount) + accumulatedVolumes.AddInput(posting.Destination, posting.Asset, posting.Amount) + } + for _, posting := range tx.Postings { + expandedTx.PostCommitVolumes.AddInput(posting.Destination, posting.Asset, accumulatedVolumes.GetVolumes(posting.Destination, posting.Asset).Input) + expandedTx.PostCommitVolumes.AddOutput(posting.Destination, posting.Asset, accumulatedVolumes.GetVolumes(posting.Destination, posting.Asset).Output) + expandedTx.PostCommitVolumes.AddOutput(posting.Source, posting.Asset, accumulatedVolumes.GetVolumes(posting.Source, posting.Asset).Output) + expandedTx.PostCommitVolumes.AddInput(posting.Source, posting.Asset, accumulatedVolumes.GetVolumes(posting.Source, posting.Asset).Input) + } + ret = append(ret, *expandedTx) + } + + for _, log := range logs { + switch payload := log.Data.(type) { + case ledger.NewTransactionLogPayload: + appendTx(payload.Transaction) + case ledger.RevertedTransactionLogPayload: + appendTx(payload.RevertTransaction) + ret[payload.RevertedTransactionID.Uint64()].Reverted = true + case ledger.SetMetadataLogPayload: + ret[payload.TargetID.(*big.Int).Uint64()].Metadata = ret[payload.TargetID.(*big.Int).Uint64()].Metadata.Merge(payload.Metadata) + } + } + + return ret +} + +func Reverse[T any](values ...T) []T { + ret := make([]T, len(values)) + for i := 0; i < len(values)/2; i++ { + ret[i], ret[len(values)-i-1] = values[len(values)-i-1], values[i] + } + if len(values)%2 == 1 { + ret[(len(values)-1)/2] = values[(len(values)-1)/2] + } + return ret +} + +func TestGetTransactionWithVolumes(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := ledger.Now() + ctx := logging.TestingContext() + + tx1 := ledger.ExpandedTransaction{ + Transaction: ledger.Transaction{ + ID: big.NewInt(0), + TransactionData: ledger.TransactionData{ + Postings: []ledger.Posting{ + { + Source: "world", + Destination: "central_bank", + Amount: big.NewInt(100), + Asset: "USD", + }, + }, + Reference: "tx1", + Timestamp: now.Add(-3 * time.Hour), + }, + }, + PostCommitVolumes: ledger.AccountsAssetsVolumes{ + "world": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(100), + }, + }, + "central_bank": { + "USD": { + Input: big.NewInt(100), + Output: big.NewInt(0), + }, + }, + }, + PreCommitVolumes: ledger.AccountsAssetsVolumes{ + "world": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(0), + }, + }, + "central_bank": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(0), + }, + }, + }, + } + tx2 := ledger.ExpandedTransaction{ + Transaction: ledger.Transaction{ + ID: big.NewInt(1), + TransactionData: ledger.TransactionData{ + Postings: []ledger.Posting{ + { + Source: "world", + Destination: "central_bank", + Amount: big.NewInt(100), + Asset: "USD", + }, + }, + Reference: "tx2", + Timestamp: now.Add(-2 * time.Hour), + }, + }, + PostCommitVolumes: ledger.AccountsAssetsVolumes{ + "world": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(200), + }, + }, + "central_bank": { + "USD": { + Input: big.NewInt(200), + Output: big.NewInt(0), + }, + }, + }, + PreCommitVolumes: ledger.AccountsAssetsVolumes{ + "world": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(100), + }, + }, + "central_bank": { + "USD": { + Input: big.NewInt(100), + Output: big.NewInt(0), + }, + }, + }, + } + + require.NoError(t, insertTransactions(ctx, store, tx1.Transaction, tx2.Transaction)) + + tx, err := store.GetTransactionWithVolumes(ctx, ledgerstore.NewGetTransactionQuery(tx1.ID). + WithExpandVolumes(). + WithExpandEffectiveVolumes()) + require.NoError(t, err) + require.Equal(t, tx1.Postings, tx.Postings) + require.Equal(t, tx1.Reference, tx.Reference) + require.Equal(t, tx1.Timestamp, tx.Timestamp) + internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ + "world": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(100), + }, + }, + "central_bank": { + "USD": { + Input: big.NewInt(100), + Output: big.NewInt(0), + }, + }, + }, tx.PostCommitVolumes) + internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ + "world": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(0), + }, + }, + "central_bank": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(0), + }, + }, + }, tx.PreCommitVolumes) + + tx, err = store.GetTransactionWithVolumes(ctx, ledgerstore.NewGetTransactionQuery(tx2.ID). + WithExpandVolumes(). + WithExpandEffectiveVolumes()) + require.Equal(t, tx2.Postings, tx.Postings) + require.Equal(t, tx2.Reference, tx.Reference) + require.Equal(t, tx2.Timestamp, tx.Timestamp) + internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ + "world": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(200), + }, + }, + "central_bank": { + "USD": { + Input: big.NewInt(200), + Output: big.NewInt(0), + }, + }, + }, tx.PostCommitVolumes) + internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ + "world": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(100), + }, + }, + "central_bank": { + "USD": { + Input: big.NewInt(100), + Output: big.NewInt(0), + }, + }, + }, tx.PreCommitVolumes) +} + +func TestGetTransaction(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := ledger.Now() + + tx1 := ledger.Transaction{ + ID: big.NewInt(0), + TransactionData: ledger.TransactionData{ + Postings: []ledger.Posting{ + { + Source: "world", + Destination: "central_bank", + Amount: big.NewInt(100), + Asset: "USD", + }, + }, + Reference: "tx1", + Timestamp: now.Add(-3 * time.Hour), + }, + } + tx2 := ledger.Transaction{ + ID: big.NewInt(1), + TransactionData: ledger.TransactionData{ + Postings: []ledger.Posting{ + { + Source: "world", + Destination: "central_bank", + Amount: big.NewInt(100), + Asset: "USD", + }, + }, + Reference: "tx2", + Timestamp: now.Add(-2 * time.Hour), + }, + } + + require.NoError(t, insertTransactions(context.Background(), store, tx1, tx2)) + + tx, err := store.GetTransaction(context.Background(), tx1.ID) + require.NoError(t, err) + require.Equal(t, tx1.Postings, tx.Postings) + require.Equal(t, tx1.Reference, tx.Reference) + require.Equal(t, tx1.Timestamp, tx.Timestamp) +} + +func TestGetTransactionByReference(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := ledger.Now() + + tx1 := ledger.Transaction{ + ID: big.NewInt(0), + TransactionData: ledger.TransactionData{ + Postings: []ledger.Posting{ + { + Source: "world", + Destination: "central_bank", + Amount: big.NewInt(100), + Asset: "USD", + }, + }, + Reference: "tx1", + Timestamp: now.Add(-3 * time.Hour), + }, + } + tx2 := ledger.Transaction{ + ID: big.NewInt(1), + TransactionData: ledger.TransactionData{ + Postings: []ledger.Posting{ + { + Source: "world", + Destination: "central_bank", + Amount: big.NewInt(100), + Asset: "USD", + }, + }, + Reference: "tx2", + Timestamp: now.Add(-2 * time.Hour), + }, + } + + require.NoError(t, insertTransactions(context.Background(), store, tx1, tx2)) + + tx, err := store.GetTransactionByReference(context.Background(), "tx1") + require.NoError(t, err) + require.Equal(t, tx1.Postings, tx.Postings) + require.Equal(t, tx1.Reference, tx.Reference) + require.Equal(t, tx1.Timestamp, tx.Timestamp) +} + +func TestInsertTransactions(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := ledger.Now() + + t.Run("success inserting transaction", func(t *testing.T) { + tx1 := ledger.ExpandedTransaction{ + Transaction: ledger.Transaction{ + ID: big.NewInt(0), + TransactionData: ledger.TransactionData{ + Postings: ledger.Postings{ + { + Source: "world", + Destination: "alice", + Amount: big.NewInt(100), + Asset: "USD", + }, + }, + Timestamp: now.Add(-3 * time.Hour), + Metadata: metadata.Metadata{}, + }, + }, + PreCommitVolumes: map[string]ledger.VolumesByAssets{ + "world": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes(), + }, + "alice": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes(), + }, + }, + PostCommitVolumes: map[string]ledger.VolumesByAssets{ + "world": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithOutputInt64(100), + }, + "alice": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithInputInt64(100), + }, + }, + } + + err := insertTransactions(context.Background(), store, tx1.Transaction) + require.NoError(t, err, "inserting transaction should not fail") + + tx, err := store.GetTransactionWithVolumes(context.Background(), ledgerstore.NewGetTransactionQuery(big.NewInt(0)). + WithExpandVolumes()) + internaltesting.RequireEqual(t, tx1, *tx) + }) + + t.Run("success inserting multiple transactions", func(t *testing.T) { + tx2 := ledger.ExpandedTransaction{ + Transaction: ledger.Transaction{ + ID: big.NewInt(1), + TransactionData: ledger.TransactionData{ + Postings: ledger.Postings{ + { + Source: "world", + Destination: "polo", + Amount: big.NewInt(200), + Asset: "USD", + }, + }, + Timestamp: now.Add(-2 * time.Hour), + Metadata: metadata.Metadata{}, + }, + }, + PreCommitVolumes: map[string]ledger.VolumesByAssets{ + "world": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithOutputInt64(100), + }, + "polo": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes(), + }, + }, + PostCommitVolumes: map[string]ledger.VolumesByAssets{ + "world": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithOutputInt64(300), + }, + "polo": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithInputInt64(200), + }, + }, + } + + tx3 := ledger.ExpandedTransaction{ + Transaction: ledger.Transaction{ + ID: big.NewInt(2), + TransactionData: ledger.TransactionData{ + Postings: ledger.Postings{ + { + Source: "world", + Destination: "gfyrag", + Amount: big.NewInt(150), + Asset: "USD", + }, + }, + Timestamp: now.Add(-1 * time.Hour), + Metadata: metadata.Metadata{}, + }, + }, + PreCommitVolumes: map[string]ledger.VolumesByAssets{ + "world": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithOutputInt64(300), + }, + "gfyrag": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes(), + }, + }, + PostCommitVolumes: map[string]ledger.VolumesByAssets{ + "world": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithOutputInt64(450), + }, + "gfyrag": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithInputInt64(150), + }, + }, + } + + require.NoError(t, store.InsertLogs(context.Background(), + ledger.NewTransactionLog(&tx2.Transaction, map[string]metadata.Metadata{}).ChainLog(nil).WithID(2), + ledger.NewTransactionLog(&tx3.Transaction, map[string]metadata.Metadata{}).ChainLog(nil).WithID(3), + )) + + tx, err := store.GetTransactionWithVolumes(context.Background(), ledgerstore.NewGetTransactionQuery(big.NewInt(1)).WithExpandVolumes()) + require.NoError(t, err, "getting transaction should not fail") + internaltesting.RequireEqual(t, tx2, *tx) + + tx, err = store.GetTransactionWithVolumes(context.Background(), ledgerstore.NewGetTransactionQuery(big.NewInt(2)).WithExpandVolumes()) + require.NoError(t, err, "getting transaction should not fail") + internaltesting.RequireEqual(t, tx3, *tx) + }) +} + +func TestCountTransactions(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := ledger.Now() + + tx1 := ledger.ExpandedTransaction{ + Transaction: ledger.Transaction{ + ID: big.NewInt(0), + TransactionData: ledger.TransactionData{ + Postings: ledger.Postings{ + { + Source: "world", + Destination: "alice", + Amount: big.NewInt(100), + Asset: "USD", + }, + }, + Timestamp: now.Add(-3 * time.Hour), + Metadata: metadata.Metadata{}, + }, + }, + PreCommitVolumes: map[string]ledger.VolumesByAssets{ + "world": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes(), + }, + "alice": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes(), + }, + }, + PostCommitVolumes: map[string]ledger.VolumesByAssets{ + "world": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithOutputInt64(100), + }, + "alice": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithInputInt64(100), + }, + }, + } + tx2 := ledger.ExpandedTransaction{ + Transaction: ledger.Transaction{ + ID: big.NewInt(1), + TransactionData: ledger.TransactionData{ + Postings: ledger.Postings{ + { + Source: "world", + Destination: "polo", + Amount: big.NewInt(200), + Asset: "USD", + }, + }, + Timestamp: now.Add(-2 * time.Hour), + Metadata: metadata.Metadata{}, + }, + }, + PreCommitVolumes: map[string]ledger.VolumesByAssets{ + "world": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithOutputInt64(100), + }, + "polo": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes(), + }, + }, + PostCommitVolumes: map[string]ledger.VolumesByAssets{ + "world": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithOutputInt64(300), + }, + "polo": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithInputInt64(200), + }, + }, + } + + tx3 := ledger.ExpandedTransaction{ + Transaction: ledger.Transaction{ + ID: big.NewInt(2), + TransactionData: ledger.TransactionData{ + Postings: ledger.Postings{ + { + Source: "world", + Destination: "gfyrag", + Amount: big.NewInt(150), + Asset: "USD", + }, + }, + Timestamp: now.Add(-1 * time.Hour), + Metadata: metadata.Metadata{}, + }, + }, + PreCommitVolumes: map[string]ledger.VolumesByAssets{ + "world": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithOutputInt64(300), + }, + "gfyrag": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes(), + }, + }, + PostCommitVolumes: map[string]ledger.VolumesByAssets{ + "world": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithOutputInt64(450), + }, + "gfyrag": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithInputInt64(150), + }, + }, + } + + err := insertTransactions(context.Background(), store, tx1.Transaction, tx2.Transaction, tx3.Transaction) + require.NoError(t, err, "inserting transaction should not fail") + + count, err := store.CountTransactions(context.Background(), ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}))) + require.NoError(t, err, "counting transactions should not fail") + require.Equal(t, uint64(3), count, "count should be equal") +} + +func TestUpdateTransactionsMetadata(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := ledger.Now() + + tx1 := ledger.ExpandedTransaction{ + Transaction: ledger.Transaction{ + ID: big.NewInt(0), + TransactionData: ledger.TransactionData{ + Postings: ledger.Postings{ + { + Source: "world", + Destination: "alice", + Amount: big.NewInt(100), + Asset: "USD", + }, + }, + Timestamp: now.Add(-3 * time.Hour), + Metadata: metadata.Metadata{}, + }, + }, + PreCommitVolumes: map[string]ledger.VolumesByAssets{ + "world": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes(), + }, + "alice": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes(), + }, + }, + PostCommitVolumes: map[string]ledger.VolumesByAssets{ + "world": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithOutputInt64(100), + }, + "alice": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithInputInt64(100), + }, + }, + } + tx2 := ledger.ExpandedTransaction{ + Transaction: ledger.Transaction{ + ID: big.NewInt(1), + TransactionData: ledger.TransactionData{ + Postings: ledger.Postings{ + { + Source: "world", + Destination: "polo", + Amount: big.NewInt(200), + Asset: "USD", + }, + }, + Timestamp: now.Add(-2 * time.Hour), + Metadata: metadata.Metadata{}, + }, + }, + PreCommitVolumes: map[string]ledger.VolumesByAssets{ + "world": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithOutputInt64(100), + }, + "polo": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes(), + }, + }, + PostCommitVolumes: map[string]ledger.VolumesByAssets{ + "world": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithOutputInt64(300), + }, + "polo": map[string]*ledger.Volumes{ + "USD": ledger.NewEmptyVolumes().WithInputInt64(200), + }, + }, + } + + err := insertTransactions(context.Background(), store, tx1.Transaction, tx2.Transaction) + require.NoError(t, err, "inserting transaction should not fail") + + err = store.InsertLogs(context.Background(), + ledger.NewSetMetadataOnTransactionLog(ledger.Now(), tx1.ID, metadata.Metadata{"foo1": "bar2"}).ChainLog(nil).WithID(3), + ledger.NewSetMetadataOnTransactionLog(ledger.Now(), tx2.ID, metadata.Metadata{"foo2": "bar2"}).ChainLog(nil).WithID(4), + ) + require.NoError(t, err, "updating multiple transaction metadata should not fail") + + tx, err := store.GetTransactionWithVolumes(context.Background(), ledgerstore.NewGetTransactionQuery(big.NewInt(0)).WithExpandVolumes().WithExpandEffectiveVolumes()) + require.NoError(t, err, "getting transaction should not fail") + require.Equal(t, tx.Metadata, metadata.Metadata{"foo1": "bar2"}, "metadata should be equal") + + tx, err = store.GetTransactionWithVolumes(context.Background(), ledgerstore.NewGetTransactionQuery(big.NewInt(1)).WithExpandVolumes().WithExpandEffectiveVolumes()) + require.NoError(t, err, "getting transaction should not fail") + require.Equal(t, tx.Metadata, metadata.Metadata{"foo2": "bar2"}, "metadata should be equal") +} + +func TestDeleteTransactionsMetadata(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := ledger.Now() + + tx1 := ledger.Transaction{ + ID: big.NewInt(0), + TransactionData: ledger.TransactionData{ + Postings: ledger.Postings{ + { + Source: "world", + Destination: "alice", + Amount: big.NewInt(100), + Asset: "USD", + }, + }, + Timestamp: now.Add(-3 * time.Hour), + Metadata: metadata.Metadata{}, + }, + } + + require.NoError(t, store.InsertLogs(context.Background(), + ledger.NewTransactionLog(&tx1, map[string]metadata.Metadata{}).ChainLog(nil).WithID(1), + ledger.NewSetMetadataOnTransactionLog(ledger.Now(), tx1.ID, metadata.Metadata{"foo1": "bar1", "foo2": "bar2"}).ChainLog(nil).WithID(2), + )) + + tx, err := store.GetTransaction(context.Background(), tx1.ID) + require.NoError(t, err) + require.Equal(t, tx.Metadata, metadata.Metadata{"foo1": "bar1", "foo2": "bar2"}) + + require.NoError(t, store.InsertLogs(context.Background(), + ledger.NewDeleteMetadataLog(ledger.Now(), ledger.DeleteMetadataLogPayload{ + TargetType: ledger.MetaTargetTypeTransaction, + TargetID: tx1.ID, + Key: "foo1", + }).ChainLog(nil).WithID(3), + )) + + tx, err = store.GetTransaction(context.Background(), tx1.ID) + require.NoError(t, err) + require.Equal(t, metadata.Metadata{"foo2": "bar2"}, tx.Metadata) +} + +func TestInsertTransactionInPast(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := ledger.Now() + + tx1 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD/2", big.NewInt(100)), + ).WithDate(now) + + tx2 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("bank", "user1", "USD/2", big.NewInt(50)), + ).WithDate(now.Add(time.Hour)).WithIDUint64(1) + + // Insert in past must modify pre/post commit volumes of tx2 + tx3 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("bank", "user2", "USD/2", big.NewInt(50)), + ).WithDate(now.Add(30 * time.Minute)).WithIDUint64(2) + + require.NoError(t, store.InsertLogs(context.Background(), + ledger.NewTransactionLog(tx1, map[string]metadata.Metadata{}).ChainLog(nil).WithID(1), + ledger.NewTransactionLog(tx2, map[string]metadata.Metadata{}).ChainLog(nil).WithID(2), + ledger.NewTransactionLog(tx3, map[string]metadata.Metadata{}).ChainLog(nil).WithID(3), + )) + + tx2FromDatabase, err := store.GetTransactionWithVolumes(context.Background(), ledgerstore.NewGetTransactionQuery(tx2.ID).WithExpandVolumes().WithExpandEffectiveVolumes()) + require.NoError(t, err) + + internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ + "bank": { + "USD/2": ledger.NewVolumesInt64(100, 50), + }, + "user1": { + "USD/2": ledger.NewVolumesInt64(0, 0), + }, + }, tx2FromDatabase.PreCommitEffectiveVolumes) + internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ + "bank": { + "USD/2": ledger.NewVolumesInt64(100, 100), + }, + "user1": { + "USD/2": ledger.NewVolumesInt64(50, 0), + }, + }, tx2FromDatabase.PostCommitEffectiveVolumes) +} + +func TestInsertTransactionInPastInOneBatch(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := ledger.Now() + + tx1 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD/2", big.NewInt(100)), + ).WithDate(now) + + tx2 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("bank", "user1", "USD/2", big.NewInt(50)), + ).WithDate(now.Add(time.Hour)).WithIDUint64(1) + + // Insert in past must modify pre/post commit volumes of tx2 + tx3 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("bank", "user2", "USD/2", big.NewInt(50)), + ).WithDate(now.Add(30 * time.Minute)).WithIDUint64(2) + + require.NoError(t, insertTransactions(context.Background(), store, *tx1, *tx2, *tx3)) + + tx2FromDatabase, err := store.GetTransactionWithVolumes(context.Background(), ledgerstore.NewGetTransactionQuery(tx2.ID).WithExpandVolumes().WithExpandEffectiveVolumes()) + require.NoError(t, err) + + internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ + "bank": { + "USD/2": ledger.NewVolumesInt64(100, 50), + }, + "user1": { + "USD/2": ledger.NewVolumesInt64(0, 0), + }, + }, tx2FromDatabase.PreCommitEffectiveVolumes) + internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ + "bank": { + "USD/2": ledger.NewVolumesInt64(100, 100), + }, + "user1": { + "USD/2": ledger.NewVolumesInt64(50, 0), + }, + }, tx2FromDatabase.PostCommitEffectiveVolumes) +} + +func TestInsertTwoTransactionAtSameDateInSameBatch(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := ledger.Now() + + tx1 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD/2", big.NewInt(100)), + ).WithDate(now.Add(-time.Hour)) + + tx2 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("bank", "user1", "USD/2", big.NewInt(10)), + ).WithDate(now).WithIDUint64(1) + + tx3 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("bank", "user2", "USD/2", big.NewInt(10)), + ).WithDate(now).WithIDUint64(2) + + require.NoError(t, insertTransactions(context.Background(), store, *tx1, *tx2, *tx3)) + + tx2FromDatabase, err := store.GetTransactionWithVolumes(context.Background(), ledgerstore.NewGetTransactionQuery(tx2.ID).WithExpandVolumes().WithExpandEffectiveVolumes()) + require.NoError(t, err) + + internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ + "bank": { + "USD/2": ledger.NewVolumesInt64(100, 10), + }, + "user1": { + "USD/2": ledger.NewVolumesInt64(10, 0), + }, + }, tx2FromDatabase.PostCommitVolumes) + internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ + "bank": { + "USD/2": ledger.NewVolumesInt64(100, 0), + }, + "user1": { + "USD/2": ledger.NewVolumesInt64(0, 0), + }, + }, tx2FromDatabase.PreCommitVolumes) + + tx3FromDatabase, err := store.GetTransactionWithVolumes(context.Background(), ledgerstore.NewGetTransactionQuery(tx3.ID).WithExpandVolumes().WithExpandEffectiveVolumes()) + require.NoError(t, err) + + internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ + "bank": { + "USD/2": ledger.NewVolumesInt64(100, 10), + }, + "user2": { + "USD/2": ledger.NewVolumesInt64(0, 0), + }, + }, tx3FromDatabase.PreCommitVolumes) + internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ + "bank": { + "USD/2": ledger.NewVolumesInt64(100, 20), + }, + "user2": { + "USD/2": ledger.NewVolumesInt64(10, 0), + }, + }, tx3FromDatabase.PostCommitVolumes) +} + +func TestInsertTwoTransactionAtSameDateInTwoBatch(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := ledger.Now() + + tx1 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD/2", big.NewInt(100)), + ).WithDate(now.Add(-time.Hour)) + + tx2 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("bank", "user1", "USD/2", big.NewInt(10)), + ).WithDate(now).WithIDUint64(1) + + require.NoError(t, insertTransactions(context.Background(), store, *tx1, *tx2)) + + tx3 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("bank", "user2", "USD/2", big.NewInt(10)), + ).WithDate(now).WithIDUint64(2) + + require.NoError(t, store.InsertLogs(context.Background(), + ledger.NewTransactionLog(tx3, map[string]metadata.Metadata{}).ChainLog(nil).WithID(3), + )) + + tx3FromDatabase, err := store.GetTransactionWithVolumes(context.Background(), ledgerstore.NewGetTransactionQuery(tx3.ID).WithExpandVolumes().WithExpandEffectiveVolumes()) + require.NoError(t, err) + + internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ + "bank": { + "USD/2": ledger.NewVolumesInt64(100, 10), + }, + "user2": { + "USD/2": ledger.NewVolumesInt64(0, 0), + }, + }, tx3FromDatabase.PreCommitVolumes) + internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ + "bank": { + "USD/2": ledger.NewVolumesInt64(100, 20), + }, + "user2": { + "USD/2": ledger.NewVolumesInt64(10, 0), + }, + }, tx3FromDatabase.PostCommitVolumes) +} + +func TestListTransactions(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := ledger.Now() + ctx := logging.TestingContext() + + tx1 := ledger.NewTransaction(). + WithIDUint64(0). + WithPostings( + ledger.NewPosting("world", "alice", "USD", big.NewInt(100)), + ). + WithMetadata(metadata.Metadata{"category": "1"}). + WithDate(now.Add(-3 * time.Hour)) + tx2 := ledger.NewTransaction(). + WithIDUint64(1). + WithPostings( + ledger.NewPosting("world", "bob", "USD", big.NewInt(100)), + ). + WithMetadata(metadata.Metadata{"category": "2"}). + WithDate(now.Add(-2 * time.Hour)) + tx3 := ledger.NewTransaction(). + WithIDUint64(2). + WithPostings( + ledger.NewPosting("world", "users:marley", "USD", big.NewInt(100)), + ). + WithMetadata(metadata.Metadata{"category": "3"}). + WithDate(now.Add(-time.Hour)) + tx4 := ledger.NewTransaction(). + WithIDUint64(3). + WithPostings( + ledger.NewPosting("users:marley", "world", "USD", big.NewInt(100)), + ). + WithDate(now) + + logs := []*ledger.Log{ + ledger.NewTransactionLog(tx1, map[string]metadata.Metadata{}), + ledger.NewTransactionLog(tx2, map[string]metadata.Metadata{}), + ledger.NewTransactionLog(tx3, map[string]metadata.Metadata{}), + ledger.NewRevertedTransactionLog(ledger.Now(), tx3.ID, tx4), + ledger.NewSetMetadataOnTransactionLog(ledger.Now(), tx3.ID, metadata.Metadata{ + "additional_metadata": "true", + }), + } + + require.NoError(t, store.InsertLogs(ctx, ledger.ChainLogs(logs...)...)) + + type testCase struct { + name string + query ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilterWithVolumes] + expected *api.Cursor[ledger.ExpandedTransaction] + } + testCases := []testCase{ + { + name: "nominal", + query: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}), + expected: &api.Cursor[ledger.ExpandedTransaction]{ + PageSize: 15, + HasMore: false, + Data: Reverse(expandLogs(logs...)...), + }, + }, + { + name: "address filter", + query: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("account", "bob")), + expected: &api.Cursor[ledger.ExpandedTransaction]{ + PageSize: 15, + HasMore: false, + Data: expandLogs(logs...)[1:2], + }, + }, + { + name: "address filter using segment", + query: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("account", "users:")), + expected: &api.Cursor[ledger.ExpandedTransaction]{ + PageSize: 15, + HasMore: false, + Data: Reverse(expandLogs(logs...)[2:]...), + }, + }, + { + name: "filter using metadata", + query: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("metadata[category]", "2")), + expected: &api.Cursor[ledger.ExpandedTransaction]{ + PageSize: 15, + HasMore: false, + Data: expandLogs(logs...)[1:2], + }, + }, + { + name: "using point in time", + query: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ + PITFilter: ledgerstore.PITFilter{ + PIT: pointer.For(now.Add(-time.Hour)), + }, + }), + expected: &api.Cursor[ledger.ExpandedTransaction]{ + PageSize: 15, + HasMore: false, + Data: Reverse(expandLogs(logs[:3]...)...), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + tc.query.Options.ExpandVolumes = true + tc.query.Options.ExpandEffectiveVolumes = false + cursor, err := store.GetTransactions(ctx, ledgerstore.NewGetTransactionsQuery(tc.query)) + require.NoError(t, err) + internaltesting.RequireEqual(t, *tc.expected, *cursor) + + count, err := store.CountTransactions(ctx, ledgerstore.NewGetTransactionsQuery(tc.query)) + require.NoError(t, err) + require.EqualValues(t, len(tc.expected.Data), count) + }) + } +} diff --git a/internal/storage/ledgerstore/utils.go b/internal/storage/ledgerstore/utils.go new file mode 100644 index 000000000..217280211 --- /dev/null +++ b/internal/storage/ledgerstore/utils.go @@ -0,0 +1,171 @@ +package ledgerstore + +import ( + "context" + "fmt" + "reflect" + "strings" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/storage" + "github.com/formancehq/ledger/internal/storage/paginate" + "github.com/formancehq/ledger/internal/storage/query" + "github.com/formancehq/stack/libs/go-libs/api" + "github.com/uptrace/bun" +) + +func fetch[T any](s *Store, ctx context.Context, builders ...func(query *bun.SelectQuery) *bun.SelectQuery) (T, error) { + var ret T + ret = reflect.New(reflect.TypeOf(ret).Elem()).Interface().(T) + err := s.withTransaction(ctx, func(tx bun.Tx) error { + query := s.db.NewSelect().Conn(tx) + for _, builder := range builders { + query = query.Apply(builder) + } + if query.GetTableName() == "" && query.GetModel() == nil { + //query = query.Model(ret) + } + + return storage.PostgresError(query.Scan(ctx, ret)) + }) + return ret, err +} + +func fetchAndMap[T any, TO any](s *Store, ctx context.Context, + mapper func(T) TO, + builders ...func(query *bun.SelectQuery) *bun.SelectQuery) (TO, error) { + ret, err := fetch[T](s, ctx, builders...) + if err != nil { + var zero TO + return zero, storage.PostgresError(err) + } + return mapper(ret), nil +} + +func paginateWithOffset[FILTERS any, RETURN any](s *Store, ctx context.Context, + q *paginate.OffsetPaginatedQuery[FILTERS], builders ...func(query *bun.SelectQuery) *bun.SelectQuery) (*api.Cursor[RETURN], error) { + tx, err := s.prepareTransaction(ctx) + if err != nil { + return nil, err + } + defer tx.Rollback() + + var ret RETURN + query := s.db.NewSelect().Conn(tx) + for _, builder := range builders { + query = query.Apply(builder) + } + if query.GetModel() == nil && query.GetTableName() == "" { + query = query.Model(ret) + } + + return paginate.UsingOffset[FILTERS, RETURN](ctx, query, *q) +} + +func paginateWithColumn[FILTERS any, RETURN any](s *Store, ctx context.Context, q *paginate.ColumnPaginatedQuery[FILTERS], builders ...func(query *bun.SelectQuery) *bun.SelectQuery) (*api.Cursor[RETURN], error) { + tx, err := s.prepareTransaction(ctx) + if err != nil { + return nil, err + } + defer tx.Rollback() + + query := s.db.NewSelect().Conn(tx) + for _, builder := range builders { + query = query.Apply(builder) + } + + return paginate.UsingColumn[FILTERS, RETURN](ctx, query, *q) +} + +func count(s *Store, ctx context.Context, builders ...func(query *bun.SelectQuery) *bun.SelectQuery) (uint64, error) { + var ( + count int + err error + ) + if err := s.withTransaction(ctx, func(tx bun.Tx) error { + query := s.db.NewSelect() + for _, builder := range builders { + query = query.Apply(builder) + } + count, err = s.db.NewSelect(). + TableExpr("(" + query.String() + ") data"). + Conn(tx). + Count(ctx) + return err + }); err != nil { + return 0, err + } + return uint64(count), nil +} + +func filterAccountAddress(address, key string) string { + parts := make([]string, 0) + src := strings.Split(address, ":") + + needSegmentCheck := false + for _, segment := range src { + needSegmentCheck = segment == "" + if needSegmentCheck { + break + } + } + + if needSegmentCheck { + parts = append(parts, fmt.Sprintf("jsonb_array_length(%s_array) = %d", key, len(src))) + + for i, segment := range src { + if len(segment) == 0 { + continue + } + parts = append(parts, fmt.Sprintf("%s_array @@ ('$[%d] == \"%s\"')::jsonpath", key, i, segment)) + } + } else { + parts = append(parts, fmt.Sprintf("%s = '%s'", key, address)) + } + + return strings.Join(parts, " and ") +} + +func filterPIT(pit *ledger.Time, column string) func(query *bun.SelectQuery) *bun.SelectQuery { + return func(query *bun.SelectQuery) *bun.SelectQuery { + if pit == nil || pit.IsZero() { + return query + } + return query.Where(fmt.Sprintf("%s <= ?", column), pit) + } +} + +type PaginatedQueryOptions[T any] struct { + QueryBuilder query.Builder `json:"qb"` + PageSize uint64 `json:"pageSize"` + Options T `json:"options"` +} + +func (opts PaginatedQueryOptions[T]) WithQueryBuilder(qb query.Builder) PaginatedQueryOptions[T] { + opts.QueryBuilder = qb + + return opts +} + +func (opts PaginatedQueryOptions[T]) WithPageSize(pageSize uint64) PaginatedQueryOptions[T] { + opts.PageSize = pageSize + + return opts +} + +func NewPaginatedQueryOptions[T any](options T) PaginatedQueryOptions[T] { + return PaginatedQueryOptions[T]{ + Options: options, + PageSize: paginate.QueryDefaultPageSize, + } +} + +type PITFilter struct { + PIT *ledger.Time `json:"pit"` +} + +type PITFilterWithVolumes struct { + PITFilter + ExpandVolumes bool `json:"volumes"` + ExpandEffectiveVolumes bool `json:"effectiveVolumes"` +} diff --git a/internal/storage/paginate/bigint.go b/internal/storage/paginate/bigint.go new file mode 100644 index 000000000..29d4693a8 --- /dev/null +++ b/internal/storage/paginate/bigint.go @@ -0,0 +1,92 @@ +package paginate + +import ( + "database/sql" + "database/sql/driver" + "encoding/json" + "fmt" + "math/big" +) + +type BigInt big.Int + +func (i *BigInt) MarshalJSON() ([]byte, error) { + return json.Marshal(i.ToMathBig()) +} + +func (i *BigInt) UnmarshalJSON(bytes []byte) error { + v, err := i.FromString(string(bytes)) + if err != nil { + return err + } + *i = *v + return nil +} + +func NewInt() *BigInt { + return new(BigInt) +} +func newBigint(x *big.Int) *BigInt { + return (*BigInt)(x) +} + +// same as NewBigint() +func FromMathBig(x *big.Int) *BigInt { + return (*BigInt)(x) +} + +func FromInt64(x int64) *BigInt { + return FromMathBig(big.NewInt(x)) +} + +func (i *BigInt) FromString(x string) (*BigInt, error) { + if x == "" { + return FromInt64(0), nil + } + a := big.NewInt(0) + b, ok := a.SetString(x, 10) + + if !ok { + return nil, fmt.Errorf("cannot create Int from string") + } + + return newBigint(b), nil +} + +func (b *BigInt) Value() (driver.Value, error) { + return (*big.Int)(b).String(), nil +} + +func (b *BigInt) Set(v *BigInt) *BigInt { + return (*BigInt)((*big.Int)(b).Set((*big.Int)(v))) +} + +func (b *BigInt) Sub(x *BigInt, y *BigInt) *BigInt { + return (*BigInt)((*big.Int)(b).Sub((*big.Int)(x), (*big.Int)(y))) +} + +func (b *BigInt) Scan(value interface{}) error { + + var i sql.NullString + + if err := i.Scan(value); err != nil { + return err + } + + if _, ok := (*big.Int)(b).SetString(i.String, 10); ok { + return nil + } + + return fmt.Errorf("Error converting type %T into Bigint", value) +} + +func (b *BigInt) ToMathBig() *big.Int { + return (*big.Int)(b) +} + +func (i *BigInt) Cmp(bottom *BigInt) int { + return (*big.Int)(i).Cmp((*big.Int)(bottom)) +} + +var _ json.Unmarshaler = (*BigInt)(nil) +var _ json.Marshaler = (*BigInt)(nil) diff --git a/internal/storage/paginate/main_test.go b/internal/storage/paginate/main_test.go new file mode 100644 index 000000000..20bab814d --- /dev/null +++ b/internal/storage/paginate/main_test.go @@ -0,0 +1,22 @@ +package paginate_test + +import ( + "os" + "testing" + + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/formancehq/stack/libs/go-libs/pgtesting" +) + +func TestMain(m *testing.M) { + if err := pgtesting.CreatePostgresServer(); err != nil { + logging.Error(err) + os.Exit(1) + } + + code := m.Run() + if err := pgtesting.DestroyPostgresServer(); err != nil { + logging.Error(err) + } + os.Exit(code) +} diff --git a/internal/storage/paginate/pagination.go b/internal/storage/paginate/pagination.go new file mode 100644 index 000000000..b05411162 --- /dev/null +++ b/internal/storage/paginate/pagination.go @@ -0,0 +1,99 @@ +package paginate + +import ( + "encoding/base64" + "encoding/json" + "math/big" +) + +const ( + OrderAsc = iota + OrderDesc + + QueryDefaultPageSize = 15 +) + +type Order int + +func (o Order) String() string { + switch o { + case OrderAsc: + return "ASC" + case OrderDesc: + return "DESC" + } + panic("should not happen") +} + +func (o Order) Reverse() Order { + return (o + 1) % 2 +} + +type ColumnPaginatedQuery[OPTIONS any] struct { + PageSize uint64 `json:"pageSize"` + Bottom *big.Int `json:"bottom"` + Column string `json:"column"` + PaginationID *big.Int `json:"paginationID"` + Order Order `json:"order"` + Options OPTIONS `json:"filters"` + Reverse bool `json:"reverse"` +} + +func (q *ColumnPaginatedQuery[PAYLOAD]) EncodeAsCursor() string { + return encodeCursor(q) +} + +func (a *ColumnPaginatedQuery[PAYLOAD]) WithPageSize(pageSize uint64) *ColumnPaginatedQuery[PAYLOAD] { + if pageSize != 0 { + a.PageSize = pageSize + } + + return a +} + +type OffsetPaginatedQuery[OPTIONS any] struct { + Offset uint64 `json:"offset"` + Order Order `json:"order"` + PageSize uint64 `json:"pageSize"` + Options OPTIONS `json:"filters"` +} + +func (q *OffsetPaginatedQuery[PAYLOAD]) EncodeAsCursor() string { + return encodeCursor(q) +} + +func (a *OffsetPaginatedQuery[PAYLOAD]) WithPageSize(pageSize uint64) *OffsetPaginatedQuery[PAYLOAD] { + if pageSize != 0 { + a.PageSize = pageSize + } + + return a +} + +func encodeCursor[T any](v *T) string { + if v == nil { + return "" + } + return EncodeCursor(*v) +} + +func EncodeCursor[T any](v T) string { + data, err := json.Marshal(v) + if err != nil { + panic(err) + } + return base64.RawURLEncoding.EncodeToString(data) +} + +func UnmarshalCursor(v string, to any) error { + res, err := base64.RawURLEncoding.DecodeString(v) + if err != nil { + return err + } + + if err := json.Unmarshal(res, &to); err != nil { + return err + } + + return nil +} diff --git a/internal/storage/paginate/pagination_column.go b/internal/storage/paginate/pagination_column.go new file mode 100644 index 000000000..1860e1f90 --- /dev/null +++ b/internal/storage/paginate/pagination_column.go @@ -0,0 +1,118 @@ +package paginate + +import ( + "context" + "fmt" + "math/big" + "reflect" + "strings" + + storageerrors "github.com/formancehq/ledger/internal/storage" + "github.com/formancehq/stack/libs/go-libs/api" + "github.com/uptrace/bun" +) + +func UsingColumn[FILTERS any, ENTITY any](ctx context.Context, + sb *bun.SelectQuery, + query ColumnPaginatedQuery[FILTERS]) (*api.Cursor[ENTITY], error) { + ret := make([]ENTITY, 0) + + sb = sb.Limit(int(query.PageSize) + 1) // Fetch one additional item to find the next token + order := query.Order + if query.Reverse { + order = order.Reverse() + } + sb = sb.OrderExpr(fmt.Sprintf("%s %s", query.Column, order)) + + if query.PaginationID != nil { + if query.Reverse { + switch query.Order { + case OrderAsc: + sb = sb.Where(fmt.Sprintf("%s < ?", query.Column), query.PaginationID) + case OrderDesc: + sb = sb.Where(fmt.Sprintf("%s > ?", query.Column), query.PaginationID) + } + } else { + switch query.Order { + case OrderAsc: + sb = sb.Where(fmt.Sprintf("%s >= ?", query.Column), query.PaginationID) + case OrderDesc: + sb = sb.Where(fmt.Sprintf("%s <= ?", query.Column), query.PaginationID) + } + } + } + + if err := sb.Scan(ctx, &ret); err != nil { + return nil, storageerrors.PostgresError(err) + } + var ( + paginatedColumnIndex = 0 + ) + typeOfT := reflect.TypeOf(ret).Elem() + for ; paginatedColumnIndex < typeOfT.NumField(); paginatedColumnIndex++ { + field := typeOfT.Field(paginatedColumnIndex) + tag := field.Tag.Get("bun") + column := strings.Split(tag, ",")[0] + if column == query.Column { + break + } + } + + var ( + paginationIDs = make([]*BigInt, 0) + ) + for _, t := range ret { + paginationID := reflect.ValueOf(t). + Field(paginatedColumnIndex). + Interface().(*BigInt) + if query.Bottom == nil { + query.Bottom = (*big.Int)(paginationID) + } + paginationIDs = append(paginationIDs, paginationID) + } + + hasMore := len(ret) > int(query.PageSize) + if hasMore { + ret = ret[:len(ret)-1] + } + if query.Reverse { + for i := 0; i < len(ret)/2; i++ { + ret[i], ret[len(ret)-i-1] = ret[len(ret)-i-1], ret[i] + } + } + + var previous, next *ColumnPaginatedQuery[FILTERS] + + if query.Reverse { + cp := query + cp.Reverse = false + next = &cp + + if hasMore { + cp := query + cp.PaginationID = (*big.Int)(paginationIDs[len(paginationIDs)-2]) + previous = &cp + } + } else { + if hasMore { + cp := query + cp.PaginationID = (*big.Int)(paginationIDs[len(paginationIDs)-1]) + next = &cp + } + if query.PaginationID != nil { + if (query.Order == OrderAsc && query.PaginationID.Cmp(query.Bottom) > 0) || (query.Order == OrderDesc && query.PaginationID.Cmp(query.Bottom) < 0) { + cp := query + cp.Reverse = true + previous = &cp + } + } + } + + return &api.Cursor[ENTITY]{ + PageSize: int(query.PageSize), + HasMore: next != nil, + Previous: previous.EncodeAsCursor(), + Next: next.EncodeAsCursor(), + Data: ret, + }, nil +} diff --git a/internal/storage/paginate/pagination_column_test.go b/internal/storage/paginate/pagination_column_test.go new file mode 100644 index 000000000..2acaf5bae --- /dev/null +++ b/internal/storage/paginate/pagination_column_test.go @@ -0,0 +1,343 @@ +package paginate_test + +import ( + "context" + "math/big" + "testing" + + "github.com/formancehq/ledger/internal/storage" + "github.com/formancehq/ledger/internal/storage/paginate" + "github.com/formancehq/stack/libs/go-libs/pgtesting" + "github.com/stretchr/testify/require" +) + +func TestColumnPagination(t *testing.T) { + t.Parallel() + + pgServer := pgtesting.NewPostgresDatabase(t) + db, err := storage.OpenSQLDB(storage.ConnectionOptions{ + DatabaseSourceName: pgServer.ConnString(), + Debug: testing.Verbose(), + }) + require.NoError(t, err) + + _, err = db.Exec(` + CREATE TABLE "models" (id int, pair boolean); + `) + require.NoError(t, err) + + type model struct { + ID *paginate.BigInt `bun:"id,type:numeric"` + Pair bool `bun:"pair"` + } + + models := make([]model, 0) + for i := 0; i < 100; i++ { + models = append(models, model{ + ID: (*paginate.BigInt)(big.NewInt(int64(i))), + Pair: i%2 == 0, + }) + } + + _, err = db.NewInsert(). + Model(&models). + Exec(context.Background()) + require.NoError(t, err) + + type testCase struct { + name string + query paginate.ColumnPaginatedQuery[bool] + expectedNext *paginate.ColumnPaginatedQuery[bool] + expectedPrevious *paginate.ColumnPaginatedQuery[bool] + expectedNumberOfItems int64 + } + testCases := []testCase{ + { + name: "asc first page", + query: paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Column: "id", + Order: paginate.OrderAsc, + }, + expectedNext: &paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Column: "id", + PaginationID: big.NewInt(int64(10)), + Order: paginate.OrderAsc, + Bottom: big.NewInt(int64(0)), + }, + expectedNumberOfItems: 10, + }, + { + name: "asc second page using next cursor", + query: paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Column: "id", + PaginationID: big.NewInt(int64(10)), + Order: paginate.OrderAsc, + Bottom: big.NewInt(int64(0)), + }, + expectedPrevious: &paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Column: "id", + Order: paginate.OrderAsc, + Bottom: big.NewInt(int64(0)), + PaginationID: big.NewInt(int64(10)), + Reverse: true, + }, + expectedNext: &paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Column: "id", + PaginationID: big.NewInt(int64(20)), + Order: paginate.OrderAsc, + Bottom: big.NewInt(int64(0)), + }, + expectedNumberOfItems: 10, + }, + { + name: "asc last page using next cursor", + query: paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Column: "id", + PaginationID: big.NewInt(int64(90)), + Order: paginate.OrderAsc, + Bottom: big.NewInt(int64(0)), + }, + expectedPrevious: &paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Column: "id", + Order: paginate.OrderAsc, + PaginationID: big.NewInt(int64(90)), + Bottom: big.NewInt(int64(0)), + Reverse: true, + }, + expectedNumberOfItems: 10, + }, + { + name: "desc first page", + query: paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Column: "id", + Order: paginate.OrderDesc, + }, + expectedNext: &paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Bottom: big.NewInt(int64(99)), + Column: "id", + PaginationID: big.NewInt(int64(89)), + Order: paginate.OrderDesc, + }, + expectedNumberOfItems: 10, + }, + { + name: "desc second page using next cursor", + query: paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Bottom: big.NewInt(int64(99)), + Column: "id", + PaginationID: big.NewInt(int64(89)), + Order: paginate.OrderDesc, + }, + expectedPrevious: &paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Bottom: big.NewInt(int64(99)), + Column: "id", + PaginationID: big.NewInt(int64(89)), + Order: paginate.OrderDesc, + Reverse: true, + }, + expectedNext: &paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Bottom: big.NewInt(int64(99)), + Column: "id", + PaginationID: big.NewInt(int64(79)), + Order: paginate.OrderDesc, + }, + expectedNumberOfItems: 10, + }, + { + name: "desc last page using next cursor", + query: paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Bottom: big.NewInt(int64(99)), + Column: "id", + PaginationID: big.NewInt(int64(9)), + Order: paginate.OrderDesc, + }, + expectedPrevious: &paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Bottom: big.NewInt(int64(99)), + Column: "id", + PaginationID: big.NewInt(int64(9)), + Order: paginate.OrderDesc, + Reverse: true, + }, + expectedNumberOfItems: 10, + }, + { + name: "asc first page using previous cursor", + query: paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Bottom: big.NewInt(int64(0)), + Column: "id", + PaginationID: big.NewInt(int64(10)), + Order: paginate.OrderAsc, + Reverse: true, + }, + expectedNext: &paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Bottom: big.NewInt(int64(0)), + Column: "id", + PaginationID: big.NewInt(int64(10)), + Order: paginate.OrderAsc, + }, + expectedNumberOfItems: 10, + }, + { + name: "desc first page using previous cursor", + query: paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Bottom: big.NewInt(int64(99)), + Column: "id", + PaginationID: big.NewInt(int64(89)), + Order: paginate.OrderDesc, + Reverse: true, + }, + expectedNext: &paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Bottom: big.NewInt(int64(99)), + Column: "id", + PaginationID: big.NewInt(int64(89)), + Order: paginate.OrderDesc, + }, + expectedNumberOfItems: 10, + }, + { + name: "asc first page with filter", + query: paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Column: "id", + Order: paginate.OrderAsc, + Options: true, + }, + expectedNext: &paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Column: "id", + PaginationID: big.NewInt(int64(20)), + Order: paginate.OrderAsc, + Options: true, + Bottom: big.NewInt(int64(0)), + }, + expectedNumberOfItems: 10, + }, + { + name: "asc second page with filter", + query: paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Column: "id", + PaginationID: big.NewInt(int64(20)), + Order: paginate.OrderAsc, + Options: true, + Bottom: big.NewInt(int64(0)), + }, + expectedNext: &paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Column: "id", + PaginationID: big.NewInt(int64(40)), + Order: paginate.OrderAsc, + Options: true, + Bottom: big.NewInt(int64(0)), + }, + expectedPrevious: &paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Column: "id", + PaginationID: big.NewInt(int64(20)), + Order: paginate.OrderAsc, + Options: true, + Bottom: big.NewInt(int64(0)), + Reverse: true, + }, + expectedNumberOfItems: 10, + }, + { + name: "desc first page with filter", + query: paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Column: "id", + Order: paginate.OrderDesc, + Options: true, + }, + expectedNext: &paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Column: "id", + PaginationID: big.NewInt(int64(78)), + Order: paginate.OrderDesc, + Options: true, + Bottom: big.NewInt(int64(98)), + }, + expectedNumberOfItems: 10, + }, + { + name: "desc second page with filter", + query: paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Column: "id", + PaginationID: big.NewInt(int64(78)), + Order: paginate.OrderDesc, + Options: true, + Bottom: big.NewInt(int64(98)), + }, + expectedNext: &paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Column: "id", + PaginationID: big.NewInt(int64(58)), + Order: paginate.OrderDesc, + Options: true, + Bottom: big.NewInt(int64(98)), + }, + expectedPrevious: &paginate.ColumnPaginatedQuery[bool]{ + PageSize: 10, + Column: "id", + PaginationID: big.NewInt(int64(78)), + Order: paginate.OrderDesc, + Options: true, + Bottom: big.NewInt(int64(98)), + Reverse: true, + }, + expectedNumberOfItems: 10, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + models := make([]model, 0) + query := db.NewSelect().Model(&models).Column("id") + if tc.query.Options { + query = query.Where("pair = ?", true) + } + cursor, err := paginate.UsingColumn[bool, model](context.Background(), query, tc.query) + require.NoError(t, err) + + if tc.expectedNext == nil { + require.Empty(t, cursor.Next) + } else { + require.NotEmpty(t, cursor.Next) + + q := paginate.ColumnPaginatedQuery[bool]{} + require.NoError(t, paginate.UnmarshalCursor(cursor.Next, &q)) + require.EqualValues(t, *tc.expectedNext, q) + } + + if tc.expectedPrevious == nil { + require.Empty(t, cursor.Previous) + } else { + require.NotEmpty(t, cursor.Previous) + + q := paginate.ColumnPaginatedQuery[bool]{} + require.NoError(t, paginate.UnmarshalCursor(cursor.Previous, &q)) + require.EqualValues(t, *tc.expectedPrevious, q) + } + }) + } +} diff --git a/internal/storage/paginate/pagination_offset.go b/internal/storage/paginate/pagination_offset.go new file mode 100644 index 000000000..9630b3d01 --- /dev/null +++ b/internal/storage/paginate/pagination_offset.go @@ -0,0 +1,48 @@ +package paginate + +import ( + "context" + + "github.com/formancehq/stack/libs/go-libs/api" + "github.com/uptrace/bun" +) + +func UsingOffset[Q any, T any](ctx context.Context, sb *bun.SelectQuery, query OffsetPaginatedQuery[Q]) (*api.Cursor[T], error) { + ret := make([]T, 0) + + sb = sb.Offset(int(query.Offset)) + sb = sb.Limit(int(query.PageSize) + 1) + + if err := sb.Scan(ctx, &ret); err != nil { + return nil, err + } + + var previous, next *OffsetPaginatedQuery[Q] + + // Page with transactions before + if query.Offset > 0 { + cp := query + offset := int(query.Offset) - int(query.PageSize) + if offset < 0 { + offset = 0 + } + cp.Offset = uint64(offset) + previous = &cp + } + + // Page with transactions after + if len(ret) > int(query.PageSize) { + cp := query + cp.Offset = query.Offset + query.PageSize + next = &cp + ret = ret[:len(ret)-1] + } + + return &api.Cursor[T]{ + PageSize: int(query.PageSize), + HasMore: next != nil, + Previous: previous.EncodeAsCursor(), + Next: next.EncodeAsCursor(), + Data: ret, + }, nil +} diff --git a/internal/storage/paginate/pagination_offset_test.go b/internal/storage/paginate/pagination_offset_test.go new file mode 100644 index 000000000..24abfed0d --- /dev/null +++ b/internal/storage/paginate/pagination_offset_test.go @@ -0,0 +1,170 @@ +package paginate_test + +import ( + "context" + "testing" + + "github.com/formancehq/ledger/internal/storage" + "github.com/formancehq/ledger/internal/storage/paginate" + "github.com/formancehq/stack/libs/go-libs/pgtesting" + "github.com/stretchr/testify/require" +) + +func TestOffsetPagination(t *testing.T) { + t.Parallel() + + pgServer := pgtesting.NewPostgresDatabase(t) + db, err := storage.OpenSQLDB(storage.ConnectionOptions{ + DatabaseSourceName: pgServer.ConnString(), + Debug: testing.Verbose(), + }) + require.NoError(t, err) + + _, err = db.Exec(` + CREATE TABLE "models" (id int, pair boolean); + `) + require.NoError(t, err) + + type model struct { + ID uint64 `bun:"id"` + Pair bool `bun:"pair"` + } + + models := make([]model, 0) + for i := 0; i < 100; i++ { + models = append(models, model{ + ID: uint64(i), + Pair: i%2 == 0, + }) + } + + _, err = db.NewInsert(). + Model(&models). + Exec(context.Background()) + require.NoError(t, err) + + type testCase struct { + name string + query paginate.OffsetPaginatedQuery[bool] + expectedNext *paginate.OffsetPaginatedQuery[bool] + expectedPrevious *paginate.OffsetPaginatedQuery[bool] + expectedNumberOfItems uint64 + } + testCases := []testCase{ + { + name: "asc first page", + query: paginate.OffsetPaginatedQuery[bool]{ + PageSize: 10, + Order: paginate.OrderAsc, + }, + expectedNext: &paginate.OffsetPaginatedQuery[bool]{ + PageSize: 10, + Offset: 10, + Order: paginate.OrderAsc, + }, + expectedNumberOfItems: 10, + }, + { + name: "asc second page using next cursor", + query: paginate.OffsetPaginatedQuery[bool]{ + PageSize: 10, + Offset: 10, + Order: paginate.OrderAsc, + }, + expectedPrevious: &paginate.OffsetPaginatedQuery[bool]{ + PageSize: 10, + Order: paginate.OrderAsc, + Offset: 0, + }, + expectedNext: &paginate.OffsetPaginatedQuery[bool]{ + PageSize: 10, + Order: paginate.OrderAsc, + Offset: 20, + }, + expectedNumberOfItems: 10, + }, + { + name: "asc last page using next cursor", + query: paginate.OffsetPaginatedQuery[bool]{ + PageSize: 10, + Offset: 90, + Order: paginate.OrderAsc, + }, + expectedPrevious: &paginate.OffsetPaginatedQuery[bool]{ + PageSize: 10, + Order: paginate.OrderAsc, + Offset: 80, + }, + expectedNumberOfItems: 10, + }, + { + name: "asc last page partial", + query: paginate.OffsetPaginatedQuery[bool]{ + PageSize: 10, + Offset: 95, + Order: paginate.OrderAsc, + }, + expectedPrevious: &paginate.OffsetPaginatedQuery[bool]{ + PageSize: 10, + Order: paginate.OrderAsc, + Offset: 85, + }, + expectedNumberOfItems: 10, + }, + { + name: "asc fist page partial", + query: paginate.OffsetPaginatedQuery[bool]{ + PageSize: 10, + Offset: 5, + Order: paginate.OrderAsc, + }, + expectedPrevious: &paginate.OffsetPaginatedQuery[bool]{ + PageSize: 10, + Order: paginate.OrderAsc, + Offset: 0, + }, + expectedNext: &paginate.OffsetPaginatedQuery[bool]{ + PageSize: 10, + Order: paginate.OrderAsc, + Offset: 15, + }, + expectedNumberOfItems: 10, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + + query := db.NewSelect().Model(&models).Column("id") + if tc.query.Options { + query = query.Where("pair = ?", true) + } + cursor, err := paginate.UsingOffset[bool, model]( + context.Background(), + query, + tc.query) + require.NoError(t, err) + + if tc.expectedNext == nil { + require.Empty(t, cursor.Next) + } else { + require.NotEmpty(t, cursor.Next) + + q := paginate.OffsetPaginatedQuery[bool]{} + require.NoError(t, paginate.UnmarshalCursor(cursor.Next, &q)) + require.EqualValues(t, *tc.expectedNext, q) + } + + if tc.expectedPrevious == nil { + require.Empty(t, cursor.Previous) + } else { + require.NotEmpty(t, cursor.Previous) + + q := paginate.OffsetPaginatedQuery[bool]{} + require.NoError(t, paginate.UnmarshalCursor(cursor.Previous, &q)) + require.EqualValues(t, *tc.expectedPrevious, q) + } + }) + } +} diff --git a/internal/storage/query/expression.go b/internal/storage/query/expression.go new file mode 100644 index 000000000..47a2eb052 --- /dev/null +++ b/internal/storage/query/expression.go @@ -0,0 +1,223 @@ +package query + +import ( + "encoding/json" + "fmt" + "strings" + + "github.com/pkg/errors" +) + +type Context interface { + BuildMatcher(key, operator string, value any) (string, []any, error) +} +type ContextFn func(key, operator string, value any) (string, []any, error) + +func (fn ContextFn) BuildMatcher(key, operator string, value any) (string, []any, error) { + return fn(key, operator, value) +} + +type Builder interface { + Build(Context) (string, []any, error) +} + +type set struct { + operator string + items []Builder +} + +var _ Builder = (*set)(nil) + +func (set set) Build(ctx Context) (string, []any, error) { + clauses := make([]string, 0) + args := make([]any, 0) + for _, builder := range set.items { + clause, clauseArgs, err := builder.Build(ctx) + if err != nil { + return "", nil, err + } + clauses = append(clauses, clause) + args = append(args, clauseArgs...) + } + return "(" + strings.Join(clauses, fmt.Sprintf(") %s (", set.operator)) + ")", args, nil +} + +type keyValue struct { + operator string + key string + value any +} + +var _ Builder = (*keyValue)(nil) + +func (k keyValue) Build(ctx Context) (string, []any, error) { + return ctx.BuildMatcher(k.key, k.operator, k.value) +} + +type not struct { + expression Builder +} + +var _ Builder = (*not)(nil) + +func (n not) Build(context Context) (string, []any, error) { + sub, args, err := n.expression.Build(context) + if err != nil { + return "", nil, err + } + return fmt.Sprintf("not (%s)", sub), args, nil +} + +func Not(expr Builder) not { + return not{ + expression: expr, + } +} + +func Match(key string, value any) keyValue { + return keyValue{ + operator: "$match", + key: key, + value: value, + } +} + +func Or(items ...Builder) set { + return set{ + operator: "or", + items: items, + } +} + +func And(items ...Builder) set { + return set{ + operator: "and", + items: items, + } +} + +func Lt(key string, value any) keyValue { + return keyValue{ + operator: "$lt", + key: key, + value: value, + } +} + +func Lte(key string, value any) keyValue { + return keyValue{ + operator: "$lte", + key: key, + value: value, + } +} + +func Gt(key string, value any) keyValue { + return keyValue{ + operator: "$gt", + key: key, + value: value, + } +} + +func Gte(key string, value any) keyValue { + return keyValue{ + operator: "$gte", + key: key, + value: value, + } +} + +func singleKey(m map[string]any) (string, any, error) { + switch { + case len(m) == 0: + return "", nil, fmt.Errorf("expected single key, found none") + case len(m) > 1: + return "", nil, fmt.Errorf("expected single key, found more then one") + default: + var ( + key string + value any + ) + for key, value = range m { + } + return key, value, nil + } +} + +func parseSet(operator string, value any) (set, error) { + set := set{ + operator: operator[1:], + } + switch value := value.(type) { + case []any: + for ind, sub := range value { + switch sub := sub.(type) { + case map[string]any: + subExpression, err := mapMapToExpression(sub) + if err != nil { + return set, err + } + set.items = append(set.items, subExpression) + default: + return set, fmt.Errorf("unexpected type %T when decoding %s clause at index %d", value, operator, ind) + } + } + return set, nil + default: + return set, fmt.Errorf("unexpected type %T", value) + } +} + +func parseKeyValue(operator string, m any) (keyValue, error) { + kv := keyValue{ + operator: operator, + } + switch m := m.(type) { + case map[string]any: + key, value, err := singleKey(m) + if err != nil { + return kv, err + } + kv.key = key + kv.value = value + return kv, nil + default: + return kv, fmt.Errorf("unexpected type %T", m) + } +} + +func mapMapToExpression(m map[string]any) (Builder, error) { + operator, value, err := singleKey(m) + if err != nil { + return nil, err + } + switch operator { + case "$and", "$or": + and, err := parseSet(operator, value) + if err != nil { + return nil, errors.Wrap(err, "parsing $and") + } + return and, nil + case "$match", "$gte", "$lte", "$gt", "$lt": + match, err := parseKeyValue(operator, value) + if err != nil { + return nil, errors.Wrapf(err, "parsing %s", operator) + } + return match, nil + default: + return nil, fmt.Errorf("unexpected operator %s", operator) + } +} + +func ParseJSON(data string) (Builder, error) { + if len(data) == 0 { + return nil, nil + } + m := make(map[string]any) + if err := json.Unmarshal([]byte(data), &m); err != nil { + panic(err) + } + + return mapMapToExpression(m) +} diff --git a/internal/storage/query/expression_test.go b/internal/storage/query/expression_test.go new file mode 100644 index 000000000..8057630b3 --- /dev/null +++ b/internal/storage/query/expression_test.go @@ -0,0 +1,41 @@ +package query + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestParseExpression(t *testing.T) { + json := `{ + "$and": [ + { + "$match": { + "account": "accounts::pending" + } + }, + { + "$or": [ + { + "$gte": { + "balance": 1000 + } + }, + { + "$match": { + "metadata[category]": "gold" + } + } + ] + } + ] +}` + expr, err := ParseJSON(json) + require.NoError(t, err) + + _, _, err = expr.Build(ContextFn(func(key, operator string, value any) (string, []any, error) { + return fmt.Sprintf("%s %s ?", key, DefaultComparisonOperatorsMapping[operator]), []any{value}, nil + })) + require.NoError(t, err) +} diff --git a/internal/storage/query/mapping.go b/internal/storage/query/mapping.go new file mode 100644 index 000000000..366a29508 --- /dev/null +++ b/internal/storage/query/mapping.go @@ -0,0 +1,9 @@ +package query + +var DefaultComparisonOperatorsMapping = map[string]string{ + "$match": "=", + "$gte": ">=", + "$gt": ">", + "$lte": "<=", + "$lt": "<", +} diff --git a/internal/storage/storagetesting/storage.go b/internal/storage/storagetesting/storage.go new file mode 100644 index 000000000..8edca816c --- /dev/null +++ b/internal/storage/storagetesting/storage.go @@ -0,0 +1,35 @@ +package storagetesting + +import ( + "context" + "testing" + "time" + + "github.com/formancehq/ledger/internal/storage" + "github.com/formancehq/ledger/internal/storage/driver" + "github.com/formancehq/stack/libs/go-libs/pgtesting" + "github.com/stretchr/testify/require" +) + +func StorageDriver(t pgtesting.TestingT) *driver.Driver { + pgServer := pgtesting.NewPostgresDatabase(t) + + db, err := storage.OpenSQLDB(storage.ConnectionOptions{ + DatabaseSourceName: pgServer.ConnString(), + Debug: testing.Verbose(), + MaxIdleConns: 40, + MaxOpenConns: 40, + ConnMaxIdleTime: time.Minute, + }) + require.NoError(t, err) + + t.Cleanup(func() { + _ = db.Close() + }) + + d := driver.New(db) + + require.NoError(t, d.Initialize(context.Background())) + + return d +} diff --git a/internal/storage/systemstore/configuration.go b/internal/storage/systemstore/configuration.go new file mode 100644 index 000000000..4a54cd6c7 --- /dev/null +++ b/internal/storage/systemstore/configuration.go @@ -0,0 +1,51 @@ +package systemstore + +import ( + "context" + + ledger "github.com/formancehq/ledger/internal" + storageerrors "github.com/formancehq/ledger/internal/storage" + "github.com/uptrace/bun" +) + +type configuration struct { + bun.BaseModel `bun:"_system.configuration,alias:configuration"` + + Key string `bun:"key,type:varchar(255),pk"` // Primary key + Value string `bun:"value,type:text"` + AddedAt ledger.Time `bun:"addedAt,type:timestamp"` +} + +func (s *Store) GetConfiguration(ctx context.Context, key string) (string, error) { + query := s.db.NewSelect(). + Model((*configuration)(nil)). + Column("value"). + Where("key = ?", key). + Limit(1). + String() + + row := s.db.QueryRowContext(ctx, query) + if row.Err() != nil { + return "", storageerrors.PostgresError(row.Err()) + } + var value string + if err := row.Scan(&value); err != nil { + return "", storageerrors.PostgresError(err) + } + + return value, nil +} + +func (s *Store) InsertConfiguration(ctx context.Context, key, value string) error { + config := &configuration{ + Key: key, + Value: value, + AddedAt: ledger.Now(), + } + + _, err := s.db.NewInsert(). + Model(config). + Exec(ctx) + + return storageerrors.PostgresError(err) +} diff --git a/internal/storage/systemstore/ledgers.go b/internal/storage/systemstore/ledgers.go new file mode 100644 index 000000000..942df66a6 --- /dev/null +++ b/internal/storage/systemstore/ledgers.go @@ -0,0 +1,92 @@ +package systemstore + +import ( + "context" + + ledger "github.com/formancehq/ledger/internal" + storageerrors "github.com/formancehq/ledger/internal/storage" + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +type Ledgers struct { + bun.BaseModel `bun:"_system.ledgers,alias:ledgers"` + + Ledger string `bun:"ledger,type:varchar(255),pk"` // Primary key + AddedAt ledger.Time `bun:"addedat,type:timestamp"` +} + +func (s *Store) ListLedgers(ctx context.Context) ([]string, error) { + query := s.db.NewSelect(). + Model((*Ledgers)(nil)). + Column("ledger"). + String() + + rows, err := s.db.QueryContext(ctx, query) + if err != nil { + return nil, storageerrors.PostgresError(err) + } + defer rows.Close() + + res := make([]string, 0) + for rows.Next() { + var ledger string + if err := rows.Scan(&ledger); err != nil { + return nil, storageerrors.PostgresError(err) + } + res = append(res, ledger) + } + return res, nil +} + +func (s *Store) DeleteLedger(ctx context.Context, name string) error { + _, err := s.db.NewDelete(). + Model((*Ledgers)(nil)). + Where("ledger = ?", name). + Exec(ctx) + + return errors.Wrap(storageerrors.PostgresError(err), "delete ledger from system store") +} + +func (s *Store) Register(ctx context.Context, ledgerName string) (bool, error) { + l := &Ledgers{ + Ledger: ledgerName, + AddedAt: ledger.Now(), + } + + ret, err := s.db.NewInsert(). + Model(l). + Ignore(). + Exec(ctx) + if err != nil { + return false, storageerrors.PostgresError(err) + } + + affected, err := ret.RowsAffected() + if err != nil { + return false, storageerrors.PostgresError(err) + } + + return affected > 0, nil +} + +func (s *Store) Exists(ctx context.Context, ledger string) (bool, error) { + query := s.db.NewSelect(). + Model((*Ledgers)(nil)). + Column("ledger"). + Where("ledger = ?", ledger). + String() + + ret := s.db.QueryRowContext(ctx, query) + if ret.Err() != nil { + return false, nil + } + + var t string + _ = ret.Scan(&t) // Trigger close + + if t == "" { + return false, nil + } + return true, nil +} diff --git a/internal/storage/systemstore/migrations.go b/internal/storage/systemstore/migrations.go new file mode 100644 index 000000000..e572a38ee --- /dev/null +++ b/internal/storage/systemstore/migrations.go @@ -0,0 +1,34 @@ +package systemstore + +import ( + "context" + + "github.com/formancehq/ledger/internal/storage" + "github.com/formancehq/stack/libs/go-libs/migrations" + "github.com/uptrace/bun" +) + +func (s *Store) getMigrator() *migrations.Migrator { + migrator := migrations.NewMigrator(migrations.WithSchema("_system", true)) + migrator.RegisterMigrations( + migrations.Migration{ + Name: "Init schema", + UpWithContext: func(ctx context.Context, tx bun.Tx) error { + _, err := tx.NewCreateTable(). + Model((*Ledgers)(nil)). + IfNotExists(). + Exec(ctx) + if err != nil { + return storage.PostgresError(err) + } + + _, err = s.db.NewCreateTable(). + Model((*configuration)(nil)). + IfNotExists(). + Exec(ctx) + return storage.PostgresError(err) + }, + }, + ) + return migrator +} diff --git a/internal/storage/systemstore/store.go b/internal/storage/systemstore/store.go new file mode 100644 index 000000000..124f8824a --- /dev/null +++ b/internal/storage/systemstore/store.go @@ -0,0 +1,20 @@ +package systemstore + +import ( + "context" + + "github.com/formancehq/ledger/internal/storage" + "github.com/uptrace/bun" +) + +type Store struct { + db *bun.DB +} + +func NewStore(db *bun.DB) *Store { + return &Store{db: db} +} + +func (s *Store) Initialize(ctx context.Context) error { + return storage.PostgresError(s.getMigrator().Up(ctx, s.db)) +} diff --git a/internal/storage/utils.go b/internal/storage/utils.go new file mode 100644 index 000000000..c2721a8c3 --- /dev/null +++ b/internal/storage/utils.go @@ -0,0 +1,58 @@ +package storage + +import ( + "database/sql" + "fmt" + "io" + "time" + + "github.com/formancehq/stack/libs/go-libs/bun/bundebug" + "github.com/uptrace/bun" + "github.com/uptrace/bun/dialect/pgdialect" + "github.com/uptrace/bun/extra/bunotel" +) + +type ConnectionOptions struct { + DatabaseSourceName string + Debug bool + Writer io.Writer + MaxIdleConns int + MaxOpenConns int + ConnMaxIdleTime time.Duration +} + +func (opts ConnectionOptions) String() string { + return fmt.Sprintf("dsn=%s, debug=%v, max-idle-conns=%d, max-open-conns=%d, conn-max-idle-time=%s", + opts.DatabaseSourceName, opts.Debug, opts.MaxIdleConns, opts.MaxOpenConns, opts.ConnMaxIdleTime) +} + +func OpenSQLDB(options ConnectionOptions, hooks ...bun.QueryHook) (*bun.DB, error) { + sqldb, err := sql.Open("postgres", options.DatabaseSourceName) + if err != nil { + return nil, err + } + if options.MaxIdleConns != 0 { + sqldb.SetMaxIdleConns(options.MaxIdleConns) + } + if options.ConnMaxIdleTime != 0 { + sqldb.SetConnMaxIdleTime(options.ConnMaxIdleTime) + } + if options.MaxOpenConns != 0 { + sqldb.SetMaxOpenConns(options.MaxOpenConns) + } + + db := bun.NewDB(sqldb, pgdialect.New(), bun.WithDiscardUnknownColumns()) + if options.Debug { + db.AddQueryHook(bundebug.NewQueryHook()) + } + db.AddQueryHook(bunotel.NewQueryHook()) + for _, hook := range hooks { + db.AddQueryHook(hook) + } + + if err := db.Ping(); err != nil { + return nil, err + } + + return db, nil +} diff --git a/internal/testing/compare.go b/internal/testing/compare.go new file mode 100644 index 000000000..0e978c8c2 --- /dev/null +++ b/internal/testing/compare.go @@ -0,0 +1,20 @@ +package testing + +import ( + "math/big" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/stretchr/testify/require" +) + +func bigIntComparer(v1 *big.Int, v2 *big.Int) bool { + return v1.String() == v2.String() +} + +func RequireEqual(t *testing.T, expected, actual any) { + t.Helper() + if diff := cmp.Diff(expected, actual, cmp.Comparer(bigIntComparer)); diff != "" { + require.Failf(t, "Content not matching", diff) + } +} diff --git a/internal/time.go b/internal/time.go new file mode 100644 index 000000000..1361aee20 --- /dev/null +++ b/internal/time.go @@ -0,0 +1,117 @@ +package ledger + +import ( + "database/sql/driver" + "fmt" + "time" + + "github.com/pkg/errors" +) + +const ( + DatePrecision = time.Microsecond + DateFormat = time.RFC3339Nano +) + +type Time struct { + time.Time +} + +func (t *Time) Scan(src interface{}) (err error) { + switch src := src.(type) { + case time.Time: + *t = Time{ + Time: src.UTC(), + } + return nil + case string: + *t = Time{} + t.Time, err = time.ParseInLocation(DateFormat, src, time.UTC) + return err + case []byte: + *t = Time{} + t.Time, err = time.ParseInLocation(DateFormat, string(src), time.UTC) + return err + case nil: + *t = Time{} + t.Time = time.Time{} + return nil + default: + return fmt.Errorf("unsupported data type: %T", src) + } +} + +func (t Time) Value() (driver.Value, error) { + return t.Format(DateFormat), nil +} + +func (t Time) Before(t2 Time) bool { + return t.Time.Before(t2.Time) +} + +func (t Time) After(t2 Time) bool { + return t.Time.After(t2.Time) +} + +func (t Time) Sub(t2 Time) time.Duration { + return t.Time.Sub(t2.Time) +} + +func (t Time) Add(d time.Duration) Time { + return Time{ + Time: t.Time.Add(d), + } +} + +func (t Time) UTC() Time { + return Time{ + Time: t.Time.UTC(), + } +} + +func (t Time) Round(precision time.Duration) Time { + return Time{ + Time: t.Time.Round(precision), + } +} + +func (t Time) Equal(t2 Time) bool { + return t.Time.Equal(t2.Time) +} + +func (t Time) MarshalJSON() ([]byte, error) { + return []byte(fmt.Sprintf(`"%s"`, t.Format(DateFormat))), nil +} + +func (t *Time) UnmarshalJSON(data []byte) error { + if len(data) == 0 { + *t = Time{} + return nil + } + if data[0] != '"' || data[len(data)-1] != '"' { + return errors.New("invalid date format") + } + + parsed, err := ParseTime(string(data[1 : len(data)-1])) + if err != nil { + return err + } + *t = parsed + return nil +} + +func Now() Time { + return Time{ + Time: time.Now().UTC().Round(DatePrecision), + } +} + +func ParseTime(v string) (Time, error) { + t, err := time.Parse(DateFormat, v) + if err != nil { + return Time{}, err + } + return Time{ + Time: t.Round(DatePrecision), + }, nil +} diff --git a/internal/transaction.go b/internal/transaction.go new file mode 100644 index 000000000..b96173f07 --- /dev/null +++ b/internal/transaction.go @@ -0,0 +1,116 @@ +package ledger + +import ( + "math/big" + + "github.com/formancehq/stack/libs/go-libs/metadata" +) + +type Transactions struct { + Transactions []TransactionData `json:"transactions"` +} + +type TransactionData struct { + Postings Postings `json:"postings"` + Metadata metadata.Metadata `json:"metadata"` + Timestamp Time `json:"timestamp"` + Reference string `json:"reference"` +} + +func (d TransactionData) WithPostings(postings ...Posting) TransactionData { + d.Postings = append(d.Postings, postings...) + return d +} + +func NewTransactionData() TransactionData { + return TransactionData{ + Metadata: metadata.Metadata{}, + } +} + +func (t *TransactionData) Reverse() TransactionData { + postings := make(Postings, len(t.Postings)) + copy(postings, t.Postings) + postings.Reverse() + + return TransactionData{ + Postings: postings, + } +} + +func (d TransactionData) WithDate(now Time) TransactionData { + d.Timestamp = now + + return d +} + +type Transaction struct { + TransactionData + ID *big.Int `json:"id"` + Reverted bool `json:"reverted"` +} + +func (t *Transaction) WithPostings(postings ...Posting) *Transaction { + t.TransactionData = t.TransactionData.WithPostings(postings...) + return t +} + +func (t *Transaction) WithReference(ref string) *Transaction { + t.Reference = ref + return t +} + +func (t *Transaction) WithDate(ts Time) *Transaction { + t.Timestamp = ts + return t +} + +func (t *Transaction) WithIDUint64(id uint64) *Transaction { + t.ID = big.NewInt(int64(id)) + return t +} + +func (t *Transaction) WithID(id *big.Int) *Transaction { + t.ID = id + return t +} + +func (t *Transaction) WithMetadata(m metadata.Metadata) *Transaction { + t.Metadata = m + return t +} + +func NewTransaction() *Transaction { + return &Transaction{ + ID: big.NewInt(0), + TransactionData: NewTransactionData(). + WithDate(Now()), + } +} + +type ExpandedTransaction struct { + Transaction + PreCommitVolumes AccountsAssetsVolumes `json:"preCommitVolumes,omitempty"` + PostCommitVolumes AccountsAssetsVolumes `json:"postCommitVolumes,omitempty"` + PreCommitEffectiveVolumes AccountsAssetsVolumes `json:"preCommitEffectiveVolumes,omitempty"` + PostCommitEffectiveVolumes AccountsAssetsVolumes `json:"postCommitEffectiveVolumes,omitempty"` +} + +func (t *ExpandedTransaction) AppendPosting(p Posting) { + t.Postings = append(t.Postings, p) +} + +func ExpandTransaction(tx *Transaction, preCommitVolumes AccountsAssetsVolumes) ExpandedTransaction { + postCommitVolumes := preCommitVolumes.Copy() + for _, posting := range tx.Postings { + preCommitVolumes.AddInput(posting.Destination, posting.Asset, Zero) + preCommitVolumes.AddOutput(posting.Source, posting.Asset, Zero) + postCommitVolumes.AddOutput(posting.Source, posting.Asset, posting.Amount) + postCommitVolumes.AddInput(posting.Destination, posting.Asset, posting.Amount) + } + return ExpandedTransaction{ + Transaction: *tx, + PreCommitVolumes: preCommitVolumes, + PostCommitVolumes: postCommitVolumes, + } +} diff --git a/internal/transaction_test.go b/internal/transaction_test.go new file mode 100644 index 000000000..0263c8ca2 --- /dev/null +++ b/internal/transaction_test.go @@ -0,0 +1,154 @@ +package ledger + +import ( + "math/big" + "testing" + + "github.com/formancehq/stack/libs/go-libs/metadata" + "github.com/stretchr/testify/require" +) + +func TestReverseTransaction(t *testing.T) { + t.Run("1 posting", func(t *testing.T) { + tx := &ExpandedTransaction{ + Transaction: Transaction{ + TransactionData: TransactionData{ + Postings: Postings{ + { + Source: "world", + Destination: "users:001", + Amount: big.NewInt(100), + Asset: "COIN", + }, + }, + Reference: "foo", + }, + }, + } + + expected := TransactionData{ + Postings: Postings{ + { + Source: "users:001", + Destination: "world", + Amount: big.NewInt(100), + Asset: "COIN", + }, + }, + } + require.Equal(t, expected, tx.Reverse()) + }) + + t.Run("2 postings", func(t *testing.T) { + tx := &ExpandedTransaction{ + Transaction: Transaction{ + TransactionData: TransactionData{ + Postings: Postings{ + { + Source: "world", + Destination: "users:001", + Amount: big.NewInt(100), + Asset: "COIN", + }, + { + Source: "users:001", + Destination: "payments:001", + Amount: big.NewInt(100), + Asset: "COIN", + }, + }, + Reference: "foo", + }, + }, + } + + expected := TransactionData{ + Postings: Postings{ + { + Source: "payments:001", + Destination: "users:001", + Amount: big.NewInt(100), + Asset: "COIN", + }, + { + Source: "users:001", + Destination: "world", + Amount: big.NewInt(100), + Asset: "COIN", + }, + }, + } + require.Equal(t, expected, tx.Reverse()) + }) + + t.Run("3 postings", func(t *testing.T) { + tx := &ExpandedTransaction{ + Transaction: Transaction{ + TransactionData: TransactionData{ + Postings: Postings{ + { + Source: "world", + Destination: "users:001", + Amount: big.NewInt(100), + Asset: "COIN", + }, + { + Source: "users:001", + Destination: "payments:001", + Amount: big.NewInt(100), + Asset: "COIN", + }, + { + Source: "payments:001", + Destination: "alice", + Amount: big.NewInt(100), + Asset: "COIN", + }, + }, + Reference: "foo", + }, + }, + } + + expected := TransactionData{ + Postings: Postings{ + { + Source: "alice", + Destination: "payments:001", + Amount: big.NewInt(100), + Asset: "COIN", + }, + { + Source: "payments:001", + Destination: "users:001", + Amount: big.NewInt(100), + Asset: "COIN", + }, + { + Source: "users:001", + Destination: "world", + Amount: big.NewInt(100), + Asset: "COIN", + }, + }, + } + require.Equal(t, expected, tx.Reverse()) + }) +} + +func BenchmarkHash(b *testing.B) { + logs := make([]ChainedLog, b.N) + var previous *ChainedLog + for i := 0; i < b.N; i++ { + newLog := NewTransactionLog(NewTransaction().WithPostings( + NewPosting("world", "bank", "USD", big.NewInt(100)), + ), map[string]metadata.Metadata{}).ChainLog(previous) + previous = newLog + logs = append(logs, *newLog) + } + + b.ResetTimer() + for i := 1; i < b.N; i++ { + logs[i].ComputeHash(&logs[i-1]) + } +} diff --git a/internal/volumes.go b/internal/volumes.go new file mode 100644 index 000000000..479422dd3 --- /dev/null +++ b/internal/volumes.go @@ -0,0 +1,249 @@ +package ledger + +import ( + "database/sql/driver" + "encoding/json" + "math/big" +) + +type Volumes struct { + Input *big.Int `json:"input"` + Output *big.Int `json:"output"` +} + +func (v Volumes) CopyWithZerosIfNeeded() *Volumes { + var input *big.Int + if v.Input == nil { + input = &big.Int{} + } else { + input = new(big.Int).Set(v.Input) + } + var output *big.Int + if v.Output == nil { + output = &big.Int{} + } else { + output = new(big.Int).Set(v.Output) + } + return &Volumes{ + Input: input, + Output: output, + } +} + +func (v Volumes) WithInput(input *big.Int) *Volumes { + v.Input = input + return &v +} + +func (v Volumes) WithInputInt64(value int64) *Volumes { + v.Input = big.NewInt(value) + return &v +} + +func (v Volumes) WithOutput(output *big.Int) *Volumes { + v.Output = output + return &v +} + +func (v Volumes) WithOutputInt64(value int64) *Volumes { + v.Output = big.NewInt(value) + return &v +} + +func NewEmptyVolumes() *Volumes { + return &Volumes{ + Input: new(big.Int), + Output: new(big.Int), + } +} + +func NewVolumesInt64(input, output int64) *Volumes { + return &Volumes{ + Input: big.NewInt(input), + Output: big.NewInt(output), + } +} + +type VolumesWithBalance struct { + Input *big.Int `json:"input"` + Output *big.Int `json:"output"` + Balance *big.Int `json:"balance"` +} + +func (v Volumes) MarshalJSON() ([]byte, error) { + return json.Marshal(VolumesWithBalance{ + Input: v.Input, + Output: v.Output, + Balance: v.Balance(), + }) +} + +func (v Volumes) Balance() *big.Int { + input := v.Input + if input == nil { + input = Zero + } + output := v.Output + if output == nil { + output = Zero + } + return new(big.Int).Sub(input, output) +} + +func (v Volumes) copy() *Volumes { + return &Volumes{ + Input: new(big.Int).Set(v.Input), + Output: new(big.Int).Set(v.Output), + } +} + +type BalancesByAssets map[string]*big.Int + +type VolumesByAssets map[string]*Volumes + +type BalancesByAssetsByAccounts map[string]BalancesByAssets + +func (v VolumesByAssets) Balances() BalancesByAssets { + balances := BalancesByAssets{} + for asset, vv := range v { + balances[asset] = new(big.Int).Sub(vv.Input, vv.Output) + } + return balances +} + +func (v VolumesByAssets) copy() VolumesByAssets { + ret := VolumesByAssets{} + for key, volumes := range v { + ret[key] = volumes.copy() + } + return ret +} + +type AccountsAssetsVolumes map[string]VolumesByAssets + +func (a AccountsAssetsVolumes) GetVolumes(account, asset string) *Volumes { + if a == nil { + return &Volumes{ + Input: &big.Int{}, + Output: &big.Int{}, + } + } + if assetsVolumes, ok := a[account]; !ok { + return &Volumes{ + Input: &big.Int{}, + Output: &big.Int{}, + } + } else { + return &Volumes{ + Input: assetsVolumes[asset].Input, + Output: assetsVolumes[asset].Output, + } + } +} + +func (a *AccountsAssetsVolumes) SetVolumes(account, asset string, volumes *Volumes) { + if *a == nil { + *a = AccountsAssetsVolumes{} + } + if assetsVolumes, ok := (*a)[account]; !ok { + (*a)[account] = map[string]*Volumes{ + asset: volumes.CopyWithZerosIfNeeded(), + } + } else { + assetsVolumes[asset] = volumes.CopyWithZerosIfNeeded() + } +} + +func (a *AccountsAssetsVolumes) AddInput(account, asset string, input *big.Int) { + if *a == nil { + *a = AccountsAssetsVolumes{} + } + if assetsVolumes, ok := (*a)[account]; !ok { + (*a)[account] = map[string]*Volumes{ + asset: { + Input: input, + Output: &big.Int{}, + }, + } + } else { + volumes := assetsVolumes[asset].CopyWithZerosIfNeeded() + volumes.Input.Add(volumes.Input, input) + assetsVolumes[asset] = volumes + } +} + +func (a *AccountsAssetsVolumes) AddOutput(account, asset string, output *big.Int) { + if *a == nil { + *a = AccountsAssetsVolumes{} + } + if assetsVolumes, ok := (*a)[account]; !ok { + (*a)[account] = map[string]*Volumes{ + asset: { + Output: output, + Input: &big.Int{}, + }, + } + } else { + volumes := assetsVolumes[asset].CopyWithZerosIfNeeded() + volumes.Output.Add(volumes.Output, output) + assetsVolumes[asset] = volumes + } +} + +func (a AccountsAssetsVolumes) HasAccount(account string) bool { + if a == nil { + return false + } + _, ok := a[account] + return ok +} + +func (a AccountsAssetsVolumes) HasAccountAndAsset(account, asset string) bool { + if a == nil { + return false + } + volumesByAsset, ok := a[account] + if !ok { + return false + } + _, ok = volumesByAsset[asset] + return ok +} + +// Scan - Implement the database/sql scanner interface +func (a *AccountsAssetsVolumes) Scan(value interface{}) error { + if value == nil { + return nil + } + + val, err := driver.String.ConvertValue(value) + if err != nil { + return err + } + + *a = AccountsAssetsVolumes{} + switch val := val.(type) { + case []uint8: + return json.Unmarshal(val, a) + case string: + return json.Unmarshal([]byte(val), a) + default: + panic("not handled type") + } +} + +func (a AccountsAssetsVolumes) Copy() AccountsAssetsVolumes { + ret := AccountsAssetsVolumes{} + for key, volumes := range a { + ret[key] = volumes.copy() + } + return ret +} + +func (a AccountsAssetsVolumes) Balances() BalancesByAssetsByAccounts { + ret := BalancesByAssetsByAccounts{} + for account, volumesByAssets := range a { + ret[account] = volumesByAssets.Balances() + } + return ret +} diff --git a/libs/.gitignore b/libs/.gitignore new file mode 100644 index 000000000..2a9b1e54c --- /dev/null +++ b/libs/.gitignore @@ -0,0 +1,3 @@ +.idea +vendor +coverage.* diff --git a/libs/.pre-commit-config.yaml b/libs/.pre-commit-config.yaml new file mode 100644 index 000000000..a4c584c91 --- /dev/null +++ b/libs/.pre-commit-config.yaml @@ -0,0 +1,22 @@ +exclude: client +fail_fast: true +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.3.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + exclude: .cloud + - id: check-added-large-files +- repo: https://github.com/formancehq/pre-commit-hooks + rev: dd079f7c30ad72446d615f55a000d4f875e79633 + hooks: + - id: gogenerate + files: swagger.yaml + - id: gomodtidy + - id: goimports + - id: gofmt + - id: golangci-lint + - id: gotests + - id: commitlint diff --git a/libs/README.md b/libs/README.md new file mode 100644 index 000000000..8babdfce6 --- /dev/null +++ b/libs/README.md @@ -0,0 +1 @@ +# go-libs diff --git a/libs/Taskfile.yml b/libs/Taskfile.yml new file mode 100644 index 000000000..a9b2d374c --- /dev/null +++ b/libs/Taskfile.yml @@ -0,0 +1,44 @@ +version: '3' + +vars: + PKG: "./..." + FAILFAST: "-failfast" + TIMEOUT: "1m" + RUN: "''" + +tasks: + default: + cmds: + - task: lint + - task: tests:local + + lint: + cmds: + - golangci-lint run --fix --allow-parallel-runners --config ./../../.golangci.yml + + tests: + cmds: + - go test -v -coverpkg {{.PKG}} -coverprofile coverage.out -covermode atomic {{.PKG}} + + tests:local: + cmds: + - > + go test -v {{.FAILFAST}} -coverpkg {{.PKG}} -coverprofile coverage.out + -run {{.RUN}} -timeout {{.TIMEOUT}} {{.PKG}} | + sed ''/PASS/s//$(printf "\033[32mPASS\033[0m")/'' | + sed ''/FAIL/s//$(printf "\033[31mFAIL\033[0m")/'' | + sed ''/RUN/s//$(printf "\033[34mRUN\033[0m")/'' + - task: coverage + + coverage: + cmds: + - go tool cover -html=coverage.out -o coverage.html + - echo "To open the html coverage file, use one of the following commands:" + - echo "open coverage.html on mac" + - echo "xdg-open coverage.html on linux" + silent: true + + sync:ledger: + cmds: + - rm -rf ./../../components/ledger/libs/* + - cp -R * ./../../components/ledger/libs/ diff --git a/libs/analytics/cli.go b/libs/analytics/cli.go new file mode 100644 index 000000000..b5d65543d --- /dev/null +++ b/libs/analytics/cli.go @@ -0,0 +1,67 @@ +package analytics + +import ( + "time" + + "github.com/coreos/go-semver/semver" + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/spf13/cobra" + "github.com/spf13/viper" + "go.uber.org/fx" +) + +const ( + // deprecated + segmentEnabledFlag = "segment-enabled" + // deprecated + segmentWriteKeyFlag = "segment-write-flagKey" + // deprecated + segmentApplicationIdFlag = "segment-application-id" + // deprecated + segmentHeartbeatIntervalFlag = "segment-heartbeat-interval" + + telemetryEnabledFlag = "telemetry-enabled" + telemetryWriteKeyFlag = "telemetry-write-flagKey" + telemetryApplicationIdFlag = "telemetry-application-id" + telemetryHeartbeatIntervalFlag = "telemetry-heartbeat-interval" +) + +func InitAnalyticsFlags(cmd *cobra.Command, defaultWriteKey string, useDeprecatedFlags bool) { + if useDeprecatedFlags { + cmd.PersistentFlags().Bool(segmentEnabledFlag, false, "Is segment enabled") + cmd.PersistentFlags().String(segmentApplicationIdFlag, "", "Segment application id") + cmd.PersistentFlags().String(segmentWriteKeyFlag, defaultWriteKey, "Segment write flagKey") + cmd.PersistentFlags().Duration(segmentHeartbeatIntervalFlag, 4*time.Hour, "Segment heartbeat interval") + } + cmd.PersistentFlags().Bool(telemetryEnabledFlag, true, "Is telemetry enabled") + cmd.PersistentFlags().String(telemetryApplicationIdFlag, "", "telemetry application id") + cmd.PersistentFlags().String(telemetryWriteKeyFlag, defaultWriteKey, "telemetry write flagKey") + cmd.PersistentFlags().Duration(telemetryHeartbeatIntervalFlag, 4*time.Hour, "telemetry heartbeat interval") +} + +func NewAnalyticsModule(logger logging.Logger, v *viper.Viper, version string, useDeprecatedFlags bool) fx.Option { + if v.GetBool(telemetryEnabledFlag) || (useDeprecatedFlags && v.GetBool(segmentEnabledFlag)) { + + writeKey := viper.GetString(telemetryWriteKeyFlag) + if writeKey == "" && useDeprecatedFlags { + writeKey = viper.GetString(segmentWriteKeyFlag) + } + interval := viper.GetDuration(telemetryHeartbeatIntervalFlag) + if interval == 0 && useDeprecatedFlags { + interval = viper.GetDuration(segmentHeartbeatIntervalFlag) + } + if writeKey == "" { + logger.Infof("telemetry enabled but no write flagKey provided") + } else if interval == 0 { + logger.Error("telemetry heartbeat interval is 0") + } else { + _, err := semver.NewVersion(version) + if err != nil { + logger.Infof("telemetry enabled but version '%s' is not semver, skip", version) + } else { + return NewHeartbeatModule(version, writeKey, interval) + } + } + } + return fx.Options() +} diff --git a/libs/analytics/cli_test.go b/libs/analytics/cli_test.go new file mode 100644 index 000000000..742861898 --- /dev/null +++ b/libs/analytics/cli_test.go @@ -0,0 +1,103 @@ +package analytics + +import ( + "fmt" + "reflect" + "testing" + "time" + + "github.com/spf13/cobra" + "github.com/spf13/viper" + "github.com/stretchr/testify/require" +) + +func TestAnalyticsFlags(t *testing.T) { + type testCase struct { + name string + flagKey string + flagValue string + viperMethod interface{} + expectedValue interface{} + } + + for _, testCase := range []testCase{ + { + name: "using deprecated segment enabled flag", + flagKey: segmentEnabledFlag, + flagValue: "true", + viperMethod: (*viper.Viper).GetBool, + expectedValue: true, + }, + { + name: "using deprecated segment write flagKey flag", + flagKey: segmentWriteKeyFlag, + flagValue: "foo:bar", + viperMethod: (*viper.Viper).GetString, + expectedValue: "foo:bar", + }, + { + name: "using deprecated segment heartbeat interval flag", + flagKey: segmentHeartbeatIntervalFlag, + flagValue: "10s", + viperMethod: (*viper.Viper).GetDuration, + expectedValue: 10 * time.Second, + }, + { + name: "using deprecated segment application id flag", + flagKey: segmentApplicationIdFlag, + flagValue: "foo:bar", + viperMethod: (*viper.Viper).GetString, + expectedValue: "foo:bar", + }, + { + name: "using telemetry enabled flag", + flagKey: telemetryEnabledFlag, + flagValue: "true", + viperMethod: (*viper.Viper).GetBool, + expectedValue: true, + }, + { + name: "using telemetry write flagKey flag", + flagKey: telemetryWriteKeyFlag, + flagValue: "foo:bar", + viperMethod: (*viper.Viper).GetString, + expectedValue: "foo:bar", + }, + { + name: "using telemetry heartbeat interval flag", + flagKey: telemetryHeartbeatIntervalFlag, + flagValue: "10s", + viperMethod: (*viper.Viper).GetDuration, + expectedValue: 10 * time.Second, + }, + { + name: "using telemetry application id flag", + flagKey: telemetryApplicationIdFlag, + flagValue: "foo:bar", + viperMethod: (*viper.Viper).GetString, + expectedValue: "foo:bar", + }, + } { + t.Run(testCase.name, func(t *testing.T) { + v := viper.GetViper() + cmd := &cobra.Command{ + Run: func(cmd *cobra.Command, args []string) { + ret := reflect.ValueOf(testCase.viperMethod).Call([]reflect.Value{ + reflect.ValueOf(v), + reflect.ValueOf(testCase.flagKey), + }) + require.Len(t, ret, 1) + + rValue := ret[0].Interface() + require.Equal(t, testCase.expectedValue, rValue) + }, + } + InitAnalyticsFlags(cmd, "xxx", true) + + cmd.SetArgs([]string{fmt.Sprintf("--%s", testCase.flagKey), testCase.flagValue}) + + require.NoError(t, v.BindPFlags(cmd.PersistentFlags())) + require.NoError(t, cmd.Execute()) + }) + } +} diff --git a/libs/analytics/module.go b/libs/analytics/module.go new file mode 100644 index 000000000..055c42e9c --- /dev/null +++ b/libs/analytics/module.go @@ -0,0 +1,55 @@ +package analytics + +import ( + "context" + "time" + + "github.com/google/uuid" + "github.com/segmentio/analytics-go" + "go.uber.org/fx" +) + +const ( + FXTagPropertiesEnrichers = `group:"enrichers"` +) + +func NewHeartbeatModule(version, writeKey string, interval time.Duration) fx.Option { + defaultAppId := uuid.NewString() + return fx.Options( + fx.Supply(analytics.Config{}), // Provide empty config to be able to replace (use fx.Replace) if necessary + fx.Provide(func(cfg analytics.Config) (analytics.Client, error) { + return analytics.NewWithConfig(writeKey, cfg) + }), + fx.Provide(fx.Annotate(func(client analytics.Client, provider AppIdProvider, enrichers []PropertiesEnricher) *heartbeat { + return newHeartbeat(provider, client, version, interval, enrichers...) + }, fx.ParamTags("", "", FXTagPropertiesEnrichers))), + fx.Provide(func() AppIdProvider { + return AppIdProviderFn(func(ctx context.Context) (string, error) { + return defaultAppId, nil + }) + }), + fx.Invoke(func(m *heartbeat, lc fx.Lifecycle) { + lc.Append(fx.Hook{ + OnStart: func(ctx context.Context) error { + go func() { + err := m.Run(context.Background()) + if err != nil { + panic(err) + } + }() + return nil + }, + OnStop: func(ctx context.Context) error { + return m.Stop(ctx) + }, + }) + }), + fx.Invoke(func(lc fx.Lifecycle, client analytics.Client) { + lc.Append(fx.Hook{ + OnStop: func(ctx context.Context) error { + return client.Close() + }, + }) + }), + ) +} diff --git a/libs/analytics/module_test.go b/libs/analytics/module_test.go new file mode 100644 index 000000000..998d59133 --- /dev/null +++ b/libs/analytics/module_test.go @@ -0,0 +1,85 @@ +package analytics + +import ( + "context" + "encoding/json" + "net/http" + "testing" + "time" + + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/segmentio/analytics-go" + "github.com/sirupsen/logrus" + "github.com/spf13/viper" + "github.com/stretchr/testify/require" + "go.uber.org/fx" +) + +type roundTripperFn func(req *http.Request) (*http.Response, error) + +func (fn roundTripperFn) RoundTrip(req *http.Request) (*http.Response, error) { + return fn(req) +} + +func TestAnalyticsModule(t *testing.T) { + v := viper.GetViper() + v.Set(telemetryEnabledFlag, true) + v.Set(telemetryWriteKeyFlag, "XXX") + v.Set(telemetryApplicationIdFlag, "appId") + v.Set(telemetryHeartbeatIntervalFlag, 10*time.Second) + + handled := make(chan *analytics.Track, 1) + + module := NewAnalyticsModule(logging.NewLogrus(logrus.New()), v, "1.0.0", true) + app := fx.New( + module, + fx.NopLogger, + fx.Supply(fx.Annotate(PropertiesEnricherFn(func(ctx context.Context, p analytics.Properties) error { + p.Set("additionalProperty", "test") + return nil + }), fx.As(new(PropertiesEnricher)), fx.ResultTags(FXTagPropertiesEnrichers))), + fx.Replace(analytics.Config{ + BatchSize: 1, + Transport: roundTripperFn(func(req *http.Request) (*http.Response, error) { + select { + case <-handled: + // Nothing to do, the chan has already been closed + default: + type batch struct { + Messages []*analytics.Track `json:"batch"` + } + b := batch{} + if err := json.NewDecoder(req.Body).Decode(&b); err != nil { + panic(err) + } + handled <- b.Messages[0] + close(handled) + } + return &http.Response{ + StatusCode: http.StatusOK, + }, nil + }), + })) + require.NoError(t, app.Start(context.Background())) + defer func() { + require.NoError(t, app.Stop(context.Background())) + }() + + select { + case <-time.After(time.Second): + require.Fail(t, "Timeout waiting first stats from analytics module") + case track := <-handled: + require.Equal(t, "test", track.Properties["additionalProperty"]) + } + +} + +func TestAnalyticsModuleDisabled(t *testing.T) { + v := viper.GetViper() + v.Set(telemetryEnabledFlag, false) + + module := NewAnalyticsModule(logging.NewLogrus(logrus.New()), v, "1.0.0", true) + app := fx.New(module, fx.NopLogger) + require.NoError(t, app.Start(context.Background())) + require.NoError(t, app.Stop(context.Background())) +} diff --git a/libs/analytics/segment.go b/libs/analytics/segment.go new file mode 100644 index 000000000..f89389170 --- /dev/null +++ b/libs/analytics/segment.go @@ -0,0 +1,126 @@ +package analytics + +import ( + "context" + "runtime" + "time" + + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/pbnjay/memory" + "github.com/segmentio/analytics-go" +) + +const ( + ApplicationStats = "Application stats" + + VersionProperty = "version" + OSProperty = "os" + ArchProperty = "arch" + TimeZoneProperty = "tz" + CPUCountProperty = "cpuCount" + TotalMemoryProperty = "totalMemory" +) + +type AppIdProvider interface { + AppID(ctx context.Context) (string, error) +} +type AppIdProviderFn func(ctx context.Context) (string, error) + +func (fn AppIdProviderFn) AppID(ctx context.Context) (string, error) { + return fn(ctx) +} + +type PropertiesEnricher interface { + Enrich(ctx context.Context, p analytics.Properties) error +} +type PropertiesEnricherFn func(ctx context.Context, p analytics.Properties) error + +func (fn PropertiesEnricherFn) Enrich(ctx context.Context, p analytics.Properties) error { + return fn(ctx, p) +} + +type heartbeat struct { + version string + interval time.Duration + client analytics.Client + stopChan chan chan struct{} + appIdProvider AppIdProvider + enrichers []PropertiesEnricher +} + +func (m *heartbeat) Run(ctx context.Context) error { + + enqueue := func() { + err := m.enqueue(ctx) + if err != nil { + logging.FromContext(ctx).WithFields(map[string]interface{}{ + "error": err, + }).Error("enqueuing analytics") + } + } + + enqueue() + for { + select { + case ch := <-m.stopChan: + ch <- struct{}{} + return nil + case <-ctx.Done(): + return ctx.Err() + case <-time.After(m.interval): + enqueue() + } + } +} + +func (m *heartbeat) Stop(ctx context.Context) error { + ch := make(chan struct{}) + m.stopChan <- ch + select { + case <-ctx.Done(): + return ctx.Err() + case <-ch: + return nil + } +} + +func (m *heartbeat) enqueue(ctx context.Context) error { + + appId, err := m.appIdProvider.AppID(ctx) + if err != nil { + return err + } + + tz, _ := time.Now().Local().Zone() + + properties := analytics.NewProperties(). + Set(VersionProperty, m.version). + Set(OSProperty, runtime.GOOS). + Set(ArchProperty, runtime.GOARCH). + Set(TimeZoneProperty, tz). + Set(CPUCountProperty, runtime.NumCPU()). + Set(TotalMemoryProperty, memory.TotalMemory()/1024/1024) + + for _, enricher := range m.enrichers { + if err := enricher.Enrich(ctx, properties); err != nil { + logging.FromContext(ctx).Errorf("Enricher return error: %s", err) + } + } + + return m.client.Enqueue(&analytics.Track{ + AnonymousId: appId, + Event: ApplicationStats, + Properties: properties, + }) +} + +func newHeartbeat(appIdProvider AppIdProvider, client analytics.Client, version string, interval time.Duration, enrichers ...PropertiesEnricher) *heartbeat { + return &heartbeat{ + version: version, + interval: interval, + client: client, + appIdProvider: appIdProvider, + stopChan: make(chan chan struct{}, 1), + enrichers: enrichers, + } +} diff --git a/pkg/analytics/segment_test.go b/libs/analytics/segment_test.go similarity index 76% rename from pkg/analytics/segment_test.go rename to libs/analytics/segment_test.go index b7d89a807..ba53164ad 100644 --- a/pkg/analytics/segment_test.go +++ b/libs/analytics/segment_test.go @@ -7,18 +7,13 @@ import ( "errors" "io" "net/http" - "os" "sync" "testing" "time" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/storage" - "github.com/numary/ledger/pkg/storage/sqlstorage" - "github.com/pborman/uuid" + "github.com/segmentio/analytics-go" "github.com/stretchr/testify/require" "go.uber.org/fx" - "gopkg.in/segmentio/analytics-go.v3" ) type transport func(*http.Request) (*http.Response, error) @@ -80,20 +75,17 @@ const ( var ( module = fx.Options( + fx.NopLogger, NewHeartbeatModule(version, writeKey, interval), - fx.Provide(func() AppIdProvider { + fx.Replace(func() AppIdProvider { return AppIdProviderFn(func(ctx context.Context) (string, error) { - return "foo", nil + return applicationId, nil }) }), - fx.Provide(func(lc fx.Lifecycle) (storage.Driver[ledger.Store], error) { - id := uuid.New() - driver := sqlstorage.NewDriver("sqlite", sqlstorage.NewSQLiteDB(os.TempDir(), id)) - lc.Append(fx.Hook{ - OnStart: driver.Initialize, - }) - return sqlstorage.NewLedgerStorageDriverFromRawDriver(driver), nil - }), + fx.Supply(fx.Annotate(PropertiesEnricherFn(func(ctx context.Context, p analytics.Properties) error { + p.Set("foo", "bar") + return nil + }), fx.ResultTags(FXTagPropertiesEnrichers), fx.As(new(PropertiesEnricher)))), ) ) @@ -148,24 +140,21 @@ func TestSegment(t *testing.T) { track := batch.Batch[0] require.Equal(t, ApplicationStats, track.Event) - require.Equal(t, version, track.Properties[VersionProperty]) - require.Equal(t, applicationId, track.AnonymousId) + require.Equal(t, "bar", track.Properties["foo"]) } }) }) t.Run("With error on the backend", func(t *testing.T) { - firstCallChan := make(chan struct{}) + firstCall := true queue := NewQueue[*http.Request]() app := newApp(module, func(request *http.Request) (*http.Response, error) { - select { - case <-firstCallChan: // Enter this case only if the chan is closed - queue.Put(request) - return emptyHttpResponse, nil - default: - close(firstCallChan) - return nil, errors.New("general error") + if firstCall { + firstCall = false + return nil, errors.New("error on the first try") } + queue.Put(request) + return emptyHttpResponse, nil }) withApp(t, app, func(t *testing.T) { EventuallyQueueNotEmpty(t, queue) diff --git a/libs/api/apitesting/utils.go b/libs/api/apitesting/utils.go new file mode 100644 index 000000000..13fe5b27b --- /dev/null +++ b/libs/api/apitesting/utils.go @@ -0,0 +1,32 @@ +package apitesting + +import ( + "encoding/json" + "net/http/httptest" + "reflect" + "testing" + + "github.com/formancehq/stack/libs/go-libs/api" + "github.com/stretchr/testify/require" +) + +func ReadErrorResponse(t *testing.T, rec *httptest.ResponseRecorder) *api.ErrorResponse { + t.Helper() + ret := &api.ErrorResponse{} + require.NoError(t, json.NewDecoder(rec.Body).Decode(ret)) + return ret +} + +func ReadResponse[T any](t *testing.T, rec *httptest.ResponseRecorder, to T) { + t.Helper() + ret := &api.BaseResponse[T]{} + require.NoError(t, json.NewDecoder(rec.Body).Decode(ret)) + reflect.ValueOf(to).Elem().Set(reflect.ValueOf(*ret.Data).Elem()) +} + +func ReadCursor[T any](t *testing.T, rec *httptest.ResponseRecorder, to *api.Cursor[T]) { + t.Helper() + ret := &api.BaseResponse[T]{} + require.NoError(t, json.NewDecoder(rec.Body).Decode(ret)) + reflect.ValueOf(to).Elem().Set(reflect.ValueOf(ret.Cursor).Elem()) +} diff --git a/libs/api/handler_info.go b/libs/api/handler_info.go new file mode 100644 index 000000000..19f18cf66 --- /dev/null +++ b/libs/api/handler_info.go @@ -0,0 +1,18 @@ +package api + +import ( + "encoding/json" + "net/http" +) + +type ServiceInfo struct { + Version string `json:"version"` +} + +func InfoHandler(info ServiceInfo) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + if err := json.NewEncoder(w).Encode(info); err != nil { + panic(err) + } + } +} diff --git a/libs/api/response.go b/libs/api/response.go new file mode 100644 index 000000000..202f1d8a9 --- /dev/null +++ b/libs/api/response.go @@ -0,0 +1,77 @@ +package api + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "net/url" + + "github.com/formancehq/stack/libs/go-libs/collectionutils" + "github.com/pkg/errors" +) + +type BaseResponse[T any] struct { + Data *T `json:"data,omitempty"` + Cursor *Cursor[T] `json:"cursor,omitempty"` +} + +type Cursor[T any] struct { + PageSize int `json:"pageSize,omitempty"` + HasMore bool `json:"hasMore"` + Previous string `json:"previous,omitempty"` + Next string `json:"next,omitempty"` + Data []T `json:"data"` +} + +func MapCursor[FROM any, TO any](cursor *Cursor[FROM], mapper func(FROM) TO) *Cursor[TO] { + return &Cursor[TO]{ + PageSize: cursor.PageSize, + HasMore: cursor.HasMore, + Previous: cursor.Previous, + Next: cursor.Next, + Data: collectionutils.Map(cursor.Data, mapper), + } +} + +type ErrorResponse struct { + ErrorCode string `json:"errorCode,omitempty"` + ErrorMessage string `json:"errorMessage,omitempty"` + Details string `json:"details,omitempty"` +} + +func FetchAllPaginated[T any](ctx context.Context, client *http.Client, _url string, queryParams url.Values) ([]T, error) { + ret := make([]T, 0) + + var nextToken string + for { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, _url, nil) + if err != nil { + return nil, err + } + if nextToken == "" { + req.URL.RawQuery = queryParams.Encode() + } else { + req.URL.RawQuery = url.Values{ + "cursor": []string{nextToken}, + }.Encode() + } + rsp, err := client.Do(req) + if err != nil { + return nil, err + } + if rsp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("unexpected status code %d while waiting for %d", rsp.StatusCode, http.StatusOK) + } + apiResponse := BaseResponse[T]{} + if err := json.NewDecoder(rsp.Body).Decode(&apiResponse); err != nil { + return nil, errors.Wrap(err, "decoding cursir") + } + ret = append(ret, apiResponse.Cursor.Data...) + if !apiResponse.Cursor.HasMore { + break + } + nextToken = apiResponse.Cursor.Next + } + return ret, nil +} diff --git a/libs/api/response_test.go b/libs/api/response_test.go new file mode 100644 index 000000000..0c4df75fd --- /dev/null +++ b/libs/api/response_test.go @@ -0,0 +1,28 @@ +package api + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestCursor(t *testing.T) { + c := Cursor[int64]{ + Data: []int64{1, 2, 3}, + } + by, err := json.Marshal(c) + require.NoError(t, err) + assert.Equal(t, `{"hasMore":false,"data":[1,2,3]}`, string(by)) + + c = Cursor[int64]{ + Data: []int64{1, 2, 3}, + HasMore: true, + } + by, err = json.Marshal(c) + require.NoError(t, err) + assert.Equal(t, + `{"hasMore":true,"data":[1,2,3]}`, + string(by)) +} diff --git a/libs/api/response_utils.go b/libs/api/response_utils.go new file mode 100644 index 000000000..f861f27ca --- /dev/null +++ b/libs/api/response_utils.go @@ -0,0 +1,37 @@ +package api + +import ( + "bytes" + "encoding/json" + "io" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Encode(t require.TestingT, v interface{}) []byte { + data, err := json.Marshal(v) + assert.NoError(t, err) + return data +} + +func Buffer(t require.TestingT, v interface{}) *bytes.Buffer { + return bytes.NewBuffer(Encode(t, v)) +} + +func Decode(t require.TestingT, reader io.Reader, v interface{}) { + err := json.NewDecoder(reader).Decode(v) + require.NoError(t, err) +} + +func DecodeSingleResponse[T any](t require.TestingT, reader io.Reader) (T, bool) { + res := BaseResponse[T]{} + Decode(t, reader, &res) + return *res.Data, true +} + +func DecodeCursorResponse[T any](t require.TestingT, reader io.Reader) *Cursor[T] { + res := BaseResponse[T]{} + Decode(t, reader, &res) + return res.Cursor +} diff --git a/libs/api/utils.go b/libs/api/utils.go new file mode 100644 index 000000000..c49a1b59e --- /dev/null +++ b/libs/api/utils.go @@ -0,0 +1,153 @@ +package api + +import ( + "encoding/json" + "net/http" + "strconv" + "strings" + + "github.com/formancehq/stack/libs/go-libs/logging" +) + +const ( + defaultLimit = 15 + + ErrorCodeNotFound = "NOT_FOUND" +) + +func writeJSON(w http.ResponseWriter, statusCode int, v any) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(statusCode) + if v != nil { + if err := json.NewEncoder(w).Encode(v); err != nil { + panic(err) + } + } +} + +func NotFound(w http.ResponseWriter) { + writeJSON(w, http.StatusNotFound, ErrorResponse{ + ErrorCode: ErrorCodeNotFound, + ErrorMessage: "resource not found", + }) +} + +func NoContent(w http.ResponseWriter) { + writeJSON(w, http.StatusNoContent, nil) +} + +func BadRequest(w http.ResponseWriter, code string, err error) { + writeJSON(w, http.StatusBadRequest, ErrorResponse{ + ErrorCode: code, + ErrorMessage: err.Error(), + }) +} + +func InternalServerError(w http.ResponseWriter, r *http.Request, err error) { + logging.FromContext(r.Context()).Error(err) + + writeJSON(w, http.StatusInternalServerError, ErrorResponse{ + ErrorCode: "INTERNAL_ERROR", + ErrorMessage: err.Error(), + }) +} + +func Created(w http.ResponseWriter, v any) { + writeJSON(w, http.StatusCreated, BaseResponse[any]{ + Data: &v, + }) +} + +func RawOk(w http.ResponseWriter, v any) { + writeJSON(w, http.StatusOK, v) +} + +func Ok(w http.ResponseWriter, v any) { + writeJSON(w, http.StatusOK, BaseResponse[any]{ + Data: &v, + }) +} + +func RenderCursor[T any](w http.ResponseWriter, v Cursor[T]) { + writeJSON(w, http.StatusOK, BaseResponse[T]{ + Cursor: &v, + }) +} + +func WriteResponse(w http.ResponseWriter, status int, body []byte) { + w.WriteHeader(status) + if _, err := w.Write(body); err != nil { + panic(err) + } +} + +func CursorFromListResponse[T any, V any](w http.ResponseWriter, query ListQuery[V], response *ListResponse[T]) { + RenderCursor(w, Cursor[T]{ + PageSize: query.Limit, + HasMore: response.HasMore, + Previous: response.Previous, + Next: response.Next, + Data: response.Data, + }) +} + +func ParsePaginationToken(r *http.Request) string { + return r.URL.Query().Get("paginationToken") +} + +func ParsePageSize(r *http.Request) int { + pageSize := r.URL.Query().Get("pageSize") + if pageSize == "" { + return defaultLimit + } + + v, err := strconv.ParseInt(pageSize, 10, 32) + if err != nil { + panic(err) + } + return int(v) +} + +func ReadPaginatedRequest[T any](r *http.Request, f func(r *http.Request) T) ListQuery[T] { + var payload T + if f != nil { + payload = f(r) + } + return ListQuery[T]{ + Pagination: Pagination{ + Limit: ParsePageSize(r), + PaginationToken: ParsePaginationToken(r), + }, + Payload: payload, + } +} + +func GetQueryMap(m map[string][]string, key string) map[string]string { + dicts := make(map[string]string) + for k, v := range m { + if i := strings.IndexByte(k, '['); i >= 1 && k[0:i] == key { + if j := strings.IndexByte(k[i+1:], ']'); j >= 1 { + dicts[k[i+1:][:j]] = v[0] + } + } + } + return dicts +} + +type ListResponse[T any] struct { + Data []T + Next, Previous string + HasMore bool +} + +type Pagination struct { + Limit int + PaginationToken string +} + +type ListQuery[T any] struct { + Pagination + Payload T +} + +type Mapper[SRC any, DST any] func(src SRC) DST diff --git a/libs/ballast/ballast.go b/libs/ballast/ballast.go new file mode 100644 index 000000000..be5108fec --- /dev/null +++ b/libs/ballast/ballast.go @@ -0,0 +1,12 @@ +package ballast + +//lint:ignore U1000 this var is actually used to allocate some memory. +var ballast []byte + +func Allocate(sizeInBytes uint) { + ballast = make([]byte, 0, sizeInBytes) +} + +func ReleaseForGC() { + ballast = nil +} diff --git a/libs/ballast/module.go b/libs/ballast/module.go new file mode 100644 index 000000000..89a440a71 --- /dev/null +++ b/libs/ballast/module.go @@ -0,0 +1,28 @@ +package ballast + +import ( + "context" + + "go.uber.org/fx" +) + +func Module(ballastSizeInBytes uint) fx.Option { + if ballastSizeInBytes == 0 { + return fx.Options() + } + + return fx.Options( + fx.Invoke(func(lc fx.Lifecycle) { + lc.Append(fx.Hook{ + OnStart: func(ctx context.Context) error { + Allocate(ballastSizeInBytes) + return nil + }, + OnStop: func(ctx context.Context) error { + ReleaseForGC() + return nil + }, + }) + }), + ) +} diff --git a/libs/bun/bundebug/debug_hook.go b/libs/bun/bundebug/debug_hook.go new file mode 100644 index 000000000..236177352 --- /dev/null +++ b/libs/bun/bundebug/debug_hook.go @@ -0,0 +1,41 @@ +package bundebug + +import ( + "context" + "fmt" + "time" + + "github.com/formancehq/stack/libs/go-libs/logging" + + "github.com/uptrace/bun" +) + +type QueryHook struct{} + +var _ bun.QueryHook = (*QueryHook)(nil) + +func NewQueryHook() *QueryHook { + return &QueryHook{} +} + +func (h *QueryHook) BeforeQuery( + ctx context.Context, event *bun.QueryEvent, +) context.Context { + return ctx +} + +func (h *QueryHook) AfterQuery(ctx context.Context, event *bun.QueryEvent) { + dur := time.Since(event.StartTime) + + fields := map[string]any{ + "component": "bun", + "operation": event.Operation(), + "duration": fmt.Sprintf("%s", dur.Round(time.Microsecond)), + } + + if event.Err != nil { + fields["err"] = event.Err.Error() + } + + logging.FromContext(ctx).WithFields(fields).Debug(event.Query) +} diff --git a/libs/bun/bunexplain/explain_hook.go b/libs/bun/bunexplain/explain_hook.go new file mode 100644 index 000000000..5767cc30a --- /dev/null +++ b/libs/bun/bunexplain/explain_hook.go @@ -0,0 +1,50 @@ +package bunexplain + +import ( + "context" + "database/sql" + "fmt" + "strings" + + "github.com/uptrace/bun" +) + +//nolint:unused +type explainHook struct{} + +//nolint:unused +func (h *explainHook) AfterQuery(ctx context.Context, event *bun.QueryEvent) {} + +//nolint:unused +func (h *explainHook) BeforeQuery(ctx context.Context, event *bun.QueryEvent) context.Context { + lowerQuery := strings.ToLower(event.Query) + if strings.HasPrefix(lowerQuery, "explain") || + strings.HasPrefix(lowerQuery, "create") || + strings.HasPrefix(lowerQuery, "begin") || + strings.HasPrefix(lowerQuery, "alter") || + strings.HasPrefix(lowerQuery, "rollback") || + strings.HasPrefix(lowerQuery, "commit") { + return ctx + } + + event.DB.RunInTx(context.Background(), &sql.TxOptions{}, func(ctx context.Context, tx bun.Tx) error { + rows, err := tx.Query("explain analyze " + event.Query) + if err != nil { + return err + } + defer rows.Next() + + for rows.Next() { + var line string + if err := rows.Scan(&line); err != nil { + return err + } + fmt.Println(line) + } + + return tx.Rollback() + + }) + + return ctx +} diff --git a/libs/collectionutils/linked_list.go b/libs/collectionutils/linked_list.go new file mode 100644 index 000000000..b6ecb8ff9 --- /dev/null +++ b/libs/collectionutils/linked_list.go @@ -0,0 +1,142 @@ +package collectionutils + +import ( + "sync" +) + +type LinkedListNode[T any] struct { + object T + list *LinkedList[T] + previousNode, nextNode *LinkedListNode[T] +} + +func (n *LinkedListNode[T]) Next() *LinkedListNode[T] { + return n.nextNode +} + +func (n *LinkedListNode[T]) Value() T { + return n.object +} + +func (n *LinkedListNode[T]) Remove() { + if n.previousNode != nil { + n.previousNode.nextNode = n.nextNode + } + if n.nextNode != nil { + n.nextNode.previousNode = n.previousNode + } + if n == n.list.firstNode { + n.list.firstNode = n.nextNode + } + if n == n.list.lastNode { + n.list.lastNode = n.previousNode + } +} + +type LinkedList[T any] struct { + mu sync.Mutex + firstNode, lastNode *LinkedListNode[T] +} + +func (r *LinkedList[T]) Append(objects ...T) { + r.mu.Lock() + defer r.mu.Unlock() + + for _, object := range objects { + if r.firstNode == nil { + r.firstNode = &LinkedListNode[T]{ + object: object, + list: r, + } + r.lastNode = r.firstNode + continue + } + r.lastNode = &LinkedListNode[T]{ + object: object, + previousNode: r.lastNode, + list: r, + } + r.lastNode.previousNode.nextNode = r.lastNode + } +} + +func (r *LinkedList[T]) RemoveFirst(cmp func(T) bool) *LinkedListNode[T] { + r.mu.Lock() + defer r.mu.Unlock() + + node := r.firstNode + for node != nil { + if cmp(node.object) { + node.Remove() + return node + } + node = node.nextNode + } + + return nil +} + +func (r *LinkedList[T]) RemoveValue(t T) *LinkedListNode[T] { + return r.RemoveFirst(func(t2 T) bool { + return (any)(t) == (any)(t2) + }) +} + +func (r *LinkedList[T]) TakeFirst() T { + var t T + if r.firstNode == nil { + return t + } + ret := r.firstNode.object + if r.firstNode.nextNode == nil { + r.firstNode = nil + } else { + r.firstNode = r.firstNode.nextNode + r.firstNode.previousNode = nil + } + return ret +} + +func (r *LinkedList[T]) Length() int { + r.mu.Lock() + defer r.mu.Unlock() + + count := 0 + + node := r.firstNode + for node != nil { + count++ + node = node.nextNode + } + + return count +} + +func (r *LinkedList[T]) ForEach(f func(t T)) { + r.mu.Lock() + defer r.mu.Unlock() + + node := r.firstNode + for node != nil { + f(node.object) + node = node.nextNode + } +} + +func (r *LinkedList[T]) Slice() []T { + ret := make([]T, 0) + node := r.firstNode + for node != nil { + ret = append(ret, node.object) + node = node.nextNode + } + return ret +} + +func (r *LinkedList[T]) FirstNode() *LinkedListNode[T] { + return r.firstNode +} + +func NewLinkedList[T any]() *LinkedList[T] { + return &LinkedList[T]{} +} diff --git a/libs/collectionutils/map.go b/libs/collectionutils/map.go new file mode 100644 index 000000000..5fde51e1c --- /dev/null +++ b/libs/collectionutils/map.go @@ -0,0 +1,9 @@ +package collectionutils + +func Keys[K comparable, V any](m map[K]V) []K { + ret := make([]K, 0) + for k := range m { + ret = append(ret, k) + } + return ret +} diff --git a/libs/collectionutils/slice.go b/libs/collectionutils/slice.go new file mode 100644 index 000000000..58d2faead --- /dev/null +++ b/libs/collectionutils/slice.go @@ -0,0 +1,84 @@ +package collectionutils + +import ( + "reflect" +) + +func Map[FROM any, TO any](input []FROM, mapper func(FROM) TO) []TO { + ret := make([]TO, len(input)) + for i, input := range input { + ret[i] = mapper(input) + } + return ret +} + +func CopyMap[KEY comparable, VALUE any](m map[KEY]VALUE) map[KEY]VALUE { + ret := make(map[KEY]VALUE) + for k, v := range m { + ret[k] = v + } + return ret +} + +func Filter[TYPE any](input []TYPE, filter func(TYPE) bool) []TYPE { + ret := make([]TYPE, 0) + for _, i := range input { + if filter(i) { + ret = append(ret, i) + } + } + return ret +} + +func Flatten[TYPE any](input [][]TYPE) []TYPE { + ret := make([]TYPE, 0) + for _, types := range input { + ret = append(ret, types...) + } + return ret +} + +func First[TYPE any](input []TYPE, filter func(TYPE) bool) TYPE { + var zero TYPE + ret := Filter(input, filter) + if len(ret) >= 1 { + return ret[0] + } + return zero +} + +func FilterEq[T any](t T) func(T) bool { + return func(t2 T) bool { + return reflect.DeepEqual(t, t2) + } +} + +func FilterNot[T any](t func(T) bool) func(T) bool { + return func(t2 T) bool { + return !t(t2) + } +} + +func Contains[T any](slice []T, t T) bool { + for _, t2 := range slice { + if reflect.DeepEqual(t, t2) { + return true + } + } + return false +} + +type Set[T comparable] map[T]struct{} + +func (s Set[T]) Put(t T) { + s[t] = struct{}{} +} + +func (s Set[T]) Contains(t T) bool { + _, ok := s[t] + return ok +} + +func NewSet[T comparable]() Set[T] { + return make(Set[T], 0) +} diff --git a/libs/commitlint.config.js b/libs/commitlint.config.js new file mode 100644 index 000000000..3580f6021 --- /dev/null +++ b/libs/commitlint.config.js @@ -0,0 +1,5 @@ +module.exports = { + extends: [ + '@commitlint/config-conventional' + ] +} diff --git a/libs/contextutil/contextutil.go b/libs/contextutil/contextutil.go new file mode 100644 index 000000000..7aaa323a4 --- /dev/null +++ b/libs/contextutil/contextutil.go @@ -0,0 +1,40 @@ +package contextutil + +import ( + "context" + "time" +) + +type detachedContext struct { + parent context.Context +} + +var _ context.Context = (*detachedContext)(nil) + +func (c *detachedContext) Done() <-chan struct{} { + return nil +} + +func (c *detachedContext) Deadline() (deadline time.Time, ok bool) { + return c.parent.Deadline() +} + +func (c *detachedContext) Err() error { + return c.parent.Err() +} + +func (c *detachedContext) Value(key interface{}) interface{} { + return c.parent.Value(key) +} + +func Detached(parent context.Context) (context.Context, context.CancelFunc) { + c := &detachedContext{parent: parent} + if deadline, ok := parent.Deadline(); ok { + return context.WithDeadline(c, deadline) + } + return context.WithCancel(c) +} + +func DetachedWithTimeout(parent context.Context, timeout time.Duration) (context.Context, context.CancelFunc) { + return context.WithTimeout(&detachedContext{parent: parent}, timeout) +} diff --git a/libs/doc.go b/libs/doc.go new file mode 100644 index 000000000..c23ab8ad8 --- /dev/null +++ b/libs/doc.go @@ -0,0 +1 @@ +package go_libs diff --git a/libs/errorsutil/errorsutil.go b/libs/errorsutil/errorsutil.go new file mode 100644 index 000000000..4bb1b11d7 --- /dev/null +++ b/libs/errorsutil/errorsutil.go @@ -0,0 +1,38 @@ +package errorsutil + +// Error is a wrapper that allows to wrap an error with another error. +// Let's take an example: imagine that you have multiple errors in a storage +// package, and you want to wrap them with a storage error. You will be able to +// do it with this wrapper and not loose the original error. +type Error struct { + wrappingError error + originalErr error +} + +// Error returns the original error. +func (e *Error) Error() string { + return e.originalErr.Error() +} + +// Implements the Causer interface from the github.com/pkg/errors package. +func (e *Error) Cause() error { + return e.originalErr +} + +// Unwrap returns the original error in order to be able to use the errors.Is +// function easily. +func (e *Error) Unwrap() error { + return e.originalErr +} + +// Is implements the Is interface of errors +func (e *Error) Is(err error) bool { + return e.wrappingError == err +} + +func NewError(wrappingError, originalErr error) *Error { + return &Error{ + wrappingError: wrappingError, + originalErr: originalErr, + } +} diff --git a/libs/errorsutil/errorsutil_test.go b/libs/errorsutil/errorsutil_test.go new file mode 100644 index 000000000..f94848e26 --- /dev/null +++ b/libs/errorsutil/errorsutil_test.go @@ -0,0 +1,32 @@ +package errorsutil_test + +import ( + "errors" + "testing" + + "github.com/formancehq/stack/libs/go-libs/errorsutil" + pkgError "github.com/pkg/errors" + "github.com/stretchr/testify/require" +) + +var ( + ErrStorageNotFound = errors.New("not found") + ErrNotFound = errors.New("not found") +) + +func TestError(t *testing.T) { + basicError := errors.New("got an error") + + wrapError1 := errorsutil.NewError(ErrStorageNotFound, basicError) + wrapError2 := errorsutil.NewError(ErrNotFound, wrapError1) + pkgWrapError := pkgError.Wrap(wrapError2, "pkg wrap") + + require.True(t, errors.Is(wrapError2, ErrNotFound)) + require.True(t, errors.Is(wrapError2, ErrStorageNotFound)) + + require.True(t, errors.Is(pkgWrapError, ErrNotFound)) + require.True(t, errors.Is(pkgWrapError, ErrStorageNotFound)) + + require.Equal(t, pkgError.Cause(wrapError2), basicError) + require.Equal(t, pkgError.Cause(pkgWrapError), basicError) +} diff --git a/libs/go.mod b/libs/go.mod new file mode 100644 index 000000000..8631a570b --- /dev/null +++ b/libs/go.mod @@ -0,0 +1,161 @@ +module github.com/formancehq/stack/libs/go-libs + +go 1.19 + +require ( + github.com/Shopify/sarama v1.38.1 + github.com/ThreeDotsLabs/watermill v1.2.0 + github.com/ThreeDotsLabs/watermill-http v1.1.4 + github.com/ThreeDotsLabs/watermill-kafka/v2 v2.2.2 + github.com/ThreeDotsLabs/watermill-nats/v2 v2.0.0 + github.com/coreos/go-semver v0.3.0 + github.com/dgraph-io/ristretto v0.1.1 + github.com/google/uuid v1.3.0 + github.com/imdario/mergo v0.3.13 + github.com/jackc/pgx/v5 v5.3.0 + github.com/lib/pq v1.10.7 + github.com/nats-io/nats-server/v2 v2.9.8 + github.com/nats-io/nats.go v1.23.0 + github.com/ory/dockertest/v3 v3.9.1 + github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 + github.com/pkg/errors v0.9.1 + github.com/segmentio/analytics-go v3.1.0+incompatible + github.com/sirupsen/logrus v1.9.0 + github.com/spf13/cobra v1.6.1 + github.com/spf13/pflag v1.0.5 + github.com/spf13/viper v1.15.0 + github.com/stretchr/testify v1.8.3 + github.com/uptrace/bun v1.1.14 + github.com/uptrace/bun/dialect/pgdialect v1.1.14 + github.com/uptrace/bun/extra/bundebug v1.1.14 + github.com/uptrace/opentelemetry-go-extra/otellogrus v0.1.21 + github.com/xdg-go/scram v1.1.2 + go.opentelemetry.io/contrib/instrumentation/host v0.42.0 + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.42.0 + go.opentelemetry.io/contrib/instrumentation/runtime v0.42.0 + go.opentelemetry.io/contrib/propagators/b3 v1.13.0 + go.opentelemetry.io/otel v1.16.0 + go.opentelemetry.io/otel/exporters/jaeger v1.16.0 + go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.39.0 + go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.39.0 + go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.16.0 + go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.16.0 + go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.16.0 + go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v0.39.0 + go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.16.0 + go.opentelemetry.io/otel/metric v1.16.0 + go.opentelemetry.io/otel/sdk v1.16.0 + go.opentelemetry.io/otel/sdk/metric v0.39.0 + go.opentelemetry.io/otel/trace v1.16.0 + go.uber.org/fx v1.19.1 +) + +require ( + github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect + github.com/Microsoft/go-winio v0.6.0 // indirect + github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 // indirect + github.com/ajg/form v1.5.1 // indirect + github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 // indirect + github.com/cenkalti/backoff/v4 v4.2.1 // indirect + github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/containerd/continuity v0.3.0 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 // indirect + github.com/docker/cli v20.10.17+incompatible // indirect + github.com/docker/docker v20.10.17+incompatible // indirect + github.com/docker/go-connections v0.4.0 // indirect + github.com/docker/go-units v0.4.0 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/eapache/go-resiliency v1.3.0 // indirect + github.com/eapache/go-xerial-snappy v0.0.0-20230111030713-bf00bc1b83b6 // indirect + github.com/eapache/queue v1.1.0 // indirect + github.com/fatih/color v1.15.0 // indirect + github.com/felixge/httpsnoop v1.0.3 // indirect + github.com/fsnotify/fsnotify v1.6.0 // indirect + github.com/go-chi/chi v4.1.2+incompatible // indirect + github.com/go-chi/render v1.0.2 // indirect + github.com/go-logr/logr v1.2.4 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-ole/go-ole v1.2.6 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/glog v1.1.0 // indirect + github.com/golang/protobuf v1.5.3 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect + github.com/grpc-ecosystem/grpc-gateway/v2 v2.15.0 // indirect + github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/hashicorp/go-uuid v1.0.3 // indirect + github.com/hashicorp/hcl v1.0.0 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect + github.com/jcmturner/aescts/v2 v2.0.0 // indirect + github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect + github.com/jcmturner/gofork v1.7.6 // indirect + github.com/jcmturner/gokrb5/v8 v8.4.3 // indirect + github.com/jcmturner/rpc/v2 v2.0.3 // indirect + github.com/jinzhu/inflection v1.0.0 // indirect + github.com/klauspost/compress v1.15.15 // indirect + github.com/lithammer/shortuuid/v3 v3.0.7 // indirect + github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect + github.com/magiconair/properties v1.8.7 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.19 // indirect + github.com/minio/highwayhash v1.0.2 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae // indirect + github.com/nats-io/jwt/v2 v2.3.0 // indirect + github.com/nats-io/nkeys v0.3.0 // indirect + github.com/nats-io/nuid v1.0.1 // indirect + github.com/oklog/ulid v1.3.1 // indirect + github.com/opencontainers/go-digest v1.0.0 // indirect + github.com/opencontainers/image-spec v1.0.2 // indirect + github.com/opencontainers/runc v1.1.3 // indirect + github.com/pelletier/go-toml/v2 v2.0.6 // indirect + github.com/pierrec/lz4/v4 v4.1.17 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect + github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect + github.com/segmentio/backo-go v1.0.1 // indirect + github.com/shirou/gopsutil/v3 v3.23.4 // indirect + github.com/shoenig/go-m1cpu v0.1.5 // indirect + github.com/spf13/afero v1.9.3 // indirect + github.com/spf13/cast v1.5.0 // indirect + github.com/spf13/jwalterweatherman v1.1.0 // indirect + github.com/subosito/gotenv v1.4.2 // indirect + github.com/tklauser/go-sysconf v0.3.11 // indirect + github.com/tklauser/numcpus v0.6.0 // indirect + github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect + github.com/uptrace/opentelemetry-go-extra/otelutil v0.1.21 // indirect + github.com/vmihailenco/msgpack/v5 v5.3.5 // indirect + github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect + github.com/xdg-go/pbkdf2 v1.0.0 // indirect + github.com/xdg-go/stringprep v1.0.4 // indirect + github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect + github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect + github.com/xeipuuv/gojsonschema v1.2.0 // indirect + github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c // indirect + github.com/yusufpapurcu/wmi v1.2.2 // indirect + go.opentelemetry.io/contrib/instrumentation/github.com/Shopify/sarama/otelsarama v0.42.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.16.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.39.0 // indirect + go.opentelemetry.io/proto/otlp v0.19.0 // indirect + go.uber.org/atomic v1.10.0 // indirect + go.uber.org/dig v1.16.1 // indirect + go.uber.org/multierr v1.9.0 // indirect + go.uber.org/zap v1.24.0 // indirect + golang.org/x/crypto v0.6.0 // indirect + golang.org/x/mod v0.8.0 // indirect + golang.org/x/net v0.10.0 // indirect + golang.org/x/sys v0.8.0 // indirect + golang.org/x/text v0.9.0 // indirect + golang.org/x/time v0.2.0 // indirect + golang.org/x/tools v0.6.0 // indirect + google.golang.org/genproto v0.0.0-20230306155012-7f2fa6fef1f4 // indirect + google.golang.org/grpc v1.55.0 // indirect + google.golang.org/protobuf v1.30.0 // indirect + gopkg.in/ini.v1 v1.67.0 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/libs/go.sum b/libs/go.sum new file mode 100644 index 000000000..7a1f662cd --- /dev/null +++ b/libs/go.sum @@ -0,0 +1,919 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/Microsoft/go-winio v0.6.0 h1:slsWYD/zyx7lCXoZVlvQrj0hPTM1HI4+v1sIda2yDvg= +github.com/Microsoft/go-winio v0.6.0/go.mod h1:cTAf44im0RAYeL23bpB+fzCyDH2MJiz2BO69KH/soAE= +github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw= +github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk= +github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/Shopify/sarama v1.38.1 h1:lqqPUPQZ7zPqYlWpTh+LQ9bhYNu2xJL6k1SJN4WVe2A= +github.com/Shopify/sarama v1.38.1/go.mod h1:iwv9a67Ha8VNa+TifujYoWGxWnu2kNVAQdSdZ4X2o5g= +github.com/Shopify/toxiproxy/v2 v2.5.0 h1:i4LPT+qrSlKNtQf5QliVjdP08GyAH8+BUIc9gT0eahc= +github.com/ThreeDotsLabs/watermill v1.1.0/go.mod h1:Qd1xNFxolCAHCzcMrm6RnjW0manbvN+DJVWc1MWRFlI= +github.com/ThreeDotsLabs/watermill v1.2.0 h1:TU3TML1dnQ/ifK09F2+4JQk2EKhmhXe7Qv7eb5ZpTS8= +github.com/ThreeDotsLabs/watermill v1.2.0/go.mod h1:IuVxGk/kgCN0cex2S94BLglUiB0PwOm8hbUhm6g2Nx4= +github.com/ThreeDotsLabs/watermill-http v1.1.4 h1:wRM54z/BPnIWjGbXMrOnwOlrCAESzoSNxTAHiLysFA4= +github.com/ThreeDotsLabs/watermill-http v1.1.4/go.mod h1:mkQ9CC0pxTZerNwr281rBoOy355vYt/lePkmYSX/BRg= +github.com/ThreeDotsLabs/watermill-kafka/v2 v2.2.2 h1:COB5neqVL8jGwoz1Y9dawQ7Xhxid1XXX8+1CI/PebVU= +github.com/ThreeDotsLabs/watermill-kafka/v2 v2.2.2/go.mod h1:U001oyrHo+df3Q7hIXgKqxY2OW6woz64+GNuIxZokbM= +github.com/ThreeDotsLabs/watermill-nats/v2 v2.0.0 h1:ZbdQ+cHwOZmXByEoKUH8SS6qR/erNQfrsNpvH5z/gfk= +github.com/ThreeDotsLabs/watermill-nats/v2 v2.0.0/go.mod h1:X6pcl579pScj4mII3KM/WJ+bcOqORqiCToy92f4gqJ4= +github.com/ajg/form v1.5.1 h1:t9c7v8JUKu/XxOGBU0yjNpaMloxGEJhUkqFRq0ibGeU= +github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 h1:DDGfHa7BWjL4YnC6+E63dPcxHo2sUxDIu8g3QgEJdRY= +github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= +github.com/cenkalti/backoff/v3 v3.0.0/go.mod h1:cIeZDE3IrqwwJl6VUwCN6trj1oXrTS4rc0ij+ULvLYs= +github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= +github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= +github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/checkpoint-restore/go-criu/v5 v5.3.0/go.mod h1:E/eQpaFtUKGOOSEBZgmKAcn+zUUwWxqcaKZlF54wK8E= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/cilium/ebpf v0.7.0/go.mod h1:/oI2+1shJiTGAMgl6/RgJr36Eo1jzrRcAWbcXO2usCA= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= +github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/containerd/console v1.0.3/go.mod h1:7LqA/THxQ86k76b8c/EMSiaJ3h1eZkMkXar0TQ1gf3U= +github.com/containerd/continuity v0.3.0 h1:nisirsYROK15TAMVukJOUyGJjz4BNQJBVsNvAXZJ/eg= +github.com/containerd/continuity v0.3.0/go.mod h1:wJEAIwKOm/pBZuBd0JmeTvnLquTB1Ag8espWhkykbPM= +github.com/coreos/go-semver v0.3.0 h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM= +github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/creack/pty v1.1.11 h1:07n33Z8lZxZ2qwegKbObQohDhXDQxiMMz1NOUGYlesw= +github.com/creack/pty v1.1.11/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/cyphar/filepath-securejoin v0.2.3/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgraph-io/ristretto v0.1.1 h1:6CWw5tJNgpegArSHpNHJKldNeq03FQCwYvfMVWajOK8= +github.com/dgraph-io/ristretto v0.1.1/go.mod h1:S1GPSBCYCIhmVNfcth17y2zZtQT6wzkzgwUve0VDWWA= +github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= +github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 h1:fAjc9m62+UWV/WAFKLNi6ZS0675eEUC9y3AlwSbQu1Y= +github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= +github.com/docker/cli v20.10.17+incompatible h1:eO2KS7ZFeov5UJeaDmIs1NFEDRf32PaqRpvoEkKBy5M= +github.com/docker/cli v20.10.17+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/docker v20.10.17+incompatible h1:JYCuMrWaVNophQTOrMMoSwudOVEfcegoZZrleKc1xwE= +github.com/docker/docker v20.10.17+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= +github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= +github.com/docker/go-units v0.4.0 h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw= +github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/eapache/go-resiliency v1.3.0 h1:RRL0nge+cWGlxXbUzJ7yMcq6w2XBEr19dCN6HECGaT0= +github.com/eapache/go-resiliency v1.3.0/go.mod h1:5yPzW0MIvSe0JDsv0v+DvcjEv2FyD6iZYSs1ZI+iQho= +github.com/eapache/go-xerial-snappy v0.0.0-20230111030713-bf00bc1b83b6 h1:8yY/I9ndfrgrXUbOGObLHKBR4Fl3nZXwM2c7OYTT8hM= +github.com/eapache/go-xerial-snappy v0.0.0-20230111030713-bf00bc1b83b6/go.mod h1:YvSRo5mw33fLEx1+DlK6L2VV43tJt5Eyel9n9XBcR+0= +github.com/eapache/queue v1.1.0 h1:YOEu7KNc61ntiQlcEeUIoDTJ2o8mQznoNvUhiigpIqc= +github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= +github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= +github.com/felixge/httpsnoop v1.0.3 h1:s/nj+GCswXYzN5v2DpNMuMQYe+0DDwt5WVCU6CWBdXk= +github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= +github.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k= +github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= +github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= +github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-chi/chi v4.0.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= +github.com/go-chi/chi v4.1.2+incompatible h1:fGFk2Gmi/YKXk0OmGfBh0WgmN3XB8lVnEyNz34tQRec= +github.com/go-chi/chi v4.1.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= +github.com/go-chi/render v1.0.1/go.mod h1:pq4Rr7HbnsdaeHagklXub+p6Wd16Af5l9koip1OvJns= +github.com/go-chi/render v1.0.2 h1:4ER/udB0+fMWB2Jlf15RV3F4A2FDuYi/9f+lFttR/Lg= +github.com/go-chi/render v1.0.2/go.mod h1:/gr3hVkmYR0YlEy3LxCuVRFzEu9Ruok+gFqbIofjao0= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.2.4 h1:g01GSCwiDw2xSZfjJ2/T9M+S6pFdcNtFYsp+Y43HYDQ= +github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/godbus/dbus/v5 v5.0.6/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/glog v1.0.0/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4= +github.com/golang/glog v1.1.0 h1:/d3pCKDPWNnvIWe0vVUpNP32qc8U3PDVxySP/y360qE= +github.com/golang/glog v1.1.0/go.mod h1:pfYeQZ3JWZoXTV5sFc986z3HTpwQs9At6P4ImfuP3NQ= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= +github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= +github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= +github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= +github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4ZsPv9hVvWI6+ch50m39Pf2Ks= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.15.0 h1:1JYBfzqrWPcCclBwxFCPAou9n+q86mfnu7NAeHfte7A= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.15.0/go.mod h1:YDZoGHuwE+ov0c8smSH49WLF3F2LaWnYYuDVd+EWrc0= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/imdario/mergo v0.3.13 h1:lFzP57bqS/wsqKssCGmtLAb8A0wKjLGrve2q3PPVcBk= +github.com/imdario/mergo v0.3.13/go.mod h1:4lJ1jqUDcsbIECGy0RUJAXNIhg+6ocWgb1ALK2O4oXg= +github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx/v5 v5.3.0 h1:/NQi8KHMpKWHInxXesC8yD4DhkXPrVhmnwYkjp9AmBA= +github.com/jackc/pgx/v5 v5.3.0/go.mod h1:t3JDKnCBlYIc0ewLF0Q7B8MXmoIaBOZj/ic7iHozM/8= +github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8= +github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs= +github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo= +github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM= +github.com/jcmturner/gofork v1.7.6 h1:QH0l3hzAU1tfT3rZCnW5zXl+orbkNMMRGJfdJjHVETg= +github.com/jcmturner/gofork v1.7.6/go.mod h1:1622LH6i/EZqLloHfE7IeZ0uEJwMSUyQ/nDd82IeqRo= +github.com/jcmturner/goidentity/v6 v6.0.1 h1:VKnZd2oEIMorCTsFBnJWbExfNN7yZr3EhJAxwOkZg6o= +github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg= +github.com/jcmturner/gokrb5/v8 v8.4.3 h1:iTonLeSJOn7MVUtyMT+arAn5AKAPrkilzhGw8wE/Tq8= +github.com/jcmturner/gokrb5/v8 v8.4.3/go.mod h1:dqRwJGXznQrzw6cWmyo6kH+E7jksEQG/CyVWsJEsJO0= +github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY= +github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc= +github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= +github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.15.15 h1:EF27CXIuDsYJ6mmvtBRlEuB2UVOqHG1tAXgZ7yIO+lw= +github.com/klauspost/compress v1.15.15/go.mod h1:ZcK2JAFqKOpnBlxcLsJzYfrS9X1akm9fHZNnD9+Vo/4= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/lib/pq v1.10.7 h1:p7ZhMD+KsSRozJr34udlUrhboJwWAgCg34+/ZZNvZZw= +github.com/lib/pq v1.10.7/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lithammer/shortuuid/v3 v3.0.4/go.mod h1:RviRjexKqIzx/7r1peoAITm6m7gnif/h+0zmolKJjzw= +github.com/lithammer/shortuuid/v3 v3.0.7 h1:trX0KTHy4Pbwo/6ia8fscyHoGA+mf1jWbPJVuvyJQQ8= +github.com/lithammer/shortuuid/v3 v3.0.7/go.mod h1:vMk8ke37EmiewwolSO1NLW8vP4ZaKlRuDIi8tWWmAts= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= +github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= +github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= +github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/minio/highwayhash v1.0.2 h1:Aak5U0nElisjDCfPSG79Tgzkn2gl66NxOMspRrKnA/g= +github.com/minio/highwayhash v1.0.2/go.mod h1:BQskDq+xkJ12lmlUUi7U0M5Swg3EWR+dLTk+kldvVxY= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/moby/sys/mountinfo v0.5.0/go.mod h1:3bMD3Rg+zkqx8MRYPi7Pyb0Ie97QEBmdxbhnCLlSvSU= +github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae h1:O4SWKdcHVCvYqyDV+9CJA1fcDN2L11Bule0iFy3YlAI= +github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae/go.mod h1:E2VnQOmVuvZB6UYnnDB0qG5Nq/1tD9acaOpo6xmt0Kw= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/nats-io/jwt/v2 v2.3.0 h1:z2mA1a7tIf5ShggOFlR1oBPgd6hGqcDYsISxZByUzdI= +github.com/nats-io/jwt/v2 v2.3.0/go.mod h1:0tqz9Hlu6bCBFLWAASKhE5vUA4c24L9KPUUgvwumE/k= +github.com/nats-io/nats-server/v2 v2.9.8 h1:jgxZsv+A3Reb3MgwxaINcNq/za8xZInKhDg9Q0cGN1o= +github.com/nats-io/nats-server/v2 v2.9.8/go.mod h1:AB6hAnGZDlYfqb7CTAm66ZKMZy9DpfierY1/PbpvI2g= +github.com/nats-io/nats.go v1.23.0 h1:lR28r7IX44WjYgdiKz9GmUeW0uh/m33uD3yEjLZ2cOE= +github.com/nats-io/nats.go v1.23.0/go.mod h1:ki/Scsa23edbh8IRZbCuNXR9TDcbvfaSijKtaqQgw+Q= +github.com/nats-io/nkeys v0.3.0 h1:cgM5tL53EvYRU+2YLXIK0G2mJtK12Ft9oeooSZMA2G8= +github.com/nats-io/nkeys v0.3.0/go.mod h1:gvUNGjVcM2IPr5rCsRsC6Wb3Hr2CQAm08dsxtV6A5y4= +github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw= +github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= +github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= +github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.0.2 h1:9yCKha/T5XdGtO0q9Q9a6T5NUCsTn/DrBg0D7ufOcFM= +github.com/opencontainers/image-spec v1.0.2/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= +github.com/opencontainers/runc v1.1.3 h1:vIXrkId+0/J2Ymu2m7VjGvbSlAId9XNRPhn2p4b+d8w= +github.com/opencontainers/runc v1.1.3/go.mod h1:1J5XiS+vdZ3wCyZybsuxXZWGrgSr8fFJHLXuG2PsnNg= +github.com/opencontainers/runtime-spec v1.0.3-0.20210326190908-1c3f411f0417/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= +github.com/opencontainers/selinux v1.10.0/go.mod h1:2i0OySw99QjzBBQByd1Gr9gSjvuho1lHsJxIJ3gGbJI= +github.com/ory/dockertest/v3 v3.9.1 h1:v4dkG+dlu76goxMiTT2j8zV7s4oPPEppKT8K8p2f1kY= +github.com/ory/dockertest/v3 v3.9.1/go.mod h1:42Ir9hmvaAPm0Mgibk6mBPi7SFvTXxEcnztDYOJ//uM= +github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 h1:onHthvaw9LFnH4t2DcNVpwGmV9E1BkGknEliJkfwQj0= +github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58/go.mod h1:DXv8WO4yhMYhSNPKjeNKa5WY9YCIEBRbNzFFPJbWO6Y= +github.com/pelletier/go-toml/v2 v2.0.6 h1:nrzqCb7j9cDFj2coyLNLaZuJTLjWjlaz6nvTvIwycIU= +github.com/pelletier/go-toml/v2 v2.0.6/go.mod h1:eumQOmlWiOPt5WriQQqoM5y18pDHwha2N+QD+EUNTek= +github.com/pierrec/lz4/v4 v4.1.17 h1:kV4Ip+/hUBC+8T6+2EgburRtkE9ef4nbY3f4dFhGjMc= +github.com/pierrec/lz4/v4 v4.1.17/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 h1:N/ElC8H3+5XpJzTSTfLsJV/mx9Q9g7kxmchpfZyxgzM= +github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/seccomp/libseccomp-golang v0.9.2-0.20220502022130-f33da4d89646/go.mod h1:JA8cRccbGaA1s33RQf7Y1+q9gHmZX1yB/z9WDN1C6fg= +github.com/segmentio/analytics-go v3.1.0+incompatible h1:IyiOfUgQFVHvsykKKbdI7ZsH374uv3/DfZUo9+G0Z80= +github.com/segmentio/analytics-go v3.1.0+incompatible/go.mod h1:C7CYBtQWk4vRk2RyLu0qOcbHJ18E3F1HV2C/8JvKN48= +github.com/segmentio/backo-go v1.0.1 h1:68RQccglxZeyURy93ASB/2kc9QudzgIDexJ927N++y4= +github.com/segmentio/backo-go v1.0.1/go.mod h1:9/Rh6yILuLysoQnZ2oNooD2g7aBnvM7r/fNVxRNWfBc= +github.com/shirou/gopsutil/v3 v3.23.4 h1:hZwmDxZs7Ewt75DV81r4pFMqbq+di2cbt9FsQBqLD2o= +github.com/shirou/gopsutil/v3 v3.23.4/go.mod h1:ZcGxyfzAMRevhUR2+cfhXDH6gQdFYE/t8j1nsU4mPI8= +github.com/shoenig/go-m1cpu v0.1.5 h1:LF57Z/Fpb/WdGLjt2HZilNnmZOxg/q2bSKTQhgbrLrQ= +github.com/shoenig/go-m1cpu v0.1.5/go.mod h1:Wwvst4LR89UxjeFtLRMrpgRiyY4xPsejnVZym39dbAQ= +github.com/shoenig/test v0.6.3 h1:GVXWJFk9PiOjN0KoJ7VrJGH6uLPnqxR7/fe3HUPfE0c= +github.com/shoenig/test v0.6.3/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0= +github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/afero v1.9.3 h1:41FoI0fD7OR7mGcKE/aOiLkGreyf8ifIOQmJANWogMk= +github.com/spf13/afero v1.9.3/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y= +github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w= +github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU= +github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA= +github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY= +github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= +github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.15.0 h1:js3yy885G8xwJa6iOISGFwd+qlUo5AvyXb7CiihdtiU= +github.com/spf13/viper v1.15.0/go.mod h1:fFcTBJxvhhzSJiZy8n+PeW6t8l+KeT/uTARa0jHOQLA= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.3 h1:RP3t2pwF7cMEbC1dqtB6poj3niw/9gnV4Cjg5oW5gtY= +github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/subosito/gotenv v1.4.2 h1:X1TuBLAMDFbaTAChgCBLu3DU3UPyELpnF2jjJ2cz/S8= +github.com/subosito/gotenv v1.4.2/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= +github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= +github.com/tklauser/go-sysconf v0.3.11 h1:89WgdJhk5SNwJfu+GKyYveZ4IaJ7xAkecBo+KdJV0CM= +github.com/tklauser/go-sysconf v0.3.11/go.mod h1:GqXfhXY3kiPa0nAXPDIQIWzJbMCB7AmcWpGR8lSZfqI= +github.com/tklauser/numcpus v0.6.0 h1:kebhY2Qt+3U6RNK7UqpYNA+tJ23IBEGKkB7JQBfDYms= +github.com/tklauser/numcpus v0.6.0/go.mod h1:FEZLMke0lhOUG6w2JadTzp0a+Nl8PF/GFkQ5UVIcaL4= +github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc h1:9lRDQMhESg+zvGYmW5DyG0UqvY96Bu5QYsTLvCHdrgo= +github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc/go.mod h1:bciPuU6GHm1iF1pBvUfxfsH0Wmnc2VbpgvbI9ZWuIRs= +github.com/uptrace/bun v1.1.14 h1:S5vvNnjEynJ0CvnrBOD7MIRW7q/WbtvFXrdfy0lddAM= +github.com/uptrace/bun v1.1.14/go.mod h1:RHk6DrIisO62dv10pUOJCz5MphXThuOTpVNYEYv7NI8= +github.com/uptrace/bun/dialect/pgdialect v1.1.14 h1:b7+V1KDJPQSFYgkG/6YLXCl2uvwEY3kf/GSM7hTHRDY= +github.com/uptrace/bun/dialect/pgdialect v1.1.14/go.mod h1:v6YiaXmnKQ2FlhRD2c0ZfKd+QXH09pYn4H8ojaavkKk= +github.com/uptrace/bun/extra/bundebug v1.1.14 h1:9OCGfP9ZDlh41u6OLerWdhBtJAVGXHr0xtxO4xWi6t0= +github.com/uptrace/bun/extra/bundebug v1.1.14/go.mod h1:lto3guzS2v6mnQp1+akyE+ecBLOltevDDe324NXEYdw= +github.com/uptrace/opentelemetry-go-extra/otellogrus v0.1.21 h1:OXsouNDvuET5o1A4uvoCnAXuuNke8JlfZWceciyUlC8= +github.com/uptrace/opentelemetry-go-extra/otellogrus v0.1.21/go.mod h1:Xm3wlRGm5xzdAGPOvqydXPiGj0Da1q0OlUNm7Utoda4= +github.com/uptrace/opentelemetry-go-extra/otelutil v0.1.21 h1:HCqo51kNF8wxDMDhxcN5S6DlfZXigMtptRpkvjBCeVc= +github.com/uptrace/opentelemetry-go-extra/otelutil v0.1.21/go.mod h1:2MNqrUmDrt5E0glMuoJI/9FyGVpBKo1FqjSH60UOZFg= +github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE= +github.com/vishvananda/netns v0.0.0-20191106174202-0a2b9b5464df/go.mod h1:JP3t17pCcGlemwknint6hfoeCVQrEMVwxRLRjXpq+BU= +github.com/vmihailenco/msgpack/v5 v5.3.5 h1:5gO0H1iULLWGhs2H5tbAHIZTV8/cYafcFOr9znI5mJU= +github.com/vmihailenco/msgpack/v5 v5.3.5/go.mod h1:7xyJ9e+0+9SaZT0Wt1RGleJXzli6Q/V5KbhBonMG9jc= +github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= +github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= +github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= +github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= +github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY= +github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= +github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8= +github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= +github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= +github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c h1:3lbZUMbMiGUW/LMkfsEABsc5zNT9+b1CvsJx47JzJ8g= +github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c/go.mod h1:UrdRz5enIKZ63MEE3IF9l2/ebyx59GyGgPi+tICQdmM= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/yusufpapurcu/wmi v1.2.2 h1:KBNDSne4vP5mbSWnJbO+51IMOXJB67QiYCSBrubbPRg= +github.com/yusufpapurcu/wmi v1.2.2/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opentelemetry.io/contrib/instrumentation/github.com/Shopify/sarama/otelsarama v0.42.0 h1:bdoQBGxf0fxHaJo/Ry+RCi8k2ug7T5pqD/NdrwnbyyE= +go.opentelemetry.io/contrib/instrumentation/github.com/Shopify/sarama/otelsarama v0.42.0/go.mod h1:TDCzwfNfOr8Of31UVortnNAvRrgMyMS0zAXw9I3hoSc= +go.opentelemetry.io/contrib/instrumentation/host v0.42.0 h1:/GMlvboQJd4LWxNX/oGYLv06J5a/M/flauLruM/3U2g= +go.opentelemetry.io/contrib/instrumentation/host v0.42.0/go.mod h1:w6v1mVemRjTTdfejACjf+LgVA6zKtHOWmdAIf3icx7A= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.42.0 h1:pginetY7+onl4qN1vl0xW/V/v6OBZ0vVdH+esuJgvmM= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.42.0/go.mod h1:XiYsayHc36K3EByOO6nbAXnAWbrUxdjUROCEeeROOH8= +go.opentelemetry.io/contrib/instrumentation/runtime v0.42.0 h1:EbmAUG9hEAMXyfWEasIt2kmh/WmXUznUksChApTgBGc= +go.opentelemetry.io/contrib/instrumentation/runtime v0.42.0/go.mod h1:rD9feqRYP24P14t5kmhNMqsqm1jvKmpx2H2rKVw52V8= +go.opentelemetry.io/contrib/propagators/b3 v1.13.0 h1:f17PBmZK60RoHvOpJVqEka8oS2EXjpjHquESD/8zZ50= +go.opentelemetry.io/contrib/propagators/b3 v1.13.0/go.mod h1:zy2hz1TpGUoJzSwlBchVGvVAFQS8s2pglKLbrAFZ+Sc= +go.opentelemetry.io/otel v1.16.0 h1:Z7GVAX/UkAXPKsy94IU+i6thsQS4nb7LviLpnaNeW8s= +go.opentelemetry.io/otel v1.16.0/go.mod h1:vl0h9NUa1D5s1nv3A5vZOYWn8av4K8Ml6JDeHrT/bx4= +go.opentelemetry.io/otel/exporters/jaeger v1.16.0 h1:YhxxmXZ011C0aDZKoNw+juVWAmEfv/0W2XBOv9aHTaA= +go.opentelemetry.io/otel/exporters/jaeger v1.16.0/go.mod h1:grYbBo/5afWlPpdPZYhyn78Bk04hnvxn2+hvxQhKIQM= +go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.16.0 h1:t4ZwRPU+emrcvM2e9DHd0Fsf0JTPVcbfa/BhTDF03d0= +go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.16.0/go.mod h1:vLarbg68dH2Wa77g71zmKQqlQ8+8Rq3GRG31uc0WcWI= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.39.0 h1:f6BwB2OACc3FCbYVznctQ9V6KK7Vq6CjmYXJ7DeSs4E= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.39.0/go.mod h1:UqL5mZ3qs6XYhDnZaW1Ps4upD+PX6LipH40AoeuIlwU= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.39.0 h1:rm+Fizi7lTM2UefJ1TO347fSRcwmIsUAaZmYmIGBRAo= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.39.0/go.mod h1:sWFbI3jJ+6JdjOVepA5blpv/TJ20Hw+26561iMbWcwU= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.39.0 h1:IZXpCEtI7BbX01DRQEWTGDkvjMB6hEhiEZXS+eg2YqY= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.39.0/go.mod h1:xY111jIZtWb+pUUgT4UiiSonAaY2cD2Ts5zvuKLki3o= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.16.0 h1:cbsD4cUcviQGXdw8+bo5x2wazq10SKz8hEbtCRPcU78= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.16.0/go.mod h1:JgXSGah17croqhJfhByOLVY719k1emAXC8MVhCIJlRs= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.16.0 h1:TVQp/bboR4mhZSav+MdgXB8FaRho1RC8UwVn3T0vjVc= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.16.0/go.mod h1:I33vtIe0sR96wfrUcilIzLoA3mLHhRmz9S9Te0S3gDo= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.16.0 h1:iqjq9LAB8aK++sKVcELezzn655JnBNdsDhghU4G/So8= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.16.0/go.mod h1:hGXzO5bhhSHZnKvrDaXB82Y9DRFour0Nz/KrBh7reWw= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v0.39.0 h1:fl2WmyenEf6LYYlfHAtCUEDyGcpwJNqD4dHGO7PVm4w= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v0.39.0/go.mod h1:csyQxQ0UHHKVA8KApS7eUO/klMO5sd/av5CNZNU4O6w= +go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.16.0 h1:+XWJd3jf75RXJq29mxbuXhCXFDG3S3R4vBUeSI2P7tE= +go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.16.0/go.mod h1:hqgzBPTf4yONMFgdZvL/bK42R/iinTyVQtiWihs3SZc= +go.opentelemetry.io/otel/metric v1.16.0 h1:RbrpwVG1Hfv85LgnZ7+txXioPDoh6EdbZHo26Q3hqOo= +go.opentelemetry.io/otel/metric v1.16.0/go.mod h1:QE47cpOmkwipPiefDwo2wDzwJrlfxxNYodqc4xnGCo4= +go.opentelemetry.io/otel/sdk v1.16.0 h1:Z1Ok1YsijYL0CSJpHt4cS3wDDh7p572grzNrBMiMWgE= +go.opentelemetry.io/otel/sdk v1.16.0/go.mod h1:tMsIuKXuuIWPBAOrH+eHtvhTL+SntFtXF9QD68aP6p4= +go.opentelemetry.io/otel/sdk/metric v0.39.0 h1:Kun8i1eYf48kHH83RucG93ffz0zGV1sh46FAScOTuDI= +go.opentelemetry.io/otel/sdk/metric v0.39.0/go.mod h1:piDIRgjcK7u0HCL5pCA4e74qpK/jk3NiUoAHATVAmiI= +go.opentelemetry.io/otel/trace v1.16.0 h1:8JRpaObFoW0pxuVPapkgH8UhHQj+bJW8jJsCZEu5MQs= +go.opentelemetry.io/otel/trace v1.16.0/go.mod h1:Yt9vYq1SdNz3xdjZZK7wcXv1qv2pwLkqr2QVwea0ef0= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= +go.opentelemetry.io/proto/otlp v0.19.0 h1:IVN6GR+mhC4s5yfcTbmzHYODqvWAp3ZedA2SJPI1Nnw= +go.opentelemetry.io/proto/otlp v0.19.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= +go.uber.org/atomic v1.10.0 h1:9qC72Qh0+3MqyJbAn8YU5xVq1frD8bn3JtD2oXtafVQ= +go.uber.org/atomic v1.10.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= +go.uber.org/dig v1.16.1 h1:+alNIBsl0qfY0j6epRubp/9obgtrObRAc5aD+6jbWY8= +go.uber.org/dig v1.16.1/go.mod h1:557JTAUZT5bUK0SvCwikmLPPtdQhfvLYtO5tJgQSbnk= +go.uber.org/fx v1.19.1 h1:JwYIYAQzXBuBBwSZ1/tn/95pnQO/Sp3yE8lWj9eSAzI= +go.uber.org/fx v1.19.1/go.mod h1:bGK+AEy7XUwTBkqCsK/vDyFF0JJOA6X5KWpNC0e6qTA= +go.uber.org/goleak v1.2.1 h1:NBol2c7O1ZokfZ0LEU9K6Whx/KnwvepVetCUhtKja4A= +go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI= +go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ= +go.uber.org/zap v1.24.0 h1:FiJd5l1UOLj0wCgbSE0rwwXHzEdAZS6hiiSnxJN/D60= +go.uber.org/zap v1.24.0/go.mod h1:2kMP+WWQ8aoFoedH3T2sq6iJ2yDWpHbP0f6MQbS9Gkg= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210314154223-e6e6c4f2bb5b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.6.0 h1:qfktjS5LUO+fFKeJXZ+ikTRijMmljikvG68fpMMruSc= +golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0 h1:LUYupSeNrTNCGzR/hVBk2NHZO4hXcVaW1k4Qx7rjPx8= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220725212005-46097bf591d3/go.mod h1:AaygXjzTFtRAg2ttMY5RMuhpJ3cNnI0XpyFJD1iQRSM= +golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190130150945-aca44879d564/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606203320-7fc4e5ec1444/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191115151921-52ab43148777/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210906170528-6f6e22806c34/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211116061358-0a5406a5449c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= +golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.2.0 h1:52I/1L54xyEQAYdtcSuxtiT84KGYTBGXwayxmIpNJhE= +golang.org/x/time v0.2.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0 h1:BOw41kyTf3PuCW1pVQf8+Cyg8pMlkYB1oo9iJ6D/lKM= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20230306155012-7f2fa6fef1f4 h1:DdoeryqhaXp1LtT/emMP1BRJPHHKFi5akj/nbx/zNTA= +google.golang.org/genproto v0.0.0-20230306155012-7f2fa6fef1f4/go.mod h1:NWraEVixdDnqcqQ30jipen1STv2r/n24Wb7twVTGR4s= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= +google.golang.org/grpc v1.55.0 h1:3Oj82/tFSCeUrRTg/5E/7d/W5A1tj6Ky1ABAuZuv5ag= +google.golang.org/grpc v1.55.0/go.mod h1:iYEXKGkEBhg1PjZQvoYEVPTDkHo1/bjTnfwTeGONTY8= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= +google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= +gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk= +gotest.tools/v3 v3.2.0 h1:I0DwBVMGAx26dttAj1BtJLAkVGncrkkUXfJLC4Flt/I= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/libs/health/controller.go b/libs/health/controller.go new file mode 100644 index 000000000..a01efbde5 --- /dev/null +++ b/libs/health/controller.go @@ -0,0 +1,66 @@ +package health + +import ( + "encoding/json" + "net/http" + "sync" +) + +type HealthController struct { + Checks []NamedCheck +} + +type result struct { + Check NamedCheck + Err error +} + +func (ctrl *HealthController) Check(w http.ResponseWriter, r *http.Request) { + sg := sync.WaitGroup{} + sg.Add(len(ctrl.Checks)) + + results := make(chan result, len(ctrl.Checks)) + for _, ch := range ctrl.Checks { + go func(ch NamedCheck) { + defer sg.Done() + select { + case <-r.Context().Done(): + return + case results <- result{ + Check: ch, + Err: ch.Do(r.Context()), + }: + } + }(ch) + } + sg.Wait() + close(results) + + response := map[string]string{} + hasError := false + for r := range results { + if r.Err != nil { + hasError = true + response[r.Check.Name()] = r.Err.Error() + } else { + response[r.Check.Name()] = "OK" + } + } + + if hasError { + w.WriteHeader(http.StatusInternalServerError) + } else { + w.WriteHeader(http.StatusOK) + } + + err := json.NewEncoder(w).Encode(response) + if err != nil { + panic(err) + } +} + +func NewHealthController(checks []NamedCheck) *HealthController { + return &HealthController{ + Checks: checks, + } +} diff --git a/libs/health/controller_test.go b/libs/health/controller_test.go new file mode 100644 index 000000000..a02abd473 --- /dev/null +++ b/libs/health/controller_test.go @@ -0,0 +1,87 @@ +package health_test + +import ( + "context" + "encoding/json" + "errors" + "net/http" + "net/http/httptest" + "testing" + + "github.com/formancehq/stack/libs/go-libs/health" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.uber.org/fx" +) + +func TestHealthController(t *testing.T) { + type testCase struct { + name string + healthChecksProvider []any + expectedStatus int + expectedResult map[string]string + } + + var tests = []testCase{ + { + name: "all-ok", + healthChecksProvider: []any{ + func() health.NamedCheck { + return health.NewNamedCheck("test1", health.CheckFn(func(ctx context.Context) error { + return nil + })) + }, + func() health.NamedCheck { + return health.NewNamedCheck("test2", health.CheckFn(func(ctx context.Context) error { + return nil + })) + }, + }, + expectedStatus: http.StatusOK, + expectedResult: map[string]string{ + "test1": "OK", + "test2": "OK", + }, + }, + { + name: "one-failing", + healthChecksProvider: []any{ + func() health.NamedCheck { + return health.NewNamedCheck("test1", health.CheckFn(func(ctx context.Context) error { + return nil + })) + }, + func() health.NamedCheck { + return health.NewNamedCheck("test2", health.CheckFn(func(ctx context.Context) error { + return errors.New("failure") + })) + }, + }, + expectedStatus: http.StatusInternalServerError, + expectedResult: map[string]string{ + "test1": "OK", + "test2": "failure", + }, + }, + } + + for _, tc := range tests { + options := make([]fx.Option, 0) + options = append(options, health.Module(), fx.NopLogger) + for _, p := range tc.healthChecksProvider { + options = append(options, health.ProvideHealthCheck(p)) + } + options = append(options, fx.Invoke(func(ctrl *health.HealthController) { + rec := httptest.NewRecorder() + req := httptest.NewRequest(http.MethodGet, "/_health", nil) + ctrl.Check(rec, req) + assert.Equal(t, tc.expectedStatus, rec.Result().StatusCode) + + ret := make(map[string]string) + assert.NoError(t, json.NewDecoder(rec.Result().Body).Decode(&ret)) + assert.Equal(t, tc.expectedResult, ret) + })) + app := fx.New(options...) + require.NoError(t, app.Err()) + } +} diff --git a/libs/health/health.go b/libs/health/health.go new file mode 100644 index 000000000..3ae283d13 --- /dev/null +++ b/libs/health/health.go @@ -0,0 +1,33 @@ +package health + +import "context" + +type Check interface { + Do(ctx context.Context) error +} +type CheckFn func(ctx context.Context) error + +func (fn CheckFn) Do(ctx context.Context) error { + return fn(ctx) +} + +type NamedCheck interface { + Check + Name() string +} + +type simpleNamedCheck struct { + Check + name string +} + +func (c *simpleNamedCheck) Name() string { + return c.name +} + +func NewNamedCheck(name string, check Check) *simpleNamedCheck { + return &simpleNamedCheck{ + Check: check, + name: name, + } +} diff --git a/libs/health/module.go b/libs/health/module.go new file mode 100644 index 000000000..323129385 --- /dev/null +++ b/libs/health/module.go @@ -0,0 +1,15 @@ +package health + +import "go.uber.org/fx" + +const HealthCheckKey = `group:"_healthCheck"` + +func ProvideHealthCheck(provider any) fx.Option { + return fx.Provide( + fx.Annotate(provider, fx.ResultTags(HealthCheckKey)), + ) +} + +func Module() fx.Option { + return fx.Provide(fx.Annotate(NewHealthController, fx.ParamTags(HealthCheckKey))) +} diff --git a/libs/httpclient/debug.go b/libs/httpclient/debug.go new file mode 100644 index 000000000..6cff96644 --- /dev/null +++ b/libs/httpclient/debug.go @@ -0,0 +1,41 @@ +package httpclient + +import ( + "net/http" + "net/http/httputil" + + "github.com/formancehq/stack/libs/go-libs/logging" +) + +type httpTransport struct { + underlying http.RoundTripper +} + +func (h httpTransport) RoundTrip(request *http.Request) (*http.Response, error) { + data, err := httputil.DumpRequest(request, true) + if err != nil { + panic(err) + } + logging.FromContext(request.Context()).Debug(string(data)) + + rsp, err := h.underlying.RoundTrip(request) + if err != nil { + return nil, err + } + + data, err = httputil.DumpResponse(rsp, true) + if err != nil { + panic(err) + } + logging.FromContext(request.Context()).Debug(string(data)) + + return rsp, nil +} + +var _ http.RoundTripper = &httpTransport{} + +func NewDebugHTTPTransport(underlying http.RoundTripper) *httpTransport { + return &httpTransport{ + underlying: underlying, + } +} diff --git a/libs/httpserver/serverport.go b/libs/httpserver/serverport.go new file mode 100644 index 000000000..d1e096d06 --- /dev/null +++ b/libs/httpserver/serverport.go @@ -0,0 +1,109 @@ +package httpserver + +import ( + "context" + "net" + "net/http" + "strconv" + + "go.uber.org/fx" +) + +type serverInfo struct { + started chan struct{} + port int +} + +type serverInfoContextKey string + +var serverInfoKey serverInfoContextKey = "_serverInfo" + +func GetActualServerInfo(ctx context.Context) *serverInfo { + siAsAny := ctx.Value(serverInfoKey) + if siAsAny == nil { + return nil + } + return siAsAny.(*serverInfo) +} + +func ContextWithServerInfo(ctx context.Context) context.Context { + return context.WithValue(ctx, serverInfoKey, &serverInfo{ + started: make(chan struct{}), + }) +} + +func Started(ctx context.Context) chan struct{} { + si := GetActualServerInfo(ctx) + if si == nil { + return nil + } + return si.started +} + +func Port(ctx context.Context) int { + si := GetActualServerInfo(ctx) + if si == nil { + return 0 + } + return si.port +} + +func StartedServer(ctx context.Context, listener net.Listener) { + si := GetActualServerInfo(ctx) + if si == nil { + return + } + + _, portAsString, _ := net.SplitHostPort(listener.Addr().String()) + port, _ := strconv.ParseInt(portAsString, 10, 32) + + si.port = int(port) + close(si.started) +} + +func StartServer(ctx context.Context, bind string, handler http.Handler, options ...func(server *http.Server)) (func(ctx context.Context) error, error) { + listener, err := net.Listen("tcp", bind) + if err != nil { + return func(ctx context.Context) error { + return nil + }, err + } + StartedServer(ctx, listener) + + srv := &http.Server{ + Handler: handler, + } + for _, option := range options { + option(srv) + } + + go func() { + err := srv.Serve(listener) + if err != nil && err != http.ErrServerClosed { + panic(err) + } + }() + + return func(ctx context.Context) error { + return srv.Shutdown(ctx) + }, nil +} + +func NewHook(addr string, handler http.Handler, options ...func(server *http.Server)) fx.Hook { + var ( + close func(ctx context.Context) error + err error + ) + return fx.Hook{ + OnStart: func(ctx context.Context) error { + close, err = StartServer(ctx, addr, handler, options...) + return err + }, + OnStop: func(ctx context.Context) error { + if close == nil { + return nil + } + return close(ctx) + }, + } +} diff --git a/libs/logging/context.go b/libs/logging/context.go new file mode 100644 index 000000000..fcf9fb20c --- /dev/null +++ b/libs/logging/context.go @@ -0,0 +1,37 @@ +package logging + +import ( + "context" + + "github.com/sirupsen/logrus" +) + +type contextKey string + +var loggerKey contextKey = "_logger" + +func FromContext(ctx context.Context) Logger { + l := ctx.Value(loggerKey) + if l == nil { + return NewLogrus(logrus.New()) + } + return l.(Logger) +} + +func ContextWithLogger(ctx context.Context, l Logger) context.Context { + return context.WithValue(ctx, loggerKey, l) +} + +func ContextWithFields(ctx context.Context, fields map[string]any) context.Context { + return ContextWithLogger(ctx, FromContext(ctx).WithFields(fields)) +} + +func ContextWithField(ctx context.Context, key string, value any) context.Context { + return ContextWithLogger(ctx, FromContext(ctx).WithFields(map[string]any{ + key: value, + })) +} + +func TestingContext() context.Context { + return ContextWithLogger(context.Background(), Testing()) +} diff --git a/libs/logging/logging.go b/libs/logging/logging.go new file mode 100644 index 000000000..f161d462d --- /dev/null +++ b/libs/logging/logging.go @@ -0,0 +1,37 @@ +package logging + +import "context" + +type Logger interface { + Debugf(fmt string, args ...any) + Infof(fmt string, args ...any) + Errorf(fmt string, args ...any) + Debug(args ...any) + Info(args ...any) + Error(args ...any) + WithFields(map[string]any) Logger + WithField(key string, value any) Logger + WithContext(ctx context.Context) Logger +} + +func Debugf(fmt string, args ...any) { + FromContext(context.TODO()).Debugf(fmt, args...) +} +func Infof(fmt string, args ...any) { + FromContext(context.TODO()).Infof(fmt, args...) +} +func Errorf(fmt string, args ...any) { + FromContext(context.TODO()).Errorf(fmt, args...) +} +func Debug(args ...any) { + FromContext(context.TODO()).Debug(args...) +} +func Info(args ...any) { + FromContext(context.TODO()).Info(args...) +} +func Error(args ...any) { + FromContext(context.TODO()).Error(args...) +} +func WithFields(fields map[string]any) Logger { + return FromContext(context.TODO()).WithFields(fields) +} diff --git a/libs/logging/logrus.go b/libs/logging/logrus.go new file mode 100644 index 000000000..2ac5356f1 --- /dev/null +++ b/libs/logging/logrus.go @@ -0,0 +1,86 @@ +package logging + +import ( + "context" + "flag" + "io" + "os" + "testing" + + "github.com/sirupsen/logrus" +) + +type logrusLogger struct { + entry interface { + Debugf(format string, args ...any) + Debug(args ...any) + Infof(format string, args ...any) + Info(args ...any) + Errorf(format string, args ...any) + Error(args ...any) + WithFields(fields logrus.Fields) *logrus.Entry + WithField(key string, value any) *logrus.Entry + WithContext(ctx context.Context) *logrus.Entry + } +} + +func (l *logrusLogger) WithContext(ctx context.Context) Logger { + return &logrusLogger{ + l.entry.WithContext(ctx), + } +} + +func (l *logrusLogger) Debug(args ...any) { + l.entry.Debug(args...) +} +func (l *logrusLogger) Debugf(fmt string, args ...any) { + l.entry.Debugf(fmt, args...) +} +func (l *logrusLogger) Infof(fmt string, args ...any) { + l.entry.Infof(fmt, args...) +} +func (l *logrusLogger) Info(args ...any) { + l.entry.Info(args...) +} +func (l *logrusLogger) Errorf(fmt string, args ...any) { + l.entry.Errorf(fmt, args...) +} +func (l *logrusLogger) Error(args ...any) { + l.entry.Error(args...) +} +func (l *logrusLogger) WithFields(fields map[string]any) Logger { + return &logrusLogger{ + entry: l.entry.WithFields(fields), + } +} + +func (l *logrusLogger) WithField(key string, value any) Logger { + return l.WithFields(map[string]any{ + key: value, + }) +} + +var _ Logger = &logrusLogger{} + +func NewLogrus(logger *logrus.Logger) *logrusLogger { + return &logrusLogger{ + entry: logger, + } +} + +func Testing() *logrusLogger { + logger := logrus.New() + logger.SetOutput(io.Discard) + flag.Parse() + if testing.Verbose() { + logger.SetOutput(os.Stdout) + logger.SetLevel(logrus.DebugLevel) + } + + textFormatter := new(logrus.TextFormatter) + textFormatter.TimestampFormat = "15-01-2018 15:04:05.000000" + textFormatter.FullTimestamp = true + logger.SetFormatter(textFormatter) + + return NewLogrus(logger) +} diff --git a/libs/metadata/metadata.go b/libs/metadata/metadata.go new file mode 100644 index 000000000..31c36a839 --- /dev/null +++ b/libs/metadata/metadata.go @@ -0,0 +1,83 @@ +package metadata + +import ( + "database/sql/driver" + "encoding/json" + "reflect" + + "github.com/imdario/mergo" +) + +type Metadata map[string]string + +// IsEquivalentTo allow to compare to metadata object. +func (m1 Metadata) IsEquivalentTo(m2 Metadata) bool { + return reflect.DeepEqual(m1, m2) +} + +func (m1 Metadata) Merge(m2 Metadata) Metadata { + ret := Metadata{} + if err := mergo.Merge(&ret, m1, mergo.WithOverride); err != nil { + panic(err) + } + if err := mergo.Merge(&ret, m2, mergo.WithOverride); err != nil { + panic(err) + } + return ret +} + +// Scan - Implement the database/sql scanner interface +func (m1 *Metadata) Scan(value interface{}) error { + if value == nil { + return nil + } + v, err := driver.String.ConvertValue(value) + if err != nil { + return err + } + + *m1 = Metadata{} + switch vv := v.(type) { + case []uint8: + return json.Unmarshal(vv, m1) + case string: + return json.Unmarshal([]byte(vv), m1) + default: + panic("not handled type") + } +} + +func (m1 Metadata) ConvertValue(v interface{}) (driver.Value, error) { + return json.Marshal(v) +} + +func (m1 Metadata) Copy() Metadata { + ret := Metadata{} + for k, v := range m1 { + ret[k] = v + } + return ret +} + +func ComputeMetadata(key, value string) Metadata { + return Metadata{ + key: value, + } +} + +func MarshalValue(v any) string { + vv, err := json.Marshal(v) + if err != nil { + panic(err) + } + + return string(vv) +} + +func UnmarshalValue[TO any](value string) TO { + var ret TO + if err := json.Unmarshal([]byte(value), &ret); err != nil { + panic(err) + } + return ret +} diff --git a/libs/metadata/owner.go b/libs/metadata/owner.go new file mode 100644 index 000000000..4444f2a35 --- /dev/null +++ b/libs/metadata/owner.go @@ -0,0 +1,5 @@ +package metadata + +type Owner interface { + GetMetadata() Metadata +} diff --git a/libs/migrations/migration.go b/libs/migrations/migration.go new file mode 100644 index 000000000..1405f18bd --- /dev/null +++ b/libs/migrations/migration.go @@ -0,0 +1,13 @@ +package migrations + +import ( + "context" + + "github.com/uptrace/bun" +) + +type Migration struct { + Name string + Up func(tx bun.Tx) error + UpWithContext func(ctx context.Context, tx bun.Tx) error +} diff --git a/libs/migrations/migrator.go b/libs/migrations/migrator.go new file mode 100644 index 000000000..77fb68d1b --- /dev/null +++ b/libs/migrations/migrator.go @@ -0,0 +1,250 @@ +package migrations + +import ( + "context" + "database/sql" + "fmt" + "github.com/lib/pq" + "time" + + "github.com/pkg/errors" + "github.com/uptrace/bun" +) + +const ( + // Keep goose name to keep backward compatibility + migrationTable = "goose_db_version" +) + +type Info struct { + bun.BaseModel `bun:"goose_db_version"` + + Version string `json:"version" bun:"version_id"` + Name string `json:"name" bun:"-"` + State string `json:"state,omitempty" bun:"-"` + Date time.Time `json:"date,omitempty" bun:"tstamp"` +} + +type Migrator struct { + migrations []Migration + schema string + createSchema bool +} + +type option func(m *Migrator) + +func WithSchema(schema string, create bool) option { + return func(m *Migrator) { + m.schema = schema + m.createSchema = create + } +} + +func (m *Migrator) RegisterMigrations(migrations ...Migration) *Migrator { + m.migrations = append(m.migrations, migrations...) + return m +} + +func (m *Migrator) createVersionTable(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, fmt.Sprintf(`create table if not exists %s ( + id serial primary key, + version_id bigint not null, + is_applied boolean not null, + tstamp timestamp default now() + );`, migrationTable)) + if err != nil { + return err + } + + lastVersion, err := m.getLastVersion(ctx, tx) + if err != nil { + return err + } + + if lastVersion == -1 { + if err := m.insertVersion(ctx, tx, 0); err != nil { + return err + } + } + + return err +} + +func (m *Migrator) getLastVersion(ctx context.Context, querier interface { + QueryRowContext(ctx context.Context, query string, args ...any) *sql.Row +}) (int64, error) { + row := querier.QueryRowContext(ctx, fmt.Sprintf(`select max(version_id) from "%s";`, migrationTable)) + if err := row.Err(); err != nil { + switch { + case err == sql.ErrNoRows: + return -1, nil + default: + switch err := err.(type) { + case *pq.Error: + switch err.Code { + case "42P01": // Table not exists + return -1, nil + } + } + } + + return -1, errors.Wrap(err, "selecting max id from version table") + } + var number sql.NullInt64 + if err := row.Scan(&number); err != nil { + return 0, err + } + + if !number.Valid { + return -1, nil + } + + return number.Int64, nil +} + +func (m *Migrator) insertVersion(ctx context.Context, tx bun.Tx, version int) error { + _, err := tx.ExecContext(ctx, + fmt.Sprintf(`INSERT INTO "%s" (version_id, is_applied, tstamp) VALUES (?, ?, ?)`, migrationTable), + version, true, time.Now()) + return err +} + +func (m *Migrator) GetDBVersion(ctx context.Context, db *bun.DB) (int64, error) { + tx, err := m.newTx(ctx, db) + if err != nil { + return -1, err + } + defer func() { + _ = tx.Rollback() + }() + + return m.getLastVersion(ctx, tx) +} + +func (m *Migrator) newTx(ctx context.Context, db bun.IDB) (bun.Tx, error) { + tx, err := db.BeginTx(ctx, &sql.TxOptions{}) + if err != nil { + return bun.Tx{}, err + } + + if m.schema != "" { + _, err := tx.ExecContext(ctx, fmt.Sprintf(`set search_path = "%s"`, m.schema)) + if err != nil { + return bun.Tx{}, err + } + } + + return tx, err +} + +func (m *Migrator) Up(ctx context.Context, db bun.IDB) error { + tx, err := m.newTx(ctx, db) + if err != nil { + return err + } + defer func() { + _ = tx.Rollback() + }() + + if m.schema != "" && m.createSchema { + _, err := tx.ExecContext(ctx, fmt.Sprintf(`create schema if not exists "%s"`, m.schema)) + if err != nil { + return err + } + } + + if err := m.createVersionTable(ctx, tx); err != nil { + return err + } + + lastMigration, err := m.getLastVersion(ctx, tx) + if err != nil { + return err + } + + if len(m.migrations) > int(lastMigration)-1 { + for ind, migration := range m.migrations[lastMigration:] { + if migration.UpWithContext != nil { + if err := migration.UpWithContext(ctx, tx); err != nil { + return err + } + } else if migration.Up != nil { + if err := migration.Up(tx); err != nil { + return err + } + } else { + return errors.New("no code defined for migration") + } + + if err := m.insertVersion(ctx, tx, int(lastMigration)+ind+1); err != nil { + return err + } + } + } + + return tx.Commit() +} + +func (m *Migrator) GetMigrations(ctx context.Context, db bun.IDB) ([]Info, error) { + tx, err := m.newTx(ctx, db) + if err != nil { + return nil, err + } + defer func() { + _ = tx.Rollback() + }() + + migrationTableName := migrationTable + if m.schema != "" { + migrationTableName = fmt.Sprintf(`"%s".%s`, m.schema, migrationTableName) + } + + ret := make([]Info, 0) + if err := tx.NewSelect(). + TableExpr(migrationTableName). + Order("version_id"). + Where("version_id >= 1"). + Column("version_id", "tstamp"). + Scan(ctx, &ret); err != nil { + return nil, err + } + + for i := 0; i < len(ret); i++ { + ret[i].Name = m.migrations[i].Name + ret[i].State = "DONE" + } + + for i := len(ret); i < len(m.migrations); i++ { + ret = append(ret, Info{ + Version: fmt.Sprint(i), + Name: m.migrations[i].Name, + State: "TO DO", + }) + } + + return ret, nil +} + +func (m *Migrator) IsUpToDate(ctx context.Context, db *bun.DB) (bool, error) { + tx, err := m.newTx(ctx, db) + if err != nil { + return false, err + } + defer func() { + _ = tx.Rollback() + }() + version, err := m.getLastVersion(ctx, tx) + if err != nil { + return false, err + } + + return int(version) == len(m.migrations), nil +} + +func NewMigrator(opts ...option) *Migrator { + ret := &Migrator{} + for _, opt := range opts { + opt(ret) + } + return ret +} diff --git a/libs/migrations/migrator_test.go b/libs/migrations/migrator_test.go new file mode 100644 index 000000000..ee6ff066b --- /dev/null +++ b/libs/migrations/migrator_test.go @@ -0,0 +1,48 @@ +package migrations + +import ( + "context" + "database/sql" + "testing" + + "github.com/formancehq/stack/libs/go-libs/pgtesting" + _ "github.com/jackc/pgx/v5/stdlib" + "github.com/stretchr/testify/require" + "github.com/uptrace/bun" + "github.com/uptrace/bun/dialect/pgdialect" + "github.com/uptrace/bun/extra/bundebug" +) + +func TestMigrations(t *testing.T) { + require.NoError(t, pgtesting.CreatePostgresServer()) + defer func() { + require.NoError(t, pgtesting.DestroyPostgresServer()) + }() + + migrator := NewMigrator() + migrator.RegisterMigrations( + Migration{ + Up: func(tx bun.Tx) error { + _, err := tx.Exec(`CREATE TABLE "foo" (id varchar)`) + return err + }, + }, + ) + + db := pgtesting.NewPostgresDatabase(t) + sqlDB, err := sql.Open("pgx", db.ConnString()) + require.NoError(t, err) + + bunDB := bun.NewDB(sqlDB, pgdialect.New()) + if testing.Verbose() { + bunDB.AddQueryHook(bundebug.NewQueryHook( + bundebug.WithVerbose(true), + bundebug.FromEnv("BUNDEBUG"), + )) + } + + require.NoError(t, migrator.Up(context.Background(), bunDB)) + version, err := migrator.GetDBVersion(context.Background(), bunDB) + require.NoError(t, err) + require.EqualValues(t, 1, version) +} diff --git a/libs/moon.yml b/libs/moon.yml new file mode 100644 index 000000000..0e3d91c6e --- /dev/null +++ b/libs/moon.yml @@ -0,0 +1,13 @@ +type: 'library' +language: 'go' + +tasks: + sync: + deps: + - go-libs:sync/ledger + + sync/ledger: + command: 'task sync:ledger' + platform: 'system' + inputs: + - '**' diff --git a/libs/oauth2/oauth2introspect/introspect.go b/libs/oauth2/oauth2introspect/introspect.go new file mode 100644 index 000000000..e0c1303e5 --- /dev/null +++ b/libs/oauth2/oauth2introspect/introspect.go @@ -0,0 +1,99 @@ +package oauth2introspect + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + "net/url" + "time" + + "github.com/dgraph-io/ristretto" + "github.com/pkg/errors" +) + +type IntrospecterOption interface { + apply(introspecter *Introspecter) +} +type IntrospecterOptionFn func(introspecter *Introspecter) + +func (fn IntrospecterOptionFn) apply(introspecter *Introspecter) { + fn(introspecter) +} + +func WithClient(client *http.Client) IntrospecterOptionFn { + return func(introspecter *Introspecter) { + introspecter.client = client + } +} + +func WithCache(cache *ristretto.Cache, cacheTTL time.Duration) IntrospecterOptionFn { + return func(introspecter *Introspecter) { + introspecter.cache = cache + introspecter.cacheTTL = cacheTTL + } +} + +type Introspecter struct { + introspectUrl string + client *http.Client + cache *ristretto.Cache + cacheTTL time.Duration +} + +func (i *Introspecter) Introspect(ctx context.Context, bearer string) (bool, error) { + + if i.cache != nil { + v, ok := i.cache.Get(bearer) + if ok { + return v.(bool), nil + } + } + + form := url.Values{} + form.Set("token", bearer) + + checkAuthReq, err := http.NewRequest(http.MethodPost, i.introspectUrl, bytes.NewBufferString(form.Encode())) + if err != nil { + return false, errors.Wrap(err, "creating introspection request") + } + checkAuthReq.Header.Add("Content-Type", "application/x-www-form-urlencoded") + checkAuthReq = checkAuthReq.WithContext(ctx) + + rsp, err := i.client.Do(checkAuthReq) + if err != nil { + return false, errors.Wrap(err, "making introspection request") + } + + switch rsp.StatusCode { + case http.StatusOK: + type X struct { + Active bool `json:"active"` + } + x := X{} + err = json.NewDecoder(rsp.Body).Decode(&x) + if err != nil { + return false, errors.Wrap(err, "decoding introspection response") + } + + if i.cache != nil { + _ = i.cache.SetWithTTL(bearer, x.Active, 1, i.cacheTTL) + } + + return x.Active, nil + default: + return false, fmt.Errorf("unexpected status code %d on introspection request", rsp.StatusCode) + } +} + +func NewIntrospecter(url string, options ...IntrospecterOption) *Introspecter { + i := &Introspecter{ + introspectUrl: url, + client: http.DefaultClient, + } + for _, opt := range options { + opt.apply(i) + } + return i +} diff --git a/libs/oauth2/oauth2introspect/middleware.go b/libs/oauth2/oauth2introspect/middleware.go new file mode 100644 index 000000000..dfd58c576 --- /dev/null +++ b/libs/oauth2/oauth2introspect/middleware.go @@ -0,0 +1,31 @@ +package oauth2introspect + +import ( + "net/http" + "strings" +) + +func NewMiddleware(i *Introspecter) func(handler http.Handler) http.Handler { + return func(handler http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + + authHeader := r.Header.Get("Authorization") + if !strings.HasPrefix(strings.ToUpper(authHeader), "BEARER ") { + w.WriteHeader(http.StatusUnauthorized) + return + } + + bearer := authHeader[7:] + active, err := i.Introspect(r.Context(), bearer) + if err != nil { + panic(err) + } + + if !active { + w.WriteHeader(http.StatusUnauthorized) + } + + handler.ServeHTTP(w, r) + }) + } +} diff --git a/libs/otlp/cli.go b/libs/otlp/cli.go new file mode 100644 index 000000000..f12f49fca --- /dev/null +++ b/libs/otlp/cli.go @@ -0,0 +1,54 @@ +package otlp + +import ( + "fmt" + "strings" + "sync" + + flag "github.com/spf13/pflag" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/sdk/resource" + "go.uber.org/fx" +) + +var ( + onceInitOTLPFlags sync.Once + OnceLoadResources sync.Once +) + +const ( + OtelResourceAttributes = "otel-resource-attributes" + OtelServiceName = "otel-service-name" +) + +func InitOTLPFlags(flags *flag.FlagSet) { + onceInitOTLPFlags.Do(func() { + flags.String(OtelServiceName, "", "OpenTelemetry service name") + flags.StringSlice(OtelResourceAttributes, []string{}, "Additional OTLP resource attributes") + }) +} + +func LoadResource(serviceName string, resourceAttributes []string) fx.Option { + options := make([]fx.Option, 0) + OnceLoadResources.Do(func() { + options = append(options, + fx.Provide(func() (*resource.Resource, error) { + defaultResource := resource.Default() + attributes := make([]attribute.KeyValue, 0) + if serviceName != "" { + attributes = append(attributes, attribute.String("service.name", serviceName)) + } + for _, ra := range resourceAttributes { + parts := strings.SplitN(ra, "=", 2) + if len(parts) < 2 { + return nil, fmt.Errorf("malformed otlp attribute: %s", ra) + } + attributes = append(attributes, attribute.String(parts[0], parts[1])) + } + return resource.Merge(defaultResource, resource.NewSchemaless(attributes...)) + }), + ) + }) + + return fx.Options(options...) +} diff --git a/libs/otlp/error.go b/libs/otlp/error.go new file mode 100644 index 000000000..c282f7c79 --- /dev/null +++ b/libs/otlp/error.go @@ -0,0 +1,30 @@ +package otlp + +import ( + "context" + "fmt" + + "go.opentelemetry.io/otel/codes" + "go.opentelemetry.io/otel/trace" +) + +func RecordError(ctx context.Context, e error) { + if e == nil { + return + } + span := trace.SpanFromContext(ctx) + span.SetStatus(codes.Error, e.Error()) + span.RecordError(e, trace.WithStackTrace(true)) +} + +func RecordAsError(ctx context.Context, e any) { + if e == nil { + return + } + switch ee := e.(type) { + case error: + RecordError(ctx, ee) + default: + RecordError(ctx, fmt.Errorf("%s", e)) + } +} diff --git a/libs/otlp/http_client.go b/libs/otlp/http_client.go new file mode 100644 index 000000000..8b8aac0ae --- /dev/null +++ b/libs/otlp/http_client.go @@ -0,0 +1,43 @@ +package otlp + +import ( + "net/http" + "net/http/httputil" + + "go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" +) + +type WithBodiesTracingHTTPTransport struct { + underlying http.RoundTripper +} + +func (t WithBodiesTracingHTTPTransport) RoundTrip(req *http.Request) (*http.Response, error) { + span := trace.SpanFromContext(req.Context()) + rawRequest, err := httputil.DumpRequest(req, true) + if err != nil { + panic(err) + } + + span.SetAttributes(attribute.String("raw-request", string(rawRequest))) + + rsp, err := t.underlying.RoundTrip(req) + if rsp != nil { + rawResponse, err := httputil.DumpResponse(rsp, true) + if err != nil { + panic(err) + } + + span.SetAttributes(attribute.String("raw-response", string(rawResponse))) + } + return rsp, err +} + +func NewRoundTripper(debug bool) http.RoundTripper { + var transport = http.DefaultTransport + if debug { + transport = WithBodiesTracingHTTPTransport{transport} + } + return otelhttp.NewTransport(transport) +} diff --git a/libs/otlp/otlp.go b/libs/otlp/otlp.go new file mode 100644 index 000000000..85662f725 --- /dev/null +++ b/libs/otlp/otlp.go @@ -0,0 +1,6 @@ +package otlp + +const ( + ModeGRPC = "grpc" + ModeHTTP = "http" +) diff --git a/libs/otlp/otlpmetrics/cli.go b/libs/otlp/otlpmetrics/cli.go new file mode 100644 index 000000000..98e8072c7 --- /dev/null +++ b/libs/otlp/otlpmetrics/cli.go @@ -0,0 +1,54 @@ +package otlpmetrics + +import ( + "time" + + "github.com/formancehq/stack/libs/go-libs/otlp" + flag "github.com/spf13/pflag" + "github.com/spf13/viper" + "go.uber.org/fx" +) + +const ( + OtelMetricsFlag = "otel-metrics" + OtelMetricsExporterPushIntervalFlag = "otel-metrics-exporter-push-interval" + OtelMetricsRuntimeFlag = "otel-metrics-runtime" + OtelMetricsRuntimeMinimumReadMemStatsIntervalFlag = "otel-metrics-runtime-minimum-read-mem-stats-interval" + OtelMetricsExporterFlag = "otel-metrics-exporter" + OtelMetricsExporterOTLPModeFlag = "otel-metrics-exporter-otlp-mode" + OtelMetricsExporterOTLPEndpointFlag = "otel-metrics-exporter-otlp-endpoint" + OtelMetricsExporterOTLPInsecureFlag = "otel-metrics-exporter-otlp-insecure" +) + +func InitOTLPMetricsFlags(flags *flag.FlagSet) { + otlp.InitOTLPFlags(flags) + + flags.Bool(OtelMetricsFlag, false, "Enable OpenTelemetry traces support") + flags.Duration(OtelMetricsExporterPushIntervalFlag, 10*time.Second, "OpenTelemetry metrics exporter push interval") + flags.Bool(OtelMetricsRuntimeFlag, false, "Enable OpenTelemetry runtime metrics") + flags.Duration(OtelMetricsRuntimeMinimumReadMemStatsIntervalFlag, 15*time.Second, "OpenTelemetry runtime metrics minimum read mem stats interval") + flags.String(OtelMetricsExporterFlag, "stdout", "OpenTelemetry metrics exporter") + flags.String(OtelMetricsExporterOTLPModeFlag, "grpc", "OpenTelemetry traces OTLP exporter mode (grpc|http)") + flags.String(OtelMetricsExporterOTLPEndpointFlag, "", "OpenTelemetry traces grpc endpoint") + flags.Bool(OtelMetricsExporterOTLPInsecureFlag, false, "OpenTelemetry traces grpc insecure") +} + +func CLIMetricsModule(v *viper.Viper) fx.Option { + if v.GetBool(OtelMetricsFlag) { + return MetricsModule(ModuleConfig{ + ServiceName: v.GetString(otlp.OtelServiceName), + ServiceVersion: "develop", + OTLPConfig: &OTLPConfig{ + Mode: v.GetString(OtelMetricsExporterOTLPModeFlag), + Endpoint: v.GetString(OtelMetricsExporterOTLPEndpointFlag), + Insecure: v.GetBool(OtelMetricsExporterOTLPInsecureFlag), + }, + Exporter: v.GetString(OtelMetricsExporterFlag), + RuntimeMetrics: v.GetBool(OtelMetricsRuntimeFlag), + MinimumReadMemStatsInterval: v.GetDuration(OtelMetricsRuntimeMinimumReadMemStatsIntervalFlag), + PushInterval: v.GetDuration(OtelMetricsExporterPushIntervalFlag), + ResourceAttributes: v.GetStringSlice(otlp.OtelResourceAttributes), + }) + } + return fx.Options() +} diff --git a/libs/otlp/otlpmetrics/module.go b/libs/otlp/otlpmetrics/module.go new file mode 100644 index 000000000..77366f863 --- /dev/null +++ b/libs/otlp/otlpmetrics/module.go @@ -0,0 +1,160 @@ +package otlpmetrics + +import ( + "context" + "fmt" + "time" + + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/formancehq/stack/libs/go-libs/otlp" + "go.opentelemetry.io/contrib/instrumentation/host" + "go.opentelemetry.io/contrib/instrumentation/runtime" + "go.opentelemetry.io/contrib/propagators/b3" + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc" + "go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp" + "go.opentelemetry.io/otel/metric" + "go.opentelemetry.io/otel/propagation" + sdkmetric "go.opentelemetry.io/otel/sdk/metric" + "go.uber.org/fx" +) + +const ( + metricsProviderOptionKey = `group:"_metricsProviderOption"` + metricsRuntimeOptionKey = `group:"_metricsRuntimeOption"` + + StdoutExporter = "stdout" + OTLPExporter = "otlp" +) + +type ModuleConfig struct { + ServiceName string + ServiceVersion string + + RuntimeMetrics bool + MinimumReadMemStatsInterval time.Duration + + Exporter string + OTLPConfig *OTLPConfig + PushInterval time.Duration + ResourceAttributes []string +} + +type OTLPConfig struct { + Mode string + Endpoint string + Insecure bool +} + +func ProvideMetricsProviderOption(v any, annotations ...fx.Annotation) fx.Option { + annotations = append(annotations, fx.ResultTags(metricsProviderOptionKey)) + return fx.Provide(fx.Annotate(v, annotations...)) +} + +func ProvideRuntimeMetricsOption(v any, annotations ...fx.Annotation) fx.Option { + annotations = append(annotations, fx.ResultTags(metricsRuntimeOptionKey)) + return fx.Provide(fx.Annotate(v, annotations...)) + +} + +func MetricsModule(cfg ModuleConfig) fx.Option { + options := make([]fx.Option, 0) + options = append(options, + fx.Supply(cfg), + otlp.LoadResource(cfg.ServiceName, cfg.ResourceAttributes), + fx.Decorate(fx.Annotate(func(mp *sdkmetric.MeterProvider) metric.MeterProvider { return mp }, fx.As(new(metric.MeterProvider)))), + fx.Provide(fx.Annotate(func(options ...sdkmetric.Option) *sdkmetric.MeterProvider { + fmt.Println("run meter provider with options", options) + return sdkmetric.NewMeterProvider(options...) + }, fx.ParamTags(metricsProviderOptionKey))), + fx.Invoke(func(lc fx.Lifecycle, metricProvider *sdkmetric.MeterProvider, options ...runtime.Option) { + fmt.Println("start meter provider") + // set global propagator to tracecontext (the default is no-op). + otel.SetTextMapPropagator(propagation.NewCompositeTextMapPropagator( + b3.New(), propagation.TraceContext{})) // B3 format is common and used by zipkin. Always enabled right now. + lc.Append(fx.Hook{ + OnStart: func(ctx context.Context) error { + otel.SetMeterProvider(metricProvider) + if cfg.RuntimeMetrics { + if err := runtime.Start(options...); err != nil { + return err + } + if err := host.Start(); err != nil { + return err + } + } + return nil + }, + OnStop: func(ctx context.Context) error { + logging.FromContext(ctx).Infof("Flush metrics") + if err := metricProvider.ForceFlush(ctx); err != nil { + logging.FromContext(ctx).Errorf("unable to flush metrics: %s", err) + } + logging.FromContext(ctx).Infof("Shutting down metrics provider") + if err := metricProvider.Shutdown(ctx); err != nil { + logging.FromContext(ctx).Errorf("unable to shutdown metrics provider: %s", err) + } + logging.FromContext(ctx).Infof("Metrics provider stopped") + return nil + }, + }) + }), + ProvideMetricsProviderOption(sdkmetric.WithResource), + ProvideMetricsProviderOption(sdkmetric.WithReader), + fx.Provide( + fx.Annotate(sdkmetric.NewPeriodicReader, fx.ParamTags(``, OTLPMetricsPeriodicReaderOptionsKey), fx.As(new(sdkmetric.Reader))), + ), + ProvideOTLPMetricsPeriodicReaderOption(func() sdkmetric.PeriodicReaderOption { + return sdkmetric.WithInterval(cfg.PushInterval) + }), + ProvideRuntimeMetricsOption(func() runtime.Option { + return runtime.WithMinimumReadMemStatsInterval(cfg.MinimumReadMemStatsInterval) + }), + ) + + switch cfg.Exporter { + case StdoutExporter: + options = append(options, StdoutMetricsModule()) + case OTLPExporter: + mode := otlp.ModeGRPC + if cfg.OTLPConfig != nil { + if cfg.OTLPConfig.Mode != "" { + mode = cfg.OTLPConfig.Mode + } + } + switch mode { + case otlp.ModeGRPC: + if cfg.OTLPConfig != nil { + if cfg.OTLPConfig.Endpoint != "" { + options = append(options, ProvideOTLPMetricsGRPCOption(func() otlpmetricgrpc.Option { + return otlpmetricgrpc.WithEndpoint(cfg.OTLPConfig.Endpoint) + })) + } + if cfg.OTLPConfig.Insecure { + options = append(options, ProvideOTLPMetricsGRPCOption(func() otlpmetricgrpc.Option { + return otlpmetricgrpc.WithInsecure() + })) + } + } + + options = append(options, ProvideOTLPMetricsGRPCExporter()) + case otlp.ModeHTTP: + if cfg.OTLPConfig != nil { + if cfg.OTLPConfig.Endpoint != "" { + options = append(options, ProvideOTLPMetricsHTTPOption(func() otlpmetrichttp.Option { + return otlpmetrichttp.WithEndpoint(cfg.OTLPConfig.Endpoint) + })) + } + if cfg.OTLPConfig.Insecure { + options = append(options, ProvideOTLPMetricsHTTPOption(func() otlpmetrichttp.Option { + return otlpmetrichttp.WithInsecure() + })) + } + } + + options = append(options, ProvideOTLPMetricsHTTPExporter()) + } + } + + return fx.Options(options...) +} diff --git a/libs/otlp/otlpmetrics/otlpexporter.go b/libs/otlp/otlpmetrics/otlpexporter.go new file mode 100644 index 000000000..03868dd52 --- /dev/null +++ b/libs/otlp/otlpmetrics/otlpexporter.go @@ -0,0 +1,60 @@ +package otlpmetrics + +import ( + "context" + + "go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc" + "go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp" + sdkmetric "go.opentelemetry.io/otel/sdk/metric" + "go.uber.org/fx" +) + +func LoadOTLPMetricsGRPCExporter(options ...otlpmetricgrpc.Option) (sdkmetric.Exporter, error) { + // TODO(polo): context.Background() is not ideal here + return otlpmetricgrpc.New(context.Background(), options...) +} + +func ProvideOTLPMetricsGRPCExporter() fx.Option { + return fx.Options( + fx.Provide( + fx.Annotate(LoadOTLPMetricsGRPCExporter, fx.ParamTags(OTLPMetricsGRPCOptionsKey), fx.As(new(sdkmetric.Exporter))), + ), + ) +} + +func LoadOTLPMetricsHTTPExporter(options ...otlpmetrichttp.Option) (sdkmetric.Exporter, error) { + // TODO(polo): context.Background() is not ideal here + return otlpmetrichttp.New(context.Background(), options...) +} + +func ProvideOTLPMetricsHTTPExporter() fx.Option { + return fx.Options( + fx.Provide( + fx.Annotate(LoadOTLPMetricsHTTPExporter, fx.ParamTags(OTLPMetricsHTTPOptionsKey), fx.As(new(sdkmetric.Exporter))), + ), + ) +} + +const OTLPMetricsGRPCOptionsKey = `group:"_otlpMetricsGrpcOptions"` + +func ProvideOTLPMetricsGRPCOption(provider any) fx.Option { + return fx.Provide( + fx.Annotate(provider, fx.ResultTags(OTLPMetricsGRPCOptionsKey), fx.As(new(otlpmetricgrpc.Option))), + ) +} + +const OTLPMetricsHTTPOptionsKey = `group:"_otlpMetricsHTTPOptions"` + +func ProvideOTLPMetricsHTTPOption(provider any) fx.Option { + return fx.Provide( + fx.Annotate(provider, fx.ResultTags(OTLPMetricsHTTPOptionsKey), fx.As(new(otlpmetrichttp.Option))), + ) +} + +const OTLPMetricsPeriodicReaderOptionsKey = `group:"_otlpMetricsPeriodicReaderOptions"` + +func ProvideOTLPMetricsPeriodicReaderOption(provider any) fx.Option { + return fx.Provide( + fx.Annotate(provider, fx.ResultTags(OTLPMetricsPeriodicReaderOptionsKey), fx.As(new(sdkmetric.PeriodicReaderOption))), + ) +} diff --git a/libs/otlp/otlpmetrics/stdoutexporter.go b/libs/otlp/otlpmetrics/stdoutexporter.go new file mode 100644 index 000000000..258080c8c --- /dev/null +++ b/libs/otlp/otlpmetrics/stdoutexporter.go @@ -0,0 +1,19 @@ +package otlpmetrics + +import ( + "go.opentelemetry.io/otel/exporters/stdout/stdoutmetric" + sdkmetric "go.opentelemetry.io/otel/sdk/metric" + "go.uber.org/fx" +) + +func LoadStdoutMetricsProvider() (sdkmetric.Exporter, error) { + return stdoutmetric.New() +} + +func StdoutMetricsModule() fx.Option { + return fx.Options( + fx.Provide( + fx.Annotate(LoadStdoutMetricsProvider, fx.As(new(sdkmetric.Exporter))), + ), + ) +} diff --git a/libs/otlp/otlptraces/cli.go b/libs/otlp/otlptraces/cli.go new file mode 100644 index 000000000..04a4bacc0 --- /dev/null +++ b/libs/otlp/otlptraces/cli.go @@ -0,0 +1,66 @@ +package otlptraces + +import ( + "github.com/formancehq/stack/libs/go-libs/otlp" + flag "github.com/spf13/pflag" + "github.com/spf13/viper" + "go.uber.org/fx" +) + +const ( + OtelTracesFlag = "otel-traces" + OtelTracesBatchFlag = "otel-traces-batch" + OtelTracesExporterFlag = "otel-traces-exporter" + OtelTracesExporterJaegerEndpointFlag = "otel-traces-exporter-jaeger-endpoint" + OtelTracesExporterJaegerUserFlag = "otel-traces-exporter-jaeger-user" + OtelTracesExporterJaegerPasswordFlag = "otel-traces-exporter-jaeger-password" + OtelTracesExporterOTLPModeFlag = "otel-traces-exporter-otlp-mode" + OtelTracesExporterOTLPEndpointFlag = "otel-traces-exporter-otlp-endpoint" + OtelTracesExporterOTLPInsecureFlag = "otel-traces-exporter-otlp-insecure" +) + +func InitOTLPTracesFlags(flags *flag.FlagSet) { + otlp.InitOTLPFlags(flags) + + flags.Bool(OtelTracesFlag, false, "Enable OpenTelemetry traces support") + flags.Bool(OtelTracesBatchFlag, false, "Use OpenTelemetry batching") + flags.String(OtelTracesExporterFlag, "stdout", "OpenTelemetry traces exporter") + flags.String(OtelTracesExporterJaegerEndpointFlag, "", "OpenTelemetry traces Jaeger exporter endpoint") + flags.String(OtelTracesExporterJaegerUserFlag, "", "OpenTelemetry traces Jaeger exporter user") + flags.String(OtelTracesExporterJaegerPasswordFlag, "", "OpenTelemetry traces Jaeger exporter password") + flags.String(OtelTracesExporterOTLPModeFlag, "grpc", "OpenTelemetry traces OTLP exporter mode (grpc|http)") + flags.String(OtelTracesExporterOTLPEndpointFlag, "", "OpenTelemetry traces grpc endpoint") + flags.Bool(OtelTracesExporterOTLPInsecureFlag, false, "OpenTelemetry traces grpc insecure") +} + +func CLITracesModule(v *viper.Viper) fx.Option { + if v.GetBool(OtelTracesFlag) { + return TracesModule(ModuleConfig{ + Batch: v.GetBool(OtelTracesBatchFlag), + Exporter: v.GetString(OtelTracesExporterFlag), + JaegerConfig: func() *JaegerConfig { + if v.GetString(OtelTracesExporterFlag) != JaegerExporter { + return nil + } + return &JaegerConfig{ + Endpoint: v.GetString(OtelTracesExporterJaegerEndpointFlag), + User: v.GetString(OtelTracesExporterJaegerUserFlag), + Password: v.GetString(OtelTracesExporterJaegerPasswordFlag), + } + }(), + OTLPConfig: func() *OTLPConfig { + if v.GetString(OtelTracesExporterFlag) != OTLPExporter { + return nil + } + return &OTLPConfig{ + Mode: v.GetString(OtelTracesExporterOTLPModeFlag), + Endpoint: v.GetString(OtelTracesExporterOTLPEndpointFlag), + Insecure: v.GetBool(OtelTracesExporterOTLPInsecureFlag), + } + }(), + ServiceName: v.GetString(otlp.OtelServiceName), + ResourceAttributes: v.GetStringSlice(otlp.OtelResourceAttributes), + }) + } + return fx.Options() +} diff --git a/libs/otlp/otlptraces/cli_test.go b/libs/otlp/otlptraces/cli_test.go new file mode 100644 index 000000000..fdaeab757 --- /dev/null +++ b/libs/otlp/otlptraces/cli_test.go @@ -0,0 +1,86 @@ +package otlptraces + +import ( + "context" + "errors" + "fmt" + "reflect" + "sync" + "testing" + + "github.com/formancehq/stack/libs/go-libs/otlp" + "github.com/spf13/cobra" + "github.com/spf13/viper" + "github.com/stretchr/testify/require" + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/exporters/jaeger" + "go.opentelemetry.io/otel/exporters/otlp/otlptrace" + tracesdk "go.opentelemetry.io/otel/sdk/trace" + "go.uber.org/fx" +) + +func TestOTLPTracesModule(t *testing.T) { + type testCase struct { + name string + args []string + expectedSpanExporter tracesdk.SpanExporter + } + + for _, testCase := range []testCase{ + { + name: "jaeger", + args: []string{ + fmt.Sprintf("--%s", OtelTracesFlag), + fmt.Sprintf("--%s=%s", OtelTracesExporterFlag, "jaeger"), + }, + expectedSpanExporter: &jaeger.Exporter{}, + }, + { + name: "otlp", + args: []string{ + fmt.Sprintf("--%s", OtelTracesFlag), + fmt.Sprintf("--%s=%s", OtelTracesExporterFlag, "otlp"), + }, + expectedSpanExporter: &otlptrace.Exporter{}, + }, + } { + t.Run(testCase.name, func(t *testing.T) { + cmd := &cobra.Command{ + PreRunE: func(cmd *cobra.Command, args []string) error { + // Since we are doing multiple tests with the same otlp + // package, we have to reset the once variables. + otlp.OnceLoadResources = sync.Once{} + return viper.BindPFlags(cmd.Flags()) + }, + RunE: func(cmd *cobra.Command, args []string) error { + app := fx.New( + fx.NopLogger, + CLITracesModule(viper.GetViper()), + fx.Invoke(func(lc fx.Lifecycle, spanExporter tracesdk.SpanExporter) { + lc.Append(fx.Hook{ + OnStart: func(ctx context.Context) error { + if !reflect.TypeOf(otel.GetTracerProvider()). + AssignableTo(reflect.TypeOf(&tracesdk.TracerProvider{})) { + return errors.New("otel.GetTracerProvider() should return a *tracesdk.TracerProvider instance") + } + if !reflect.TypeOf(spanExporter). + AssignableTo(reflect.TypeOf(testCase.expectedSpanExporter)) { + return fmt.Errorf("span exporter should be of type %t", testCase.expectedSpanExporter) + } + return nil + }, + }) + })) + require.NoError(t, app.Start(cmd.Context())) + require.NoError(t, app.Err()) + return nil + }, + } + InitOTLPTracesFlags(cmd.Flags()) + + cmd.SetArgs(testCase.args) + + require.NoError(t, cmd.Execute()) + }) + } +} diff --git a/libs/otlp/otlptraces/jaegerexporter.go b/libs/otlp/otlptraces/jaegerexporter.go new file mode 100644 index 000000000..a7dd32696 --- /dev/null +++ b/libs/otlp/otlptraces/jaegerexporter.go @@ -0,0 +1,29 @@ +package otlptraces + +import ( + "go.opentelemetry.io/otel/exporters/jaeger" + "go.opentelemetry.io/otel/sdk/trace" + "go.uber.org/fx" +) + +func LoadJaegerTracerExporter(options ...jaeger.CollectorEndpointOption) (*jaeger.Exporter, error) { + return jaeger.New(jaeger.WithCollectorEndpoint(options...)) +} + +const ( + JaegerCollectorEndpointGroupKey = `group:"_tracerCollectorEndpointOptions"` +) + +func ProvideJaegerTracerCollectorEndpoint(provider any) fx.Option { + return fx.Provide(fx.Annotate(provider, fx.ResultTags(JaegerCollectorEndpointGroupKey))) +} + +func JaegerTracerModule() fx.Option { + return fx.Options( + fx.Provide( + fx.Annotate(LoadJaegerTracerExporter, fx.ParamTags( + JaegerCollectorEndpointGroupKey, + ), fx.As(new(trace.SpanExporter))), + ), + ) +} diff --git a/libs/otlp/otlptraces/otlpexporter.go b/libs/otlp/otlptraces/otlpexporter.go new file mode 100644 index 000000000..aca8407c5 --- /dev/null +++ b/libs/otlp/otlptraces/otlpexporter.go @@ -0,0 +1,63 @@ +package otlptraces + +import ( + "context" + + "go.opentelemetry.io/otel/exporters/otlp/otlptrace" + "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc" + "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp" + "go.opentelemetry.io/otel/sdk/trace" + "go.uber.org/fx" +) + +func LoadOTLPTracerProvider(client otlptrace.Client) (*otlptrace.Exporter, error) { + return otlptrace.New(context.Background(), client) +} + +func LoadOTLPTracerGRPCClient(options ...otlptracegrpc.Option) otlptrace.Client { + return otlptracegrpc.NewClient(options...) +} + +func LoadOTLPTracerHTTPClient(options ...otlptracehttp.Option) otlptrace.Client { + return otlptracehttp.NewClient(options...) +} + +func OTLPTracerModule() fx.Option { + return fx.Options( + fx.Provide( + fx.Annotate(LoadOTLPTracerProvider, fx.As(new(trace.SpanExporter))), + ), + ) +} + +const OTLPTracerGRPCOptionsKey = `group:"_otlpTracerGrpcOptions"` + +func ProvideOTLPTracerGRPCClientOption(provider any) fx.Option { + return fx.Provide( + fx.Annotate(provider, fx.ResultTags(OTLPTracerGRPCOptionsKey), fx.As(new(otlptracegrpc.Option))), + ) +} + +func OTLPTracerGRPCClientModule() fx.Option { + return fx.Options( + fx.Provide( + fx.Annotate(LoadOTLPTracerGRPCClient, fx.ParamTags(OTLPTracerGRPCOptionsKey)), + ), + ) +} + +const OTLPTracerHTTPOptionsKey = `group:"_otlpTracerHTTPOptions"` + +func ProvideOTLPTracerHTTPClientOption(provider any) fx.Option { + return fx.Provide( + fx.Annotate(provider, fx.ResultTags(OTLPTracerHTTPOptionsKey), fx.As(new(otlptracehttp.Option))), + ) +} + +func OTLPTracerHTTPClientModule() fx.Option { + return fx.Options( + fx.Provide( + fx.Annotate(LoadOTLPTracerHTTPClient, fx.ParamTags(OTLPTracerHTTPOptionsKey)), + ), + ) +} diff --git a/libs/otlp/otlptraces/stdoutexporter.go b/libs/otlp/otlptraces/stdoutexporter.go new file mode 100644 index 000000000..4cbf1c6ae --- /dev/null +++ b/libs/otlp/otlptraces/stdoutexporter.go @@ -0,0 +1,23 @@ +package otlptraces + +import ( + "os" + + "go.opentelemetry.io/otel/exporters/stdout/stdouttrace" + "go.opentelemetry.io/otel/sdk/trace" + "go.uber.org/fx" +) + +func LoadStdoutTracerProvider() (*stdouttrace.Exporter, error) { + return stdouttrace.New( + stdouttrace.WithWriter(os.Stdout), + ) +} + +func StdoutTracerModule() fx.Option { + return fx.Options( + fx.Provide( + fx.Annotate(LoadStdoutTracerProvider, fx.As(new(trace.SpanExporter))), + ), + ) +} diff --git a/libs/otlp/otlptraces/traces.go b/libs/otlp/otlptraces/traces.go new file mode 100644 index 000000000..f0e8eb172 --- /dev/null +++ b/libs/otlp/otlptraces/traces.go @@ -0,0 +1,147 @@ +package otlptraces + +import ( + "context" + + "github.com/formancehq/stack/libs/go-libs/otlp" + "go.opentelemetry.io/contrib/propagators/b3" + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/exporters/jaeger" + "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc" + "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp" + "go.opentelemetry.io/otel/propagation" + tracesdk "go.opentelemetry.io/otel/sdk/trace" + "go.opentelemetry.io/otel/trace" + "go.uber.org/fx" +) + +const ( + JaegerExporter = "jaeger" + StdoutExporter = "stdout" + OTLPExporter = "otlp" + + TracerProviderOptionKey = `group:"_tracerProviderOption"` +) + +type JaegerConfig struct { + Endpoint string + User string + Password string +} + +type OTLPConfig struct { + Mode string + Endpoint string + Insecure bool +} + +type ModuleConfig struct { + Exporter string + Batch bool + JaegerConfig *JaegerConfig + OTLPConfig *OTLPConfig + ResourceAttributes []string + ServiceName string +} + +func ProvideTracerProviderOption(v any, annotations ...fx.Annotation) fx.Option { + annotations = append(annotations, fx.ResultTags(TracerProviderOptionKey)) + return fx.Provide(fx.Annotate(v, annotations...)) +} + +func TracesModule(cfg ModuleConfig) fx.Option { + options := make([]fx.Option, 0) + options = append(options, + fx.Supply(cfg), + otlp.LoadResource(cfg.ServiceName, cfg.ResourceAttributes), + fx.Provide(func(tp *tracesdk.TracerProvider) trace.TracerProvider { return tp }), + fx.Provide(fx.Annotate(func(options ...tracesdk.TracerProviderOption) *tracesdk.TracerProvider { + return tracesdk.NewTracerProvider(options...) + }, fx.ParamTags(TracerProviderOptionKey))), + fx.Invoke(func(lc fx.Lifecycle, tracerProvider *tracesdk.TracerProvider) { + // set global propagator to tracecontext (the default is no-op). + otel.SetTextMapPropagator(propagation.NewCompositeTextMapPropagator( + b3.New(), propagation.TraceContext{})) // B3 format is common and used by zipkin. Always enabled right now. + lc.Append(fx.Hook{ + OnStart: func(ctx context.Context) error { + otel.SetTracerProvider(tracerProvider) + return nil + }, + OnStop: func(ctx context.Context) error { + return tracerProvider.Shutdown(ctx) + }, + }) + }), + ProvideTracerProviderOption(tracesdk.WithResource), + ) + if cfg.Batch { + options = append(options, ProvideTracerProviderOption(tracesdk.WithBatcher, fx.ParamTags(``, `group:"_batchOptions"`))) + } else { + options = append(options, ProvideTracerProviderOption(tracesdk.WithSyncer)) + } + + switch cfg.Exporter { + case JaegerExporter: + options = append(options, JaegerTracerModule()) + if cfg.JaegerConfig != nil { + if v := cfg.JaegerConfig.Endpoint; v != "" { + options = append(options, ProvideJaegerTracerCollectorEndpoint(func() jaeger.CollectorEndpointOption { + return jaeger.WithEndpoint(v) + })) + } + + if v := cfg.JaegerConfig.User; v != "" { + options = append(options, ProvideJaegerTracerCollectorEndpoint(func() jaeger.CollectorEndpointOption { + return jaeger.WithUsername(v) + })) + } + + if v := cfg.JaegerConfig.Password; v != "" { + options = append(options, ProvideJaegerTracerCollectorEndpoint(func() jaeger.CollectorEndpointOption { + return jaeger.WithPassword(v) + })) + } + } + case StdoutExporter: + options = append(options, StdoutTracerModule()) + case OTLPExporter: + options = append(options, OTLPTracerModule()) + mode := otlp.ModeGRPC + if cfg.OTLPConfig != nil { + if cfg.OTLPConfig.Mode != "" { + mode = cfg.OTLPConfig.Mode + } + switch mode { + case otlp.ModeGRPC: + if cfg.OTLPConfig.Endpoint != "" { + options = append(options, ProvideOTLPTracerGRPCClientOption(func() otlptracegrpc.Option { + return otlptracegrpc.WithEndpoint(cfg.OTLPConfig.Endpoint) + })) + } + if cfg.OTLPConfig.Insecure { + options = append(options, ProvideOTLPTracerGRPCClientOption(func() otlptracegrpc.Option { + return otlptracegrpc.WithInsecure() + })) + } + case otlp.ModeHTTP: + if cfg.OTLPConfig.Endpoint != "" { + options = append(options, ProvideOTLPTracerHTTPClientOption(func() otlptracehttp.Option { + return otlptracehttp.WithEndpoint(cfg.OTLPConfig.Endpoint) + })) + } + if cfg.OTLPConfig.Insecure { + options = append(options, ProvideOTLPTracerHTTPClientOption(func() otlptracehttp.Option { + return otlptracehttp.WithInsecure() + })) + } + } + } + switch mode { + case otlp.ModeGRPC: + options = append(options, OTLPTracerGRPCClientModule()) + case otlp.ModeHTTP: + options = append(options, OTLPTracerHTTPClientModule()) + } + } + return fx.Options(options...) +} diff --git a/libs/otlp/otlptraces/traces_test.go b/libs/otlp/otlptraces/traces_test.go new file mode 100644 index 000000000..bc5964a03 --- /dev/null +++ b/libs/otlp/otlptraces/traces_test.go @@ -0,0 +1,95 @@ +package otlptraces + +import ( + "context" + "sync" + "testing" + + "github.com/formancehq/stack/libs/go-libs/otlp" + "github.com/stretchr/testify/assert" + "go.uber.org/fx" +) + +func TestTracesModule(t *testing.T) { + type testCase struct { + name string + config ModuleConfig + } + + tests := []testCase{ + { + name: "otlp-exporter", + config: ModuleConfig{ + Exporter: OTLPExporter, + }, + }, + { + name: "otlp-exporter-with-grpc-config", + config: ModuleConfig{ + Exporter: OTLPExporter, + OTLPConfig: &OTLPConfig{ + Mode: otlp.ModeGRPC, + Endpoint: "remote:8080", + Insecure: true, + }, + }, + }, + { + name: "otlp-exporter-with-http-config", + config: ModuleConfig{ + Exporter: OTLPExporter, + OTLPConfig: &OTLPConfig{ + Mode: otlp.ModeHTTP, + Endpoint: "remote:8080", + Insecure: true, + }, + }, + }, + { + name: "jaeger-exporter", + config: ModuleConfig{ + Exporter: JaegerExporter, + }, + }, + { + name: "jaeger-exporter-with-config", + config: ModuleConfig{ + Exporter: JaegerExporter, + JaegerConfig: &JaegerConfig{}, + }, + }, + { + name: "stdout-exporter", + config: ModuleConfig{ + Exporter: StdoutExporter, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + // Since we are doing multiple tests with the same otlp + // package, we have to reset the once variables. + otlp.OnceLoadResources = sync.Once{} + + options := []fx.Option{TracesModule(test.config)} + if !testing.Verbose() { + options = append(options, fx.NopLogger) + } + options = append(options, fx.Provide(func() *testing.T { + return t + })) + assert.NoError(t, fx.ValidateApp(options...)) + + app := fx.New(options...) + assert.NoError(t, app.Start(context.Background())) + defer func(app *fx.App, ctx context.Context) { + err := app.Stop(ctx) + if err != nil { + panic(err) + } + }(app, context.Background()) + }) + } + +} diff --git a/libs/otlp/recover.go b/libs/otlp/recover.go new file mode 100644 index 000000000..a97fe97ca --- /dev/null +++ b/libs/otlp/recover.go @@ -0,0 +1,16 @@ +package otlp + +import ( + "context" +) + +func RecordErrorOnRecover(ctx context.Context, forwardPanic bool) func() { + return func() { + if e := recover(); e != nil { + RecordAsError(ctx, e) + if forwardPanic { + panic(e) + } + } + } +} diff --git a/libs/pgtesting/README.md b/libs/pgtesting/README.md new file mode 100644 index 000000000..b51174961 --- /dev/null +++ b/libs/pgtesting/README.md @@ -0,0 +1,3 @@ +# How to use ? + +See [example](./pkg/postgres_test.go) for an example. diff --git a/libs/pgtesting/postgres.go b/libs/pgtesting/postgres.go new file mode 100644 index 000000000..c77151916 --- /dev/null +++ b/libs/pgtesting/postgres.go @@ -0,0 +1,257 @@ +package pgtesting + +import ( + "context" + "database/sql" + "fmt" + "os" + "strconv" + "sync" + "time" + + "github.com/google/uuid" + _ "github.com/lib/pq" + "github.com/ory/dockertest/v3" + "github.com/ory/dockertest/v3/docker" + "github.com/pkg/errors" + "github.com/stretchr/testify/require" + "github.com/uptrace/bun" + "github.com/uptrace/bun/dialect/pgdialect" +) + +type TestingT interface { + require.TestingT + Cleanup(func()) +} + +type pgDatabase struct { + url string +} + +func (s *pgDatabase) ConnString() string { + return s.url +} + +type pgServer struct { + destroy func() error + lock sync.Mutex + db *bun.DB + port string + config config +} + +func (s *pgServer) GetPort() int { + v, err := strconv.ParseInt(s.port, 10, 64) + if err != nil { + panic(err) + } + return int(v) +} + +func (s *pgServer) GetHost() string { + return "localhost" +} + +func (s *pgServer) GetUsername() string { + return s.config.initialUsername +} + +func (s *pgServer) GetPassword() string { + return s.config.initialUserPassword +} + +func (s *pgServer) GetDSN() string { + return s.GetDatabaseDSN(s.config.initialDatabaseName) +} + +func (s *pgServer) GetDatabaseDSN(databaseName string) string { + return fmt.Sprintf("postgresql://%s:%s@localhost:%s/%s?sslmode=disable", s.config.initialUsername, + s.config.initialUserPassword, s.port, databaseName) +} + +func (s *pgServer) NewDatabase(t TestingT) *pgDatabase { + s.lock.Lock() + defer s.lock.Unlock() + + databaseName := uuid.NewString() + _, err := s.db.ExecContext(context.Background(), fmt.Sprintf(`CREATE DATABASE "%s"`, databaseName)) + require.NoError(t, err) + + if os.Getenv("NO_CLEANUP") != "true" { + t.Cleanup(func() { + s.lock.Lock() + defer s.lock.Unlock() + + _, _ = s.db.ExecContext(context.Background(), fmt.Sprintf(`DROP DATABASE "%s"`, databaseName)) + }) + } + + return &pgDatabase{ + url: s.GetDatabaseDSN(databaseName), + } +} + +func (s *pgServer) Close() error { + if s.db == nil { + return nil + } + if os.Getenv("NO_CLEANUP") == "true" { + return nil + } + if err := s.db.Close(); err != nil { + return err + } + if err := s.destroy(); err != nil { + return err + } + return nil +} + +var srv *pgServer + +func Server() *pgServer { + return srv +} + +func NewPostgresDatabase(t TestingT) *pgDatabase { + return srv.NewDatabase(t) +} + +func DestroyPostgresServer() error { + return srv.Close() +} + +type config struct { + initialDatabaseName string + initialUserPassword string + initialUsername string + statusCheckInterval time.Duration + maximumWaitingTime time.Duration + context context.Context + hostConfigOptions []func(hostConfig *docker.HostConfig) +} + +func (c config) validate() error { + if c.statusCheckInterval == 0 { + return errors.New("status check interval must be greater than 0") + } + if c.initialUsername == "" { + return errors.New("initial username must be defined") + } + if c.initialUserPassword == "" { + return errors.New("initial user password must be defined") + } + if c.initialDatabaseName == "" { + return errors.New("initial database name must be defined") + } + return nil +} + +type option func(opts *config) + +func WithInitialDatabaseName(name string) option { + return func(opts *config) { + opts.initialDatabaseName = name + } +} + +func WithInitialUser(username, pwd string) option { + return func(opts *config) { + opts.initialUserPassword = pwd + opts.initialUsername = username + } +} + +func WithStatusCheckInterval(d time.Duration) option { + return func(opts *config) { + opts.statusCheckInterval = d + } +} + +func WithMaximumWaitingTime(d time.Duration) option { + return func(opts *config) { + opts.maximumWaitingTime = d + } +} + +func WithContext(ctx context.Context) option { + return func(opts *config) { + opts.context = ctx + } +} + +func WithDockerHostConfigOption(opt func(hostConfig *docker.HostConfig)) option { + return func(opts *config) { + opts.hostConfigOptions = append(opts.hostConfigOptions, opt) + } +} + +var defaultOptions = []option{ + WithStatusCheckInterval(200 * time.Millisecond), + WithInitialUser("root", "root"), + WithMaximumWaitingTime(15 * time.Second), + WithInitialDatabaseName("formance"), + WithContext(context.Background()), +} + +func CreatePostgresServer(opts ...option) error { + cfg := config{} + for _, opt := range append(defaultOptions, opts...) { + opt(&cfg) + } + + if err := cfg.validate(); err != nil { + return errors.Wrap(err, "validating config") + } + + pool, err := dockertest.NewPool("") + if err != nil { + return errors.Wrap(err, "unable to start docker containers pool") + } + + resource, err := pool.RunWithOptions(&dockertest.RunOptions{ + Repository: "postgres", + Tag: "15-alpine", + Env: []string{ + fmt.Sprintf("POSTGRES_USER=%s", cfg.initialUsername), + fmt.Sprintf("POSTGRES_PASSWORD=%s", cfg.initialUserPassword), + fmt.Sprintf("POSTGRES_DB=%s", cfg.initialDatabaseName), + }, + Entrypoint: nil, + Cmd: []string{"-c", "superuser-reserved-connections=0"}, + }, cfg.hostConfigOptions...) + if err != nil { + return errors.Wrap(err, "unable to start postgres server container") + } + + srv = &pgServer{ + port: resource.GetPort("5432/tcp"), + destroy: func() error { + return pool.Purge(resource) + }, + config: cfg, + } + + try := time.Duration(0) + sqldb, err := sql.Open("postgres", srv.GetDatabaseDSN(cfg.initialDatabaseName)) + if err != nil { + return err + } + srv.db = bun.NewDB(sqldb, pgdialect.New()) + for try*cfg.statusCheckInterval < cfg.maximumWaitingTime { + err := srv.db.Ping() + if err != nil { + try++ + select { + case <-cfg.context.Done(): + return cfg.context.Err() + case <-time.After(cfg.statusCheckInterval): + } + continue + } + + return nil + } + + return errors.New("timeout waiting for server ready") +} diff --git a/libs/pgtesting/postgres_test.go b/libs/pgtesting/postgres_test.go new file mode 100644 index 000000000..352fa025a --- /dev/null +++ b/libs/pgtesting/postgres_test.go @@ -0,0 +1,35 @@ +package pgtesting + +import ( + "context" + "fmt" + "log" + "os" + "testing" + + "github.com/jackc/pgx/v5" + "github.com/stretchr/testify/require" +) + +func TestMain(m *testing.M) { + if err := CreatePostgresServer(); err != nil { + log.Fatal(err) + } + code := m.Run() + if err := DestroyPostgresServer(); err != nil { + log.Fatal(err) + } + os.Exit(code) +} + +func TestPostgres(t *testing.T) { + for i := 0; i < 10; i++ { + t.Run(fmt.Sprintf("test%d", i), func(t *testing.T) { + t.Parallel() + database := NewPostgresDatabase(t) + conn, err := pgx.Connect(context.Background(), database.ConnString()) + require.NoError(t, err) + require.NoError(t, conn.Close(context.Background())) + }) + } +} diff --git a/libs/pointer/utils.go b/libs/pointer/utils.go new file mode 100644 index 000000000..837c3991b --- /dev/null +++ b/libs/pointer/utils.go @@ -0,0 +1,5 @@ +package pointer + +func For[T any](t T) *T { + return &t +} diff --git a/libs/publish/cli.go b/libs/publish/cli.go new file mode 100644 index 000000000..7fa7b1911 --- /dev/null +++ b/libs/publish/cli.go @@ -0,0 +1,110 @@ +package publish + +import ( + "fmt" + "strings" + + "github.com/Shopify/sarama" + "github.com/spf13/cobra" + "github.com/spf13/viper" + "github.com/xdg-go/scram" + "go.uber.org/fx" +) + +const ( + // General configuration + PublisherTopicMappingFlag = "publisher-topic-mapping" + // Kafka configuration + PublisherKafkaEnabledFlag = "publisher-kafka-enabled" + PublisherKafkaBrokerFlag = "publisher-kafka-broker" + PublisherKafkaSASLEnabled = "publisher-kafka-sasl-enabled" + PublisherKafkaSASLUsername = "publisher-kafka-sasl-username" + PublisherKafkaSASLPassword = "publisher-kafka-sasl-password" + PublisherKafkaSASLMechanism = "publisher-kafka-sasl-mechanism" + PublisherKafkaSASLScramSHASize = "publisher-kafka-sasl-scram-sha-size" + PublisherKafkaTLSEnabled = "publisher-kafka-tls-enabled" + // HTTP configuration + PublisherHttpEnabledFlag = "publisher-http-enabled" + // Nats configuration + PublisherNatsEnabledFlag = "publisher-nats-enabled" + PublisherNatsClientIDFlag = "publisher-nats-client-id" + PublisherNatsURLFlag = "publisher-nats-url" +) + +func InitCLIFlags(cmd *cobra.Command) { + cmd.PersistentFlags().Bool(PublisherKafkaEnabledFlag, false, "Publish write events to kafka") + cmd.PersistentFlags().StringSlice(PublisherKafkaBrokerFlag, []string{"localhost:9092"}, "Kafka address is kafka enabled") + cmd.PersistentFlags().StringSlice(PublisherTopicMappingFlag, []string{}, "Define mapping between internal event types and topics") + cmd.PersistentFlags().Bool(PublisherHttpEnabledFlag, false, "Sent write event to http endpoint") + cmd.PersistentFlags().Bool(PublisherKafkaSASLEnabled, false, "Enable SASL authentication on kafka publisher") + cmd.PersistentFlags().String(PublisherKafkaSASLUsername, "", "SASL username") + cmd.PersistentFlags().String(PublisherKafkaSASLPassword, "", "SASL password") + cmd.PersistentFlags().String(PublisherKafkaSASLMechanism, "", "SASL authentication mechanism") + cmd.PersistentFlags().Int(PublisherKafkaSASLScramSHASize, 512, "SASL SCRAM SHA size") + cmd.PersistentFlags().Bool(PublisherKafkaTLSEnabled, false, "Enable TLS to connect on kafka") + cmd.PersistentFlags().Bool(PublisherNatsEnabledFlag, false, "Publish write events to nats") + cmd.PersistentFlags().String(PublisherNatsClientIDFlag, "", "Nats client ID") + cmd.PersistentFlags().String(PublisherNatsURLFlag, "", "Nats url") +} + +func CLIPublisherModule(v *viper.Viper, serviceName string) fx.Option { + options := make([]fx.Option, 0) + + topics := v.GetStringSlice(PublisherTopicMappingFlag) + mapping := make(map[string]string) + for _, topic := range topics { + parts := strings.SplitN(topic, ":", 2) + if len(parts) != 2 { + panic(fmt.Sprintf("unable to parse topic '%s', must be two parts, separated by a colon", topic)) + } + mapping[parts[0]] = parts[1] + } + + options = append(options, Module(mapping)) + switch { + case v.GetBool(PublisherHttpEnabledFlag): + // Currently don't expose http listener, so pass addr == "" + options = append(options, httpModule("")) + case v.GetBool(PublisherNatsEnabledFlag): + options = append(options, NatsModule( + v.GetString(PublisherNatsClientIDFlag), v.GetString(PublisherNatsURLFlag), serviceName)) + case v.GetBool(PublisherKafkaEnabledFlag): + options = append(options, + kafkaModule(clientId(serviceName), serviceName, v.GetStringSlice(PublisherKafkaBrokerFlag)...), + ProvideSaramaOption( + WithConsumerReturnErrors(), + WithProducerReturnSuccess(), + ), + ) + if v.GetBool(PublisherKafkaTLSEnabled) { + options = append(options, ProvideSaramaOption(WithTLS())) + } + if v.GetBool(PublisherKafkaSASLEnabled) { + options = append(options, ProvideSaramaOption( + WithSASLEnabled(), + WithSASLCredentials( + v.GetString(PublisherKafkaSASLUsername), + v.GetString(PublisherKafkaSASLPassword), + ), + WithSASLMechanism(sarama.SASLMechanism(v.GetString(PublisherKafkaSASLMechanism))), + WithSASLScramClient(func() sarama.SCRAMClient { + var fn scram.HashGeneratorFcn + switch v.GetInt(PublisherKafkaSASLScramSHASize) { + case 512: + fn = SHA512 + case 256: + fn = SHA256 + default: + panic("sha size not handled") + } + return &XDGSCRAMClient{ + HashGeneratorFcn: fn, + } + }), + )) + } + default: + options = append(options, GoChannelModule()) + } + return fx.Options(options...) +} diff --git a/libs/publish/http.go b/libs/publish/http.go new file mode 100644 index 000000000..4a5dcf4fb --- /dev/null +++ b/libs/publish/http.go @@ -0,0 +1,90 @@ +package publish + +import ( + "context" + "net/http" + + "github.com/ThreeDotsLabs/watermill" + wHttp "github.com/ThreeDotsLabs/watermill-http/pkg/http" + "github.com/ThreeDotsLabs/watermill/message" + "go.uber.org/fx" +) + +func newHTTPPublisher(logger watermill.LoggerAdapter, config wHttp.PublisherConfig) (*wHttp.Publisher, error) { + return wHttp.NewPublisher(config, logger) +} + +func newHTTPSubscriber(logger watermill.LoggerAdapter, addr string, config wHttp.SubscriberConfig) (*wHttp.Subscriber, error) { + return wHttp.NewSubscriber(addr, config, logger) +} + +func newHTTPPublisherConfig(httpClient *http.Client, m wHttp.MarshalMessageFunc) wHttp.PublisherConfig { + return wHttp.PublisherConfig{ + MarshalMessageFunc: m, + Client: httpClient, + } +} + +func newHTTPSubscriberConfig(m wHttp.UnmarshalMessageFunc) wHttp.SubscriberConfig { + return wHttp.SubscriberConfig{ + Router: nil, + UnmarshalMessageFunc: m, + } +} + +func defaultHTTPMarshalMessageFunc() wHttp.MarshalMessageFunc { + return func(url string, msg *message.Message) (*http.Request, error) { + req, err := wHttp.DefaultMarshalMessageFunc(url, msg) + if err != nil { + return nil, err + } + req.Header.Set("Content-Type", "application/json") + return req, nil + } +} + +func defaultHTTPUnmarshalMessageFunc() wHttp.UnmarshalMessageFunc { + return wHttp.DefaultUnmarshalMessageFunc +} + +func httpModule(addr string) fx.Option { + options := []fx.Option{ + fx.Provide(newHTTPPublisher), + fx.Provide(newHTTPPublisherConfig), + fx.Provide(defaultHTTPMarshalMessageFunc), + fx.Supply(http.DefaultClient), + fx.Provide(func(p *wHttp.Publisher) message.Publisher { + return p + }), + } + if addr != "" { + options = append(options, + fx.Provide(newHTTPSubscriberConfig), + fx.Provide(func(logger watermill.LoggerAdapter, config wHttp.SubscriberConfig, lc fx.Lifecycle) (*wHttp.Subscriber, error) { + ret, err := newHTTPSubscriber(logger, addr, config) + if err != nil { + return nil, err + } + lc.Append(fx.Hook{ + OnStart: func(ctx context.Context) error { + go func() { + if err := ret.StartHTTPServer(); err != nil && err != http.ErrServerClosed { + panic(err) + } + }() + return nil + }, + OnStop: func(ctx context.Context) error { + return ret.Close() + }, + }) + return ret, nil + }), + fx.Provide(defaultHTTPUnmarshalMessageFunc), + fx.Provide(func(p *wHttp.Subscriber) message.Subscriber { + return p + }), + ) + } + return fx.Options(options...) +} diff --git a/libs/publish/kafka.go b/libs/publish/kafka.go new file mode 100644 index 000000000..38329adec --- /dev/null +++ b/libs/publish/kafka.go @@ -0,0 +1,161 @@ +package publish + +import ( + "context" + "crypto/tls" + + "github.com/Shopify/sarama" + "github.com/ThreeDotsLabs/watermill" + "github.com/ThreeDotsLabs/watermill-kafka/v2/pkg/kafka" + "github.com/ThreeDotsLabs/watermill/message" + "go.uber.org/fx" +) + +type SaramaOption interface { + Apply(config *sarama.Config) +} +type SaramaOptionFn func(config *sarama.Config) + +func (fn SaramaOptionFn) Apply(config *sarama.Config) { + fn(config) +} + +func WithConsumerOffsetsInitial(v int64) SaramaOptionFn { + return func(config *sarama.Config) { + config.Consumer.Offsets.Initial = v + } +} + +func WithConsumerReturnErrors() SaramaOptionFn { + return func(config *sarama.Config) { + config.Consumer.Return.Errors = true + } +} + +func WithProducerReturnSuccess() SaramaOptionFn { + return func(config *sarama.Config) { + config.Producer.Return.Successes = true + } +} + +func WithSASLEnabled() SaramaOptionFn { + return func(config *sarama.Config) { + config.Net.SASL.Enable = true + } +} + +func WithSASLMechanism(mechanism sarama.SASLMechanism) SaramaOptionFn { + return func(config *sarama.Config) { + config.Net.SASL.Mechanism = mechanism + } +} + +func WithSASLScramClient(fn func() sarama.SCRAMClient) SaramaOptionFn { + return func(config *sarama.Config) { + config.Net.SASL.SCRAMClientGeneratorFunc = fn + } +} + +func WithSASLCredentials(user, pwd string) SaramaOptionFn { + return func(config *sarama.Config) { + config.Net.SASL.User = user + config.Net.SASL.Password = pwd + } +} + +func WithTLS() SaramaOptionFn { + return func(config *sarama.Config) { + config.Net.TLS = struct { + Enable bool + Config *tls.Config + }{ + Enable: true, + Config: &tls.Config{}, + } + } +} + +func ProvideSaramaOption(options ...SaramaOption) fx.Option { + fxOptions := make([]fx.Option, 0) + for _, opt := range options { + opt := opt + fxOptions = append(fxOptions, fx.Provide(fx.Annotate(func() SaramaOption { + return opt + }, fx.ResultTags(`group:"saramaOptions"`), fx.As(new(SaramaOption))))) + } + return fx.Options(fxOptions...) +} + +type clientId string + +func newSaramaConfig(clientId clientId, version sarama.KafkaVersion, options ...SaramaOption) *sarama.Config { + config := sarama.NewConfig() + config.ClientID = string(clientId) + config.Version = version + + for _, opt := range options { + opt.Apply(config) + } + + return config +} + +func newKafkaPublisher(logger watermill.LoggerAdapter, config *sarama.Config, marshaller kafka.Marshaler, brokers ...string) (*kafka.Publisher, error) { + return kafka.NewPublisher(kafka.PublisherConfig{ + Brokers: brokers, + Marshaler: marshaller, + OverwriteSaramaConfig: config, + OTELEnabled: true, + }, logger) +} + +func newKafkaSubscriber(logger watermill.LoggerAdapter, config *sarama.Config, + unmarshaler kafka.Unmarshaler, consumerGroup string, brokers ...string) (*kafka.Subscriber, error) { + return kafka.NewSubscriber(kafka.SubscriberConfig{ + Brokers: brokers, + OverwriteSaramaConfig: config, + Unmarshaler: unmarshaler, + OTELEnabled: true, + ConsumerGroup: consumerGroup, + }, logger) +} + +func kafkaModule(clientId clientId, consumerGroup string, brokers ...string) fx.Option { + return fx.Options( + fx.Supply(clientId), + fx.Supply(sarama.V1_0_0_0), + fx.Supply(fx.Annotate(kafka.DefaultMarshaler{}, fx.As(new(kafka.Marshaler)))), + fx.Supply(fx.Annotate(kafka.DefaultMarshaler{}, fx.As(new(kafka.Unmarshaler)))), + fx.Provide(fx.Annotate(newSaramaConfig, fx.ParamTags(``, ``, `group:"saramaOptions"`))), + fx.Provide(func(lc fx.Lifecycle, logger watermill.LoggerAdapter, marshaller kafka.Marshaler, config *sarama.Config) (*kafka.Publisher, error) { + ret, err := newKafkaPublisher(logger, config, marshaller, brokers...) + if err != nil { + return nil, err + } + lc.Append(fx.Hook{ + OnStop: func(ctx context.Context) error { + return ret.Close() + }, + }) + return ret, nil + }), + fx.Provide(func(lc fx.Lifecycle, logger watermill.LoggerAdapter, unmarshaler kafka.Unmarshaler, config *sarama.Config) (*kafka.Subscriber, error) { + ret, err := newKafkaSubscriber(logger, config, unmarshaler, consumerGroup, brokers...) + if err != nil { + return nil, err + } + lc.Append(fx.Hook{ + OnStop: func(ctx context.Context) error { + return ret.Close() + }, + }) + return ret, nil + }), + fx.Provide(func(kafkaPublisher *kafka.Publisher) message.Publisher { + return kafkaPublisher + }), + fx.Provide(func(kafkaSubscriber *kafka.Subscriber) message.Subscriber { + return kafkaSubscriber + }), + ) +} diff --git a/libs/publish/kafka_scram.go b/libs/publish/kafka_scram.go new file mode 100644 index 000000000..62a411b12 --- /dev/null +++ b/libs/publish/kafka_scram.go @@ -0,0 +1,37 @@ +package publish + +import ( + "crypto/sha256" + "crypto/sha512" + + "github.com/xdg-go/scram" +) + +var ( + SHA256 scram.HashGeneratorFcn = sha256.New + SHA512 scram.HashGeneratorFcn = sha512.New +) + +type XDGSCRAMClient struct { + *scram.Client + *scram.ClientConversation + scram.HashGeneratorFcn +} + +func (x *XDGSCRAMClient) Begin(userName, password, authzID string) (err error) { + x.Client, err = x.HashGeneratorFcn.NewClient(userName, password, authzID) + if err != nil { + return err + } + x.ClientConversation = x.Client.NewConversation() + return nil +} + +func (x *XDGSCRAMClient) Step(challenge string) (response string, err error) { + response, err = x.ClientConversation.Step(challenge) + return +} + +func (x *XDGSCRAMClient) Done() bool { + return x.ClientConversation.Done() +} diff --git a/libs/publish/logging.go b/libs/publish/logging.go new file mode 100644 index 000000000..77be72378 --- /dev/null +++ b/libs/publish/logging.go @@ -0,0 +1,45 @@ +package publish + +import ( + "github.com/ThreeDotsLabs/watermill" + "github.com/formancehq/stack/libs/go-libs/logging" + "go.uber.org/fx" +) + +type watermillLoggerAdapter struct { + logging.Logger +} + +func (w watermillLoggerAdapter) Error(msg string, err error, fields watermill.LogFields) { + w.WithFields(fields).WithFields(map[string]any{ + "err": err, + }).Error(msg) +} + +func (w watermillLoggerAdapter) Info(msg string, fields watermill.LogFields) { + w.WithFields(fields).Info(msg) +} + +func (w watermillLoggerAdapter) Debug(msg string, fields watermill.LogFields) { + w.WithFields(fields).Debug(msg) +} + +func (w watermillLoggerAdapter) Trace(msg string, fields watermill.LogFields) { + w.WithFields(fields).Debug(msg) +} + +func (w watermillLoggerAdapter) With(fields watermill.LogFields) watermill.LoggerAdapter { + return watermillLoggerAdapter{ + Logger: w.Logger.WithFields(fields), + } +} + +var _ watermill.LoggerAdapter = &watermillLoggerAdapter{} + +func defaultLoggingModule() fx.Option { + return fx.Provide(func(logger logging.Logger) watermill.LoggerAdapter { + return watermillLoggerAdapter{ + Logger: logger, + } + }) +} diff --git a/libs/publish/messages.go b/libs/publish/messages.go new file mode 100644 index 000000000..773932823 --- /dev/null +++ b/libs/publish/messages.go @@ -0,0 +1,19 @@ +package publish + +import ( + "context" + "encoding/json" + + "github.com/ThreeDotsLabs/watermill/message" + "github.com/google/uuid" +) + +func NewMessage(ctx context.Context, m any) *message.Message { + data, err := json.Marshal(m) + if err != nil { + panic(err) + } + msg := message.NewMessage(uuid.NewString(), data) + msg.SetContext(ctx) + return msg +} diff --git a/libs/publish/module.go b/libs/publish/module.go new file mode 100644 index 000000000..1f0df0bf9 --- /dev/null +++ b/libs/publish/module.go @@ -0,0 +1,98 @@ +package publish + +import ( + "context" + + "github.com/ThreeDotsLabs/watermill" + "github.com/ThreeDotsLabs/watermill/message" + "github.com/ThreeDotsLabs/watermill/pubsub/gochannel" + "go.uber.org/fx" +) + +func newGoChannel() *gochannel.GoChannel { + return gochannel.NewGoChannel( + gochannel.Config{ + BlockPublishUntilSubscriberAck: true, + }, + watermill.NopLogger{}, + ) +} + +func GoChannelModule() fx.Option { + return fx.Options( + fx.Provide(newGoChannel), + fx.Provide(func(ch *gochannel.GoChannel) message.Subscriber { + return ch + }), + fx.Provide(func(ch *gochannel.GoChannel) message.Publisher { + return ch + }), + fx.Invoke(func(lc fx.Lifecycle, channel *gochannel.GoChannel) { + lc.Append(fx.Hook{ + OnStop: func(ctx context.Context) error { + return channel.Close() + }, + }) + }), + ) +} + +func Module(topics map[string]string) fx.Option { + options := fx.Options( + defaultLoggingModule(), + fx.Supply(message.RouterConfig{}), + fx.Provide(message.NewRouter), + fx.Invoke(func(router *message.Router, lc fx.Lifecycle) error { + lc.Append(fx.Hook{ + OnStart: func(ctx context.Context) error { + go func() { + if err := router.Run(context.Background()); err != nil { + panic(err) + } + }() + select { + case <-ctx.Done(): + return ctx.Err() + case <-router.Running(): + } + return nil + }, + OnStop: func(ctx context.Context) error { + return router.Close() + }, + }) + return nil + }), + fx.Decorate(func(publisher message.Publisher) message.Publisher { + return NewTopicMapperPublisherDecorator(publisher, topics) + }), + ) + return options +} + +type topicMapperPublisherDecorator struct { + message.Publisher + topics map[string]string +} + +func (p topicMapperPublisherDecorator) Publish(topic string, messages ...*message.Message) error { + mappedTopic, ok := p.topics[topic] + if ok { + return p.Publisher.Publish(mappedTopic, messages...) + } + mappedTopic, ok = p.topics["*"] + if ok { + return p.Publisher.Publish(mappedTopic, messages...) + } + + return p.Publisher.Publish(topic, messages...) +} + +var _ message.Publisher = &topicMapperPublisherDecorator{} + +func NewTopicMapperPublisherDecorator(publisher message.Publisher, topics map[string]string) *topicMapperPublisherDecorator { + return &topicMapperPublisherDecorator{ + Publisher: publisher, + topics: topics, + } +} diff --git a/libs/publish/module_test.go b/libs/publish/module_test.go new file mode 100644 index 000000000..8f1065f89 --- /dev/null +++ b/libs/publish/module_test.go @@ -0,0 +1,190 @@ +package publish + +import ( + "context" + "fmt" + "io" + "os" + "testing" + "time" + + "github.com/Shopify/sarama" + "github.com/ThreeDotsLabs/watermill/message" + "github.com/formancehq/stack/libs/go-libs/logging" + natsServer "github.com/nats-io/nats-server/v2/server" + "github.com/ory/dockertest/v3" + "github.com/ory/dockertest/v3/docker" + "github.com/stretchr/testify/require" + "go.uber.org/fx" + "go.uber.org/fx/fxtest" +) + +func createRedpandaServer(t *testing.T) string { + + pool, err := dockertest.NewPool("") + require.NoError(t, err) + + resource, err := pool.RunWithOptions(&dockertest.RunOptions{ + Repository: "docker.redpanda.com/vectorized/redpanda", + Tag: "v22.3.11", + Tty: true, + Cmd: []string{ + "redpanda", "start", + "--smp", "1", + "--overprovisioned", + "--kafka-addr", "PLAINTEXT://0.0.0.0:9092", + "--advertise-kafka-addr", "PLAINTEXT://localhost:9092", + "--pandaproxy-addr", "0.0.0.0:8082", + "--advertise-pandaproxy-addr", "localhost:8082", + }, + PortBindings: map[docker.Port][]docker.PortBinding{ + "9092/tcp": {{ + HostIP: "0.0.0.0", + HostPort: "9092", + }}, + "9644/tcp": {{ + HostIP: "0.0.0.0", + HostPort: "9644", + }}, + }, + }) + require.NoError(t, err) + t.Cleanup(func() { + require.NoError(t, pool.Purge(resource)) + }) + + stdout := io.Discard + stderr := io.Discard + if testing.Verbose() { + stdout = os.Stdout + stderr = os.Stderr + } + exitCode, err := resource.Exec([]string{ + "rpk", + "cluster", + "config", + "set", + "auto_create_topics_enabled", + "true", + }, dockertest.ExecOptions{ + StdOut: stdout, + StdErr: stderr, + }) + require.NoError(t, err) + require.Equal(t, 0, exitCode) + + return "9092" +} + +func TestModule(t *testing.T) { + t.Parallel() + + type moduleTestCase struct { + name string + setup func(t *testing.T) fx.Option + topicMapping map[string]string + topic string + } + + testCases := []moduleTestCase{ + { + name: "go-channels", + setup: func(t *testing.T) fx.Option { + return GoChannelModule() + }, + topic: "topic", + }, + { + name: "kafka", + setup: func(t *testing.T) fx.Option { + port := createRedpandaServer(t) + return fx.Options( + kafkaModule("client-id", "consumer-group", fmt.Sprintf("localhost:%s", port)), + fx.Replace(sarama.V0_11_0_0), + ProvideSaramaOption( + WithProducerReturnSuccess(), + WithConsumerReturnErrors(), + WithConsumerOffsetsInitial(sarama.OffsetOldest), + ), + ) + }, + topic: "topic", + }, + { + name: "http", + setup: func(t *testing.T) fx.Option { + return fx.Options( + httpModule("localhost:8888"), + ) + }, + topicMapping: map[string]string{ + "*": "http://localhost:8888", + }, + topic: "/", + }, + { + name: "nats", + setup: func(t *testing.T) fx.Option { + server, err := natsServer.NewServer(&natsServer.Options{ + Host: "0.0.0.0", + Port: 4322, + JetStream: true, + }) + require.NoError(t, err) + + server.Start() + + t.Cleanup(server.Shutdown) + + return fx.Options( + NatsModule("example", "nats://127.0.0.1:4322", "testing"), + ) + }, + topicMapping: map[string]string{}, + topic: "topic", + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + var ( + publisher message.Publisher + router *message.Router + messageHandled = make(chan struct{}) + ) + options := []fx.Option{ + Module(tc.topicMapping), + tc.setup(t), + fx.Populate(&publisher, &router), + fx.Supply(fx.Annotate(logging.Testing(), fx.As(new(logging.Logger)))), + fx.Invoke(func(r *message.Router, subscriber message.Subscriber) { + r.AddNoPublisherHandler("testing", tc.topic, subscriber, func(msg *message.Message) error { + require.Equal(t, "\"baz\"", string(msg.Payload)) + close(messageHandled) + return nil + }) + }), + } + if !testing.Verbose() { + options = append(options, fx.NopLogger) + } + app := fxtest.New(t, options...) + app.RequireStart() + defer func() { + app.RequireStop() + }() + + <-router.Running() + + msg := NewMessage(context.TODO(), "baz") + require.NoError(t, publisher.Publish(tc.topic, msg)) + + select { + case <-messageHandled: + case <-time.After(10 * time.Second): + t.Fatal("timeout waiting message") + } + }) + } +} diff --git a/libs/publish/nats.go b/libs/publish/nats.go new file mode 100644 index 000000000..e62c71e20 --- /dev/null +++ b/libs/publish/nats.go @@ -0,0 +1,146 @@ +package publish + +import ( + "context" + + "github.com/ThreeDotsLabs/watermill" + wNats "github.com/ThreeDotsLabs/watermill-nats/v2/pkg/nats" + "github.com/ThreeDotsLabs/watermill/message" + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/nats-io/nats.go" + "github.com/pkg/errors" + "go.uber.org/fx" +) + +func newNatsConn(config wNats.PublisherConfig) (*nats.Conn, error) { + if err := config.Validate(); err != nil { + return nil, err + } + + conn, err := nats.Connect(config.URL, config.NatsOptions...) + if err != nil { + return nil, errors.Wrap(err, "cannot connect to nats-core") + } + + return conn, nil +} + +func newNatsPublisherWithConn(conn *nats.Conn, logger watermill.LoggerAdapter, config wNats.PublisherConfig) (*wNats.Publisher, error) { + return wNats.NewPublisherWithNatsConn(conn, config.GetPublisherPublishConfig(), logger) +} + +func newNatsSubscriberWithConn(conn *nats.Conn, logger watermill.LoggerAdapter, config wNats.SubscriberConfig) (*wNats.Subscriber, error) { + return wNats.NewSubscriberWithNatsConn(conn, config.GetSubscriberSubscriptionConfig(), logger) +} + +func NatsModule(clientID, url, serviceName string) fx.Option { + jetStreamConfig := wNats.JetStreamConfig{ + AutoProvision: true, + DurablePrefix: serviceName, + } + natsOptions := []nats.Option{ + nats.Name(clientID), + } + return fx.Options( + fx.Provide(newNatsConn), + fx.Provide(newNatsDefaultCallbacks), + fx.Provide(newNatsPublisherWithConn), + fx.Provide(newNatsSubscriberWithConn), + fx.Provide(func(natsCallbacks NATSCallbacks) wNats.PublisherConfig { + natsOptions = append(natsOptions, + nats.ConnectHandler(natsCallbacks.ConnectedCB), + nats.DisconnectErrHandler(natsCallbacks.DisconnectedErrCB), + nats.DiscoveredServersHandler(natsCallbacks.DiscoveredServersCB), + nats.ErrorHandler(natsCallbacks.AsyncErrorCB), + nats.ReconnectHandler(natsCallbacks.ReconnectedCB), + nats.DisconnectHandler(natsCallbacks.DisconnectedCB), + nats.ClosedHandler(natsCallbacks.ClosedCB), + ) + + return wNats.PublisherConfig{ + NatsOptions: natsOptions, + URL: url, + Marshaler: &wNats.NATSMarshaler{}, + JetStream: jetStreamConfig, + SubjectCalculator: wNats.DefaultSubjectCalculator, + } + }), + fx.Provide(func(natsCallbacks NATSCallbacks) wNats.SubscriberConfig { + natsOptions = append(natsOptions, + nats.ConnectHandler(natsCallbacks.ConnectedCB), + nats.DisconnectErrHandler(natsCallbacks.DisconnectedErrCB), + nats.DiscoveredServersHandler(natsCallbacks.DiscoveredServersCB), + nats.ErrorHandler(natsCallbacks.AsyncErrorCB), + nats.ReconnectHandler(natsCallbacks.ReconnectedCB), + nats.DisconnectHandler(natsCallbacks.DisconnectedCB), + nats.ClosedHandler(natsCallbacks.ClosedCB), + ) + + return wNats.SubscriberConfig{ + NatsOptions: natsOptions, + Unmarshaler: &wNats.NATSMarshaler{}, + URL: url, + QueueGroupPrefix: serviceName, + JetStream: jetStreamConfig, + SubjectCalculator: wNats.DefaultSubjectCalculator, + } + }), + fx.Provide(func(publisher *wNats.Publisher) message.Publisher { + return publisher + }), + fx.Provide(func(subscriber *wNats.Subscriber, lc fx.Lifecycle) message.Subscriber { + lc.Append(fx.Hook{ + OnStop: func(ctx context.Context) error { + return subscriber.Close() + }, + }) + return subscriber + }), + ) +} + +type NATSCallbacks interface { + ClosedCB(nc *nats.Conn) + DisconnectedCB(nc *nats.Conn) + DiscoveredServersCB(nc *nats.Conn) + ReconnectedCB(nc *nats.Conn) + DisconnectedErrCB(nc *nats.Conn, err error) + ConnectedCB(nc *nats.Conn) + AsyncErrorCB(nc *nats.Conn, sub *nats.Subscription, err error) +} + +type natsDefaultCallbacks struct { + logger logging.Logger +} + +func newNatsDefaultCallbacks(logger logging.Logger) NATSCallbacks { + return &natsDefaultCallbacks{logger: logger} +} + +func (c *natsDefaultCallbacks) ClosedCB(nc *nats.Conn) { + c.logger.Infof("nats connection closed: %s", nc.Opts.Name) +} + +func (c *natsDefaultCallbacks) DisconnectedCB(nc *nats.Conn) { + c.logger.Infof("nats connection disconnected: %s", nc.Opts.Name) +} + +func (c *natsDefaultCallbacks) DiscoveredServersCB(nc *nats.Conn) { + c.logger.Infof("nats server discovered: %s", nc.Opts.Name) +} + +func (c *natsDefaultCallbacks) ReconnectedCB(nc *nats.Conn) { + c.logger.Infof("nats connection reconnected: %s", nc.Opts.Name) +} + +func (c *natsDefaultCallbacks) DisconnectedErrCB(nc *nats.Conn, err error) { + c.logger.Errorf("nats connection disconnected error for %s: %v", nc.Opts.Name, err) +} + +func (c *natsDefaultCallbacks) ConnectedCB(nc *nats.Conn) { + c.logger.Infof("nats connection done: %s", nc.Opts.Name) +} + +func (c *natsDefaultCallbacks) AsyncErrorCB(nc *nats.Conn, sub *nats.Subscription, err error) { + c.logger.Errorf("nats async error for %s with subject %s: %v", nc.Opts.Name, sub.Subject, err) +} diff --git a/libs/service/app.go b/libs/service/app.go new file mode 100644 index 000000000..f1ce5831d --- /dev/null +++ b/libs/service/app.go @@ -0,0 +1,54 @@ +package service + +import ( + "context" + "io" + + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/spf13/viper" + "go.uber.org/fx" +) + +const DebugFlag = "debug" +const JsonFormattingLoggerFlag = "json-formatting-logger" + +type App struct { + options []fx.Option + output io.Writer +} + +func (a *App) Run(ctx context.Context) error { + logger := GetDefaultLogger(a.output, viper.GetBool(DebugFlag), viper.GetBool(JsonFormattingLoggerFlag)) + app := a.newFxApp(logger) + if err := app.Start(logging.ContextWithLogger(ctx, logger)); err != nil { + return err + } + + select { + case <-ctx.Done(): + case <-app.Done(): + // <-app.Done() is a signals channel, it means we have to call the + // app.Stop in order to gracefully shutdown the app + } + + return app.Stop(logging.ContextWithLogger(context.Background(), logger)) +} + +func (a *App) Start(ctx context.Context) error { + logger := GetDefaultLogger(a.output, viper.GetBool(DebugFlag), viper.GetBool(JsonFormattingLoggerFlag)) + return a.newFxApp(logger).Start(ctx) +} + +func (a *App) newFxApp(logger logging.Logger) *fx.App { + return fx.New(append(a.options, + fx.NopLogger, + fx.Supply(fx.Annotate(logger, fx.As(new(logging.Logger)))), + )...) +} + +func New(output io.Writer, options ...fx.Option) *App { + return &App{ + options: options, + output: output, + } +} diff --git a/libs/service/logging.go b/libs/service/logging.go new file mode 100644 index 000000000..5b20fd01b --- /dev/null +++ b/libs/service/logging.go @@ -0,0 +1,43 @@ +package service + +import ( + "io" + + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/formancehq/stack/libs/go-libs/otlp/otlptraces" + "github.com/sirupsen/logrus" + "github.com/spf13/viper" + "github.com/uptrace/opentelemetry-go-extra/otellogrus" +) + +func GetDefaultLogger(w io.Writer, debug, jsonFormattingLog bool) logging.Logger { + l := logrus.New() + l.SetOutput(w) + if debug { + l.Level = logrus.DebugLevel + } + + var formatter logrus.Formatter + if jsonFormattingLog { + jsonFormatter := &logrus.JSONFormatter{} + jsonFormatter.TimestampFormat = "15-01-2018 15:04:05.000000" + formatter = jsonFormatter + } else { + textFormatter := new(logrus.TextFormatter) + textFormatter.TimestampFormat = "15-01-2018 15:04:05.000000" + textFormatter.FullTimestamp = true + formatter = textFormatter + } + + l.SetFormatter(formatter) + + if viper.GetBool(otlptraces.OtelTracesFlag) { + l.AddHook(otellogrus.NewHook(otellogrus.WithLevels( + logrus.PanicLevel, + logrus.FatalLevel, + logrus.ErrorLevel, + logrus.WarnLevel, + ))) + } + return logging.NewLogrus(l) +} diff --git a/main.go b/main.go index e5bb9c83f..2b1571661 100644 --- a/main.go +++ b/main.go @@ -1,10 +1,7 @@ -// docker run --rm -w /local -v ${PWD}:/local openapitools/openapi-generator-cli:latest generate -i ./pkg/api/controllers/swagger.yaml -g go -o ./client --git-user-id=formancehq --git-repo-id=ledger -p packageVersion=latest -p isGoSubmodule=true -p packageName=client -// -//go:generate docker run --rm -w /local -v ${PWD}:/local openapitools/openapi-generator-cli:latest validate -i ./pkg/api/controllers/swagger.yaml package main import ( - "github.com/numary/ledger/cmd" + "github.com/formancehq/ledger/cmd" ) func main() { diff --git a/moon.yml b/moon.yml new file mode 100644 index 000000000..c0d4c235a --- /dev/null +++ b/moon.yml @@ -0,0 +1,5 @@ +type: 'application' +language: 'go' +dependsOn: + - 'go-libs' + - 'openapi' diff --git a/pkg/api/controllers/swagger.yaml b/openapi.yaml similarity index 57% rename from pkg/api/controllers/swagger.yaml rename to openapi.yaml index 6674f7f59..c43b1a760 100644 --- a/pkg/api/controllers/swagger.yaml +++ b/openapi.yaml @@ -5,9 +5,10 @@ info: version: "LEDGER_VERSION" paths: - /_info: + /v2/_info: get: tags: + - Ledger - Server summary: Show server information operationId: getInfo @@ -25,7 +26,7 @@ paths: schema: $ref: '#/components/schemas/ErrorResponse' - /{ledger}/_info: + /v2/{ledger}/_info: get: summary: Get information about a ledger operationId: getLedgerInfo @@ -53,11 +54,12 @@ paths: schema: $ref: '#/components/schemas/ErrorResponse' - /{ledger}/accounts: + /v2/{ledger}/accounts: head: summary: Count the accounts from a ledger operationId: countAccounts tags: + - Ledger - Accounts parameters: - name: ledger @@ -67,23 +69,20 @@ paths: schema: type: string example: ledger001 - - name: address + - name: pit in: query - description: Filter accounts by address pattern (regular expression placed between ^ and $). + required: false schema: type: string - example: users:.+ - - name: metadata - in: query - description: Filter accounts by metadata key value pairs. Nested objects can be used as seen in the example below. - style: deepObject - explode: true - schema: - type: object - properties: {} - example: metadata[key]=value1&metadata[a.nested.key]=value2 + format: date-time + requestBody: + content: + application/json: + schema: + type: object + additionalProperties: true responses: - "200": + "204": description: OK headers: Count: @@ -103,6 +102,7 @@ paths: description: List accounts from a ledger, sorted by address in descending order. operationId: listAccounts tags: + - Ledger - Accounts parameters: - name: ledger @@ -122,67 +122,6 @@ paths: format: int64 minimum: 1 maximum: 1000 - default: 15 - - name: page_size - in: query - description: | - The maximum number of results to return per page. - Deprecated, please use `pageSize` instead. - example: 100 - schema: - type: integer - format: int64 - minimum: 1 - maximum: 1000 - default: 15 - deprecated: true - - name: after - in: query - description: Pagination cursor, will return accounts after given address, in descending order. - schema: - type: string - example: users:003 - - name: address - in: query - description: Filter accounts by address pattern (regular expression placed between ^ and $). - schema: - type: string - example: users:.+ - - name: metadata - in: query - description: Filter accounts by metadata key value pairs. Nested objects can be used as seen in the example below. - style: deepObject - explode: true - schema: - type: object - properties: {} - example: metadata[key]=value1&metadata[a.nested.key]=value2 - - name: balance - in: query - description: Filter accounts by their balance (default operator is gte) - schema: - type: integer - format: int64 - minimum: 0 - example: 2400 - - name: balanceOperator - in: query - description: | - Operator used for the filtering of balances can be greater than/equal, less than/equal, greater than, less than, equal or not. - schema: - type: string - enum: [gte, lte, gt, lt, e, ne] - example: gte - - name: balance_operator - in: query - description: | - Operator used for the filtering of balances can be greater than/equal, less than/equal, greater than, less than, equal or not. - Deprecated, please use `balanceOperator` instead. - schema: - type: string - enum: [gte, lte, gt, lt, e, ne] - example: gte - deprecated: true - name: cursor in: query description: | @@ -193,18 +132,24 @@ paths: schema: type: string example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== - - name: pagination_token + - name: expand in: query - description: | - Parameter used in pagination requests. Maximum page size is set to 15. - Set to the value of next for the next page of results. - Set to the value of previous for the previous page of results. - No other parameters can be set when this parameter is set. - Deprecated, please use `cursor` instead. schema: type: string - example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== - deprecated: true + items: + type: string + - name: pit + in: query + required: false + schema: + type: string + format: date-time + requestBody: + content: + application/json: + schema: + type: object + additionalProperties: true responses: "200": description: OK @@ -219,11 +164,12 @@ paths: schema: $ref: '#/components/schemas/ErrorResponse' - /{ledger}/accounts/{address}: + /v2/{ledger}/accounts/{address}: get: summary: Get account by its address operationId: getAccount tags: + - Ledger - Accounts parameters: - name: ledger @@ -244,6 +190,18 @@ paths: schema: type: string example: users:001 + - name: expand + in: query + schema: + type: string + items: + type: string + - name: pit + in: query + required: false + schema: + type: string + format: date-time responses: "200": description: OK @@ -258,11 +216,12 @@ paths: schema: $ref: '#/components/schemas/ErrorResponse' - /{ledger}/accounts/{address}/metadata: + /v2/{ledger}/accounts/{address}/metadata: post: summary: Add metadata to an account operationId: addMetadataToAccount tags: + - Ledger - Accounts parameters: - name: ledger @@ -283,6 +242,17 @@ paths: schema: type: string example: users:001 + - name: dryRun + in: query + description: Set the dry run mode. Dry run mode doesn't add the logs to the database or publish a message to the message broker. + schema: + type: boolean + example: true + - name: Idempotency-Key + in: header + description: Use an idempotency key + schema: + type: string requestBody: description: metadata content: @@ -301,12 +271,14 @@ paths: schema: $ref: '#/components/schemas/ErrorResponse' - /{ledger}/mapping: - get: + /v2/{ledger}/accounts/{address}/metadata/{key}: + delete: + description: Delete metadata by key + operationId: deleteAccountMetadata tags: - - Mapping - operationId: getMapping - summary: Get the mapping of a ledger + - Ledger + - Transactions + summary: Delete metadata by key parameters: - name: ledger in: path @@ -315,100 +287,28 @@ paths: schema: type: string example: ledger001 - responses: - "200": - description: OK - content: - application/json: - schema: - $ref: '#/components/schemas/MappingResponse' - default: - description: Error - content: - application/json: - schema: - $ref: '#/components/schemas/ErrorResponse' - - put: - tags: - - Mapping - operationId: updateMapping - summary: Update the mapping of a ledger - parameters: - - name: ledger + - name: address in: path - description: Name of the ledger. + description: Account address required: true schema: type: string - example: ledger001 - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/Mapping' - required: true - responses: - "200": - description: OK - content: - application/json: - schema: - $ref: '#/components/schemas/MappingResponse' - default: - description: Error - content: - application/json: - schema: - $ref: '#/components/schemas/ErrorResponse' - - /{ledger}/script: - post: - deprecated: true - tags: - - Script - operationId: runScript - summary: Execute a Numscript - description: > - This route is deprecated, and has been merged into `POST /{ledger}/transactions`. - parameters: - - name: ledger + - name: key in: path - description: Name of the ledger. + description: The key to remove. required: true schema: type: string - example: ledger001 - - name: preview - in: query - description: Set the preview mode. Preview mode doesn't add the logs to the database or publish a message to the message broker. - schema: - type: boolean - example: true - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/Script' + example: foo responses: - "200": - description: > - On success, it will return a 200 status code, and the resulting transaction under the `transaction` field. - - - On failure, it will also return a 200 status code, and the following fields: - - `details`: contains a URL. When there is an error parsing Numscript, the result can be difficult to read—the provided URL will render the error in an easy-to-read format. - - `errorCode` and `error_code` (deprecated): contains the string code of the error - - `errorMessage` and `error_message` (deprecated): contains a human-readable indication of what went wrong, for example that an account had insufficient funds, or that there was an error in the provided Numscript. - content: - application/json: - schema: - $ref: '#/components/schemas/ScriptResponse' + 2XX: + description: Key deleted + content: {} - /{ledger}/stats: + /v2/{ledger}/stats: get: tags: + - Ledger - Stats operationId: readStats summary: Get statistics from a ledger @@ -436,9 +336,10 @@ paths: schema: $ref: '#/components/schemas/ErrorResponse' - /{ledger}/transactions: + /v2/{ledger}/transactions: head: tags: + - Ledger - Transactions summary: Count the transactions from a ledger operationId: countTransactions @@ -450,78 +351,20 @@ paths: schema: type: string example: ledger001 - - name: reference - in: query - description: Filter transactions by reference field. - schema: - type: string - example: ref:001 - - name: account + - name: pit in: query - description: Filter transactions with postings involving given account, either - as source or destination (regular expression placed between ^ and $). - schema: - type: string - example: users:001 - - name: source - in: query - description: Filter transactions with postings involving given account at source (regular expression placed between ^ and $). - schema: - type: string - example: users:001 - - name: destination - in: query - description: Filter transactions with postings involving given account at destination (regular expression placed between ^ and $). - schema: - type: string - example: users:001 - - name: startTime - in: query - description: | - Filter transactions that occurred after this timestamp. - The format is RFC3339 and is inclusive (for example, "2023-01-02T15:04:01Z" includes the first second of 4th minute). - schema: - type: string - format: date-time - - name: start_time - in: query - description: | - Filter transactions that occurred after this timestamp. - The format is RFC3339 and is inclusive (for example, "2023-01-02T15:04:01Z" includes the first second of 4th minute). - Deprecated, please use `startTime` instead. - schema: - type: string - format: date-time - deprecated: true - - name: endTime - in: query - description: | - Filter transactions that occurred before this timestamp. - The format is RFC3339 and is exclusive (for example, "2023-01-02T15:04:01Z" excludes the first second of 4th minute). - schema: - type: string - format: date-time - - name: end_time - in: query - description: | - Filter transactions that occurred before this timestamp. - The format is RFC3339 and is exclusive (for example, "2023-01-02T15:04:01Z" excludes the first second of 4th minute). - Deprecated, please use `endTime` instead. + required: false schema: type: string format: date-time - deprecated: true - - name: metadata - in: query - description: Filter transactions by metadata key value pairs. Nested objects can be used as seen in the example below. - style: deepObject - explode: true - schema: - type: object - properties: { } - example: metadata[key]=value1&metadata[a.nested.key]=value2 + requestBody: + content: + application/json: + schema: + type: object + additionalProperties: true responses: - "200": + "204": description: OK headers: Count: @@ -538,9 +381,10 @@ paths: get: tags: + - Ledger - Transactions summary: List transactions from a ledger - description: List transactions from a ledger, sorted by txid in descending order. + description: List transactions from a ledger, sorted by id in descending order. operationId: listTransactions parameters: - name: ledger @@ -560,88 +404,6 @@ paths: format: int64 minimum: 1 maximum: 1000 - default: 15 - - name: page_size - in: query - description: | - The maximum number of results to return per page. - Deprecated, please use `pageSize` instead. - example: 100 - schema: - type: integer - format: int64 - minimum: 1 - maximum: 1000 - default: 15 - deprecated: true - - name: after - in: query - description: Pagination cursor, will return transactions after given txid - (in descending order). - schema: - type: string - example: 1234 - - name: reference - in: query - description: Find transactions by reference field. - schema: - type: string - example: ref:001 - - name: account - in: query - description: Filter transactions with postings involving given account, either - as source or destination (regular expression placed between ^ and $). - schema: - type: string - example: users:001 - - name: source - in: query - description: Filter transactions with postings involving given account at source (regular expression placed between ^ and $). - schema: - type: string - example: users:001 - - name: destination - in: query - description: Filter transactions with postings involving given account at destination (regular expression placed between ^ and $). - schema: - type: string - example: users:001 - - name: startTime - in: query - description: | - Filter transactions that occurred after this timestamp. - The format is RFC3339 and is inclusive (for example, "2023-01-02T15:04:01Z" includes the first second of 4th minute). - schema: - type: string - format: date-time - - name: start_time - in: query - description: | - Filter transactions that occurred after this timestamp. - The format is RFC3339 and is inclusive (for example, "2023-01-02T15:04:01Z" includes the first second of 4th minute). - Deprecated, please use `startTime` instead. - schema: - type: string - format: date-time - deprecated: true - - name: endTime - in: query - description: | - Filter transactions that occurred before this timestamp. - The format is RFC3339 and is exclusive (for example, "2023-01-02T15:04:01Z" excludes the first second of 4th minute). - schema: - type: string - format: date-time - - name: end_time - in: query - description: | - Filter transactions that occurred before this timestamp. - The format is RFC3339 and is exclusive (for example, "2023-01-02T15:04:01Z" excludes the first second of 4th minute). - Deprecated, please use `endTime` instead. - schema: - type: string - format: date-time - deprecated: true - name: cursor in: query description: | @@ -652,27 +414,24 @@ paths: schema: type: string example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== - - name: pagination_token + - name: expand in: query - description: | - Parameter used in pagination requests. Maximum page size is set to 15. - Set to the value of next for the next page of results. - Set to the value of previous for the previous page of results. - No other parameters can be set when this parameter is set. - Deprecated, please use `cursor` instead. schema: type: string - example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== - deprecated: true - - name: metadata + items: + type: string + - name: pit in: query - description: Filter transactions by metadata key value pairs. Nested objects can be used as seen in the example below. - style: deepObject - explode: true + required: false schema: - type: object - properties: { } - example: metadata[key]=value1&metadata[a.nested.key]=value2 + type: string + format: date-time + requestBody: + content: + application/json: + schema: + type: object + additionalProperties: true responses: "200": description: OK @@ -689,6 +448,7 @@ paths: post: tags: + - Ledger - Transactions summary: Create a new transaction to a ledger operationId: createTransaction @@ -700,12 +460,17 @@ paths: schema: type: string example: ledger001 - - name: preview + - name: dryRun in: query - description: Set the preview mode. Preview mode doesn't add the logs to the database or publish a message to the message broker. + description: Set the dryRun mode. dry run mode doesn't add the logs to the database or publish a message to the message broker. schema: type: boolean example: true + - name: Idempotency-Key + in: header + description: Use an idempotency key + schema: + type: string requestBody: required: true description: > @@ -722,7 +487,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/TransactionsResponse' + $ref: '#/components/schemas/CreateTransactionResponse' default: description: Error content: @@ -730,9 +495,10 @@ paths: schema: $ref: '#/components/schemas/ErrorResponse' - /{ledger}/transactions/{txid}: + /v2/{ledger}/transactions/{id}: get: tags: + - Ledger - Transactions summary: Get transaction from a ledger by its ID operationId: getTransaction @@ -744,7 +510,7 @@ paths: schema: type: string example: ledger001 - - name: txid + - name: id in: path description: Transaction ID. required: true @@ -753,13 +519,25 @@ paths: format: int64 minimum: 0 example: 1234 + - name: expand + in: query + schema: + type: string + items: + type: string + - name: pit + in: query + required: false + schema: + type: string + format: date-time responses: "200": description: OK content: application/json: schema: - $ref: '#/components/schemas/TransactionResponse' + $ref: '#/components/schemas/GetTransactionResponse' default: description: Error content: @@ -767,9 +545,10 @@ paths: schema: $ref: '#/components/schemas/ErrorResponse' - /{ledger}/transactions/{txid}/metadata: + /v2/{ledger}/transactions/{id}/metadata: post: tags: + - Ledger - Transactions summary: Set the metadata of a transaction by its ID operationId: addMetadataOnTransaction @@ -781,7 +560,7 @@ paths: schema: type: string example: ledger001 - - name: txid + - name: id in: path description: Transaction ID. required: true @@ -790,6 +569,17 @@ paths: format: int64 minimum: 0 example: 1234 + - name: dryRun + in: query + description: Set the dryRun mode. Dry run mode doesn't add the logs to the database or publish a message to the message broker. + schema: + type: boolean + example: true + - name: Idempotency-Key + in: header + description: Use an idempotency key + schema: + type: string requestBody: description: metadata content: @@ -807,12 +597,14 @@ paths: schema: $ref: '#/components/schemas/ErrorResponse' - /{ledger}/transactions/{txid}/revert: - post: + /v2/{ledger}/transactions/{id}/metadata/{key}: + delete: + description: Delete metadata by key + operationId: deleteTransactionMetadata + summary: Delete metadata by key tags: + - Ledger - Transactions - operationId: revertTransaction - summary: Revert a ledger transaction by its ID parameters: - name: ledger in: path @@ -821,7 +613,7 @@ paths: schema: type: string example: ledger001 - - name: txid + - name: id in: path description: Transaction ID. required: true @@ -830,60 +622,25 @@ paths: format: int64 minimum: 0 example: 1234 - responses: - "200": - description: OK - content: - application/json: - schema: - $ref: '#/components/schemas/TransactionResponse' - default: - description: Error - content: - application/json: - schema: - $ref: '#/components/schemas/ErrorResponse' - - /{ledger}/transactions/batch: - post: - tags: - - Transactions - summary: Create a new batch of transactions to a ledger - operationId: CreateTransactions - parameters: - - name: ledger + - name: key in: path - description: Name of the ledger. required: true + description: The key to remove. schema: type: string - example: ledger001 - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/Transactions' - required: true + example: foo responses: - "200": - description: OK - content: - application/json: - schema: - $ref: '#/components/schemas/TransactionsResponse' - default: - description: Error - content: - application/json: - schema: - $ref: '#/components/schemas/ErrorResponse' + 2XX: + description: Key deleted + content: { } - /{ledger}/balances: - get: + /v2/{ledger}/transactions/{id}/revert: + post: tags: - - Balances - summary: Get the balances from a ledger's account - operationId: getBalances + - Ledger + - Transactions + operationId: revertTransaction + summary: Revert a ledger transaction by its ID parameters: - name: ledger in: path @@ -892,57 +649,32 @@ paths: schema: type: string example: ledger001 - - name: address - in: query - description: Filter balances involving given account, either - as source or destination. - schema: - type: string - example: users:001 - - name: after - in: query - description: Pagination cursor, will return accounts after given address, in descending order. - schema: - type: string - example: users:003 - - name: cursor - in: query - description: | - Parameter used in pagination requests. Maximum page size is set to 15. - Set to the value of next for the next page of results. - Set to the value of previous for the previous page of results. - No other parameters can be set when this parameter is set. - schema: - type: string - example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== - - name: pagination_token - in: query - description: |- - Parameter used in pagination requests. - Set to the value of next for the next page of results. - Set to the value of previous for the previous page of results. - Deprecated, please use `cursor` instead. + - name: id + in: path + description: Transaction ID. + required: true schema: - type: string - example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== - deprecated: true + type: integer + format: int64 + minimum: 0 + example: 1234 responses: - "200": + "201": description: OK content: application/json: schema: - $ref: '#/components/schemas/BalancesCursorResponse' + $ref: '#/components/schemas/RevertTransactionResponse' default: description: Error content: application/json: schema: $ref: '#/components/schemas/ErrorResponse' - - /{ledger}/aggregate/balances: + /v2/{ledger}/aggregate/balances: get: tags: + - Ledger - Balances summary: Get the aggregated balances from selected accounts operationId: getBalancesAggregated @@ -954,13 +686,18 @@ paths: schema: type: string example: ledger001 - - name: address + - name: pit in: query - description: Filter balances involving given account, either - as source or destination. + required: false schema: type: string - example: users:001 + format: date-time + requestBody: + content: + application/json: + schema: + type: object + additionalProperties: true responses: "200": description: OK @@ -975,9 +712,10 @@ paths: schema: $ref: '#/components/schemas/ErrorResponse' - /{ledger}/logs: + /v2/{ledger}/logs: get: tags: + - Ledger - Logs summary: List the logs from a ledger description: List the logs from a ledger, sorted by ID in descending order. @@ -1000,63 +738,6 @@ paths: format: int64 minimum: 1 maximum: 1000 - default: 15 - - name: page_size - in: query - description: | - The maximum number of results to return per page. - Deprecated, please use `pageSize` instead. - example: 100 - schema: - type: integer - format: int64 - minimum: 1 - maximum: 1000 - default: 15 - deprecated: true - - name: after - in: query - description: Pagination cursor, will return the logs after a given ID. - (in descending order). - schema: - type: string - example: 1234 - - name: startTime - in: query - description: | - Filter transactions that occurred after this timestamp. - The format is RFC3339 and is inclusive (for example, "2023-01-02T15:04:01Z" includes the first second of 4th minute). - schema: - type: string - format: date-time - - name: start_time - in: query - description: | - Filter transactions that occurred after this timestamp. - The format is RFC3339 and is inclusive (for example, "2023-01-02T15:04:01Z" includes the first second of 4th minute). - Deprecated, please use `startTime` instead. - schema: - type: string - format: date-time - deprecated: true - - name: endTime - in: query - description: | - Filter transactions that occurred before this timestamp. - The format is RFC3339 and is exclusive (for example, "2023-01-02T15:04:01Z" excludes the first second of 4th minute). - schema: - type: string - format: date-time - - name: end_time - in: query - description: | - Filter transactions that occurred before this timestamp. - The format is RFC3339 and is exclusive (for example, "2023-01-02T15:04:01Z" excludes the first second of 4th minute). - Deprecated, please use `endTime` instead. - schema: - type: string - format: date-time - deprecated: true - name: cursor in: query description: | @@ -1067,18 +748,18 @@ paths: schema: type: string example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== - - name: pagination_token + - name: pit in: query - description: | - Parameter used in pagination requests. Maximum page size is set to 15. - Set to the value of next for the next page of results. - Set to the value of previous for the previous page of results. - No other parameters can be set when this parameter is set. - Deprecated, please use `cursor` instead. + required: false schema: type: string - example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== - deprecated: true + format: date-time + requestBody: + content: + application/json: + schema: + type: object + additionalProperties: true responses: "200": description: OK @@ -1189,7 +870,7 @@ components: data: type: array items: - $ref: '#/components/schemas/Transaction' + $ref: '#/components/schemas/ExpandedTransaction' LogsCursorResponse: type: object @@ -1262,9 +943,9 @@ components: Metadata: type: object - nullable: true - additionalProperties: {} - example: { admin: true, a: { nested: { key: value } } } + additionalProperties: + type: string + example: { admin: "true" } ConfigInfo: type: object @@ -1280,40 +961,27 @@ components: - server - version - ScriptResponse: - type: object - properties: - errorCode: - $ref: '#/components/schemas/ErrorsEnum' - errorMessage: - type: string - example: "account had insufficient funds" - details: - type: string - example: "https://play.numscript.org/?payload=eyJlcnJvciI6ImFjY291bnQgaGFkIGluc3VmZmljaWVudCBmdW5kcyJ9" - transaction: - $ref: '#/components/schemas/Transaction' - Account: type: object required: - address + - metadata properties: address: type: string example: users:001 - type: - type: string - example: virtual metadata: type: object properties: {} - example: { admin: true, a: { nested: { key: value}} } + additionalProperties: + type: string + example: { admin: "true" } AccountWithVolumesAndBalances: type: object required: - address + - metadata properties: address: type: string @@ -1324,24 +992,18 @@ components: metadata: type: object properties: {} - example: { admin: true, a: { nested: { key: value}} } + additionalProperties: + type: string + example: { admin: "true" } volumes: type: object additionalProperties: type: object additionalProperties: type: integer - format: int64 + format: bigint minimum: 0 example: { COIN: { input: 100, output: 0 } } - balances: - type: object - additionalProperties: - type: integer - format: int64 - minimum: 0 - example: - COIN: 100 AccountsBalances: type: object @@ -1359,41 +1021,17 @@ components: type: object additionalProperties: type: integer - format: int64 - minimum: 0 + format: bigint example: USD: 100 EUR: 12 - Contract: - type: object - properties: - account: - type: string - example: users:001 - expr: - type: object - required: - - accounts - - expr - - Mapping: - type: object - nullable: true - required: - - contracts - properties: - contracts: - type: array - items: - $ref: '#/components/schemas/Contract' - Posting: type: object properties: amount: type: integer - format: int64 + format: bigint minimum: 0 example: 100 asset: @@ -1420,11 +1058,10 @@ components: vars: type: object properties: {} + additionalProperties: true example: { - "vars": { "user": "users:042" } - } reference: type: string example: order_1234 @@ -1449,49 +1086,33 @@ components: example: ref:001 metadata: $ref: '#/components/schemas/Metadata' - txid: + id: type: integer format: int64 minimum: 0 - preCommitVolumes: - $ref: '#/components/schemas/AggregatedVolumes' - postCommitVolumes: - $ref: '#/components/schemas/AggregatedVolumes' - required: - - postings - - timestamp - - txid - - TransactionData: - type: object + reverted: + type: boolean required: - - postings - properties: - postings: - type: array - items: - $ref: '#/components/schemas/Posting' - reference: - type: string - example: ref:001 - metadata: - $ref: '#/components/schemas/Metadata' - timestamp: - type: string - format: date-time + - postings + - timestamp + - id + - metadata + - reverted - Transactions: - required: - - transactions - type: object - properties: - transactions: - type: array - items: - $ref: '#/components/schemas/TransactionData' + ExpandedTransaction: + allOf: + - $ref: '#/components/schemas/Transaction' + - type: object + properties: + preCommitVolumes: + $ref: '#/components/schemas/AggregatedVolumes' + postCommitVolumes: + $ref: '#/components/schemas/AggregatedVolumes' PostTransaction: type: object + required: + - metadata properties: timestamp: type: string @@ -1509,11 +1130,10 @@ components: vars: type: object properties: {} + additionalProperties: true example: { - "vars": { "user": "users:042" } - } required: - plain reference: @@ -1550,9 +1170,11 @@ components: enum: - NEW_TRANSACTION - SET_METADATA + - REVERTED_TRANSACTION data: type: object properties: {} + additionalProperties: true hash: type: string example: "9ee060170400f556b7e1575cb13f9db004f150a08355c7431c62bc639166431e" @@ -1566,23 +1188,24 @@ components: - hash - date - TransactionsResponse: - type: object + CreateTransactionResponse: properties: data: - items: - $ref: '#/components/schemas/Transaction' - type: array + $ref: '#/components/schemas/Transaction' + type: object required: - data - TransactionResponse: + RevertTransactionResponse: + $ref: '#/components/schemas/CreateTransactionResponse' + + GetTransactionResponse: properties: data: - $ref: '#/components/schemas/Transaction' + $ref: '#/components/schemas/ExpandedTransaction' type: object required: - - data + - data StatsResponse: properties: @@ -1592,32 +1215,21 @@ components: required: - data - MappingResponse: - properties: - data: - $ref: '#/components/schemas/Mapping' - type: object - ConfigInfoResponse: - properties: - data: - $ref: '#/components/schemas/ConfigInfo' - type: object - required: - - data + $ref: '#/components/schemas/ConfigInfo' Volume: type: object properties: input: type: integer - format: int64 + format: bigint output: type: integer - format: int64 + format: bigint balance: type: integer - format: int64 + format: bigint required: - input - output @@ -1659,13 +1271,14 @@ components: ErrorResponse: type: object required: - - error_code + - errorCode + - errorMessage properties: errorCode: $ref: '#/components/schemas/ErrorsEnum' errorMessage: type: string - example: "[INSUFFICIENT_FUND] account had insufficient funds" + example: "[VALIDATION] invalid 'cursor' query param" details: type: string example: "https://play.numscript.org/?payload=eyJlcnJvciI6ImFjY291bnQgaGFkIGluc3VmZmljaWVudCBmdW5kcyJ9" @@ -1680,7 +1293,10 @@ components: - NO_SCRIPT - COMPILATION_FAILED - METADATA_OVERRIDE - example: INSUFFICIENT_FUND + - NOT_FOUND + - CONTEXT_CANCELLED + - STORE + example: VALIDATION LedgerInfoResponse: properties: diff --git a/pkg/analytics/segment.go b/pkg/analytics/segment.go deleted file mode 100644 index 32f652489..000000000 --- a/pkg/analytics/segment.go +++ /dev/null @@ -1,215 +0,0 @@ -package analytics - -import ( - "context" - "crypto/sha256" - "encoding/base64" - "runtime" - "time" - - "github.com/formancehq/go-libs/logging" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/storage" - "github.com/pbnjay/memory" - "github.com/pborman/uuid" - "go.uber.org/fx" - "gopkg.in/segmentio/analytics-go.v3" -) - -const ( - ApplicationStats = "Application stats" - - VersionProperty = "version" - AccountsProperty = "accounts" - TransactionsProperty = "transactions" - LedgersProperty = "ledgers" - OSProperty = "os" - ArchProperty = "arch" - TimeZoneProperty = "tz" - CPUCountProperty = "cpuCount" - TotalMemoryProperty = "totalMemory" -) - -type AppIdProvider interface { - AppID(ctx context.Context) (string, error) -} -type AppIdProviderFn func(ctx context.Context) (string, error) - -func (fn AppIdProviderFn) AppID(ctx context.Context) (string, error) { - return fn(ctx) -} - -func FromStorageAppIdProvider(driver storage.Driver[ledger.Store]) AppIdProvider { - var appId string - return AppIdProviderFn(func(ctx context.Context) (string, error) { - var err error - if appId == "" { - appId, err = driver.GetSystemStore().GetConfiguration(ctx, "appId") - if err != nil && err != storage.ErrConfigurationNotFound { - return "", err - } - if err == storage.ErrConfigurationNotFound { - appId = uuid.New() - if err := driver.GetSystemStore().InsertConfiguration(ctx, "appId", appId); err != nil { - return "", err - } - } - } - return appId, nil - }) -} - -type heartbeat struct { - version string - interval time.Duration - client analytics.Client - stopChan chan chan struct{} - appIdProvider AppIdProvider - driver storage.Driver[ledger.Store] -} - -func (m *heartbeat) Run(ctx context.Context) error { - - enqueue := func() { - err := m.enqueue(ctx) - if err != nil { - logging.GetLogger(ctx).WithFields(map[string]interface{}{ - "error": err, - }).Error("enqueuing analytics") - } - } - - enqueue() - for { - select { - case ch := <-m.stopChan: - ch <- struct{}{} - return nil - case <-ctx.Done(): - return ctx.Err() - case <-time.After(m.interval): - enqueue() - } - } -} - -func (m *heartbeat) Stop(ctx context.Context) error { - ch := make(chan struct{}) - m.stopChan <- ch - select { - case <-ctx.Done(): - return ctx.Err() - case <-ch: - return nil - } -} - -func (m *heartbeat) enqueue(ctx context.Context) error { - - appId, err := m.appIdProvider.AppID(ctx) - if err != nil { - return err - } - - tz, _ := time.Now().Local().Zone() - - properties := analytics.NewProperties(). - Set(VersionProperty, m.version). - Set(OSProperty, runtime.GOOS). - Set(ArchProperty, runtime.GOARCH). - Set(TimeZoneProperty, tz). - Set(CPUCountProperty, runtime.NumCPU()). - Set(TotalMemoryProperty, memory.TotalMemory()/1024/1024) - - ledgers, err := m.driver.GetSystemStore().ListLedgers(ctx) - if err != nil { - return err - } - - ledgersProperty := map[string]any{} - - for _, l := range ledgers { - stats := map[string]any{} - if err := func() error { - store, _, err := m.driver.GetLedgerStore(ctx, l, false) - if err != nil { - return err - } - transactions, err := store.CountTransactions(ctx, ledger.TransactionsQuery{}) - if err != nil { - return err - } - accounts, err := store.CountAccounts(ctx, ledger.AccountsQuery{}) - if err != nil { - return err - } - stats[TransactionsProperty] = transactions - stats[AccountsProperty] = accounts - - return nil - }(); err != nil { - return err - } - - digest := sha256.New() - digest.Write([]byte(l)) - ledgerHash := base64.RawURLEncoding.EncodeToString(digest.Sum(nil)) - - ledgersProperty[ledgerHash] = stats - } - if len(ledgersProperty) > 0 { - properties.Set(LedgersProperty, ledgersProperty) - } - - return m.client.Enqueue(&analytics.Track{ - AnonymousId: appId, - Event: ApplicationStats, - Properties: properties, - }) -} - -func newHeartbeat(appIdProvider AppIdProvider, driver storage.Driver[ledger.Store], client analytics.Client, version string, interval time.Duration) *heartbeat { - return &heartbeat{ - version: version, - interval: interval, - client: client, - driver: driver, - appIdProvider: appIdProvider, - stopChan: make(chan chan struct{}, 1), - } -} - -func NewHeartbeatModule(version, writeKey string, interval time.Duration) fx.Option { - return fx.Options( - fx.Supply(analytics.Config{}), // Provide empty config to be able to replace (use fx.Replace) if necessary - fx.Provide(func(cfg analytics.Config) (analytics.Client, error) { - return analytics.NewWithConfig(writeKey, cfg) - }), - fx.Provide(func(client analytics.Client, provider AppIdProvider, driver storage.Driver[ledger.Store]) *heartbeat { - return newHeartbeat(provider, driver, client, version, interval) - }), - fx.Invoke(func(m *heartbeat, lc fx.Lifecycle) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - go func() { - err := m.Run(context.Background()) - if err != nil { - panic(err) - } - }() - return nil - }, - OnStop: func(ctx context.Context) error { - return m.Stop(ctx) - }, - }) - }), - fx.Invoke(func(lc fx.Lifecycle, client analytics.Client) { - lc.Append(fx.Hook{ - OnStop: func(ctx context.Context) error { - return client.Close() - }, - }) - }), - ) -} diff --git a/pkg/api/api.go b/pkg/api/api.go deleted file mode 100644 index 2002780b9..000000000 --- a/pkg/api/api.go +++ /dev/null @@ -1,49 +0,0 @@ -package api - -import ( - _ "embed" - "net/http" - - "github.com/formancehq/go-libs/health" - "github.com/gin-gonic/gin" - "github.com/numary/ledger/pkg/api/controllers" - "github.com/numary/ledger/pkg/api/middlewares" - "github.com/numary/ledger/pkg/api/routes" - "go.uber.org/fx" -) - -type API struct { - handler *gin.Engine -} - -func (a *API) ServeHTTP(w http.ResponseWriter, r *http.Request) { - a.handler.ServeHTTP(w, r) -} - -func NewAPI(routes *routes.Routes) *API { - gin.SetMode(gin.ReleaseMode) - h := &API{ - handler: routes.Engine(), - } - return h -} - -type Config struct { - StorageDriver string - Version string - UseScopes bool -} - -func Module(cfg Config) fx.Option { - return fx.Options( - controllers.ProvideVersion(func() string { - return cfg.Version - }), - middlewares.Module, - routes.Module, - controllers.Module, - fx.Provide(NewAPI), - fx.Supply(routes.UseScopes(cfg.UseScopes)), - health.Module(), - ) -} diff --git a/pkg/api/apierrors/errors.go b/pkg/api/apierrors/errors.go deleted file mode 100644 index a89097bc6..000000000 --- a/pkg/api/apierrors/errors.go +++ /dev/null @@ -1,92 +0,0 @@ -package apierrors - -import ( - "context" - "encoding/base64" - "encoding/json" - "fmt" - "net/http" - "strings" - - "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/logging" - "github.com/gin-gonic/gin" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/storage" - "github.com/pkg/errors" -) - -const ( - ErrInternal = "INTERNAL" - ErrConflict = "CONFLICT" - ErrInsufficientFund = "INSUFFICIENT_FUND" - ErrValidation = "VALIDATION" - ErrContextCancelled = "CONTEXT_CANCELLED" - ErrStore = "STORE" - ErrNotFound = "NOT_FOUND" - ErrScriptCompilationFailed = "COMPILATION_FAILED" - ErrScriptNoScript = "NO_SCRIPT" - ErrScriptMetadataOverride = "METADATA_OVERRIDE" -) - -func ResponseError(c *gin.Context, err error) { - _ = c.Error(err) - status, code, details := coreErrorToErrorCode(c, err) - - if status < 500 { - c.AbortWithStatusJSON(status, - api.ErrorResponse{ - ErrorCode: code, - ErrorMessage: err.Error(), - Details: details, - - ErrorCodeDeprecated: code, - ErrorMessageDeprecated: err.Error(), - }) - } else { - c.AbortWithStatus(status) - } -} - -func coreErrorToErrorCode(c *gin.Context, err error) (int, string, string) { - switch { - case ledger.IsConflictError(err): - return http.StatusConflict, ErrConflict, "" - case ledger.IsInsufficientFundError(err): - return http.StatusBadRequest, ErrInsufficientFund, "" - case ledger.IsValidationError(err): - return http.StatusBadRequest, ErrValidation, "" - case ledger.IsNotFoundError(err): - return http.StatusNotFound, ErrNotFound, "" - case ledger.IsScriptErrorWithCode(err, ErrScriptNoScript), - ledger.IsScriptErrorWithCode(err, ErrInsufficientFund), - ledger.IsScriptErrorWithCode(err, ErrScriptCompilationFailed), - ledger.IsScriptErrorWithCode(err, ErrScriptMetadataOverride): - scriptErr := err.(*ledger.ScriptError) - return http.StatusBadRequest, scriptErr.Code, EncodeLink(scriptErr.Message) - case errors.Is(err, context.Canceled): - return http.StatusInternalServerError, ErrContextCancelled, "" - case storage.IsError(err): - return http.StatusServiceUnavailable, ErrStore, "" - default: - logging.GetLogger(c.Request.Context()).Errorf( - "unknown API response error: %s", err) - return http.StatusInternalServerError, ErrInternal, "" - } -} - -func EncodeLink(errStr string) string { - if errStr == "" { - return "" - } - - errStr = strings.ReplaceAll(errStr, "\n", "\r\n") - payload, err := json.Marshal(gin.H{ - "error": errStr, - }) - if err != nil { - panic(err) - } - payloadB64 := base64.StdEncoding.EncodeToString(payload) - return fmt.Sprintf("https://play.numscript.org/?payload=%v", payloadB64) -} diff --git a/pkg/api/controllers/account_controller.go b/pkg/api/controllers/account_controller.go deleted file mode 100644 index bb75e1e37..000000000 --- a/pkg/api/controllers/account_controller.go +++ /dev/null @@ -1,198 +0,0 @@ -package controllers - -import ( - "encoding/base64" - "encoding/json" - "fmt" - "net/http" - "strconv" - - "github.com/gin-gonic/gin" - "github.com/numary/ledger/pkg/api/apierrors" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/storage/sqlstorage" -) - -type AccountController struct{} - -func NewAccountController() AccountController { - return AccountController{} -} - -func (ctl *AccountController) CountAccounts(c *gin.Context) { - l, _ := c.Get("ledger") - - accountsQuery := ledger.NewAccountsQuery(). - WithAddressFilter(c.Query("address")). - WithMetadataFilter(c.QueryMap("metadata")) - - count, err := l.(*ledger.Ledger).CountAccounts(c.Request.Context(), *accountsQuery) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - c.Header("Count", fmt.Sprint(count)) -} - -func (ctl *AccountController) GetAccounts(c *gin.Context) { - l, _ := c.Get("ledger") - - accountsQuery := ledger.NewAccountsQuery() - - if c.Query(QueryKeyCursor) != "" { - if c.Query("after") != "" || - c.Query("address") != "" || - len(c.QueryMap("metadata")) > 0 || - c.Query("balance") != "" || - c.Query(QueryKeyBalanceOperator) != "" || - c.Query(QueryKeyBalanceOperatorDeprecated) != "" || - c.Query(QueryKeyPageSize) != "" || - c.Query(QueryKeyPageSizeDeprecated) != "" { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("no other query params can be set with '%s'", QueryKeyCursor))) - return - } - - res, err := base64.RawURLEncoding.DecodeString(c.Query(QueryKeyCursor)) - if err != nil { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("invalid '%s' query param", QueryKeyCursor))) - return - } - - token := sqlstorage.AccPaginationToken{} - if err := json.Unmarshal(res, &token); err != nil { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("invalid '%s' query param", QueryKeyCursor))) - return - } - - accountsQuery = accountsQuery. - WithOffset(token.Offset). - WithAfterAddress(token.AfterAddress). - WithAddressFilter(token.AddressRegexpFilter). - WithBalanceFilter(token.BalanceFilter). - WithBalanceOperatorFilter(token.BalanceOperatorFilter). - WithMetadataFilter(token.MetadataFilter). - WithPageSize(token.PageSize) - - } else if c.Query(QueryKeyCursorDeprecated) != "" { - if c.Query("after") != "" || - c.Query("address") != "" || - len(c.QueryMap("metadata")) > 0 || - c.Query("balance") != "" || - c.Query(QueryKeyBalanceOperator) != "" || - c.Query(QueryKeyBalanceOperatorDeprecated) != "" || - c.Query(QueryKeyPageSize) != "" || - c.Query(QueryKeyPageSizeDeprecated) != "" { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("no other query params can be set with '%s'", QueryKeyCursorDeprecated))) - return - } - - res, err := base64.RawURLEncoding.DecodeString(c.Query(QueryKeyCursorDeprecated)) - if err != nil { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("invalid '%s' query param", QueryKeyCursorDeprecated))) - return - } - - token := sqlstorage.AccPaginationToken{} - if err := json.Unmarshal(res, &token); err != nil { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("invalid '%s' query param", QueryKeyCursorDeprecated))) - return - } - - accountsQuery = accountsQuery. - WithOffset(token.Offset). - WithAfterAddress(token.AfterAddress). - WithAddressFilter(token.AddressRegexpFilter). - WithBalanceFilter(token.BalanceFilter). - WithBalanceOperatorFilter(token.BalanceOperatorFilter). - WithMetadataFilter(token.MetadataFilter). - WithPageSize(token.PageSize) - - } else { - balance := c.Query("balance") - if balance != "" { - if _, err := strconv.ParseInt(balance, 10, 64); err != nil { - apierrors.ResponseError(c, ledger.NewValidationError( - "invalid parameter 'balance', should be a number")) - return - } - } - - balanceOperator, err := getBalanceOperator(c) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - pageSize, err := getPageSize(c) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - accountsQuery = accountsQuery. - WithAfterAddress(c.Query("after")). - WithAddressFilter(c.Query("address")). - WithBalanceFilter(balance). - WithBalanceOperatorFilter(balanceOperator). - WithMetadataFilter(c.QueryMap("metadata")). - WithPageSize(pageSize) - } - - cursor, err := l.(*ledger.Ledger).GetAccounts(c.Request.Context(), *accountsQuery) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - respondWithCursor[core.Account](c, http.StatusOK, cursor) -} - -func (ctl *AccountController) GetAccount(c *gin.Context) { - l, _ := c.Get("ledger") - - if !core.ValidateAddress(c.Param("address")) { - apierrors.ResponseError(c, ledger.NewValidationError("invalid account address format")) - return - } - - acc, err := l.(*ledger.Ledger).GetAccount( - c.Request.Context(), - c.Param("address")) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - respondWithData[*core.AccountWithVolumes](c, http.StatusOK, acc) -} - -func (ctl *AccountController) PostAccountMetadata(c *gin.Context) { - l, _ := c.Get("ledger") - - if !core.ValidateAddress(c.Param("address")) { - apierrors.ResponseError(c, ledger.NewValidationError("invalid account address format")) - return - } - - var m core.Metadata - if err := c.ShouldBindJSON(&m); err != nil { - apierrors.ResponseError(c, ledger.NewValidationError("invalid metadata format")) - return - } - - if err := l.(*ledger.Ledger).SaveMeta(c.Request.Context(), - core.MetaTargetTypeAccount, c.Param("address"), m); err != nil { - apierrors.ResponseError(c, err) - return - } - - respondWithNoContent(c) -} diff --git a/pkg/api/controllers/account_controller_test.go b/pkg/api/controllers/account_controller_test.go deleted file mode 100644 index ca565c7c1..000000000 --- a/pkg/api/controllers/account_controller_test.go +++ /dev/null @@ -1,588 +0,0 @@ -package controllers_test - -import ( - "context" - "encoding/base64" - "encoding/json" - "fmt" - "net/http" - "net/url" - "testing" - "time" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/numary/ledger/pkg/api" - "github.com/numary/ledger/pkg/api/apierrors" - "github.com/numary/ledger/pkg/api/controllers" - "github.com/numary/ledger/pkg/api/internal" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/storage" - "github.com/numary/ledger/pkg/storage/sqlstorage" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "go.uber.org/fx" -) - -func TestGetAccounts(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - rsp := internal.PostTransaction(t, api, controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: "alice", - Amount: core.NewMonetaryInt(150), - Asset: "USD", - }, - }, - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode, rsp.Body.String()) - - rsp = internal.PostTransaction(t, api, controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: "bob", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - meta := core.Metadata{ - "roles": "admin", - "accountId": float64(3), - "enabled": "true", - "a": map[string]any{ - "nested": map[string]any{ - "key": "hello", - }, - }, - } - rsp = internal.PostAccountMetadata(t, api, "bob", meta) - require.Equal(t, http.StatusNoContent, rsp.Result().StatusCode) - - rsp = internal.CountAccounts(api, url.Values{}) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - require.Equal(t, "3", rsp.Header().Get("Count")) - - t.Run("all", func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{}) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Account](t, rsp.Body) - // 3 accounts: world, bob, alice - assert.Len(t, cursor.Data, 3) - assert.Equal(t, []core.Account{ - {Address: "world", Metadata: core.Metadata{}}, - {Address: "bob", Metadata: meta}, - {Address: "alice", Metadata: core.Metadata{}}, - }, cursor.Data) - }) - - t.Run("meta roles", func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - "metadata[roles]": []string{"admin"}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Account](t, rsp.Body) - // 1 accounts: bob - assert.Len(t, cursor.Data, 1) - assert.Equal(t, cursor.Data[0].Address, "bob") - }) - - t.Run("meta accountId", func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - "metadata[accountId]": []string{"3"}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Account](t, rsp.Body) - // 1 accounts: bob - assert.Len(t, cursor.Data, 1) - assert.Equal(t, cursor.Data[0].Address, "bob") - }) - - t.Run("meta enabled", func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - "metadata[enabled]": []string{"true"}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Account](t, rsp.Body) - // 1 accounts: bob - assert.Len(t, cursor.Data, 1) - assert.Equal(t, cursor.Data[0].Address, "bob") - }) - - t.Run("meta nested", func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - "metadata[a.nested.key]": []string{"hello"}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Account](t, rsp.Body) - // 1 accounts: bob - assert.Len(t, cursor.Data, 1) - assert.Equal(t, cursor.Data[0].Address, "bob") - }) - - t.Run("meta unknown", func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - "metadata[unknown]": []string{"key"}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Account](t, rsp.Body) - assert.Len(t, cursor.Data, 0) - }) - - t.Run("after", func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - "after": []string{"bob"}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Account](t, rsp.Body) - // 1 accounts: alice - assert.Len(t, cursor.Data, 1) - assert.Equal(t, cursor.Data[0].Address, "alice") - }) - - t.Run("address", func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - "address": []string{"b.b"}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Account](t, rsp.Body) - // 1 accounts: bob - assert.Len(t, cursor.Data, 1) - assert.Equal(t, cursor.Data[0].Address, "bob") - }) - - to := sqlstorage.AccPaginationToken{} - raw, err := json.Marshal(to) - require.NoError(t, err) - - t.Run(fmt.Sprintf("valid empty %s", controllers.QueryKeyCursor), func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - controllers.QueryKeyCursor: []string{base64.RawURLEncoding.EncodeToString(raw)}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode, rsp.Body.String()) - }) - - t.Run(fmt.Sprintf("valid empty %s with any other param is forbidden", controllers.QueryKeyCursor), func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - controllers.QueryKeyCursor: []string{base64.RawURLEncoding.EncodeToString(raw)}, - "after": []string{"bob"}, - }) - assert.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - assert.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: fmt.Sprintf("no other query params can be set with '%s'", controllers.QueryKeyCursor), - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: fmt.Sprintf("no other query params can be set with '%s'", controllers.QueryKeyCursor), - }, err) - }) - - t.Run(fmt.Sprintf("invalid %s", controllers.QueryKeyCursor), func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - controllers.QueryKeyCursor: []string{"invalid"}, - }) - assert.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - assert.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: fmt.Sprintf("invalid '%s' query param", controllers.QueryKeyCursor), - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: fmt.Sprintf("invalid '%s' query param", controllers.QueryKeyCursor), - }, err) - }) - - t.Run(fmt.Sprintf("invalid %s not base64", controllers.QueryKeyCursor), func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - controllers.QueryKeyCursor: []string{"\n*@"}, - }) - assert.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - assert.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: fmt.Sprintf("invalid '%s' query param", controllers.QueryKeyCursor), - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: fmt.Sprintf("invalid '%s' query param", controllers.QueryKeyCursor), - }, err) - }) - - t.Run("filter by balance >= 50 with default operator", func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - "balance": []string{"50"}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Account](t, rsp.Body) - assert.Len(t, cursor.Data, 2) - assert.Equal(t, cursor.Data[0].Address, "bob") - assert.Equal(t, cursor.Data[1].Address, "alice") - }) - - t.Run("filter by balance >= 120 with default operator", func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - "balance": []string{"120"}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Account](t, rsp.Body) - assert.Len(t, cursor.Data, 1) - assert.Equal(t, cursor.Data[0].Address, "alice") - }) - - t.Run("filter by balance >= 50", func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - "balance": []string{"50"}, - controllers.QueryKeyBalanceOperator: []string{"gte"}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Account](t, rsp.Body) - assert.Len(t, cursor.Data, 2) - assert.Equal(t, cursor.Data[0].Address, "bob") - assert.Equal(t, cursor.Data[1].Address, "alice") - }) - - t.Run("filter by balance >= 120", func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - "balance": []string{"120"}, - controllers.QueryKeyBalanceOperator: []string{"gte"}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Account](t, rsp.Body) - assert.Len(t, cursor.Data, 1) - assert.Equal(t, cursor.Data[0].Address, "alice") - }) - - t.Run("filter by balance > 120", func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - "balance": []string{"120"}, - controllers.QueryKeyBalanceOperator: []string{"gt"}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Account](t, rsp.Body) - assert.Len(t, cursor.Data, 1) - assert.Equal(t, cursor.Data[0].Address, "alice") - }) - - t.Run("filter by balance < 0", func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - "balance": []string{"0"}, - controllers.QueryKeyBalanceOperator: []string{"lt"}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Account](t, rsp.Body) - assert.Len(t, cursor.Data, 1) - assert.Equal(t, cursor.Data[0].Address, "world") - }) - - t.Run("filter by balance < 100", func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - "balance": []string{"100"}, - controllers.QueryKeyBalanceOperator: []string{"lt"}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Account](t, rsp.Body) - assert.Len(t, cursor.Data, 1) - assert.Equal(t, cursor.Data[0].Address, "world") - }) - - t.Run("filter by balance <= 100", func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - "balance": []string{"100"}, - controllers.QueryKeyBalanceOperator: []string{"lte"}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Account](t, rsp.Body) - assert.Len(t, cursor.Data, 2) - assert.Equal(t, cursor.Data[0].Address, "world") - assert.Equal(t, cursor.Data[1].Address, "bob") - }) - - t.Run("filter by balance = 100", func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - "balance": []string{"100"}, - controllers.QueryKeyBalanceOperator: []string{"e"}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Account](t, rsp.Body) - assert.Len(t, cursor.Data, 1) - assert.Equal(t, cursor.Data[0].Address, "bob") - }) - - // test filter by balance != 100 - t.Run("filter by balance != 100", func(t *testing.T) { - rsp = internal.GetAccounts(api, url.Values{ - "balance": []string{"100"}, - controllers.QueryKeyBalanceOperator: []string{"ne"}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Account](t, rsp.Body) - assert.Len(t, cursor.Data, 2) - assert.Equal(t, cursor.Data[0].Address, "world") - assert.Equal(t, cursor.Data[1].Address, "alice") - }) - - t.Run("invalid balance", func(t *testing.T) { - rsp := internal.GetAccounts(api, url.Values{ - "balance": []string{"toto"}, - }) - assert.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - assert.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "invalid parameter 'balance', should be a number", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "invalid parameter 'balance', should be a number", - }, err) - }) - - t.Run("invalid balance operator", func(t *testing.T) { - rsp := internal.GetAccounts(api, url.Values{ - "balance": []string{"100"}, - controllers.QueryKeyBalanceOperator: []string{"toto"}, - }) - assert.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - assert.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: controllers.ErrInvalidBalanceOperator.Error(), - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: controllers.ErrInvalidBalanceOperator.Error(), - }, err) - }) - - return nil - }, - }) - })) -} - -func TestGetAccountsWithPageSize(t *testing.T) { - now := time.Now() - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API, driver storage.Driver[ledger.Store]) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - store := internal.GetLedgerStore(t, driver, context.Background()) - - for i := 0; i < 3*controllers.MaxPageSize; i++ { - require.NoError(t, store.UpdateAccountMetadata(ctx, fmt.Sprintf("accounts:%06d", i), core.Metadata{ - "foo": []byte("{}"), - }, now)) - } - - t.Run("invalid page size", func(t *testing.T) { - rsp := internal.GetAccounts(api, url.Values{ - controllers.QueryKeyPageSize: []string{"nan"}, - }) - assert.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - assert.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: controllers.ErrInvalidPageSize.Error(), - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: controllers.ErrInvalidPageSize.Error(), - }, err) - }) - t.Run("page size over maximum", func(t *testing.T) { - httpResponse := internal.GetAccounts(api, url.Values{ - controllers.QueryKeyPageSize: []string{fmt.Sprintf("%d", 2*controllers.MaxPageSize)}, - }) - assert.Equal(t, http.StatusOK, httpResponse.Result().StatusCode, httpResponse.Body.String()) - - cursor := internal.DecodeCursorResponse[core.Account](t, httpResponse.Body) - assert.Len(t, cursor.Data, controllers.MaxPageSize) - assert.Equal(t, cursor.PageSize, controllers.MaxPageSize) - assert.NotEmpty(t, cursor.Next) - assert.True(t, cursor.HasMore) - }) - t.Run("with page size greater than max count", func(t *testing.T) { - httpResponse := internal.GetAccounts(api, url.Values{ - controllers.QueryKeyPageSize: []string{fmt.Sprintf("%d", controllers.MaxPageSize)}, - "after": []string{fmt.Sprintf("accounts:%06d", controllers.MaxPageSize-100)}, - }) - assert.Equal(t, http.StatusOK, httpResponse.Result().StatusCode, httpResponse.Body.String()) - - cursor := internal.DecodeCursorResponse[core.Account](t, httpResponse.Body) - assert.Len(t, cursor.Data, controllers.MaxPageSize-100) - assert.Equal(t, controllers.MaxPageSize, cursor.PageSize) - assert.Empty(t, cursor.Next) - assert.False(t, cursor.HasMore) - }) - t.Run("with page size lower than max count", func(t *testing.T) { - httpResponse := internal.GetAccounts(api, url.Values{ - controllers.QueryKeyPageSize: []string{fmt.Sprintf("%d", controllers.MaxPageSize/10)}, - }) - assert.Equal(t, http.StatusOK, httpResponse.Result().StatusCode, httpResponse.Body.String()) - - cursor := internal.DecodeCursorResponse[core.Account](t, httpResponse.Body) - assert.Len(t, cursor.Data, controllers.MaxPageSize/10) - assert.Equal(t, cursor.PageSize, controllers.MaxPageSize/10) - assert.NotEmpty(t, cursor.Next) - assert.True(t, cursor.HasMore) - }) - - return nil - }, - }) - })) -} - -func TestGetAccount(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - rsp := internal.PostTransaction(t, api, controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: "alice", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - rsp = internal.PostAccountMetadata(t, api, "alice", - core.Metadata{ - "foo": json.RawMessage(`"bar"`), - }) - require.Equal(t, http.StatusNoContent, rsp.Result().StatusCode) - - t.Run("valid address", func(t *testing.T) { - rsp = internal.GetAccount(api, "alice") - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - resp, _ := internal.DecodeSingleResponse[core.AccountWithVolumes](t, rsp.Body) - - assert.EqualValues(t, core.AccountWithVolumes{ - Account: core.Account{ - Address: "alice", - Metadata: core.Metadata{ - "foo": "bar", - }, - }, - Balances: core.AssetsBalances{ - "USD": core.NewMonetaryInt(100), - }, - Volumes: core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(100), - Output: core.NewMonetaryInt(0), - }, - }, - }, resp) - }) - - t.Run("unknown address", func(t *testing.T) { - rsp = internal.GetAccount(api, "bob") - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - resp, _ := internal.DecodeSingleResponse[core.AccountWithVolumes](t, rsp.Body) - assert.EqualValues(t, core.AccountWithVolumes{ - Account: core.Account{ - Address: "bob", - Metadata: core.Metadata{}, - }, - Balances: core.AssetsBalances{}, - Volumes: core.AssetsVolumes{}, - }, resp) - }) - - t.Run("invalid address format", func(t *testing.T) { - rsp = internal.GetAccount(api, "accounts::alice") - assert.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - assert.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "invalid account address format", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "invalid account address format", - }, err) - }) - - return nil - }, - }) - })) -} - -func TestPostAccountMetadata(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - rsp := internal.PostTransaction(t, api, controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: "alice", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - t.Run("valid request", func(t *testing.T) { - rsp = internal.PostAccountMetadata(t, api, "alice", - core.Metadata{ - "foo": json.RawMessage(`"bar"`), - }) - assert.Equal(t, http.StatusNoContent, rsp.Result().StatusCode, rsp.Body.String()) - }) - - t.Run("unknown account should succeed", func(t *testing.T) { - rsp = internal.PostAccountMetadata(t, api, "bob", - core.Metadata{ - "foo": json.RawMessage(`"bar"`), - }) - assert.Equal(t, http.StatusNoContent, rsp.Result().StatusCode, rsp.Body.String()) - }) - - t.Run("invalid address format", func(t *testing.T) { - rsp = internal.PostAccountMetadata(t, api, "accounts::alice", core.Metadata{}) - assert.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - assert.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "invalid account address format", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "invalid account address format", - }, err) - }) - - t.Run("invalid metadata format", func(t *testing.T) { - rsp = internal.NewRequestOnLedger(t, api, "/accounts/alice/metadata", "invalid") - assert.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - assert.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "invalid metadata format", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "invalid metadata format", - }, err) - }) - - return nil - }, - }) - })) -} diff --git a/pkg/api/controllers/balance_controller.go b/pkg/api/controllers/balance_controller.go deleted file mode 100644 index 250da6dee..000000000 --- a/pkg/api/controllers/balance_controller.go +++ /dev/null @@ -1,122 +0,0 @@ -package controllers - -import ( - "encoding/base64" - "encoding/json" - "fmt" - "net/http" - - "github.com/gin-gonic/gin" - "github.com/numary/ledger/pkg/api/apierrors" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/storage/sqlstorage" -) - -type BalanceController struct{} - -func NewBalanceController() BalanceController { - return BalanceController{} -} - -func (ctl *BalanceController) GetBalancesAggregated(c *gin.Context) { - l, _ := c.Get("ledger") - - balancesQuery := ledger.NewBalancesQuery(). - WithAddressFilter(c.Query("address")) - balances, err := l.(*ledger.Ledger).GetBalancesAggregated( - c.Request.Context(), *balancesQuery) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - respondWithData[core.AssetsBalances](c, http.StatusOK, balances) -} - -func (ctl *BalanceController) GetBalances(c *gin.Context) { - l, _ := c.Get("ledger") - - balancesQuery := ledger.NewBalancesQuery() - - if c.Query(QueryKeyCursor) != "" { - if c.Query("after") != "" || - c.Query("address") != "" || - c.Query(QueryKeyPageSize) != "" || - c.Query(QueryKeyPageSizeDeprecated) != "" { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("no other query params can be set with '%s'", QueryKeyCursor))) - return - } - - res, err := base64.RawURLEncoding.DecodeString(c.Query(QueryKeyCursor)) - if err != nil { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("invalid '%s' query param", QueryKeyCursor))) - return - } - - token := sqlstorage.BalancesPaginationToken{} - if err := json.Unmarshal(res, &token); err != nil { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("invalid '%s' query param", QueryKeyCursor))) - return - } - - balancesQuery = balancesQuery. - WithOffset(token.Offset). - WithAfterAddress(token.AfterAddress). - WithAddressFilter(token.AddressRegexpFilter). - WithPageSize(token.PageSize) - - } else if c.Query(QueryKeyCursorDeprecated) != "" { - if c.Query("after") != "" || - c.Query("address") != "" || - c.Query(QueryKeyPageSize) != "" || - c.Query(QueryKeyPageSizeDeprecated) != "" { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("no other query params can be set with '%s'", QueryKeyCursorDeprecated))) - return - } - - res, err := base64.RawURLEncoding.DecodeString(c.Query(QueryKeyCursorDeprecated)) - if err != nil { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("invalid '%s' query param", QueryKeyCursorDeprecated))) - return - } - - token := sqlstorage.BalancesPaginationToken{} - if err := json.Unmarshal(res, &token); err != nil { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("invalid '%s' query param", QueryKeyCursorDeprecated))) - return - } - - balancesQuery = balancesQuery. - WithOffset(token.Offset). - WithAfterAddress(token.AfterAddress). - WithAddressFilter(token.AddressRegexpFilter). - WithPageSize(token.PageSize) - - } else { - pageSize, err := getPageSize(c) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - balancesQuery = balancesQuery. - WithAfterAddress(c.Query("after")). - WithAddressFilter(c.Query("address")). - WithPageSize(pageSize) - } - - cursor, err := l.(*ledger.Ledger).GetBalances(c.Request.Context(), *balancesQuery) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - respondWithCursor[core.AccountsBalances](c, http.StatusOK, cursor) -} diff --git a/pkg/api/controllers/balance_controller_test.go b/pkg/api/controllers/balance_controller_test.go deleted file mode 100644 index f232e479a..000000000 --- a/pkg/api/controllers/balance_controller_test.go +++ /dev/null @@ -1,208 +0,0 @@ -package controllers_test - -import ( - "context" - "encoding/base64" - "encoding/json" - "fmt" - "net/http" - "net/url" - "testing" - - "github.com/numary/ledger/pkg/api" - "github.com/numary/ledger/pkg/api/controllers" - "github.com/numary/ledger/pkg/api/internal" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/storage/sqlstorage" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "go.uber.org/fx" -) - -func TestGetBalancesAggregated(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - rsp := internal.PostTransaction(t, api, controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: "alice", - Amount: core.NewMonetaryInt(150), - Asset: "USD", - }, - }, - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - rsp = internal.PostTransaction(t, api, controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: "bob", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - t.Run("all", func(t *testing.T) { - rsp = internal.GetBalancesAggregated(api, url.Values{}) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - resp, ok := internal.DecodeSingleResponse[core.AssetsBalances](t, rsp.Body) - assert.Equal(t, ok, true) - assert.Equal(t, core.AssetsBalances{"USD": core.NewMonetaryInt(0)}, resp) - }) - - t.Run("filter by address", func(t *testing.T) { - rsp = internal.GetBalancesAggregated(api, url.Values{"address": []string{"world"}}) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - resp, ok := internal.DecodeSingleResponse[core.AssetsBalances](t, rsp.Body) - assert.Equal(t, true, ok) - assert.Equal(t, core.AssetsBalances{"USD": core.NewMonetaryInt(-250)}, resp) - }) - - t.Run("filter by address no result", func(t *testing.T) { - rsp = internal.GetBalancesAggregated(api, url.Values{"address": []string{"XXX"}}) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - resp, ok := internal.DecodeSingleResponse[core.AssetsBalances](t, rsp.Body) - assert.Equal(t, ok, true) - assert.Equal(t, core.AssetsBalances{}, resp) - }) - - return nil - }, - }) - })) -} - -func TestGetBalances(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - rsp := internal.PostTransaction(t, api, controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: "alice", - Amount: core.NewMonetaryInt(150), - Asset: "USD", - }, - }, - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - rsp = internal.PostTransaction(t, api, controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: "bob", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - rsp = internal.PostTransaction(t, api, controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: "alice", - Amount: core.NewMonetaryInt(200), - Asset: "CAD", - }, - }, - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - rsp = internal.PostTransaction(t, api, controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: "alice", - Amount: core.NewMonetaryInt(400), - Asset: "EUR", - }, - }, - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - to := sqlstorage.BalancesPaginationToken{} - raw, err := json.Marshal(to) - require.NoError(t, err) - - t.Run("valid empty "+controllers.QueryKeyCursor, func(t *testing.T) { - rsp = internal.GetBalances(api, url.Values{ - controllers.QueryKeyCursor: []string{base64.RawURLEncoding.EncodeToString(raw)}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode, rsp.Body.String()) - }) - - t.Run(fmt.Sprintf("valid empty %s with any other param is forbidden", controllers.QueryKeyCursor), func(t *testing.T) { - rsp = internal.GetBalances(api, url.Values{ - controllers.QueryKeyCursor: []string{base64.RawURLEncoding.EncodeToString(raw)}, - "after": []string{"bob"}, - }) - assert.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - }) - - t.Run(fmt.Sprintf("invalid %s", controllers.QueryKeyCursor), func(t *testing.T) { - rsp = internal.GetBalances(api, url.Values{ - controllers.QueryKeyCursor: []string{"invalid"}, - }) - - assert.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - assert.Contains(t, rsp.Body.String(), - fmt.Sprintf(`"invalid '%s' query param"`, controllers.QueryKeyCursor)) - }) - - t.Run("all", func(t *testing.T) { - rsp = internal.GetBalances(api, url.Values{}) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - resp := internal.DecodeCursorResponse[core.AccountsBalances](t, rsp.Body) - assert.Equal(t, []core.AccountsBalances{ - {"world": core.AssetsBalances{"USD": core.NewMonetaryInt(-250), "EUR": core.NewMonetaryInt(-400), "CAD": core.NewMonetaryInt(-200)}}, - {"bob": core.AssetsBalances{"USD": core.NewMonetaryInt(100)}}, - {"alice": core.AssetsBalances{"USD": core.NewMonetaryInt(150), "EUR": core.NewMonetaryInt(400), "CAD": core.NewMonetaryInt(200)}}, - }, resp.Data) - }) - - t.Run("after address", func(t *testing.T) { - rsp = internal.GetBalances(api, url.Values{"after": []string{"bob"}}) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - resp := internal.DecodeCursorResponse[core.AccountsBalances](t, rsp.Body) - assert.Equal(t, []core.AccountsBalances{ - {"alice": core.AssetsBalances{"USD": core.NewMonetaryInt(150), "EUR": core.NewMonetaryInt(400), "CAD": core.NewMonetaryInt(200)}}, - }, resp.Data) - }) - - t.Run("filter by address", func(t *testing.T) { - rsp = internal.GetBalances(api, url.Values{"address": []string{"world"}}) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - resp := internal.DecodeCursorResponse[core.AccountsBalances](t, rsp.Body) - assert.Equal(t, []core.AccountsBalances{ - {"world": core.AssetsBalances{"USD": core.NewMonetaryInt(-250), "EUR": core.NewMonetaryInt(-400), "CAD": core.NewMonetaryInt(-200)}}, - }, resp.Data) - }) - - t.Run("filter by address no results", func(t *testing.T) { - rsp = internal.GetBalances(api, url.Values{"address": []string{"TEST"}}) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - resp := internal.DecodeCursorResponse[core.AccountsBalances](t, rsp.Body) - assert.Equal(t, []core.AccountsBalances{}, resp.Data) - }) - - return nil - }, - }) - })) -} diff --git a/pkg/api/controllers/base_controller.go b/pkg/api/controllers/base_controller.go deleted file mode 100644 index 23d94ffe2..000000000 --- a/pkg/api/controllers/base_controller.go +++ /dev/null @@ -1,24 +0,0 @@ -package controllers - -import ( - "net/http" - - "github.com/formancehq/go-libs/api" - "github.com/gin-gonic/gin" -) - -func respondWithNoContent(c *gin.Context) { - c.Status(http.StatusNoContent) -} - -func respondWithCursor[T any](c *gin.Context, status int, data api.Cursor[T]) { - c.JSON(status, api.BaseResponse[T]{ - Cursor: &data, - }) -} - -func respondWithData[T any](c *gin.Context, status int, data T) { - c.JSON(status, api.BaseResponse[T]{ - Data: &data, - }) -} diff --git a/pkg/api/controllers/config_controller.go b/pkg/api/controllers/config_controller.go deleted file mode 100644 index ba6edd2cc..000000000 --- a/pkg/api/controllers/config_controller.go +++ /dev/null @@ -1,86 +0,0 @@ -package controllers - -import ( - "bytes" - _ "embed" - "encoding/json" - "net/http" - - "github.com/gin-gonic/gin" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/storage" - "gopkg.in/yaml.v3" -) - -type ConfigInfo struct { - Server string `json:"server"` - Version interface{} `json:"version"` - Config *Config `json:"config"` -} - -type Config struct { - LedgerStorage *LedgerStorage `json:"storage"` -} - -type LedgerStorage struct { - Driver string `json:"driver"` - Ledgers []string `json:"ledgers"` -} - -type ConfigController struct { - Version string - StorageDriver storage.Driver[ledger.Store] -} - -func NewConfigController(version string, storageDriver storage.Driver[ledger.Store]) ConfigController { - return ConfigController{ - Version: version, - StorageDriver: storageDriver, - } -} - -func (ctl *ConfigController) GetInfo(c *gin.Context) { - ledgers, err := ctl.StorageDriver.GetSystemStore().ListLedgers(c.Request.Context()) - if err != nil { - panic(err) - } - respondWithData[ConfigInfo](c, http.StatusOK, ConfigInfo{ - Server: "numary-ledger", - Version: ctl.Version, - Config: &Config{ - LedgerStorage: &LedgerStorage{ - Driver: ctl.StorageDriver.Name(), - Ledgers: ledgers, - }, - }, - }) -} - -//go:embed swagger.yaml -var swagger string - -func parseSwagger(version string) map[string]interface{} { - ret := make(map[string]interface{}) - err := yaml.NewDecoder(bytes.NewBufferString(swagger)).Decode(&ret) - if err != nil { - panic(err) - } - ret["info"].(map[string]interface{})["version"] = version - return ret -} - -func (ctl *ConfigController) GetDocsAsYaml(c *gin.Context) { - err := yaml.NewEncoder(c.Writer).Encode(parseSwagger(ctl.Version)) - if err != nil { - panic(err) - } -} - -func (ctl *ConfigController) GetDocsAsJSON(c *gin.Context) { - enc := json.NewEncoder(c.Writer) - enc.SetIndent("", " ") - err := enc.Encode(parseSwagger(ctl.Version)) - if err != nil { - panic(err) - } -} diff --git a/pkg/api/controllers/config_controller_test.go b/pkg/api/controllers/config_controller_test.go deleted file mode 100644 index 0946f1475..000000000 --- a/pkg/api/controllers/config_controller_test.go +++ /dev/null @@ -1,43 +0,0 @@ -package controllers_test - -import ( - "context" - "net/http" - "testing" - - "github.com/numary/ledger/pkg/api" - "github.com/numary/ledger/pkg/api/controllers" - "github.com/numary/ledger/pkg/api/internal" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/storage" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "go.uber.org/fx" -) - -func TestGetInfo(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, h *api.API, driver storage.Driver[ledger.Store]) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - rsp := internal.GetInfo(h) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - info, ok := internal.DecodeSingleResponse[controllers.ConfigInfo](t, rsp.Body) - require.True(t, ok) - - info.Config.LedgerStorage.Ledgers = []string{} - assert.EqualValues(t, controllers.ConfigInfo{ - Server: "numary-ledger", - Version: "latest", - Config: &controllers.Config{ - LedgerStorage: &controllers.LedgerStorage{ - Driver: driver.Name(), - Ledgers: []string{}, - }, - }, - }, info) - return nil - }, - }) - })) -} diff --git a/pkg/api/controllers/controllers.go b/pkg/api/controllers/controllers.go deleted file mode 100644 index 90da2aa4d..000000000 --- a/pkg/api/controllers/controllers.go +++ /dev/null @@ -1,27 +0,0 @@ -package controllers - -import ( - "go.uber.org/fx" -) - -const ( - versionKey = `name:"_apiVersion"` -) - -func ProvideVersion(provider interface{}) fx.Option { - return fx.Provide( - fx.Annotate(provider, fx.ResultTags(versionKey)), - ) -} - -var Module = fx.Options( - fx.Provide( - fx.Annotate(NewConfigController, fx.ParamTags(versionKey)), - ), - fx.Provide(NewLedgerController), - fx.Provide(NewScriptController), - fx.Provide(NewAccountController), - fx.Provide(NewTransactionController), - fx.Provide(NewBalanceController), - fx.Provide(NewMappingController), -) diff --git a/pkg/api/controllers/ledger_controller.go b/pkg/api/controllers/ledger_controller.go deleted file mode 100644 index 0668517cc..000000000 --- a/pkg/api/controllers/ledger_controller.go +++ /dev/null @@ -1,196 +0,0 @@ -package controllers - -import ( - "encoding/base64" - "encoding/json" - "fmt" - "net/http" - "strconv" - "time" - - "github.com/gin-gonic/gin" - "github.com/numary/ledger/pkg/api/apierrors" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/storage/sqlstorage" -) - -type LedgerController struct{} - -func NewLedgerController() LedgerController { - return LedgerController{} -} - -type Info struct { - Name string `json:"name"` - Storage storageInfo `json:"storage"` -} - -type storageInfo struct { - Migrations []core.MigrationInfo `json:"migrations"` -} - -func (ctl *LedgerController) GetInfo(c *gin.Context) { - l, _ := c.Get("ledger") - - var err error - res := Info{ - Name: c.Param("ledger"), - Storage: storageInfo{}, - } - res.Storage.Migrations, err = l.(*ledger.Ledger).GetMigrationsInfo(c.Request.Context()) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - respondWithData[Info](c, http.StatusOK, res) -} - -func (ctl *LedgerController) GetStats(c *gin.Context) { - l, _ := c.Get("ledger") - - stats, err := l.(*ledger.Ledger).Stats(c.Request.Context()) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - respondWithData[ledger.Stats](c, http.StatusOK, stats) -} - -func (ctl *LedgerController) GetLogs(c *gin.Context) { - l, _ := c.Get("ledger") - - logsQuery := ledger.NewLogsQuery() - - if c.Query(QueryKeyCursor) != "" { - if c.Query("after") != "" || - c.Query(QueryKeyStartTime) != "" || - c.Query(QueryKeyStartTimeDeprecated) != "" || - c.Query(QueryKeyEndTime) != "" || - c.Query(QueryKeyEndTimeDeprecated) != "" || - c.Query(QueryKeyPageSize) != "" || - c.Query(QueryKeyPageSizeDeprecated) != "" { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("no other query params can be set with '%s'", QueryKeyCursor))) - return - } - - res, err := base64.RawURLEncoding.DecodeString(c.Query(QueryKeyCursor)) - if err != nil { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("invalid '%s' query param", QueryKeyCursor))) - return - } - - token := sqlstorage.LogsPaginationToken{} - if err := json.Unmarshal(res, &token); err != nil { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("invalid '%s' query param", QueryKeyCursor))) - return - } - - logsQuery = logsQuery. - WithAfterID(token.AfterID). - WithStartTimeFilter(token.StartTime). - WithEndTimeFilter(token.EndTime). - WithPageSize(token.PageSize) - - } else if c.Query(QueryKeyCursorDeprecated) != "" { - if c.Query("after") != "" || - c.Query(QueryKeyStartTime) != "" || - c.Query(QueryKeyStartTimeDeprecated) != "" || - c.Query(QueryKeyEndTime) != "" || - c.Query(QueryKeyEndTimeDeprecated) != "" || - c.Query(QueryKeyPageSize) != "" || - c.Query(QueryKeyPageSizeDeprecated) != "" { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("no other query params can be set with '%s'", QueryKeyCursorDeprecated))) - return - } - - res, err := base64.RawURLEncoding.DecodeString(c.Query(QueryKeyCursorDeprecated)) - if err != nil { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("invalid '%s' query param", QueryKeyCursorDeprecated))) - return - } - - token := sqlstorage.LogsPaginationToken{} - if err := json.Unmarshal(res, &token); err != nil { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("invalid '%s' query param", QueryKeyCursorDeprecated))) - return - } - - logsQuery = logsQuery. - WithAfterID(token.AfterID). - WithStartTimeFilter(token.StartTime). - WithEndTimeFilter(token.EndTime). - WithPageSize(token.PageSize) - - } else { - var err error - var afterIDParsed uint64 - if c.Query("after") != "" { - afterIDParsed, err = strconv.ParseUint(c.Query("after"), 10, 64) - if err != nil { - apierrors.ResponseError(c, ledger.NewValidationError( - "invalid 'after' query param")) - return - } - } - - var startTimeParsed, endTimeParsed time.Time - if c.Query(QueryKeyStartTime) != "" { - startTimeParsed, err = time.Parse(time.RFC3339, c.Query(QueryKeyStartTime)) - if err != nil { - apierrors.ResponseError(c, ErrInvalidStartTime) - return - } - } - if c.Query(QueryKeyStartTimeDeprecated) != "" { - startTimeParsed, err = time.Parse(time.RFC3339, c.Query(QueryKeyStartTimeDeprecated)) - if err != nil { - apierrors.ResponseError(c, ErrInvalidStartTimeDeprecated) - return - } - } - - if c.Query(QueryKeyEndTime) != "" { - endTimeParsed, err = time.Parse(time.RFC3339, c.Query(QueryKeyEndTime)) - if err != nil { - apierrors.ResponseError(c, ErrInvalidEndTime) - return - } - } - if c.Query(QueryKeyEndTimeDeprecated) != "" { - endTimeParsed, err = time.Parse(time.RFC3339, c.Query(QueryKeyEndTimeDeprecated)) - if err != nil { - apierrors.ResponseError(c, ErrInvalidEndTimeDeprecated) - return - } - } - - pageSize, err := getPageSize(c) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - logsQuery = logsQuery. - WithAfterID(afterIDParsed). - WithStartTimeFilter(startTimeParsed). - WithEndTimeFilter(endTimeParsed). - WithPageSize(pageSize) - } - - cursor, err := l.(*ledger.Ledger).GetLogs(c.Request.Context(), logsQuery) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - respondWithCursor[core.Log](c, http.StatusOK, cursor) -} diff --git a/pkg/api/controllers/ledger_controller_test.go b/pkg/api/controllers/ledger_controller_test.go deleted file mode 100644 index c82c92b47..000000000 --- a/pkg/api/controllers/ledger_controller_test.go +++ /dev/null @@ -1,313 +0,0 @@ -package controllers_test - -import ( - "context" - "encoding/base64" - "encoding/json" - "fmt" - "net/http" - "net/url" - "testing" - "time" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/google/uuid" - "github.com/numary/ledger/pkg/api" - "github.com/numary/ledger/pkg/api/apierrors" - "github.com/numary/ledger/pkg/api/controllers" - "github.com/numary/ledger/pkg/api/internal" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/storage" - "github.com/numary/ledger/pkg/storage/sqlstorage" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "go.uber.org/fx" -) - -func TestGetLedgerInfo(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, h *api.API, driver storage.Driver[ledger.Store]) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - availableMigrations, err := sqlstorage.CollectMigrationFiles(sqlstorage.MigrationsFS) - require.NoError(t, err) - - rsp := internal.GetLedgerInfo(h) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - info, ok := internal.DecodeSingleResponse[controllers.Info](t, rsp.Body) - assert.Equal(t, true, ok) - - _, err = uuid.Parse(info.Name) - assert.NoError(t, err) - - assert.Equal(t, len(availableMigrations), len(info.Storage.Migrations)) - - for _, m := range info.Storage.Migrations { - assert.Equal(t, "DONE", m.State) - assert.NotEqual(t, "", m.Name) - assert.NotEqual(t, time.Time{}, m.Date) - } - - return nil - }, - }) - })) -} - -func TestGetStats(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, h *api.API) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - rsp := internal.PostTransaction(t, h, controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: "alice", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - }, false) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - rsp = internal.PostTransaction(t, h, controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: "boc", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - }, false) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - rsp = internal.GetLedgerStats(h) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - stats, _ := internal.DecodeSingleResponse[ledger.Stats](t, rsp.Body) - - assert.EqualValues(t, ledger.Stats{ - Transactions: 2, - Accounts: 3, - }, stats) - return nil - }, - }) - })) -} - -func TestGetLogs(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API, driver storage.Driver[ledger.Store]) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - now := time.Now().UTC() - tx1 := core.ExpandedTransaction{ - Transaction: core.Transaction{ - ID: 0, - TransactionData: core.TransactionData{ - Postings: core.Postings{ - { - Source: "world", - Destination: "alice", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - Timestamp: now.Add(-3 * time.Hour), - }, - }, - } - tx2 := core.ExpandedTransaction{ - Transaction: core.Transaction{ - ID: 1, - TransactionData: core.TransactionData{ - Postings: core.Postings{ - { - Source: "world", - Destination: "bob", - Amount: core.NewMonetaryInt(200), - Asset: "USD", - }, - }, - Timestamp: now.Add(-2 * time.Hour), - }, - }, - } - store := internal.GetLedgerStore(t, driver, ctx) - require.NoError(t, store.Commit(context.Background(), tx1, tx2)) - - require.NoError(t, store.UpdateTransactionMetadata(context.Background(), - 0, core.Metadata{"key": "value"}, time.Now().UTC())) - - require.NoError(t, store.UpdateAccountMetadata(context.Background(), - "alice", core.Metadata{"key": "value"}, time.Now().UTC())) - - var log0Timestamp, log1Timestamp time.Time - t.Run("all", func(t *testing.T) { - rsp := internal.GetLedgerLogs(api, url.Values{}) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Log](t, rsp.Body) - // all logs - require.Len(t, cursor.Data, 4) - require.Equal(t, uint64(3), cursor.Data[0].ID) - require.Equal(t, uint64(2), cursor.Data[1].ID) - require.Equal(t, uint64(1), cursor.Data[2].ID) - require.Equal(t, uint64(0), cursor.Data[3].ID) - - log0Timestamp = cursor.Data[3].Date - log1Timestamp = cursor.Data[2].Date - }) - - t.Run("after", func(t *testing.T) { - rsp := internal.GetLedgerLogs(api, url.Values{ - "after": []string{"1"}, - }) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Log](t, rsp.Body) - require.Len(t, cursor.Data, 1) - require.Equal(t, uint64(0), cursor.Data[0].ID) - }) - - t.Run("invalid after", func(t *testing.T) { - rsp := internal.GetLedgerLogs(api, url.Values{ - "after": []string{"invalid"}, - }) - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "invalid 'after' query param", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "invalid 'after' query param", - }, err) - }) - - t.Run("time range", func(t *testing.T) { - rsp := internal.GetLedgerLogs(api, url.Values{ - controllers.QueryKeyStartTime: []string{log0Timestamp.Format(time.RFC3339)}, - controllers.QueryKeyEndTime: []string{log1Timestamp.Format(time.RFC3339)}, - }) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Log](t, rsp.Body) - require.Len(t, cursor.Data, 1) - require.Equal(t, uint64(0), cursor.Data[0].ID) - }) - - t.Run("only start time", func(t *testing.T) { - rsp := internal.GetLedgerLogs(api, url.Values{ - controllers.QueryKeyStartTime: []string{time.Now().Add(time.Second).Format(time.RFC3339)}, - }) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Log](t, rsp.Body) - require.Len(t, cursor.Data, 0) - }) - - t.Run("only end time", func(t *testing.T) { - rsp := internal.GetLedgerLogs(api, url.Values{ - controllers.QueryKeyEndTime: []string{time.Now().Add(time.Second).Format(time.RFC3339)}, - }) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.Log](t, rsp.Body) - require.Len(t, cursor.Data, 4) - }) - - t.Run("invalid start time", func(t *testing.T) { - rsp := internal.GetLedgerLogs(api, url.Values{ - controllers.QueryKeyStartTime: []string{"invalid time"}, - }) - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: controllers.ErrInvalidStartTime.Error(), - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: controllers.ErrInvalidStartTime.Error(), - }, err) - }) - - t.Run("invalid end time", func(t *testing.T) { - rsp := internal.GetLedgerLogs(api, url.Values{ - controllers.QueryKeyEndTime: []string{"invalid time"}, - }) - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: controllers.ErrInvalidEndTime.Error(), - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: controllers.ErrInvalidEndTime.Error(), - }, err) - }) - - to := sqlstorage.LogsPaginationToken{} - raw, err := json.Marshal(to) - require.NoError(t, err) - - t.Run(fmt.Sprintf("valid empty %s", controllers.QueryKeyCursor), func(t *testing.T) { - rsp := internal.GetLedgerLogs(api, url.Values{ - controllers.QueryKeyCursor: []string{base64.RawURLEncoding.EncodeToString(raw)}, - }) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode, rsp.Body.String()) - }) - - t.Run(fmt.Sprintf("valid empty %s with any other param is forbidden", controllers.QueryKeyCursor), func(t *testing.T) { - rsp := internal.GetLedgerLogs(api, url.Values{ - controllers.QueryKeyCursor: []string{base64.RawURLEncoding.EncodeToString(raw)}, - "after": []string{"1"}, - }) - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: fmt.Sprintf("no other query params can be set with '%s'", controllers.QueryKeyCursor), - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: fmt.Sprintf("no other query params can be set with '%s'", controllers.QueryKeyCursor), - }, err) - }) - - t.Run(fmt.Sprintf("invalid %s", controllers.QueryKeyCursor), func(t *testing.T) { - rsp := internal.GetLedgerLogs(api, url.Values{ - controllers.QueryKeyCursor: []string{"invalid"}, - }) - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: fmt.Sprintf("invalid '%s' query param", controllers.QueryKeyCursor), - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: fmt.Sprintf("invalid '%s' query param", controllers.QueryKeyCursor), - }, err) - }) - - t.Run(fmt.Sprintf("invalid %s not base64", controllers.QueryKeyCursor), func(t *testing.T) { - rsp := internal.GetLedgerLogs(api, url.Values{ - controllers.QueryKeyCursor: []string{"@!/"}, - }) - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: fmt.Sprintf("invalid '%s' query param", controllers.QueryKeyCursor), - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: fmt.Sprintf("invalid '%s' query param", controllers.QueryKeyCursor), - }, err) - }) - - return nil - }, - }) - })) -} diff --git a/pkg/api/controllers/mapping_controller.go b/pkg/api/controllers/mapping_controller.go deleted file mode 100644 index 99d739a2b..000000000 --- a/pkg/api/controllers/mapping_controller.go +++ /dev/null @@ -1,45 +0,0 @@ -package controllers - -import ( - "net/http" - - "github.com/gin-gonic/gin" - "github.com/numary/ledger/pkg/api/apierrors" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" -) - -type MappingController struct{} - -func NewMappingController() MappingController { - return MappingController{} -} - -func (ctl *MappingController) PutMapping(c *gin.Context) { - l, _ := c.Get("ledger") - - mapping := &core.Mapping{} - if err := c.ShouldBind(mapping); err != nil { - apierrors.ResponseError(c, err) - return - } - - if err := l.(*ledger.Ledger).SaveMapping(c.Request.Context(), *mapping); err != nil { - apierrors.ResponseError(c, err) - return - } - - respondWithData[*core.Mapping](c, http.StatusOK, mapping) -} - -func (ctl *MappingController) GetMapping(c *gin.Context) { - l, _ := c.Get("ledger") - - mapping, err := l.(*ledger.Ledger).LoadMapping(c.Request.Context()) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - respondWithData[*core.Mapping](c, http.StatusOK, mapping) -} diff --git a/pkg/api/controllers/mapping_controller_test.go b/pkg/api/controllers/mapping_controller_test.go deleted file mode 100644 index e75e1f457..000000000 --- a/pkg/api/controllers/mapping_controller_test.go +++ /dev/null @@ -1,76 +0,0 @@ -package controllers_test - -import ( - "context" - "encoding/json" - "net/http" - "testing" - - "github.com/numary/ledger/pkg/api" - "github.com/numary/ledger/pkg/api/internal" - "github.com/numary/ledger/pkg/core" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "go.uber.org/fx" -) - -func TestMapping(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, h *api.API) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - m := core.Mapping{ - Contracts: []core.Contract{ - { - Name: "default", - Account: "*", - Expr: &core.ExprGt{ - Op1: core.VariableExpr{ - Name: "balance", - }, - Op2: core.ConstantExpr{ - Value: 0, - }, - }, - }, - }, - } - rsp := internal.SaveMapping(t, h, m) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - rsp = internal.LoadMapping(h) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - m2, _ := internal.DecodeSingleResponse[core.Mapping](t, rsp.Body) - - data, err := json.Marshal(m) - require.NoError(t, err) - m1AsMap := make(map[string]any) - require.NoError(t, json.Unmarshal(data, &m1AsMap)) - - data, err = json.Marshal(m2) - require.NoError(t, err) - m2AsMap := make(map[string]any) - require.NoError(t, json.Unmarshal(data, &m2AsMap)) - - assert.EqualValues(t, m1AsMap, m2AsMap) - return nil - }, - }) - })) -} - -func TestLoadEmptyMapping(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, h *api.API) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - rsp := internal.LoadMapping(h) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - m, _ := internal.DecodeSingleResponse[core.Mapping](t, rsp.Body) - - assert.EqualValues(t, core.Mapping{}, m) - return nil - }, - }) - })) -} diff --git a/pkg/api/controllers/pagination_test.go b/pkg/api/controllers/pagination_test.go deleted file mode 100644 index 9721f2f08..000000000 --- a/pkg/api/controllers/pagination_test.go +++ /dev/null @@ -1,616 +0,0 @@ -package controllers_test - -import ( - "context" - "encoding/base64" - "encoding/json" - "fmt" - "net/http" - "net/url" - "testing" - "time" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/numary/ledger/pkg/api" - "github.com/numary/ledger/pkg/api/controllers" - "github.com/numary/ledger/pkg/api/internal" - "github.com/numary/ledger/pkg/core" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "go.uber.org/fx" -) - -// This test makes sense if maxAdditionalTxs < pageSize -const ( - pageSize = 10 - maxTxsPages = 3 - maxAdditionalTxs = 2 -) - -func TestGetPagination(t *testing.T) { - for txsPages := 0; txsPages <= maxTxsPages; txsPages++ { - for additionalTxs := 0; additionalTxs <= maxAdditionalTxs; additionalTxs++ { - t.Run(fmt.Sprintf("%d-pages-%d-additional", txsPages, additionalTxs), func(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API) { - lc.Append(fx.Hook{ - OnStart: testGetPagination(t, api, txsPages, additionalTxs), - }) - })) - }) - } - } -} - -func testGetPagination(t *testing.T, api *api.API, txsPages, additionalTxs int) func(ctx context.Context) error { - return func(ctx context.Context) error { - numTxs := txsPages*pageSize + additionalTxs - if numTxs > 0 { - txsData := make([]core.TransactionData, numTxs) - for i := 0; i < numTxs; i++ { - txsData[i] = core.TransactionData{ - Postings: core.Postings{ - { - Source: "world", - Destination: fmt.Sprintf("accounts:%06d", i), - Amount: core.NewMonetaryInt(10), - Asset: "USD", - }, - }, - Reference: fmt.Sprintf("ref:%06d", i), - } - } - rsp := internal.PostTransactionBatch(t, api, core.Transactions{Transactions: txsData}) - require.Equal(t, http.StatusOK, rsp.Code, rsp.Body.String()) - } - - rsp := internal.CountTransactions(api, url.Values{}) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - require.Equal(t, fmt.Sprintf("%d", numTxs), rsp.Header().Get("Count")) - - numAcc := 0 - if numTxs > 0 { - numAcc = numTxs + 1 // + world account - } - rsp = internal.CountAccounts(api, url.Values{}) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - require.Equal(t, fmt.Sprintf("%d", numAcc), rsp.Header().Get("Count")) - - accPages := numAcc / pageSize - additionalAccs := numAcc % pageSize - - t.Run("transactions", func(t *testing.T) { - var paginationToken string - cursor := &sharedapi.Cursor[core.ExpandedTransaction]{} - - // MOVING FORWARD - for i := 0; i < txsPages; i++ { - - values := url.Values{} - if paginationToken == "" { - values.Set(controllers.QueryKeyPageSize, fmt.Sprintf("%d", pageSize)) - } else { - values.Set(controllers.QueryKeyCursor, paginationToken) - } - - rsp = internal.GetTransactions(api, values) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor = internal.DecodeCursorResponse[core.ExpandedTransaction](t, rsp.Body) - assert.Len(t, cursor.Data, pageSize) - assert.Equal(t, cursor.Next != "", cursor.HasMore) - - // First txid of the page - assert.Equal(t, - uint64((txsPages-i)*pageSize+additionalTxs-1), cursor.Data[0].ID) - - // Last txid of the page - assert.Equal(t, - uint64((txsPages-i-1)*pageSize+additionalTxs), cursor.Data[len(cursor.Data)-1].ID) - - paginationToken = cursor.Next - } - - if additionalTxs > 0 { - rsp = internal.GetTransactions(api, url.Values{ - controllers.QueryKeyCursor: []string{paginationToken}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode, rsp.Body.String()) - cursor = internal.DecodeCursorResponse[core.ExpandedTransaction](t, rsp.Body) - assert.Len(t, cursor.Data, additionalTxs) - assert.Equal(t, cursor.Next != "", cursor.HasMore) - - // First txid of the last page - assert.Equal(t, - uint64(additionalTxs-1), cursor.Data[0].ID) - - // Last txid of the last page - assert.Equal(t, - uint64(0), cursor.Data[len(cursor.Data)-1].ID) - } - - assert.Empty(t, cursor.Next) - - // MOVING BACKWARD - if txsPages > 0 { - back := 0 - for cursor.Previous != "" { - paginationToken = cursor.Previous - rsp = internal.GetTransactions(api, url.Values{ - controllers.QueryKeyCursor: []string{paginationToken}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor = internal.DecodeCursorResponse[core.ExpandedTransaction](t, rsp.Body) - assert.Len(t, cursor.Data, pageSize) - assert.Equal(t, cursor.Next != "", cursor.HasMore) - back++ - } - if additionalTxs > 0 { - assert.Equal(t, txsPages, back) - } else { - assert.Equal(t, txsPages-1, back) - } - - // First txid of the first page - assert.Equal(t, - uint64(txsPages*pageSize+additionalTxs-1), cursor.Data[0].ID) - - // Last txid of the first page - assert.Equal(t, - uint64((txsPages-1)*pageSize+additionalTxs), cursor.Data[len(cursor.Data)-1].ID) - } - - assert.Empty(t, cursor.Previous) - }) - - t.Run("accounts", func(t *testing.T) { - var paginationToken string - cursor := &sharedapi.Cursor[core.Account]{} - - // MOVING FORWARD - for i := 0; i < accPages; i++ { - - values := url.Values{} - if paginationToken == "" { - values.Set(controllers.QueryKeyPageSize, fmt.Sprintf("%d", pageSize)) - } else { - values.Set(controllers.QueryKeyCursor, paginationToken) - } - - rsp = internal.GetAccounts(api, values) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor = internal.DecodeCursorResponse[core.Account](t, rsp.Body) - assert.Len(t, cursor.Data, pageSize) - assert.Equal(t, cursor.Next != "", cursor.HasMore) - - // First account of the page - if i == 0 { - assert.Equal(t, "world", - cursor.Data[0].Address) - } else { - assert.Equal(t, - fmt.Sprintf("accounts:%06d", (accPages-i)*pageSize+additionalAccs-1), - cursor.Data[0].Address) - } - - // Last account of the page - assert.Equal(t, - fmt.Sprintf("accounts:%06d", (accPages-i-1)*pageSize+additionalAccs), - cursor.Data[len(cursor.Data)-1].Address) - - paginationToken = cursor.Next - } - - if additionalAccs > 0 { - rsp = internal.GetAccounts(api, url.Values{ - controllers.QueryKeyCursor: []string{paginationToken}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode, rsp.Body.String()) - cursor = internal.DecodeCursorResponse[core.Account](t, rsp.Body) - assert.Len(t, cursor.Data, additionalAccs) - assert.Equal(t, cursor.Next != "", cursor.HasMore) - - // First account of the last page - if accPages == 0 { - assert.Equal(t, "world", - cursor.Data[0].Address) - } else { - assert.Equal(t, - fmt.Sprintf("accounts:%06d", additionalAccs-1), - cursor.Data[0].Address) - } - - // Last account of the last page - assert.Equal(t, - fmt.Sprintf("accounts:%06d", 0), - cursor.Data[len(cursor.Data)-1].Address) - } - - assert.Empty(t, cursor.Next) - - // MOVING BACKWARD - if accPages > 0 { - back := 0 - for cursor.Previous != "" { - paginationToken = cursor.Previous - rsp = internal.GetAccounts(api, url.Values{ - controllers.QueryKeyCursor: []string{paginationToken}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode, rsp.Body.String()) - cursor = internal.DecodeCursorResponse[core.Account](t, rsp.Body) - assert.Len(t, cursor.Data, pageSize) - assert.Equal(t, cursor.Next != "", cursor.HasMore) - back++ - } - if additionalAccs > 0 { - assert.Equal(t, accPages, back) - } else { - assert.Equal(t, accPages-1, back) - } - - // First account of the first page - assert.Equal(t, "world", - cursor.Data[0].Address) - - // Last account of the first page - assert.Equal(t, - fmt.Sprintf("accounts:%06d", (txsPages-1)*pageSize+additionalTxs+1), - cursor.Data[len(cursor.Data)-1].Address) - } - - assert.Empty(t, cursor.Previous) - }) - - t.Run("balances", func(t *testing.T) { - var paginationToken string - cursor := &sharedapi.Cursor[core.AccountsBalances]{} - - // MOVING FORWARD - for i := 0; i < accPages; i++ { - - values := url.Values{} - if paginationToken == "" { - values.Set(controllers.QueryKeyPageSize, fmt.Sprintf("%d", pageSize)) - } else { - values.Set(controllers.QueryKeyCursor, paginationToken) - } - - rsp = internal.GetBalances(api, values) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor = internal.DecodeCursorResponse[core.AccountsBalances](t, rsp.Body) - assert.Len(t, cursor.Data, pageSize) - assert.Equal(t, cursor.Next != "", cursor.HasMore) - - // First account balances of the page - if i == 0 { - _, ok := cursor.Data[0]["world"] - assert.True(t, ok) - } else { - _, ok := cursor.Data[0][fmt.Sprintf( - "accounts:%06d", (accPages-i)*pageSize+additionalAccs-1)] - assert.True(t, ok) - } - - // Last account balances of the page - _, ok := cursor.Data[len(cursor.Data)-1][fmt.Sprintf( - "accounts:%06d", (accPages-i-1)*pageSize+additionalAccs)] - assert.True(t, ok) - - paginationToken = cursor.Next - } - - if additionalAccs > 0 { - rsp = internal.GetBalances(api, url.Values{ - controllers.QueryKeyCursor: []string{paginationToken}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode, rsp.Body.String()) - cursor = internal.DecodeCursorResponse[core.AccountsBalances](t, rsp.Body) - assert.Len(t, cursor.Data, additionalAccs) - assert.Equal(t, cursor.Next != "", cursor.HasMore) - - // First account balances of the last page - if accPages == 0 { - _, ok := cursor.Data[0]["world"] - assert.True(t, ok) - } else { - _, ok := cursor.Data[0][fmt.Sprintf( - "accounts:%06d", additionalAccs-1)] - assert.True(t, ok) - } - - // Last account balances of the last page - _, ok := cursor.Data[len(cursor.Data)-1][fmt.Sprintf( - "accounts:%06d", 0)] - assert.True(t, ok) - } - - // MOVING BACKWARD - if accPages > 0 { - back := 0 - for cursor.Previous != "" { - paginationToken = cursor.Previous - rsp = internal.GetBalances(api, url.Values{ - controllers.QueryKeyCursor: []string{paginationToken}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode, rsp.Body.String()) - cursor = internal.DecodeCursorResponse[core.AccountsBalances](t, rsp.Body) - assert.Len(t, cursor.Data, pageSize) - assert.Equal(t, cursor.Next != "", cursor.HasMore) - back++ - } - if additionalAccs > 0 { - assert.Equal(t, accPages, back) - } else { - assert.Equal(t, accPages-1, back) - } - - // First account balances of the first page - _, ok := cursor.Data[0]["world"] - assert.True(t, ok) - - // Last account balances of the first page - _, ok = cursor.Data[len(cursor.Data)-1][fmt.Sprintf( - "accounts:%06d", (txsPages-1)*pageSize+additionalTxs+1)] - assert.True(t, ok) - } - }) - - t.Run("logs", func(t *testing.T) { - var paginationToken string - cursor := &sharedapi.Cursor[core.Log]{} - - // MOVING FORWARD - for i := 0; i < txsPages; i++ { - - values := url.Values{} - if paginationToken == "" { - values.Set(controllers.QueryKeyPageSize, fmt.Sprintf("%d", pageSize)) - } else { - values.Set(controllers.QueryKeyCursor, paginationToken) - } - - rsp = internal.GetLedgerLogs(api, values) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor = internal.DecodeCursorResponse[core.Log](t, rsp.Body) - assert.Len(t, cursor.Data, pageSize) - assert.Equal(t, cursor.Next != "", cursor.HasMore) - - // First ID of the page - assert.Equal(t, - uint64((txsPages-i)*pageSize+additionalTxs-1), cursor.Data[0].ID) - - // Last ID of the page - assert.Equal(t, - uint64((txsPages-i-1)*pageSize+additionalTxs), cursor.Data[len(cursor.Data)-1].ID) - - paginationToken = cursor.Next - } - - if additionalTxs > 0 { - rsp = internal.GetLedgerLogs(api, url.Values{ - controllers.QueryKeyCursor: []string{paginationToken}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode, rsp.Body.String()) - cursor = internal.DecodeCursorResponse[core.Log](t, rsp.Body) - assert.Len(t, cursor.Data, additionalTxs) - assert.Equal(t, cursor.Next != "", cursor.HasMore) - - // First ID of the last page - assert.Equal(t, - uint64(additionalTxs-1), cursor.Data[0].ID) - - // Last ID of the last page - assert.Equal(t, - uint64(0), cursor.Data[len(cursor.Data)-1].ID) - } - - assert.Empty(t, cursor.Next) - - // MOVING BACKWARD - if txsPages > 0 { - back := 0 - for cursor.Previous != "" { - paginationToken = cursor.Previous - rsp = internal.GetLedgerLogs(api, url.Values{ - controllers.QueryKeyCursor: []string{paginationToken}, - }) - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor = internal.DecodeCursorResponse[core.Log](t, rsp.Body) - assert.Len(t, cursor.Data, pageSize) - assert.Equal(t, cursor.Next != "", cursor.HasMore) - back++ - } - if additionalTxs > 0 { - assert.Equal(t, txsPages, back) - } else { - assert.Equal(t, txsPages-1, back) - } - - // First ID of the first page - assert.Equal(t, - uint64(txsPages*pageSize+additionalTxs-1), cursor.Data[0].ID) - - // Last ID of the first page - assert.Equal(t, - uint64((txsPages-1)*pageSize+additionalTxs), cursor.Data[len(cursor.Data)-1].ID) - } - - assert.Empty(t, cursor.Previous) - }) - - return nil - } -} - -func TestCursor(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - timestamp, err := time.Parse(time.RFC3339, "2023-01-01T00:00:00Z") - require.NoError(t, err) - for i := 0; i < 30; i++ { - rsp := internal.PostTransaction(t, api, controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: fmt.Sprintf("accounts:%02d", i), - Amount: core.NewMonetaryInt(1), - Asset: "USD", - }, - }, - Reference: fmt.Sprintf("ref:%02d", i), - Metadata: core.Metadata{"ref": "abc"}, - Timestamp: timestamp.Add(time.Duration(i) * time.Second), - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - rsp = internal.PostAccountMetadata(t, api, fmt.Sprintf("accounts:%02d", i), - core.Metadata{ - "foo": json.RawMessage(`"bar"`), - }) - require.Equal(t, http.StatusNoContent, rsp.Result().StatusCode) - } - - t.Run("GetAccounts", func(t *testing.T) { - httpResponse := internal.GetAccounts(api, url.Values{ - "after": []string{"accounts:15"}, - "address": []string{"acc.*"}, - "metadata[foo]": []string{"bar"}, - "balance": []string{"1"}, - controllers.QueryKeyBalanceOperator: []string{"gte"}, - controllers.QueryKeyPageSize: []string{"3"}, - }) - assert.Equal(t, http.StatusOK, httpResponse.Result().StatusCode, httpResponse.Body.String()) - - cursor := internal.DecodeCursorResponse[core.Account](t, httpResponse.Body) - res, err := base64.RawURLEncoding.DecodeString(cursor.Next) - require.NoError(t, err) - require.Equal(t, - `{"pageSize":3,"offset":3,"after":"accounts:15","address":"acc.*","metadata":{"foo":"bar"},"balance":"1","balanceOperator":"gte"}`, - string(res)) - - httpResponse = internal.GetAccounts(api, url.Values{ - controllers.QueryKeyCursor: []string{cursor.Next}, - }) - assert.Equal(t, http.StatusOK, httpResponse.Result().StatusCode, httpResponse.Body.String()) - - cursor = internal.DecodeCursorResponse[core.Account](t, httpResponse.Body) - res, err = base64.RawURLEncoding.DecodeString(cursor.Previous) - require.NoError(t, err) - require.Equal(t, - `{"pageSize":3,"offset":0,"after":"accounts:15","address":"acc.*","metadata":{"foo":"bar"},"balance":"1","balanceOperator":"gte"}`, - string(res)) - res, err = base64.RawURLEncoding.DecodeString(cursor.Next) - require.NoError(t, err) - require.Equal(t, - `{"pageSize":3,"offset":6,"after":"accounts:15","address":"acc.*","metadata":{"foo":"bar"},"balance":"1","balanceOperator":"gte"}`, - string(res)) - }) - - t.Run("GetTransactions", func(t *testing.T) { - httpResponse := internal.GetTransactions(api, url.Values{ - "after": []string{"15"}, - "account": []string{"acc.*"}, - "source": []string{"world"}, - "destination": []string{"acc.*"}, - controllers.QueryKeyStartTime: []string{timestamp.Add(5 * time.Second).Format(time.RFC3339)}, - controllers.QueryKeyEndTime: []string{timestamp.Add(25 * time.Second).Format(time.RFC3339)}, - "metadata[ref]": []string{"abc"}, - controllers.QueryKeyPageSize: []string{"3"}, - }) - assert.Equal(t, http.StatusOK, httpResponse.Result().StatusCode, httpResponse.Body.String()) - - cursor := internal.DecodeCursorResponse[core.Transaction](t, httpResponse.Body) - res, err := base64.RawURLEncoding.DecodeString(cursor.Next) - require.NoError(t, err) - require.Equal(t, - `{"after":12,"account":"acc.*","source":"world","destination":"acc.*","startTime":"2023-01-01T00:00:05Z","endTime":"2023-01-01T00:00:25Z","metadata":{"ref":"abc"},"pageSize":3}`, - string(res)) - - httpResponse = internal.GetTransactions(api, url.Values{ - controllers.QueryKeyCursor: []string{cursor.Next}, - }) - assert.Equal(t, http.StatusOK, httpResponse.Result().StatusCode, httpResponse.Body.String()) - - cursor = internal.DecodeCursorResponse[core.Transaction](t, httpResponse.Body) - res, err = base64.RawURLEncoding.DecodeString(cursor.Previous) - require.NoError(t, err) - require.Equal(t, - `{"after":15,"account":"acc.*","source":"world","destination":"acc.*","startTime":"2023-01-01T00:00:05Z","endTime":"2023-01-01T00:00:25Z","metadata":{"ref":"abc"},"pageSize":3}`, - string(res)) - res, err = base64.RawURLEncoding.DecodeString(cursor.Next) - require.NoError(t, err) - require.Equal(t, - `{"after":9,"account":"acc.*","source":"world","destination":"acc.*","startTime":"2023-01-01T00:00:05Z","endTime":"2023-01-01T00:00:25Z","metadata":{"ref":"abc"},"pageSize":3}`, - string(res)) - }) - - t.Run("GetBalances", func(t *testing.T) { - httpResponse := internal.GetBalances(api, url.Values{ - "after": []string{"accounts:15"}, - "address": []string{"acc.*"}, - controllers.QueryKeyPageSize: []string{"3"}, - }) - assert.Equal(t, http.StatusOK, httpResponse.Result().StatusCode, httpResponse.Body.String()) - - cursor := internal.DecodeCursorResponse[core.AccountsBalances](t, httpResponse.Body) - res, err := base64.RawURLEncoding.DecodeString(cursor.Next) - require.NoError(t, err) - require.Equal(t, - `{"pageSize":3,"offset":3,"after":"accounts:15","address":"acc.*"}`, - string(res)) - - httpResponse = internal.GetBalances(api, url.Values{ - controllers.QueryKeyCursor: []string{cursor.Next}, - }) - assert.Equal(t, http.StatusOK, httpResponse.Result().StatusCode, httpResponse.Body.String()) - - cursor = internal.DecodeCursorResponse[core.AccountsBalances](t, httpResponse.Body) - res, err = base64.RawURLEncoding.DecodeString(cursor.Previous) - require.NoError(t, err) - require.Equal(t, - `{"pageSize":3,"offset":0,"after":"accounts:15","address":"acc.*"}`, - string(res)) - res, err = base64.RawURLEncoding.DecodeString(cursor.Next) - require.NoError(t, err) - require.Equal(t, - `{"pageSize":3,"offset":6,"after":"accounts:15","address":"acc.*"}`, - string(res)) - }) - - t.Run("GetLogs", func(t *testing.T) { - httpResponse := internal.GetLedgerLogs(api, url.Values{ - "after": []string{"30"}, - controllers.QueryKeyStartTime: []string{timestamp.Add(5 * time.Second).Format(time.RFC3339)}, - controllers.QueryKeyEndTime: []string{timestamp.Add(25 * time.Second).Format(time.RFC3339)}, - controllers.QueryKeyPageSize: []string{"2"}, - }) - assert.Equal(t, http.StatusOK, httpResponse.Result().StatusCode, httpResponse.Body.String()) - - cursor := internal.DecodeCursorResponse[core.Log](t, httpResponse.Body) - res, err := base64.RawURLEncoding.DecodeString(cursor.Next) - require.NoError(t, err) - require.Equal(t, - `{"after":26,"pageSize":2,"startTime":"2023-01-01T00:00:05Z","endTime":"2023-01-01T00:00:25Z"}`, - string(res)) - - httpResponse = internal.GetLedgerLogs(api, url.Values{ - controllers.QueryKeyCursor: []string{cursor.Next}, - }) - assert.Equal(t, http.StatusOK, httpResponse.Result().StatusCode, httpResponse.Body.String()) - - cursor = internal.DecodeCursorResponse[core.Log](t, httpResponse.Body) - res, err = base64.RawURLEncoding.DecodeString(cursor.Previous) - require.NoError(t, err) - require.Equal(t, - `{"after":28,"pageSize":2,"startTime":"2023-01-01T00:00:05Z","endTime":"2023-01-01T00:00:25Z"}`, - string(res)) - res, err = base64.RawURLEncoding.DecodeString(cursor.Next) - require.NoError(t, err) - require.Equal(t, - `{"after":22,"pageSize":2,"startTime":"2023-01-01T00:00:05Z","endTime":"2023-01-01T00:00:25Z"}`, - string(res)) - }) - - return nil - }, - }) - })) -} diff --git a/pkg/api/controllers/query.go b/pkg/api/controllers/query.go deleted file mode 100644 index 40d15530b..000000000 --- a/pkg/api/controllers/query.go +++ /dev/null @@ -1,100 +0,0 @@ -package controllers - -import ( - "strconv" - - "github.com/gin-gonic/gin" - "github.com/numary/ledger/pkg/ledger" -) - -const ( - MaxPageSize = 1000 - DefaultPageSize = ledger.QueryDefaultPageSize - - QueryKeyCursor = "cursor" - // Deprecated - QueryKeyCursorDeprecated = "pagination_token" - - QueryKeyPageSize = "pageSize" - // Deprecated - QueryKeyPageSizeDeprecated = "page_size" - - QueryKeyBalanceOperator = "balanceOperator" - // Deprecated - QueryKeyBalanceOperatorDeprecated = "balance_operator" - - QueryKeyStartTime = "startTime" - // Deprecated - QueryKeyStartTimeDeprecated = "start_time" - - QueryKeyEndTime = "endTime" - // Deprecated - QueryKeyEndTimeDeprecated = "end_time" -) - -var ( - ErrInvalidPageSize = ledger.NewValidationError("invalid 'pageSize' query param") - // Deprecated - ErrInvalidPageSizeDeprecated = ledger.NewValidationError("invalid 'page_size' query param") - - ErrInvalidBalanceOperator = ledger.NewValidationError( - "invalid parameter 'balanceOperator', should be one of 'e, ne, gt, gte, lt, lte'") - // Deprecated - ErrInvalidBalanceOperatorDeprecated = ledger.NewValidationError( - "invalid parameter 'balance_operator', should be one of 'e, ne, gt, gte, lt, lte'") - - ErrInvalidStartTime = ledger.NewValidationError("invalid 'startTime' query param") - // Deprecated - ErrInvalidStartTimeDeprecated = ledger.NewValidationError("invalid 'start_time' query param") - - ErrInvalidEndTime = ledger.NewValidationError("invalid 'endTime' query param") - // Deprecated - ErrInvalidEndTimeDeprecated = ledger.NewValidationError("invalid 'end_time' query param") -) - -func getPageSize(c *gin.Context) (uint, error) { - pageSizeParam := c.Query(QueryKeyPageSize) - pageSizeParamDeprecated := c.Query(QueryKeyPageSizeDeprecated) - if pageSizeParam == "" && pageSizeParamDeprecated == "" { - return DefaultPageSize, nil - } - - var pageSize uint64 - var err error - if pageSizeParam != "" { - pageSize, err = strconv.ParseUint(pageSizeParam, 10, 32) - if err != nil { - return 0, ErrInvalidPageSize - } - } else if pageSizeParamDeprecated != "" { - pageSize, err = strconv.ParseUint(pageSizeParamDeprecated, 10, 32) - if err != nil { - return 0, ErrInvalidPageSizeDeprecated - } - } - - if pageSize > MaxPageSize { - return MaxPageSize, nil - } - - return uint(pageSize), nil -} - -func getBalanceOperator(c *gin.Context) (ledger.BalanceOperator, error) { - balanceOperator := ledger.DefaultBalanceOperator - balanceOperatorStr := c.Query(QueryKeyBalanceOperator) - balanceOperatorStrDeprecated := c.Query(QueryKeyBalanceOperatorDeprecated) - if balanceOperatorStr != "" { - var ok bool - if balanceOperator, ok = ledger.NewBalanceOperator(balanceOperatorStr); !ok { - return "", ErrInvalidBalanceOperator - } - } else if balanceOperatorStrDeprecated != "" { - var ok bool - if balanceOperator, ok = ledger.NewBalanceOperator(balanceOperatorStrDeprecated); !ok { - return "", ErrInvalidBalanceOperatorDeprecated - } - } - - return balanceOperator, nil -} diff --git a/pkg/api/controllers/script_controller.go b/pkg/api/controllers/script_controller.go deleted file mode 100644 index 687cc7c74..000000000 --- a/pkg/api/controllers/script_controller.go +++ /dev/null @@ -1,68 +0,0 @@ -package controllers - -import ( - "net/http" - "strings" - - "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/logging" - "github.com/gin-gonic/gin" - "github.com/numary/ledger/pkg/api/apierrors" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" -) - -type ScriptResponse struct { - api.ErrorResponse - Transaction *core.ExpandedTransaction `json:"transaction,omitempty"` -} - -type ScriptController struct{} - -func NewScriptController() ScriptController { - return ScriptController{} -} - -func (ctl *ScriptController) PostScript(c *gin.Context) { - l, _ := c.Get("ledger") - - var script core.ScriptData - if err := c.ShouldBindJSON(&script); err != nil { - panic(err) - } - - value, ok := c.GetQuery("preview") - preview := ok && (strings.ToUpper(value) == "YES" || strings.ToUpper(value) == "TRUE" || value == "1") - - res := ScriptResponse{} - execRes, err := l.(*ledger.Ledger).ExecuteScript(c.Request.Context(), preview, script) - if err != nil { - var ( - code = apierrors.ErrInternal - message string - ) - switch e := err.(type) { - case *ledger.ScriptError: - code = e.Code - message = e.Message - case *ledger.ConflictError: - code = apierrors.ErrConflict - message = e.Error() - default: - logging.GetLogger(c.Request.Context()).Errorf( - "internal errors executing script: %s", err) - } - res.ErrorResponse = api.ErrorResponse{ - ErrorCode: code, - ErrorMessage: message, - ErrorCodeDeprecated: code, - ErrorMessageDeprecated: message, - } - if message != "" { - res.Details = apierrors.EncodeLink(message) - } - } - res.Transaction = &execRes - - c.JSON(http.StatusOK, res) -} diff --git a/pkg/api/controllers/script_controller_test.go b/pkg/api/controllers/script_controller_test.go deleted file mode 100644 index f2b33fe41..000000000 --- a/pkg/api/controllers/script_controller_test.go +++ /dev/null @@ -1,289 +0,0 @@ -package controllers_test - -import ( - "context" - "encoding/json" - "net/http" - "net/url" - "testing" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/numary/ledger/pkg/api" - "github.com/numary/ledger/pkg/api/apierrors" - "github.com/numary/ledger/pkg/api/controllers" - "github.com/numary/ledger/pkg/api/internal" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/storage" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "go.uber.org/fx" -) - -func TestPostScript(t *testing.T) { - type testCase struct { - name string - script core.ScriptData - expectedResponse controllers.ScriptResponse - } - - testCases := []testCase{ - { - name: "nominal", - script: core.ScriptData{ - Script: core.Script{ - Plain: ` - send [COIN 100] ( - source = @world - destination = @centralbank - ) - send [COIN 100] ( - source = @centralbank - destination = @users:001 - )`, - }, - }, - }, - { - name: "failure with insufficient funds", - script: core.ScriptData{ - Script: core.Script{ - Plain: ` - send [COIN 100] ( - source = @centralbank - destination = @users:001 - )`, - }, - }, - expectedResponse: controllers.ScriptResponse{ - ErrorResponse: sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrInsufficientFund, - ErrorMessage: "account had insufficient funds", - Details: apierrors.EncodeLink("account had insufficient funds"), - ErrorCodeDeprecated: apierrors.ErrInsufficientFund, - ErrorMessageDeprecated: "account had insufficient funds", - }, - }, - }, - { - name: "failure with metadata override", - script: core.ScriptData{ - Script: core.Script{ - Plain: ` - set_tx_meta("priority", "low") - - send [USD/2 99] ( - source=@world - destination=@user:001 - )`, - }, - Metadata: core.Metadata{ - "priority": json.RawMessage(`"high"`), - }, - }, - expectedResponse: controllers.ScriptResponse{ - ErrorResponse: sharedapi.ErrorResponse{ - ErrorCode: ledger.ScriptErrorMetadataOverride, - ErrorMessage: "cannot override metadata from script", - Details: apierrors.EncodeLink("cannot override metadata from script"), - ErrorCodeDeprecated: ledger.ScriptErrorMetadataOverride, - ErrorMessageDeprecated: "cannot override metadata from script", - }, - }, - }, - } - - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - rsp := internal.PostScript(t, api, tc.script, url.Values{}) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - res := controllers.ScriptResponse{} - require.NoError(t, json.Unmarshal(rsp.Body.Bytes(), &res)) - - res.Transaction = nil - require.EqualValues(t, tc.expectedResponse, res) - }) - } - - return nil - }, - }) - })) -} - -func TestPostScriptPreview(t *testing.T) { - script := ` - send [COIN 100] ( - source = @world - destination = @centralbank - )` - - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API, driver storage.Driver[ledger.Store]) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - store := internal.GetLedgerStore(t, driver, ctx) - - t.Run("true", func(t *testing.T) { - values := url.Values{} - values.Set("preview", "true") - - rsp := internal.PostScript(t, api, core.ScriptData{ - Script: core.Script{Plain: script}, - }, values) - - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - res := controllers.ScriptResponse{} - internal.Decode(t, rsp.Body, &res) - - cursor, err := store.GetTransactions(ctx, *ledger.NewTransactionsQuery()) - require.NoError(t, err) - require.Len(t, cursor.Data, 0) - }) - - t.Run("false", func(t *testing.T) { - values := url.Values{} - values.Set("preview", "false") - - rsp := internal.PostScript(t, api, core.ScriptData{ - Script: core.Script{Plain: script}, - }, values) - - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - res := controllers.ScriptResponse{} - internal.Decode(t, rsp.Body, &res) - - cursor, err := store.GetTransactions(ctx, *ledger.NewTransactionsQuery()) - require.NoError(t, err) - require.Len(t, cursor.Data, 1) - }) - - return nil - }, - }) - })) -} - -func TestPostScriptWithReference(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API, driver storage.Driver[ledger.Store]) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - reference := "order_1234" - rsp := internal.PostScript(t, api, core.ScriptData{ - Script: core.Script{ - Plain: ` - send [COIN 100] ( - source = @world - destination = @centralbank - )`}, - Reference: reference, - }, url.Values{}) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - res := controllers.ScriptResponse{} - require.NoError(t, json.Unmarshal(rsp.Body.Bytes(), &res)) - require.Equal(t, reference, res.Transaction.Reference) - - store := internal.GetLedgerStore(t, driver, ctx) - cursor, err := store.GetTransactions(ctx, *ledger.NewTransactionsQuery()) - require.NoError(t, err) - require.Len(t, cursor.Data, 1) - require.Equal(t, reference, cursor.Data[0].Reference) - - return nil - }, - }) - })) -} - -func TestPostScriptConflict(t *testing.T) { - script := ` - send [COIN 100] ( - source = @world - destination = @centralbank - )` - - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API, driver storage.Driver[ledger.Store]) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - t.Run("first should succeed", func(t *testing.T) { - rsp := internal.PostScript(t, api, core.ScriptData{ - Script: core.Script{ - Plain: script, - }, - Reference: "1234", - }, url.Values{}) - - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - res := controllers.ScriptResponse{} - internal.Decode(t, rsp.Body, &res) - assert.Equal(t, "", res.ErrorCode) - assert.Equal(t, "", res.ErrorMessage) - assert.Equal(t, "", res.ErrorCodeDeprecated) - assert.Equal(t, "", res.ErrorMessageDeprecated) - assert.NotNil(t, res.Transaction) - }) - - t.Run("second should fail", func(t *testing.T) { - rsp := internal.PostScript(t, api, core.ScriptData{ - Script: core.Script{ - Plain: script, - }, - Reference: "1234", - }, url.Values{}) - - assert.Equal(t, http.StatusOK, rsp.Result().StatusCode) - res := controllers.ScriptResponse{} - internal.Decode(t, rsp.Body, &res) - assert.Equal(t, apierrors.ErrConflict, res.ErrorCode) - assert.Equal(t, "conflict error on reference", res.ErrorMessage) - assert.Equal(t, apierrors.ErrConflict, res.ErrorCodeDeprecated) - assert.Equal(t, "conflict error on reference", res.ErrorMessageDeprecated) - }) - - return nil - }, - }) - })) -} - -func TestPostScriptWithSetAccountMeta(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API, driver storage.Driver[ledger.Store]) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - rsp := internal.PostScript(t, api, core.ScriptData{ - Script: core.Script{ - Plain: ` - send [COIN 100] ( - source = @world - destination = @centralbank - ) - set_account_meta(@centralbank, "fees", "15 percent")`}, - }, url.Values{}) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - res := controllers.ScriptResponse{} - require.NoError(t, json.Unmarshal(rsp.Body.Bytes(), &res)) - require.Equal(t, core.Metadata{}, res.Transaction.Metadata) - - store := internal.GetLedgerStore(t, driver, ctx) - cursor, err := store.GetTransactions(ctx, *ledger.NewTransactionsQuery()) - require.NoError(t, err) - require.Len(t, cursor.Data, 1) - - acc, err := store.GetAccount(ctx, "centralbank") - require.NoError(t, err) - require.Equal(t, core.Metadata{ - "fees": map[string]any{ - "type": "string", - "value": "15 percent", - }}, acc.Metadata) - - return nil - }, - }) - })) -} diff --git a/pkg/api/controllers/transaction_controller.go b/pkg/api/controllers/transaction_controller.go deleted file mode 100644 index 44e2d565a..000000000 --- a/pkg/api/controllers/transaction_controller.go +++ /dev/null @@ -1,401 +0,0 @@ -package controllers - -import ( - "encoding/base64" - "encoding/json" - "fmt" - "net/http" - "strconv" - "strings" - "time" - - "github.com/gin-gonic/gin" - "github.com/numary/ledger/pkg/api/apierrors" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/storage/sqlstorage" - "github.com/pkg/errors" -) - -type TransactionController struct{} - -func NewTransactionController() TransactionController { - return TransactionController{} -} - -func (ctl *TransactionController) CountTransactions(c *gin.Context) { - l, _ := c.Get("ledger") - - var startTimeParsed, endTimeParsed time.Time - var err error - if c.Query(QueryKeyStartTime) != "" { - startTimeParsed, err = time.Parse(time.RFC3339, c.Query(QueryKeyStartTime)) - if err != nil { - apierrors.ResponseError(c, ErrInvalidStartTime) - return - } - } - if c.Query(QueryKeyStartTimeDeprecated) != "" { - startTimeParsed, err = time.Parse(time.RFC3339, c.Query(QueryKeyStartTimeDeprecated)) - if err != nil { - apierrors.ResponseError(c, ErrInvalidStartTimeDeprecated) - return - } - } - - if c.Query(QueryKeyEndTime) != "" { - endTimeParsed, err = time.Parse(time.RFC3339, c.Query(QueryKeyEndTime)) - if err != nil { - apierrors.ResponseError(c, ErrInvalidEndTime) - return - } - } - if c.Query(QueryKeyEndTimeDeprecated) != "" { - endTimeParsed, err = time.Parse(time.RFC3339, c.Query(QueryKeyEndTimeDeprecated)) - if err != nil { - apierrors.ResponseError(c, ErrInvalidEndTimeDeprecated) - return - } - } - - txQuery := ledger.NewTransactionsQuery(). - WithReferenceFilter(c.Query("reference")). - WithAccountFilter(c.Query("account")). - WithSourceFilter(c.Query("source")). - WithDestinationFilter(c.Query("destination")). - WithStartTimeFilter(startTimeParsed). - WithEndTimeFilter(endTimeParsed) - - count, err := l.(*ledger.Ledger).CountTransactions(c.Request.Context(), *txQuery) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - c.Header("Count", fmt.Sprint(count)) -} - -func (ctl *TransactionController) GetTransactions(c *gin.Context) { - l, _ := c.Get("ledger") - - txQuery := ledger.NewTransactionsQuery() - - if c.Query(QueryKeyCursor) != "" { - if c.Query("after") != "" || - c.Query("reference") != "" || - c.Query("account") != "" || - c.Query("source") != "" || - c.Query("destination") != "" || - c.Query(QueryKeyStartTime) != "" || - c.Query(QueryKeyStartTimeDeprecated) != "" || - c.Query(QueryKeyEndTime) != "" || - c.Query(QueryKeyEndTimeDeprecated) != "" || - c.Query(QueryKeyPageSize) != "" || - c.Query(QueryKeyPageSizeDeprecated) != "" { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("no other query params can be set with '%s'", QueryKeyCursor))) - return - } - - res, err := base64.RawURLEncoding.DecodeString(c.Query(QueryKeyCursor)) - if err != nil { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("invalid '%s' query param", QueryKeyCursor))) - return - } - - token := sqlstorage.TxsPaginationToken{} - if err = json.Unmarshal(res, &token); err != nil { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("invalid '%s' query param", QueryKeyCursor))) - return - } - - txQuery = txQuery. - WithAfterTxID(token.AfterTxID). - WithReferenceFilter(token.ReferenceFilter). - WithAccountFilter(token.AccountFilter). - WithSourceFilter(token.SourceFilter). - WithDestinationFilter(token.DestinationFilter). - WithStartTimeFilter(token.StartTime). - WithEndTimeFilter(token.EndTime). - WithMetadataFilter(token.MetadataFilter). - WithPageSize(token.PageSize) - - } else if c.Query(QueryKeyCursorDeprecated) != "" { - if c.Query("after") != "" || - c.Query("reference") != "" || - c.Query("account") != "" || - c.Query("source") != "" || - c.Query("destination") != "" || - c.Query(QueryKeyStartTime) != "" || - c.Query(QueryKeyStartTimeDeprecated) != "" || - c.Query(QueryKeyEndTime) != "" || - c.Query(QueryKeyEndTimeDeprecated) != "" || - c.Query(QueryKeyPageSize) != "" || - c.Query(QueryKeyPageSizeDeprecated) != "" { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("no other query params can be set with '%s'", QueryKeyCursorDeprecated))) - return - } - - res, err := base64.RawURLEncoding.DecodeString(c.Query(QueryKeyCursorDeprecated)) - if err != nil { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("invalid '%s' query param", QueryKeyCursorDeprecated))) - return - } - - token := sqlstorage.TxsPaginationToken{} - if err = json.Unmarshal(res, &token); err != nil { - apierrors.ResponseError(c, ledger.NewValidationError( - fmt.Sprintf("invalid '%s' query param", QueryKeyCursorDeprecated))) - return - } - - txQuery = txQuery. - WithAfterTxID(token.AfterTxID). - WithReferenceFilter(token.ReferenceFilter). - WithAccountFilter(token.AccountFilter). - WithSourceFilter(token.SourceFilter). - WithDestinationFilter(token.DestinationFilter). - WithStartTimeFilter(token.StartTime). - WithEndTimeFilter(token.EndTime). - WithMetadataFilter(token.MetadataFilter). - WithPageSize(token.PageSize) - - } else { - var err error - var afterTxIDParsed uint64 - if c.Query("after") != "" { - afterTxIDParsed, err = strconv.ParseUint(c.Query("after"), 10, 64) - if err != nil { - apierrors.ResponseError(c, ledger.NewValidationError( - "invalid 'after' query param")) - return - } - } - - var startTimeParsed, endTimeParsed time.Time - if c.Query(QueryKeyStartTime) != "" { - startTimeParsed, err = time.Parse(time.RFC3339, c.Query(QueryKeyStartTime)) - if err != nil { - apierrors.ResponseError(c, ErrInvalidStartTime) - return - } - } - if c.Query(QueryKeyStartTimeDeprecated) != "" { - startTimeParsed, err = time.Parse(time.RFC3339, c.Query(QueryKeyStartTimeDeprecated)) - if err != nil { - apierrors.ResponseError(c, ErrInvalidStartTimeDeprecated) - return - } - } - - if c.Query(QueryKeyEndTime) != "" { - endTimeParsed, err = time.Parse(time.RFC3339, c.Query(QueryKeyEndTime)) - if err != nil { - apierrors.ResponseError(c, ErrInvalidEndTime) - return - } - } - if c.Query(QueryKeyEndTimeDeprecated) != "" { - endTimeParsed, err = time.Parse(time.RFC3339, c.Query(QueryKeyEndTimeDeprecated)) - if err != nil { - apierrors.ResponseError(c, ErrInvalidEndTimeDeprecated) - return - } - } - - pageSize, err := getPageSize(c) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - txQuery = txQuery. - WithAfterTxID(afterTxIDParsed). - WithReferenceFilter(c.Query("reference")). - WithAccountFilter(c.Query("account")). - WithSourceFilter(c.Query("source")). - WithDestinationFilter(c.Query("destination")). - WithStartTimeFilter(startTimeParsed). - WithEndTimeFilter(endTimeParsed). - WithMetadataFilter(c.QueryMap("metadata")). - WithPageSize(pageSize) - } - - cursor, err := l.(*ledger.Ledger).GetTransactions(c.Request.Context(), *txQuery) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - respondWithCursor[core.ExpandedTransaction](c, http.StatusOK, cursor) -} - -type PostTransaction struct { - Postings core.Postings `json:"postings"` - Script core.Script `json:"script"` - Timestamp time.Time `json:"timestamp"` - Reference string `json:"reference"` - Metadata core.Metadata `json:"metadata" swaggertype:"object"` -} - -func (ctl *TransactionController) PostTransaction(c *gin.Context) { - l, _ := c.Get("ledger") - - value, ok := c.GetQuery("preview") - preview := ok && - (strings.ToUpper(value) == "YES" || strings.ToUpper(value) == "TRUE" || value == "1") - - payload := PostTransaction{} - if err := c.ShouldBindJSON(&payload); err != nil { - apierrors.ResponseError(c, - ledger.NewValidationError("invalid transaction format")) - return - } - - if len(payload.Postings) > 0 && payload.Script.Plain != "" || - len(payload.Postings) == 0 && payload.Script.Plain == "" { - apierrors.ResponseError(c, ledger.NewValidationError( - "invalid payload: should contain either postings or script")) - return - } else if len(payload.Postings) > 0 { - if i, err := payload.Postings.Validate(); err != nil { - apierrors.ResponseError(c, ledger.NewValidationError(errors.Wrap(err, - fmt.Sprintf("invalid posting %d", i)).Error())) - return - } - txData := core.TransactionData{ - Postings: payload.Postings, - Timestamp: payload.Timestamp, - Reference: payload.Reference, - Metadata: payload.Metadata, - } - res, err := l.(*ledger.Ledger).ExecuteTxsData(c.Request.Context(), preview, txData) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - respondWithData[[]core.ExpandedTransaction](c, http.StatusOK, res) - return - } - - script := core.ScriptData{ - Script: payload.Script, - Timestamp: payload.Timestamp, - Reference: payload.Reference, - Metadata: payload.Metadata, - } - res, err := l.(*ledger.Ledger).ExecuteScript(c.Request.Context(), preview, script) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - respondWithData[[]core.ExpandedTransaction](c, http.StatusOK, []core.ExpandedTransaction{res}) -} - -func (ctl *TransactionController) GetTransaction(c *gin.Context) { - l, _ := c.Get("ledger") - - txId, err := strconv.ParseUint(c.Param("txid"), 10, 64) - if err != nil { - apierrors.ResponseError(c, ledger.NewValidationError("invalid transaction ID")) - return - } - - tx, err := l.(*ledger.Ledger).GetTransaction(c.Request.Context(), txId) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - respondWithData[*core.ExpandedTransaction](c, http.StatusOK, tx) -} - -func (ctl *TransactionController) RevertTransaction(c *gin.Context) { - l, _ := c.Get("ledger") - - txId, err := strconv.ParseUint(c.Param("txid"), 10, 64) - if err != nil { - apierrors.ResponseError(c, ledger.NewValidationError("invalid transaction ID")) - return - } - - tx, err := l.(*ledger.Ledger).RevertTransaction(c.Request.Context(), txId) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - respondWithData[*core.ExpandedTransaction](c, http.StatusOK, tx) -} - -func (ctl *TransactionController) PostTransactionMetadata(c *gin.Context) { - l, _ := c.Get("ledger") - - var m core.Metadata - if err := c.ShouldBindJSON(&m); err != nil { - apierrors.ResponseError(c, ledger.NewValidationError("invalid metadata format")) - return - } - - txId, err := strconv.ParseUint(c.Param("txid"), 10, 64) - if err != nil { - apierrors.ResponseError(c, ledger.NewValidationError("invalid transaction ID")) - return - } - - _, err = l.(*ledger.Ledger).GetTransaction(c.Request.Context(), txId) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - if err := l.(*ledger.Ledger).SaveMeta(c.Request.Context(), - core.MetaTargetTypeTransaction, txId, m); err != nil { - apierrors.ResponseError(c, err) - return - } - - respondWithNoContent(c) -} - -func (ctl *TransactionController) PostTransactionsBatch(c *gin.Context) { - l, _ := c.Get("ledger") - - var txs core.Transactions - if err := c.ShouldBindJSON(&txs); err != nil { - apierrors.ResponseError(c, ledger.NewValidationError("invalid transactions format")) - return - } - - if len(txs.Transactions) == 0 { - apierrors.ResponseError(c, ledger.NewValidationError("no transaction to insert")) - return - } - - for i, tx := range txs.Transactions { - if len(tx.Postings) == 0 { - apierrors.ResponseError(c, ledger.NewValidationError(errors.New(fmt.Sprintf( - "invalid transaction %d: no postings", i)).Error())) - return - } - if j, err := tx.Postings.Validate(); err != nil { - apierrors.ResponseError(c, ledger.NewValidationError(errors.Wrap(err, - fmt.Sprintf("invalid transaction %d: posting %d", i, j)).Error())) - return - } - } - - res, err := l.(*ledger.Ledger).ExecuteTxsData(c.Request.Context(), false, txs.Transactions...) - if err != nil { - apierrors.ResponseError(c, err) - return - } - - respondWithData[[]core.ExpandedTransaction](c, http.StatusOK, res) -} diff --git a/pkg/api/controllers/transaction_controller_test.go b/pkg/api/controllers/transaction_controller_test.go deleted file mode 100644 index ecbe9b9da..000000000 --- a/pkg/api/controllers/transaction_controller_test.go +++ /dev/null @@ -1,2348 +0,0 @@ -package controllers_test - -import ( - "context" - "database/sql" - "encoding/base64" - "encoding/json" - "fmt" - "net/http" - "net/url" - "os" - "strconv" - "testing" - "time" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/numary/ledger/internal/pgtesting" - "github.com/numary/ledger/pkg/api" - "github.com/numary/ledger/pkg/api/apierrors" - "github.com/numary/ledger/pkg/api/controllers" - "github.com/numary/ledger/pkg/api/internal" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/ledgertesting" - "github.com/numary/ledger/pkg/storage" - "github.com/numary/ledger/pkg/storage/sqlstorage" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "go.uber.org/fx" -) - -func TestPostTransactions(t *testing.T) { - type testCase struct { - name string - payload []controllers.PostTransaction - expectedStatusCode int - expectedRes sharedapi.BaseResponse[[]core.ExpandedTransaction] - expectedErr sharedapi.ErrorResponse - } - - var timestamp1 = time.Now().Add(1 * time.Minute).Truncate(time.Second) - var timestamp2 = time.Now().Add(2 * time.Minute).Truncate(time.Second) - var timestamp3 = time.Now().Add(3 * time.Minute).Truncate(time.Second) - - testCases := []testCase{ - { - name: "postings nominal", - payload: []controllers.PostTransaction{ - { - Postings: core.Postings{ - { - Source: "world", - Destination: "central_bank", - Amount: core.NewMonetaryInt(1000), - Asset: "USB", - }, - }, - }, - }, - expectedStatusCode: http.StatusOK, - expectedRes: sharedapi.BaseResponse[[]core.ExpandedTransaction]{ - Data: &[]core.ExpandedTransaction{{ - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Postings: core.Postings{ - { - Source: "world", - Destination: "central_bank", - Amount: core.NewMonetaryInt(1000), - Asset: "USB", - }, - }, - }, - }, - }}, - }, - }, - { - name: "postings asset with digit", - payload: []controllers.PostTransaction{ - { - Postings: core.Postings{ - { - Source: "world", - Destination: "central_bank", - Amount: core.NewMonetaryInt(1000), - Asset: "US1234D", - }, - }, - }, - }, - expectedStatusCode: http.StatusOK, - expectedRes: sharedapi.BaseResponse[[]core.ExpandedTransaction]{ - Data: &[]core.ExpandedTransaction{{ - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Postings: core.Postings{ - { - Source: "world", - Destination: "central_bank", - Amount: core.NewMonetaryInt(1000), - Asset: "US1234D", - }, - }, - }, - }, - }}, - }, - }, - { - name: "script nominal", - payload: []controllers.PostTransaction{{ - Script: core.Script{ - Plain: ` - vars { - account $acc - } - send [COIN 100] ( - source = @world - destination = @centralbank - ) - send [COIN 100] ( - source = @centralbank - destination = $acc - )`, - Vars: map[string]json.RawMessage{ - "acc": json.RawMessage(`"users:001"`), - }, - }, - }}, - expectedStatusCode: http.StatusOK, - expectedRes: sharedapi.BaseResponse[[]core.ExpandedTransaction]{ - Data: &[]core.ExpandedTransaction{{ - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Postings: core.Postings{ - { - Source: "world", - Destination: "centralbank", - Amount: core.NewMonetaryInt(100), - Asset: "COIN", - }, - { - Source: "centralbank", - Destination: "users:001", - Amount: core.NewMonetaryInt(100), - Asset: "COIN", - }, - }, - }, - }, - }}, - }, - }, - { - name: "script with set_account_meta", - payload: []controllers.PostTransaction{{ - Script: core.Script{ - Plain: ` - send [TOK 1000] ( - source = @world - destination = @bar - ) - set_account_meta(@bar, "foo", "bar") - `, - }, - }}, - expectedStatusCode: http.StatusOK, - expectedRes: sharedapi.BaseResponse[[]core.ExpandedTransaction]{ - Data: &[]core.ExpandedTransaction{{ - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Postings: core.Postings{ - { - Source: "world", - Destination: "bar", - Amount: core.NewMonetaryInt(1000), - Asset: "TOK", - }, - }, - }, - }, - }}, - }, - }, - { - name: "no postings or script", - payload: []controllers.PostTransaction{ - {}, - }, - expectedStatusCode: http.StatusBadRequest, - expectedErr: sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "invalid payload: should contain either postings or script", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "invalid payload: should contain either postings or script", - }, - }, - { - name: "postings negative amount", - payload: []controllers.PostTransaction{ - { - Postings: core.Postings{ - { - Source: "world", - Destination: "central_bank", - Amount: core.NewMonetaryInt(-1000), - Asset: "USB", - }, - }, - }, - }, - expectedStatusCode: http.StatusBadRequest, - expectedErr: sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "invalid posting 0: negative amount", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "invalid posting 0: negative amount", - }, - }, - { - name: "postings wrong asset with symbol", - payload: []controllers.PostTransaction{ - { - Postings: core.Postings{ - { - Source: "world", - Destination: "central_bank", - Amount: core.NewMonetaryInt(1000), - Asset: "@TOK", - }, - }, - }, - }, - expectedStatusCode: http.StatusBadRequest, - expectedErr: sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "invalid posting 0: invalid asset", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "invalid posting 0: invalid asset", - }, - }, - { - name: "postings wrong asset with digit as first char", - payload: []controllers.PostTransaction{ - { - Postings: core.Postings{ - { - Source: "world", - Destination: "central_bank", - Amount: core.NewMonetaryInt(1000), - Asset: "1TOK", - }, - }, - }, - }, - expectedStatusCode: http.StatusBadRequest, - expectedErr: sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "invalid posting 0: invalid asset", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "invalid posting 0: invalid asset", - }, - }, - { - name: "postings bad address", - payload: []controllers.PostTransaction{ - { - Postings: core.Postings{ - { - Source: "world", - Destination: "#fake", - Amount: core.NewMonetaryInt(1000), - Asset: "TOK", - }, - }, - }, - }, - expectedStatusCode: http.StatusBadRequest, - expectedErr: sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "invalid posting 0: invalid destination address", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "invalid posting 0: invalid destination address", - }, - }, - { - name: "postings insufficient funds", - payload: []controllers.PostTransaction{ - { - Postings: core.Postings{ - { - Source: "foo", - Destination: "bar", - Amount: core.NewMonetaryInt(1000), - Asset: "TOK", - }, - }, - }, - }, - expectedStatusCode: http.StatusBadRequest, - expectedErr: sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrInsufficientFund, - ErrorMessage: "balance.insufficient.TOK", - ErrorCodeDeprecated: apierrors.ErrInsufficientFund, - ErrorMessageDeprecated: "balance.insufficient.TOK", - }, - }, - { - name: "postings reference conflict", - payload: []controllers.PostTransaction{ - { - Postings: core.Postings{ - { - Source: "world", - Destination: "bar", - Amount: core.NewMonetaryInt(1000), - Asset: "TOK", - }, - }, - Reference: "ref", - }, - { - Postings: core.Postings{ - { - Source: "world", - Destination: "bar", - Amount: core.NewMonetaryInt(1000), - Asset: "TOK", - }, - }, - Reference: "ref", - }, - }, - expectedStatusCode: http.StatusConflict, - expectedErr: sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrConflict, - ErrorMessage: "conflict error on reference", - ErrorCodeDeprecated: apierrors.ErrConflict, - ErrorMessageDeprecated: "conflict error on reference", - }, - }, - { - name: "script failure with insufficient funds", - payload: []controllers.PostTransaction{{ - Script: core.Script{ - Plain: ` - send [COIN 100] ( - source = @centralbank - destination = @users:001 - )`, - }, - }}, - expectedStatusCode: http.StatusBadRequest, - expectedErr: sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrInsufficientFund, - ErrorMessage: "[INSUFFICIENT_FUND] account had insufficient funds", - Details: apierrors.EncodeLink("account had insufficient funds"), - ErrorCodeDeprecated: apierrors.ErrInsufficientFund, - ErrorMessageDeprecated: "[INSUFFICIENT_FUND] account had insufficient funds", - }, - }, - { - name: "script failure with metadata override", - payload: []controllers.PostTransaction{{ - Script: core.Script{ - Plain: ` - set_tx_meta("priority", "low") - - send [USD/2 99] ( - source=@world - destination=@user:001 - )`, - }, - Metadata: core.Metadata{ - "priority": json.RawMessage(`"high"`), - }, - }}, - expectedStatusCode: http.StatusBadRequest, - expectedErr: sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrScriptMetadataOverride, - ErrorMessage: "[METADATA_OVERRIDE] cannot override metadata from script", - Details: apierrors.EncodeLink("cannot override metadata from script"), - ErrorCodeDeprecated: apierrors.ErrScriptMetadataOverride, - ErrorMessageDeprecated: "[METADATA_OVERRIDE] cannot override metadata from script", - }, - }, - { - name: "script failure with no postings", - payload: []controllers.PostTransaction{{ - Script: core.Script{ - Plain: ` - set_account_meta(@bar, "foo", "bar") - `, - }, - }}, - expectedStatusCode: http.StatusBadRequest, - expectedErr: sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "transaction has no postings", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "transaction has no postings", - }, - }, - { - name: "script failure with invalid account variable", - payload: []controllers.PostTransaction{{ - Script: core.Script{ - Plain: ` - vars { - account $acc - } - send [USD/2 99] ( - source = @world - destination = $acc - ) - `, - Vars: map[string]json.RawMessage{ - "acc": json.RawMessage(`"invalid-acc"`), - }, - }, - }}, - expectedStatusCode: http.StatusBadRequest, - expectedErr: sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrScriptCompilationFailed, - ErrorMessage: "[COMPILATION_FAILED] could not set variables: invalid JSON value for variable $acc of type account: value invalid-acc: accounts should respect pattern ^[a-zA-Z_]+[a-zA-Z0-9_:]*$", - Details: apierrors.EncodeLink("could not set variables: invalid JSON value for variable $acc of type account: value invalid-acc: accounts should respect pattern ^[a-zA-Z_]+[a-zA-Z0-9_:]*$"), - ErrorCodeDeprecated: apierrors.ErrScriptCompilationFailed, - ErrorMessageDeprecated: "[COMPILATION_FAILED] could not set variables: invalid JSON value for variable $acc of type account: value invalid-acc: accounts should respect pattern ^[a-zA-Z_]+[a-zA-Z0-9_:]*$", - }, - }, - { - name: "script failure with invalid monetary variable", - payload: []controllers.PostTransaction{{ - Script: core.Script{ - Plain: ` - vars { - monetary $mon - } - send $mon ( - source = @world - destination = @alice - ) - `, - Vars: map[string]json.RawMessage{ - "mon": json.RawMessage(`{"asset": "COIN","amount":-1}`), - }, - }, - }}, - expectedStatusCode: http.StatusBadRequest, - expectedErr: sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrScriptCompilationFailed, - ErrorMessage: "[COMPILATION_FAILED] could not set variables: invalid JSON value for variable $mon of type monetary: value [COIN -1]: negative amount", - Details: apierrors.EncodeLink("could not set variables: invalid JSON value for variable $mon of type monetary: value [COIN -1]: negative amount"), - ErrorCodeDeprecated: apierrors.ErrScriptCompilationFailed, - ErrorMessageDeprecated: "[COMPILATION_FAILED] could not set variables: invalid JSON value for variable $mon of type monetary: value [COIN -1]: negative amount", - }, - }, - { - name: "postings and script", - payload: []controllers.PostTransaction{{ - Postings: core.Postings{ - { - Source: "world", - Destination: "alice", - Amount: core.NewMonetaryInt(100), - Asset: "COIN", - }, - }, - Script: core.Script{ - Plain: ` - send [COIN 100] ( - source = @world - destination = @bob - )`, - }}, - }, - expectedStatusCode: http.StatusBadRequest, - expectedErr: sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "invalid payload: should contain either postings or script", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "invalid payload: should contain either postings or script", - }, - }, - { - name: "postings with specified timestamp", - payload: []controllers.PostTransaction{ - { - Postings: core.Postings{ - { - Source: "world", - Destination: "bar", - Amount: core.NewMonetaryInt(1000), - Asset: "TOK", - }, - }, - Timestamp: timestamp2, - }, - }, - expectedStatusCode: http.StatusOK, - expectedRes: sharedapi.BaseResponse[[]core.ExpandedTransaction]{ - Data: &[]core.ExpandedTransaction{{ - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Postings: core.Postings{ - { - Source: "world", - Destination: "bar", - Amount: core.NewMonetaryInt(1000), - Asset: "TOK", - }, - }, - }, - }, - }}, - }, - }, - { - name: "script with specified timestamp", - payload: []controllers.PostTransaction{{ - Script: core.Script{ - Plain: ` - send [TOK 1000] ( - source = @world - destination = @bar - ) - `, - }, - Timestamp: timestamp3, - }}, - expectedStatusCode: http.StatusOK, - expectedRes: sharedapi.BaseResponse[[]core.ExpandedTransaction]{ - Data: &[]core.ExpandedTransaction{{ - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Postings: core.Postings{ - { - Source: "world", - Destination: "bar", - Amount: core.NewMonetaryInt(1000), - Asset: "TOK", - }, - }, - }, - }, - }}, - }, - }, - { - name: "postings with specified timestamp prior to last tx", - payload: []controllers.PostTransaction{ - { - Postings: core.Postings{ - { - Source: "world", - Destination: "bar", - Amount: core.NewMonetaryInt(1000), - Asset: "TOK", - }, - }, - Timestamp: timestamp1, - }, - }, - expectedStatusCode: http.StatusBadRequest, - expectedErr: sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "cannot pass a timestamp prior to the last transaction:", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "cannot pass a timestamp prior to the last transaction:", - }, - }, - { - name: "script with specified timestamp prior to last tx", - payload: []controllers.PostTransaction{ - { - Script: core.Script{ - Plain: ` - send [COIN 100] ( - source = @world - destination = @bob - )`, - }, - Timestamp: timestamp1, - }, - }, - expectedStatusCode: http.StatusBadRequest, - expectedErr: sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "cannot pass a timestamp prior to the last transaction:", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "cannot pass a timestamp prior to the last transaction:", - }, - }, - { - name: "mapping with postings", - payload: []controllers.PostTransaction{ - { - Postings: core.Postings{ - { - Source: "negativebalances:bar", - Destination: "world", - Amount: core.NewMonetaryInt(1000), - Asset: "TOK", - }, - }, - Timestamp: timestamp3, - }, - }, - expectedStatusCode: http.StatusOK, - expectedRes: sharedapi.BaseResponse[[]core.ExpandedTransaction]{ - Data: &[]core.ExpandedTransaction{{ - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Postings: core.Postings{ - { - Source: "negativebalances:bar", - Destination: "world", - Amount: core.NewMonetaryInt(1000), - Asset: "TOK", - }, - }, - Timestamp: timestamp3, - }, - }, - }}, - }, - }, - { - name: "short asset", - payload: []controllers.PostTransaction{ - { - Postings: core.Postings{ - { - Source: "world", - Destination: "bank", - Amount: core.NewMonetaryInt(1000), - Asset: "F/9", - }, - }, - Timestamp: timestamp3, - }, - }, - expectedStatusCode: http.StatusOK, - expectedRes: sharedapi.BaseResponse[[]core.ExpandedTransaction]{ - Data: &[]core.ExpandedTransaction{{ - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Postings: core.Postings{ - { - Source: "world", - Destination: "bank", - Amount: core.NewMonetaryInt(1000), - Asset: "F/9", - }, - }, - Timestamp: timestamp3, - }, - }, - }}, - }, - }, - } - - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - internal.SaveMapping(t, api, core.Mapping{ - Contracts: []core.Contract{{ - Name: "negative balances", - Account: "negativebalances:*", - Expr: core.ExprOr{ - &core.ExprGte{ - Op1: core.VariableExpr{Name: "balance"}, - Op2: core.ConstantExpr{Value: 0}, - }, - &core.ExprLte{ - Op1: core.VariableExpr{Name: "balance"}, - Op2: core.ConstantExpr{Value: 0}, - }, - }, - }}, - }) - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - for i := 0; i < len(tc.payload)-1; i++ { - rsp := internal.PostTransaction(t, api, tc.payload[i], false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - txs, ok := internal.DecodeSingleResponse[[]core.ExpandedTransaction](t, rsp.Body) - require.True(t, ok) - require.Len(t, txs, 1) - if !tc.payload[i].Timestamp.IsZero() { - require.Equal(t, tc.payload[i].Timestamp.UTC(), txs[0].Timestamp) - } - } - tcIndex := 0 - if len(tc.payload) > 0 { - tcIndex = len(tc.payload) - 1 - } - rsp := internal.PostTransaction(t, api, tc.payload[tcIndex], false) - require.Equal(t, tc.expectedStatusCode, rsp.Result().StatusCode, rsp.Body.String()) - - if tc.expectedStatusCode != http.StatusOK { - actualErr := sharedapi.ErrorResponse{} - if internal.Decode(t, rsp.Body, &actualErr) { - require.Equal(t, tc.expectedErr.ErrorCode, actualErr.ErrorCode, actualErr.ErrorMessage) - require.Contains(t, actualErr.ErrorMessage, tc.expectedErr.ErrorMessage) - require.Equal(t, tc.expectedErr.ErrorCodeDeprecated, actualErr.ErrorCodeDeprecated, actualErr.ErrorMessageDeprecated) - require.Contains(t, actualErr.ErrorMessageDeprecated, tc.expectedErr.ErrorMessageDeprecated) - require.Equal(t, tc.expectedErr.Details, actualErr.Details) - } - } else { - txs, ok := internal.DecodeSingleResponse[[]core.ExpandedTransaction](t, rsp.Body) - require.True(t, ok) - require.Len(t, txs, 1) - require.Equal(t, (*tc.expectedRes.Data)[0].Postings, txs[0].Postings) - require.Equal(t, len((*tc.expectedRes.Data)[0].Metadata), len(txs[0].Metadata)) - if !tc.payload[tcIndex].Timestamp.IsZero() { - require.Equal(t, tc.payload[tcIndex].Timestamp.UTC(), txs[0].Timestamp) - } - } - }) - } - - return nil - }, - }) - })) -} - -func TestPostTransactionsPreview(t *testing.T) { - script := ` - send [COIN 100] ( - source = @world - destination = @centralbank - )` - - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API, driver storage.Driver[ledger.Store]) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - store := internal.GetLedgerStore(t, driver, ctx) - - t.Run("postings true", func(t *testing.T) { - rsp := internal.PostTransaction(t, api, controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: "central_bank", - Amount: core.NewMonetaryInt(1000), - Asset: "USD", - }, - }, - }, true) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - txs, ok := internal.DecodeSingleResponse[[]core.ExpandedTransaction](t, rsp.Body) - require.True(t, ok) - require.Len(t, txs, 1) - - cursor, err := store.GetTransactions(ctx, *ledger.NewTransactionsQuery()) - require.NoError(t, err) - require.Len(t, cursor.Data, 0) - }) - - t.Run("script true", func(t *testing.T) { - rsp := internal.PostTransaction(t, api, controllers.PostTransaction{ - Script: core.Script{ - Plain: script, - }, - }, true) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - txs, ok := internal.DecodeSingleResponse[[]core.ExpandedTransaction](t, rsp.Body) - require.True(t, ok) - require.Len(t, txs, 1) - - cursor, err := store.GetTransactions(ctx, *ledger.NewTransactionsQuery()) - require.NoError(t, err) - require.Len(t, cursor.Data, 0) - }) - - t.Run("postings false", func(t *testing.T) { - rsp := internal.PostTransaction(t, api, controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: "central_bank", - Amount: core.NewMonetaryInt(1000), - Asset: "USD", - }, - }, - Reference: "refPostings", - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - txs, ok := internal.DecodeSingleResponse[[]core.ExpandedTransaction](t, rsp.Body) - require.True(t, ok) - require.Len(t, txs, 1) - - cursor, err := store.GetTransactions(ctx, *ledger.NewTransactionsQuery()) - require.NoError(t, err) - require.Len(t, cursor.Data, 1) - require.Equal(t, "refPostings", cursor.Data[0].Reference) - }) - - t.Run("script false", func(t *testing.T) { - rsp := internal.PostTransaction(t, api, controllers.PostTransaction{ - Script: core.Script{ - Plain: script, - }, - Reference: "refScript", - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - txs, ok := internal.DecodeSingleResponse[[]core.ExpandedTransaction](t, rsp.Body) - require.True(t, ok) - require.Len(t, txs, 1) - - cursor, err := store.GetTransactions(ctx, *ledger.NewTransactionsQuery()) - require.NoError(t, err) - require.Len(t, cursor.Data, 2) - require.Equal(t, "refScript", cursor.Data[0].Reference) - }) - - return nil - }, - }) - })) -} - -func TestPostTransactionsOverdraft(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API, driver storage.Driver[ledger.Store]) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - t.Run("simple", func(t *testing.T) { - rsp := internal.PostTransaction(t, api, controllers.PostTransaction{ - Script: core.Script{ - Plain: ` - send [USD/2 100] ( - source = @users:42 allowing unbounded overdraft - destination = @users:43 - ) - `, - }, - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - txs, ok := internal.DecodeSingleResponse[[]core.ExpandedTransaction](t, rsp.Body) - require.True(t, ok) - require.Len(t, txs, 1) - }) - - t.Run("complex", func(t *testing.T) { - rsp := internal.PostTransaction(t, api, controllers.PostTransaction{ - Script: core.Script{ - Plain: ` - send [USD/2 100] ( - source = @world - destination = @users:42:main - ) - - send [USD/2 500] ( - source = { - @users:42:main - @users:42:overdraft allowing overdraft up to [USD/2 200] - @users:42:credit allowing overdraft up to [USD/2 1000] - } - destination = @users:100 - ) - `, - }, - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - txs, ok := internal.DecodeSingleResponse[[]core.ExpandedTransaction](t, rsp.Body) - require.True(t, ok) - require.Len(t, txs, 1) - }) - - return nil - }, - }) - })) -} - -func TestPostTransactionInvalidBody(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - t.Run("no JSON", func(t *testing.T) { - rsp := internal.NewPostOnLedger(t, api, "/transactions", "invalid") - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "invalid transaction format", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "invalid transaction format", - }, err) - }) - - t.Run("JSON without postings", func(t *testing.T) { - rsp := internal.NewPostOnLedger(t, api, "/transactions", core.Account{Address: "addr"}) - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "invalid payload: should contain either postings or script", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "invalid payload: should contain either postings or script", - }, err) - }) - - return nil - }, - }) - })) -} - -func TestPostTransactionMetadata(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - rsp := internal.PostTransaction(t, api, controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: "central_bank", - Amount: core.NewMonetaryInt(1000), - Asset: "USD", - }, - }, - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - t.Run("valid", func(t *testing.T) { - rsp = internal.PostTransactionMetadata(t, api, 0, core.Metadata{ - "foo": json.RawMessage(`"bar"`), - }) - require.Equal(t, http.StatusNoContent, rsp.Result().StatusCode) - - rsp = internal.GetTransaction(api, 0) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - ret, _ := internal.DecodeSingleResponse[core.ExpandedTransaction](t, rsp.Body) - require.EqualValues(t, core.Metadata{ - "foo": "bar", - }, ret.Metadata) - }) - - t.Run("different metadata on same key should replace it", func(t *testing.T) { - rsp = internal.PostTransactionMetadata(t, api, 0, core.Metadata{ - "foo": "baz", - }) - require.Equal(t, http.StatusNoContent, rsp.Result().StatusCode) - - rsp = internal.GetTransaction(api, 0) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - ret, _ := internal.DecodeSingleResponse[core.ExpandedTransaction](t, rsp.Body) - require.EqualValues(t, core.Metadata{ - "foo": "baz", - }, ret.Metadata) - }) - - t.Run("transaction not found", func(t *testing.T) { - rsp = internal.PostTransactionMetadata(t, api, 42, core.Metadata{ - "foo": "baz", - }) - require.Equal(t, http.StatusNotFound, rsp.Result().StatusCode) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrNotFound, - ErrorMessage: "transaction not found", - ErrorCodeDeprecated: apierrors.ErrNotFound, - ErrorMessageDeprecated: "transaction not found", - }, err) - }) - - t.Run("no JSON", func(t *testing.T) { - rsp = internal.NewPostOnLedger(t, api, "/transactions/0/metadata", "invalid") - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "invalid metadata format", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "invalid metadata format", - }, err) - }) - - t.Run("invalid txid", func(t *testing.T) { - rsp = internal.NewPostOnLedger(t, api, "/transactions/invalid/metadata", core.Metadata{ - "foo": json.RawMessage(`"bar"`), - }) - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "invalid transaction ID", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "invalid transaction ID", - }, err) - }) - - return nil - }, - }) - })) -} - -func TestGetTransaction(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - rsp := internal.PostTransaction(t, api, controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: "central_bank", - Amount: core.NewMonetaryInt(1000), - Asset: "USD", - }, - }, - Reference: "ref", - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - t.Run("valid txid", func(t *testing.T) { - rsp = internal.GetTransaction(api, 0) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - ret, _ := internal.DecodeSingleResponse[core.ExpandedTransaction](t, rsp.Body) - require.EqualValues(t, core.Postings{ - { - Source: "world", - Destination: "central_bank", - Amount: core.NewMonetaryInt(1000), - Asset: "USD", - }, - }, ret.Postings) - require.EqualValues(t, 0, ret.ID) - require.EqualValues(t, core.Metadata{}, ret.Metadata) - require.EqualValues(t, "ref", ret.Reference) - require.NotEmpty(t, ret.Timestamp) - require.EqualValues(t, core.AccountsAssetsVolumes{ - "world": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - }, - "central_bank": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - }, - }, ret.PreCommitVolumes) - require.EqualValues(t, core.AccountsAssetsVolumes{ - "world": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(1000), - }, - }, - "central_bank": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(1000), - Output: core.NewMonetaryInt(0), - }, - }, - }, ret.PostCommitVolumes) - }) - - t.Run("unknown txid", func(t *testing.T) { - rsp = internal.GetTransaction(api, 42) - require.Equal(t, http.StatusNotFound, rsp.Result().StatusCode) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrNotFound, - ErrorMessage: "transaction not found", - ErrorCodeDeprecated: apierrors.ErrNotFound, - ErrorMessageDeprecated: "transaction not found", - }, err) - }) - - t.Run("invalid txid", func(t *testing.T) { - rsp = internal.NewGetOnLedger(api, "/transactions/invalid") - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "invalid transaction ID", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "invalid transaction ID", - }, err) - }) - - return nil - }, - }) - })) -} - -func TestGetTransactions(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API, driver storage.Driver[ledger.Store]) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - now := time.Now().UTC() - tx1 := core.ExpandedTransaction{ - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Postings: core.Postings{ - { - Source: "world", - Destination: "central_bank1", - Amount: core.NewMonetaryInt(1000), - Asset: "USD", - }, - }, - Reference: "ref:001", - Timestamp: now.Add(-3 * time.Hour), - }, - }, - } - tx2 := core.ExpandedTransaction{ - Transaction: core.Transaction{ - ID: 1, - TransactionData: core.TransactionData{ - Postings: core.Postings{ - { - Source: "world", - Destination: "central_bank2", - Amount: core.NewMonetaryInt(1000), - Asset: "USD", - }, - }, - Metadata: core.Metadata{ - "foo": "bar", - }, - Reference: "ref:002", - Timestamp: now.Add(-2 * time.Hour), - }, - }, - } - tx3 := core.ExpandedTransaction{ - Transaction: core.Transaction{ - ID: 2, - TransactionData: core.TransactionData{ - Postings: core.Postings{ - { - Source: "central_bank1", - Destination: "alice", - Amount: core.NewMonetaryInt(10), - Asset: "USD", - }, - }, - Reference: "ref:003", - Metadata: core.Metadata{ - "priority": "high", - }, - Timestamp: now.Add(-1 * time.Hour), - }, - }, - } - store := internal.GetLedgerStore(t, driver, ctx) - err := store.Commit(context.Background(), tx1, tx2, tx3) - require.NoError(t, err) - - rsp := internal.CountTransactions(api, url.Values{}) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - require.Equal(t, "3", rsp.Header().Get("Count")) - - var tx1Timestamp, tx2Timestamp time.Time - t.Run("all", func(t *testing.T) { - rsp = internal.GetTransactions(api, url.Values{}) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.ExpandedTransaction](t, rsp.Body) - // all transactions - require.Len(t, cursor.Data, 3) - require.Equal(t, cursor.Data[0].ID, uint64(2)) - require.Equal(t, cursor.Data[1].ID, uint64(1)) - require.Equal(t, cursor.Data[2].ID, uint64(0)) - - tx1Timestamp = cursor.Data[1].Timestamp - tx2Timestamp = cursor.Data[0].Timestamp - }) - - t.Run("metadata", func(t *testing.T) { - rsp = internal.GetTransactions(api, url.Values{ - "metadata[priority]": []string{"high"}, - }) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.ExpandedTransaction](t, rsp.Body) - - require.Len(t, cursor.Data, 1) - require.Equal(t, cursor.Data[0].ID, tx3.ID) - }) - - t.Run("after", func(t *testing.T) { - rsp = internal.GetTransactions(api, url.Values{ - "after": []string{"1"}, - }) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.ExpandedTransaction](t, rsp.Body) - // 1 transaction: txid 0 - require.Len(t, cursor.Data, 1) - require.Equal(t, cursor.Data[0].ID, uint64(0)) - }) - - t.Run("invalid after", func(t *testing.T) { - rsp = internal.GetTransactions(api, url.Values{ - "after": []string{"invalid"}, - }) - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "invalid 'after' query param", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "invalid 'after' query param", - }, err) - }) - - t.Run("reference", func(t *testing.T) { - rsp = internal.GetTransactions(api, url.Values{ - "reference": []string{"ref:001"}, - }) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.ExpandedTransaction](t, rsp.Body) - // 1 transaction: txid 0 - require.Len(t, cursor.Data, 1) - require.Equal(t, cursor.Data[0].ID, uint64(0)) - }) - - t.Run("destination", func(t *testing.T) { - rsp = internal.GetTransactions(api, url.Values{ - "destination": []string{"central_bank1"}, - }) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.ExpandedTransaction](t, rsp.Body) - // 1 transaction: txid 0 - require.Len(t, cursor.Data, 1) - require.Equal(t, cursor.Data[0].ID, uint64(0)) - }) - - t.Run("source", func(t *testing.T) { - rsp = internal.GetTransactions(api, url.Values{ - "source": []string{"world"}, - }) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.ExpandedTransaction](t, rsp.Body) - // 2 transactions: txid 0 and txid 1 - require.Len(t, cursor.Data, 2) - require.Equal(t, cursor.Data[0].ID, uint64(1)) - require.Equal(t, cursor.Data[1].ID, uint64(0)) - }) - - t.Run("account", func(t *testing.T) { - rsp = internal.GetTransactions(api, url.Values{ - "account": []string{"world"}, - }) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.ExpandedTransaction](t, rsp.Body) - // 2 transactions: txid 0 and txid 1 - require.Len(t, cursor.Data, 2) - require.Equal(t, cursor.Data[0].ID, uint64(1)) - require.Equal(t, cursor.Data[1].ID, uint64(0)) - }) - - t.Run("account no result", func(t *testing.T) { - rsp = internal.GetTransactions(api, url.Values{ - "account": []string{"central"}, - }) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.ExpandedTransaction](t, rsp.Body) - require.Len(t, cursor.Data, 0) - }) - - t.Run("account regex expr", func(t *testing.T) { - rsp = internal.GetTransactions(api, url.Values{ - "account": []string{"central.*"}, - }) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.ExpandedTransaction](t, rsp.Body) - require.Len(t, cursor.Data, 3) - }) - - t.Run("time range", func(t *testing.T) { - rsp = internal.GetTransactions(api, url.Values{ - controllers.QueryKeyStartTime: []string{tx1Timestamp.Format(time.RFC3339)}, - controllers.QueryKeyEndTime: []string{tx2Timestamp.Format(time.RFC3339)}, - }) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.ExpandedTransaction](t, rsp.Body) - // 1 transaction: txid 1 - require.Len(t, cursor.Data, 1) - - rsp = internal.CountTransactions(api, url.Values{ - controllers.QueryKeyStartTime: []string{tx1Timestamp.Format(time.RFC3339)}, - controllers.QueryKeyEndTime: []string{tx2Timestamp.Format(time.RFC3339)}, - }) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - require.Equal(t, "1", rsp.Header().Get("Count")) - }) - - t.Run("only start time", func(t *testing.T) { - rsp = internal.GetTransactions(api, url.Values{ - controllers.QueryKeyStartTime: []string{time.Now().Add(time.Second).Format(time.RFC3339)}, - }) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.ExpandedTransaction](t, rsp.Body) - // no transaction - require.Len(t, cursor.Data, 0) - }) - - t.Run("only end time", func(t *testing.T) { - rsp = internal.GetTransactions(api, url.Values{ - controllers.QueryKeyEndTime: []string{time.Now().Add(time.Second).Format(time.RFC3339)}, - }) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.ExpandedTransaction](t, rsp.Body) - // all transactions - require.Len(t, cursor.Data, 3) - }) - - t.Run("invalid start time", func(t *testing.T) { - rsp = internal.GetTransactions(api, url.Values{ - controllers.QueryKeyStartTime: []string{"invalid time"}, - }) - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: controllers.ErrInvalidStartTime.Error(), - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: controllers.ErrInvalidStartTime.Error(), - }, err) - }) - - t.Run("invalid end time", func(t *testing.T) { - rsp = internal.GetTransactions(api, url.Values{ - controllers.QueryKeyEndTime: []string{"invalid time"}, - }) - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: controllers.ErrInvalidEndTime.Error(), - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: controllers.ErrInvalidEndTime.Error(), - }, err) - }) - - to := sqlstorage.TxsPaginationToken{} - raw, err := json.Marshal(to) - require.NoError(t, err) - - t.Run(fmt.Sprintf("valid empty %s", controllers.QueryKeyCursor), func(t *testing.T) { - rsp = internal.GetTransactions(api, url.Values{ - controllers.QueryKeyCursor: []string{base64.RawURLEncoding.EncodeToString(raw)}, - }) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode, rsp.Body.String()) - }) - - t.Run(fmt.Sprintf("valid empty %s with any other param is forbidden", controllers.QueryKeyCursor), func(t *testing.T) { - rsp = internal.GetTransactions(api, url.Values{ - controllers.QueryKeyCursor: []string{base64.RawURLEncoding.EncodeToString(raw)}, - "after": []string{"1"}, - }) - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: fmt.Sprintf("no other query params can be set with '%s'", controllers.QueryKeyCursor), - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: fmt.Sprintf("no other query params can be set with '%s'", controllers.QueryKeyCursor), - }, err) - }) - - t.Run(fmt.Sprintf("invalid %s", controllers.QueryKeyCursor), func(t *testing.T) { - rsp = internal.GetTransactions(api, url.Values{ - controllers.QueryKeyCursor: []string{"invalid"}, - }) - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: fmt.Sprintf("invalid '%s' query param", controllers.QueryKeyCursor), - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: fmt.Sprintf("invalid '%s' query param", controllers.QueryKeyCursor), - }, err) - }) - - t.Run(fmt.Sprintf("invalid %s not base64", controllers.QueryKeyCursor), func(t *testing.T) { - rsp = internal.GetTransactions(api, url.Values{ - controllers.QueryKeyCursor: []string{"@!/"}, - }) - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: fmt.Sprintf("invalid '%s' query param", controllers.QueryKeyCursor), - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: fmt.Sprintf("invalid '%s' query param", controllers.QueryKeyCursor), - }, err) - }) - - return nil - }, - }) - })) -} - -func TestGetTransactionsWithPageSize(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API, driver storage.Driver[ledger.Store]) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - now := time.Now().UTC() - store := internal.GetLedgerStore(t, driver, context.Background()) - - for i := 0; i < 3*controllers.MaxPageSize; i++ { - tx := core.ExpandedTransaction{ - Transaction: core.Transaction{ - ID: uint64(i), - TransactionData: core.TransactionData{ - Postings: core.Postings{ - { - Source: "world", - Destination: fmt.Sprintf("account:%d", i), - Amount: core.NewMonetaryInt(1000), - Asset: "USD", - }, - }, - Timestamp: now, - }, - }, - } - require.NoError(t, store.Commit(ctx, tx)) - } - - t.Run("invalid page size", func(t *testing.T) { - rsp := internal.GetTransactions(api, url.Values{ - controllers.QueryKeyPageSize: []string{"nan"}, - }) - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: controllers.ErrInvalidPageSize.Error(), - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: controllers.ErrInvalidPageSize.Error(), - }, err) - }) - t.Run("page size over maximum", func(t *testing.T) { - httpResponse := internal.GetTransactions(api, url.Values{ - controllers.QueryKeyPageSize: []string{fmt.Sprintf("%d", 2*controllers.MaxPageSize)}, - }) - require.Equal(t, http.StatusOK, httpResponse.Result().StatusCode, httpResponse.Body.String()) - - cursor := internal.DecodeCursorResponse[core.ExpandedTransaction](t, httpResponse.Body) - require.Len(t, cursor.Data, controllers.MaxPageSize) - require.Equal(t, cursor.PageSize, controllers.MaxPageSize) - require.NotEmpty(t, cursor.Next) - require.True(t, cursor.HasMore) - }) - t.Run("with page size greater than max count", func(t *testing.T) { - httpResponse := internal.GetTransactions(api, url.Values{ - controllers.QueryKeyPageSize: []string{fmt.Sprintf("%d", controllers.MaxPageSize)}, - "after": []string{fmt.Sprintf("%d", controllers.MaxPageSize-100)}, - }) - require.Equal(t, http.StatusOK, httpResponse.Result().StatusCode, httpResponse.Body.String()) - - cursor := internal.DecodeCursorResponse[core.ExpandedTransaction](t, httpResponse.Body) - require.Len(t, cursor.Data, controllers.MaxPageSize-100) - require.Equal(t, cursor.PageSize, controllers.MaxPageSize) - require.Empty(t, cursor.Next) - require.False(t, cursor.HasMore) - }) - t.Run("with page size lower than max count", func(t *testing.T) { - httpResponse := internal.GetTransactions(api, url.Values{ - controllers.QueryKeyPageSize: []string{fmt.Sprintf("%d", controllers.MaxPageSize/10)}, - }) - require.Equal(t, http.StatusOK, httpResponse.Result().StatusCode, httpResponse.Body.String()) - - cursor := internal.DecodeCursorResponse[core.ExpandedTransaction](t, httpResponse.Body) - require.Len(t, cursor.Data, controllers.MaxPageSize/10) - require.Equal(t, cursor.PageSize, controllers.MaxPageSize/10) - require.NotEmpty(t, cursor.Next) - require.True(t, cursor.HasMore) - }) - - return nil - }, - }) - })) -} - -type transaction struct { - core.ExpandedTransaction - PreCommitVolumes accountsVolumes `json:"preCommitVolumes,omitempty"` - PostCommitVolumes accountsVolumes `json:"postCommitVolumes,omitempty"` -} -type accountsVolumes map[string]assetsVolumes -type assetsVolumes map[string]core.VolumesWithBalance - -func TestTransactionsVolumes(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API, driver storage.Driver[ledger.Store]) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - - // Single posting - single asset - worldAliceUSD := core.NewMonetaryInt(100) - - rsp := internal.PostTransaction(t, api, - controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: "alice", - Amount: worldAliceUSD, - Asset: "USD", - }, - }, - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - txs, ok := internal.DecodeSingleResponse[[]transaction](t, rsp.Body) - require.True(t, ok) - require.Len(t, txs, 1) - - expPreVolumes := accountsVolumes{ - "alice": assetsVolumes{ - "USD": core.VolumesWithBalance{ - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - Balance: core.NewMonetaryInt(0), - }, - }, - "world": assetsVolumes{ - "USD": core.VolumesWithBalance{ - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - Balance: core.NewMonetaryInt(0), - }, - }, - } - - expPostVolumes := accountsVolumes{ - "alice": assetsVolumes{ - "USD": core.VolumesWithBalance{ - Input: worldAliceUSD, - Output: core.NewMonetaryInt(0), - Balance: worldAliceUSD, - }, - }, - "world": assetsVolumes{ - "USD": core.VolumesWithBalance{ - Input: core.NewMonetaryInt(0), - Output: worldAliceUSD, - Balance: worldAliceUSD.Neg(), - }, - }, - } - - require.Equal(t, expPreVolumes, txs[0].PreCommitVolumes) - require.Equal(t, expPostVolumes, txs[0].PostCommitVolumes) - - rsp = internal.GetTransactions(api, url.Values{}) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[transaction](t, rsp.Body) - require.Len(t, cursor.Data, 1) - - require.Equal(t, expPreVolumes, cursor.Data[0].PreCommitVolumes) - require.Equal(t, expPostVolumes, cursor.Data[0].PostCommitVolumes) - - prevVolAliceUSD := expPostVolumes["alice"]["USD"] - - // Single posting - single asset - - aliceBobUSD := core.NewMonetaryInt(93) - - rsp = internal.PostTransaction(t, api, - controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "alice", - Destination: "bob", - Amount: aliceBobUSD, - Asset: "USD", - }, - }, - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - txs, ok = internal.DecodeSingleResponse[[]transaction](t, rsp.Body) - require.True(t, ok) - require.Len(t, txs, 1) - - expPreVolumes = accountsVolumes{ - "alice": assetsVolumes{ - "USD": prevVolAliceUSD, - }, - "bob": assetsVolumes{ - "USD": core.VolumesWithBalance{ - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - Balance: core.NewMonetaryInt(0), - }, - }, - } - - expPostVolumes = accountsVolumes{ - "alice": assetsVolumes{ - "USD": core.VolumesWithBalance{ - Input: prevVolAliceUSD.Input, - Output: prevVolAliceUSD.Output.Add(aliceBobUSD), - Balance: prevVolAliceUSD.Input.Sub(prevVolAliceUSD.Output).Sub(aliceBobUSD), - }, - }, - "bob": assetsVolumes{ - "USD": core.VolumesWithBalance{ - Input: aliceBobUSD, - Output: core.NewMonetaryInt(0), - Balance: aliceBobUSD, - }, - }, - } - - require.Equal(t, expPreVolumes, txs[0].PreCommitVolumes) - require.Equal(t, expPostVolumes, txs[0].PostCommitVolumes) - - rsp = internal.GetTransactions(api, url.Values{}) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor = internal.DecodeCursorResponse[transaction](t, rsp.Body) - require.Len(t, cursor.Data, 2) - - require.Equal(t, expPreVolumes, cursor.Data[0].PreCommitVolumes) - require.Equal(t, expPostVolumes, cursor.Data[0].PostCommitVolumes) - - prevVolAliceUSD = expPostVolumes["alice"]["USD"] - prevVolBobUSD := expPostVolumes["bob"]["USD"] - - // Multi posting - single asset - - worldBobEUR := core.NewMonetaryInt(156) - bobAliceEUR := core.NewMonetaryInt(3) - - rsp = internal.PostTransaction(t, api, - controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: "bob", - Amount: worldBobEUR, - Asset: "EUR", - }, - { - Source: "bob", - Destination: "alice", - Amount: bobAliceEUR, - Asset: "EUR", - }, - }, - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - txs, ok = internal.DecodeSingleResponse[[]transaction](t, rsp.Body) - require.True(t, ok) - require.Len(t, txs, 1) - - expPreVolumes = accountsVolumes{ - "alice": assetsVolumes{ - "EUR": core.VolumesWithBalance{ - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - Balance: core.NewMonetaryInt(0), - }, - }, - "bob": assetsVolumes{ - "EUR": core.VolumesWithBalance{ - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - Balance: core.NewMonetaryInt(0), - }, - }, - "world": assetsVolumes{ - "EUR": core.VolumesWithBalance{ - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - Balance: core.NewMonetaryInt(0), - }, - }, - } - - expPostVolumes = accountsVolumes{ - "alice": assetsVolumes{ - "EUR": core.VolumesWithBalance{ - Input: bobAliceEUR, - Output: core.NewMonetaryInt(0), - Balance: bobAliceEUR, - }, - }, - "bob": assetsVolumes{ - "EUR": core.VolumesWithBalance{ - Input: worldBobEUR, - Output: bobAliceEUR, - Balance: worldBobEUR.Sub(bobAliceEUR), - }, - }, - "world": assetsVolumes{ - "EUR": core.VolumesWithBalance{ - Input: core.NewMonetaryInt(0), - Output: worldBobEUR, - Balance: worldBobEUR.Neg(), - }, - }, - } - - require.Equal(t, expPreVolumes, txs[0].PreCommitVolumes) - require.Equal(t, expPostVolumes, txs[0].PostCommitVolumes) - - rsp = internal.GetTransactions(api, url.Values{}) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor = internal.DecodeCursorResponse[transaction](t, rsp.Body) - require.Len(t, cursor.Data, 3) - - require.Equal(t, expPreVolumes, cursor.Data[0].PreCommitVolumes) - require.Equal(t, expPostVolumes, cursor.Data[0].PostCommitVolumes) - - prevVolAliceEUR := expPostVolumes["alice"]["EUR"] - prevVolBobEUR := expPostVolumes["bob"]["EUR"] - - // Multi postings - multi assets - - bobAliceUSD := core.NewMonetaryInt(1) - aliceBobEUR := core.NewMonetaryInt(2) - - rsp = internal.PostTransaction(t, api, - controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "bob", - Destination: "alice", - Amount: bobAliceUSD, - Asset: "USD", - }, - { - Source: "alice", - Destination: "bob", - Amount: aliceBobEUR, - Asset: "EUR", - }, - }, - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - txs, ok = internal.DecodeSingleResponse[[]transaction](t, rsp.Body) - require.True(t, ok) - require.Len(t, txs, 1) - - expPreVolumes = accountsVolumes{ - "alice": assetsVolumes{ - "EUR": prevVolAliceEUR, - "USD": prevVolAliceUSD, - }, - "bob": assetsVolumes{ - "EUR": prevVolBobEUR, - "USD": prevVolBobUSD, - }, - } - - expPostVolumes = accountsVolumes{ - "alice": assetsVolumes{ - "EUR": core.VolumesWithBalance{ - Input: prevVolAliceEUR.Input, - Output: prevVolAliceEUR.Output.Add(aliceBobEUR), - Balance: prevVolAliceEUR.Balance.Sub(aliceBobEUR), - }, - "USD": core.VolumesWithBalance{ - Input: prevVolAliceUSD.Input.Add(bobAliceUSD), - Output: prevVolAliceUSD.Output, - Balance: prevVolAliceUSD.Balance.Add(bobAliceUSD), - }, - }, - "bob": assetsVolumes{ - "EUR": core.VolumesWithBalance{ - Input: prevVolBobEUR.Input.Add(aliceBobEUR), - Output: prevVolBobEUR.Output, - Balance: prevVolBobEUR.Balance.Add(aliceBobEUR), - }, - "USD": core.VolumesWithBalance{ - Input: prevVolBobUSD.Input, - Output: prevVolBobUSD.Output.Add(bobAliceUSD), - Balance: prevVolBobUSD.Balance.Sub(bobAliceUSD), - }, - }, - } - - require.Equal(t, expPreVolumes, txs[0].PreCommitVolumes) - require.Equal(t, expPostVolumes, txs[0].PostCommitVolumes) - - rsp = internal.GetTransactions(api, url.Values{}) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor = internal.DecodeCursorResponse[transaction](t, rsp.Body) - require.Len(t, cursor.Data, 4) - - require.Equal(t, expPreVolumes, cursor.Data[0].PreCommitVolumes) - require.Equal(t, expPostVolumes, cursor.Data[0].PostCommitVolumes) - - return nil - }, - }) - })) -} - -func TestTooManyClient(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API, driver storage.Driver[ledger.Store]) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - if ledgertesting.StorageDriverName() != "postgres" { - return nil - } - if os.Getenv("NUMARY_STORAGE_POSTGRES_CONN_STRING") != "" { // Use of external server, ignore this test - return nil - } - - store, _, err := driver.GetLedgerStore(context.Background(), "quickstart", true) - require.NoError(t, err) - - // Grab all potential connections - for i := 0; i < pgtesting.MaxConnections; i++ { - tx, err := store.(*sqlstorage.Store).Schema().BeginTx(context.Background(), &sql.TxOptions{}) - require.NoError(t, err) - defer func(tx *sql.Tx) { - _ = tx.Rollback() - }(tx) - } - - rsp := internal.GetTransactions(api, url.Values{}) - require.Equal(t, http.StatusServiceUnavailable, rsp.Result().StatusCode) - return nil - }, - }) - })) -} - -func TestRevertTransaction(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API, driver storage.Driver[ledger.Store]) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - rsp := internal.PostTransaction(t, api, controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: "alice", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - Reference: "ref:23434656", - Metadata: core.Metadata{ - "foo1": "bar1", - }, - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - rsp = internal.PostTransaction(t, api, controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "world", - Destination: "bob", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - Reference: "ref:534646", - Metadata: core.Metadata{ - "foo2": "bar2", - }, - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - rsp = internal.PostTransaction(t, api, controllers.PostTransaction{ - Postings: core.Postings{ - { - Source: "alice", - Destination: "bob", - Amount: core.NewMonetaryInt(3), - Asset: "USD", - }, - }, - Reference: "ref:578632", - Metadata: core.Metadata{ - "foo3": "bar3", - }, - }, false) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - - rsp = internal.GetTransactions(api, url.Values{}) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.ExpandedTransaction](t, rsp.Body) - require.Len(t, cursor.Data, 3) - require.Equal(t, uint64(2), cursor.Data[0].ID) - - revertedTxID := cursor.Data[0].ID - - t.Run("first revert should succeed", func(t *testing.T) { - rsp := internal.RevertTransaction(api, revertedTxID) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - res, _ := internal.DecodeSingleResponse[core.ExpandedTransaction](t, rsp.Body) - require.Equal(t, revertedTxID+1, res.ID) - require.Equal(t, core.Metadata{ - core.RevertMetadataSpecKey(): fmt.Sprintf("%d", revertedTxID), - }, res.Metadata) - - revertedByTxID := res.ID - - rsp = internal.GetTransactions(api, url.Values{}) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - cursor := internal.DecodeCursorResponse[core.ExpandedTransaction](t, rsp.Body) - require.Len(t, cursor.Data, 4) - require.Equal(t, revertedByTxID, cursor.Data[0].ID) - require.Equal(t, revertedTxID, cursor.Data[1].ID) - - require.Equal(t, core.Metadata{ - "foo3": "bar3", - core.RevertedMetadataSpecKey(): map[string]any{ - "by": strconv.FormatUint(revertedByTxID, 10), - }, - }, cursor.Data[1].Metadata) - }) - - t.Run("transaction not found", func(t *testing.T) { - rsp := internal.RevertTransaction(api, uint64(42)) - require.Equal(t, http.StatusNotFound, rsp.Result().StatusCode, rsp.Body.String()) - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrNotFound, - ErrorMessage: "transaction 42 not found", - ErrorCodeDeprecated: apierrors.ErrNotFound, - ErrorMessageDeprecated: "transaction 42 not found", - }, err) - }) - - t.Run("second revert should fail", func(t *testing.T) { - rsp := internal.RevertTransaction(api, revertedTxID) - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: fmt.Sprintf("transaction %d already reverted", revertedTxID), - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: fmt.Sprintf("transaction %d already reverted", revertedTxID), - }, err) - }) - - t.Run("invalid transaction ID format", func(t *testing.T) { - rsp = internal.NewPostOnLedger(t, api, "/transactions/invalid/revert", nil) - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "invalid transaction ID", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "invalid transaction ID", - }, err) - }) - - return nil - }, - }) - })) -} - -func TestPostTransactionsBatch(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API, driver storage.Driver[ledger.Store]) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - t.Run("valid", func(t *testing.T) { - txs := []core.TransactionData{ - { - Postings: core.Postings{ - { - Source: "world", - Destination: "alice", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - }, - { - Postings: core.Postings{ - { - Source: "world", - Destination: "bob", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - }, - } - - rsp := internal.PostTransactionBatch(t, api, core.Transactions{ - Transactions: txs, - }) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - res, _ := internal.DecodeSingleResponse[[]core.ExpandedTransaction](t, rsp.Body) - require.Len(t, res, 2) - require.Equal(t, txs[0].Postings, res[0].Postings) - require.Equal(t, txs[1].Postings, res[1].Postings) - }) - - t.Run("no postings in second tx", func(t *testing.T) { - rsp := internal.PostTransactionBatch(t, api, core.Transactions{ - Transactions: []core.TransactionData{ - { - Postings: core.Postings{ - { - Source: "world", - Destination: "alice", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - }, - { - Postings: core.Postings{}, - }, - }, - }) - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "invalid transaction 1: no postings", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "invalid transaction 1: no postings", - }, err) - }) - - t.Run("insufficient fund", func(t *testing.T) { - batch := []core.TransactionData{ - { - Postings: []core.Posting{ - { - Source: "empty_wallet", - Destination: "world", - Amount: core.NewMonetaryInt(1), - Asset: "COIN", - }, - }, - }, - } - - rsp := internal.PostTransactionBatch(t, api, core.Transactions{ - Transactions: batch, - }) - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrInsufficientFund, - ErrorMessage: "balance.insufficient.COIN", - ErrorCodeDeprecated: apierrors.ErrInsufficientFund, - ErrorMessageDeprecated: "balance.insufficient.COIN", - }, err) - }) - - t.Run("insufficient fund middle of batch", func(t *testing.T) { - batch := []core.TransactionData{ - { - Postings: []core.Posting{ - { - Source: "world", - Destination: "player2", - Asset: "GEM", - Amount: core.NewMonetaryInt(100), - }, - }, - }, - { - Postings: []core.Posting{ - { - Source: "player", - Destination: "game", - Asset: "GEM", - Amount: core.NewMonetaryInt(100), - }, - }, - }, - { - Postings: []core.Posting{ - { - Source: "world", - Destination: "player", - Asset: "GEM", - Amount: core.NewMonetaryInt(100), - }, - }, - }, - } - - rsp := internal.PostTransactionBatch(t, api, core.Transactions{ - Transactions: batch, - }) - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrInsufficientFund, - ErrorMessage: "balance.insufficient.GEM", - ErrorCodeDeprecated: apierrors.ErrInsufficientFund, - ErrorMessageDeprecated: "balance.insufficient.GEM", - }, err) - }) - - t.Run("invalid transactions format", func(t *testing.T) { - rsp := internal.NewPostOnLedger(t, api, "/transactions/batch", "invalid") - require.Equal(t, http.StatusBadRequest, rsp.Result().StatusCode, rsp.Body.String()) - - err := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &err) - require.EqualValues(t, sharedapi.ErrorResponse{ - ErrorCode: apierrors.ErrValidation, - ErrorMessage: "invalid transactions format", - ErrorCodeDeprecated: apierrors.ErrValidation, - ErrorMessageDeprecated: "invalid transactions format", - }, err) - }) - - return nil - }, - }) - })) -} - -func TestPostTransactionsBatchComplex(t *testing.T) { - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API, driver storage.Driver[ledger.Store]) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - - txs := []core.TransactionData{ - { - Postings: core.Postings{ - { - Source: "world", - Destination: "payins:001", - Amount: core.NewMonetaryInt(10000), - Asset: "EUR/2", - }, - }, - }, - { - Postings: core.Postings{ - { - Source: "payins:001", - Destination: "users:001:wallet", - Amount: core.NewMonetaryInt(10000), - Asset: "EUR/2", - }, - }, - }, - { - Postings: core.Postings{ - { - Source: "world", - Destination: "teller", - Amount: core.NewMonetaryInt(350000), - Asset: "RBLX/6", - }, - { - Source: "world", - Destination: "teller", - Amount: core.NewMonetaryInt(1840000), - Asset: "SNAP/6", - }, - }, - }, - { - Postings: core.Postings{ - { - Source: "users:001:wallet", - Destination: "trades:001", - Amount: core.NewMonetaryInt(1500), - Asset: "EUR/2", - }, - { - Source: "trades:001", - Destination: "fiat:holdings", - Amount: core.NewMonetaryInt(1500), - Asset: "EUR/2", - }, - { - Source: "teller", - Destination: "trades:001", - Amount: core.NewMonetaryInt(350000), - Asset: "RBLX/6", - }, - { - Source: "trades:001", - Destination: "users:001:wallet", - Amount: core.NewMonetaryInt(350000), - Asset: "RBLX/6", - }, - }, - }, - { - Postings: core.Postings{ - { - Source: "users:001:wallet", - Destination: "trades:001", - Amount: core.NewMonetaryInt(4230), - Asset: "EUR/2", - }, - { - Source: "trades:001", - Destination: "fiat:holdings", - Amount: core.NewMonetaryInt(4230), - Asset: "EUR/2", - }, - { - Source: "teller", - Destination: "trades:001", - Amount: core.NewMonetaryInt(1840000), - Asset: "SNAP/6", - }, - { - Source: "trades:001", - Destination: "users:001:wallet", - Amount: core.NewMonetaryInt(1840000), - Asset: "SNAP/6", - }, - }, - }, - { - Postings: core.Postings{ - { - Source: "users:001:wallet", - Destination: "users:001:withdrawals", - Amount: core.NewMonetaryInt(2270), - Asset: "EUR/2", - }, - }, - }, - { - Postings: core.Postings{ - { - Source: "users:001:withdrawals", - Destination: "payouts:001", - Amount: core.NewMonetaryInt(2270), - Asset: "EUR/2", - }, - }, - }, - } - - rsp := internal.PostTransactionBatch(t, api, core.Transactions{ - Transactions: txs, - }) - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - res, _ := internal.DecodeSingleResponse[[]core.ExpandedTransaction](t, rsp.Body) - require.Len(t, res, 7) - require.Equal(t, txs[0].Postings, res[0].Postings) - require.Equal(t, txs[1].Postings, res[1].Postings) - require.Equal(t, txs[2].Postings, res[2].Postings) - require.Equal(t, txs[3].Postings, res[3].Postings) - require.Equal(t, txs[4].Postings, res[4].Postings) - require.Equal(t, txs[5].Postings, res[5].Postings) - require.Equal(t, txs[6].Postings, res[6].Postings) - - return nil - }}) - })) -} - -func TestPostTransactionsScriptConflict(t *testing.T) { - script := ` - send [COIN 100] ( - source = @world - destination = @centralbank - )` - - internal.RunTest(t, fx.Invoke(func(lc fx.Lifecycle, api *api.API, driver storage.Driver[ledger.Store]) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - t.Run("first should succeed", func(t *testing.T) { - rsp := internal.PostTransaction(t, api, controllers.PostTransaction{ - Script: core.Script{ - Plain: script, - }, - Reference: "1234", - }, false) - - require.Equal(t, http.StatusOK, rsp.Result().StatusCode) - txs, ok := internal.DecodeSingleResponse[[]transaction](t, rsp.Body) - require.True(t, ok) - require.Len(t, txs, 1) - }) - - t.Run("second should fail", func(t *testing.T) { - rsp := internal.PostTransaction(t, api, controllers.PostTransaction{ - Script: core.Script{ - Plain: script, - }, - Reference: "1234", - }, false) - - assert.Equal(t, http.StatusConflict, rsp.Result().StatusCode) - actualErr := sharedapi.ErrorResponse{} - internal.Decode(t, rsp.Body, &actualErr) - assert.Equal(t, apierrors.ErrConflict, actualErr.ErrorCode) - assert.Equal(t, "conflict error on reference", actualErr.ErrorMessage) - assert.Equal(t, apierrors.ErrConflict, actualErr.ErrorCodeDeprecated) - assert.Equal(t, "conflict error on reference", actualErr.ErrorMessageDeprecated) - }) - - return nil - }, - }) - })) -} diff --git a/pkg/api/idempotency/hash.go b/pkg/api/idempotency/hash.go deleted file mode 100644 index 138dbcf2b..000000000 --- a/pkg/api/idempotency/hash.go +++ /dev/null @@ -1,14 +0,0 @@ -package idempotency - -import ( - "crypto/sha256" - "encoding/base64" -) - -func hashRequest(url, data string) string { - sh := sha256.New() - sh.Write([]byte(url)) - sh.Write([]byte(data)) - - return base64.RawURLEncoding.EncodeToString(sh.Sum(nil)) -} diff --git a/pkg/api/idempotency/middleware.go b/pkg/api/idempotency/middleware.go deleted file mode 100644 index 4060c8b8e..000000000 --- a/pkg/api/idempotency/middleware.go +++ /dev/null @@ -1,84 +0,0 @@ -package idempotency - -import ( - "bytes" - "io" - "net/http" - - "github.com/gin-gonic/gin" - "github.com/numary/ledger/pkg/api/apierrors" - "github.com/numary/ledger/pkg/storage" - "github.com/pkg/errors" -) - -const ( - HeaderIdempotency = "Idempotency-Key" - HeaderIdempotencyHit = "Idempotency-Hit" -) - -func Middleware(driver storage.LedgerStoreProvider[Store]) func(c *gin.Context) { - return func(c *gin.Context) { - - ik := c.Request.Header.Get(HeaderIdempotency) - if ik == "" { - return - } - - // Do not create the store if it doesn't exist - store, _, err := driver.GetLedgerStore(c.Request.Context(), c.Param("ledger"), false) - if err != nil && err != storage.ErrLedgerStoreNotFound { - apierrors.ResponseError(c, err) - return - } - - data, err := io.ReadAll(c.Request.Body) - if err != nil { - apierrors.ResponseError(c, err) - return - } - c.Request.Body = io.NopCloser(bytes.NewReader(data)) - - // Store created - if store != nil { - response, err := store.ReadIK(c.Request.Context(), ik) - if err != nil && err != ErrIKNotFound { - apierrors.ResponseError(c, err) - return - } - if err == nil { - if hashRequest(c.Request.URL.String(), string(data)) != response.RequestHash { - c.AbortWithStatus(http.StatusBadRequest) - return - } - - c.Abort() - c.Writer.Header().Set(HeaderIdempotencyHit, "true") - response.write(c) - return - } - } - - rw := newResponseWriter(c.Writer) - c.Writer = rw - - c.Next() - if c.Writer.Status() >= 200 && c.Writer.Status() < 300 { - if store == nil { - store, _, err = driver.GetLedgerStore(c.Request.Context(), c.Param("ledger"), true) - if err != nil { - _ = c.Error(errors.Wrap(err, "retrieving ledger store to save IK")) - return - } - } - if err := store.CreateIK(c.Request.Context(), ik, Response{ - RequestHash: hashRequest(c.Request.URL.String(), string(data)), - StatusCode: c.Writer.Status(), - Header: c.Writer.Header(), - // TODO: Check if PG accept big documents - Body: string(rw.Bytes()), - }); err != nil { - _ = c.Error(errors.Wrap(err, "persisting IK to database")) - } - } - } -} diff --git a/pkg/api/idempotency/middleware_test.go b/pkg/api/idempotency/middleware_test.go deleted file mode 100644 index 351994cdd..000000000 --- a/pkg/api/idempotency/middleware_test.go +++ /dev/null @@ -1,186 +0,0 @@ -package idempotency - -import ( - "context" - "io" - "net/http" - "net/http/httptest" - "strings" - "testing" - - "github.com/gin-gonic/gin" - "github.com/numary/ledger/pkg/storage" - "github.com/stretchr/testify/require" -) - -func init() { - gin.SetMode(gin.ReleaseMode) -} - -func TestIdempotency(t *testing.T) { - - var newReqRec = func(ik string) (*http.Request, *httptest.ResponseRecorder) { - rec := httptest.NewRecorder() - req := httptest.NewRequest(http.MethodGet, "/", nil) - req.Header.Set(HeaderIdempotency, ik) - return req, rec - } - - var drainBody = func(rec *httptest.ResponseRecorder) string { - data, err := io.ReadAll(rec.Body) - require.NoError(t, err) - return string(data) - } - - t.Run("With existing store", func(t *testing.T) { - store := NewInMemoryStore() - storeProvider := storage.LedgerStoreProviderFn[Store](func(ctx context.Context, name string, create bool) (Store, bool, error) { - return store, false, nil - }) - - called := false - - body := "hello world!" - - handler := gin.New() - handler.GET("/", Middleware(storeProvider), func(c *gin.Context) { - if called { - c.Writer.WriteHeader(http.StatusServiceUnavailable) - return - } - called = true - c.Writer.WriteHeader(http.StatusAccepted) - _, _ = c.Writer.Write([]byte(body)) - }) - - ik := "foo" - - req, rec := newReqRec(ik) - - handler.ServeHTTP(rec, req) - require.Equal(t, http.StatusAccepted, rec.Result().StatusCode) - require.Equal(t, body, drainBody(rec)) - - req, rec = newReqRec(ik) - - handler.ServeHTTP(rec, req) - require.Equal(t, http.StatusAccepted, rec.Result().StatusCode) - require.Equal(t, body, drainBody(rec)) - }) - - t.Run("With non existing store", func(t *testing.T) { - var store *inMemoryStore - storeProvider := storage.LedgerStoreProviderFn[Store](func(ctx context.Context, name string, create bool) (Store, bool, error) { - if store == nil { - return nil, false, storage.ErrLedgerStoreNotFound - } - return store, false, nil - }) - - called := false - body := "Hello world!" - - handler := gin.New() - handler.GET("/", Middleware(storeProvider), func(c *gin.Context) { - if called { - // Simulate the store creation by a service - c.Writer.WriteHeader(http.StatusServiceUnavailable) - return - } - called = true - store = NewInMemoryStore() - c.Writer.WriteHeader(http.StatusAccepted) - _, _ = c.Writer.Write([]byte(body)) - }) - - ik := "foo" - - req, rec := newReqRec(ik) - - handler.ServeHTTP(rec, req) - require.Equal(t, http.StatusAccepted, rec.Result().StatusCode) - require.Equal(t, body, drainBody(rec)) - - req, rec = newReqRec(ik) - - handler.ServeHTTP(rec, req) - require.Equal(t, http.StatusAccepted, rec.Result().StatusCode) - require.Equal(t, body, drainBody(rec)) - }) - t.Run("With error on inner handler", func(t *testing.T) { - store := NewInMemoryStore() - storeProvider := storage.LedgerStoreProviderFn[Store](func(ctx context.Context, name string, create bool) (Store, bool, error) { - return store, false, nil - }) - - handler := gin.New() - handler.GET("/", Middleware(storeProvider), func(c *gin.Context) { - c.Writer.WriteHeader(http.StatusServiceUnavailable) - }) - - req, rec := newReqRec("foo") - - handler.ServeHTTP(rec, req) - require.Equal(t, http.StatusServiceUnavailable, rec.Result().StatusCode) - require.Empty(t, store.iks) - }) - t.Run("With same IK for two requests", func(t *testing.T) { - store := NewInMemoryStore() - storeProvider := storage.LedgerStoreProviderFn[Store](func(ctx context.Context, name string, create bool) (Store, bool, error) { - return store, false, nil - }) - - handler := gin.New() - handler.GET("/path1", Middleware(storeProvider), func(c *gin.Context) { - c.Writer.WriteHeader(http.StatusAccepted) - }) - handler.GET("/path2", Middleware(storeProvider), func(c *gin.Context) { - c.Writer.WriteHeader(http.StatusAccepted) - }) - - ik := "foo" - - req, rec := newReqRec(ik) - req.URL.Path = "/path1" - - handler.ServeHTTP(rec, req) - require.Equal(t, http.StatusAccepted, rec.Result().StatusCode) - - req, rec = newReqRec(ik) - req.URL.Path = "/path2" - - handler.ServeHTTP(rec, req) - require.Equal(t, http.StatusBadRequest, rec.Result().StatusCode) - }) - t.Run("With request body", func(t *testing.T) { - store := NewInMemoryStore() - storeProvider := storage.LedgerStoreProviderFn[Store](func(ctx context.Context, name string, create bool) (Store, bool, error) { - return store, false, nil - }) - - requestBody := "Hello world!" - - handler := gin.New() - handler.GET("/", Middleware(storeProvider), func(c *gin.Context) { - data, err := io.ReadAll(c.Request.Body) - require.NoError(t, err) - require.Equal(t, requestBody, string(data)) - c.Writer.WriteHeader(http.StatusNoContent) - }) - - ik := "foo" - - req, rec := newReqRec(ik) - req.Body = io.NopCloser(strings.NewReader(requestBody)) - - handler.ServeHTTP(rec, req) - require.Equal(t, http.StatusNoContent, rec.Result().StatusCode) - - req, rec = newReqRec(ik) - req.Body = io.NopCloser(strings.NewReader(requestBody)) - - handler.ServeHTTP(rec, req) - require.Equal(t, http.StatusNoContent, rec.Result().StatusCode) - require.Equal(t, rec.Result().Header.Get(HeaderIdempotencyHit), "true") - }) -} diff --git a/pkg/api/idempotency/response.go b/pkg/api/idempotency/response.go deleted file mode 100644 index 7e260c6d5..000000000 --- a/pkg/api/idempotency/response.go +++ /dev/null @@ -1,27 +0,0 @@ -package idempotency - -import ( - "net/http" - - "github.com/formancehq/go-libs/logging" - "github.com/gin-gonic/gin" -) - -type Response struct { - RequestHash string - StatusCode int - Header http.Header - Body string -} - -func (r Response) write(c *gin.Context) { - for k, v := range r.Header { - for _, vv := range v { - c.Writer.Header().Add(k, vv) - } - } - c.Writer.WriteHeader(r.StatusCode) - if _, err := c.Writer.WriteString(r.Body); err != nil { - logging.GetLogger(c.Request.Context()).Errorf("Error writing stored response: %s", err) - } -} diff --git a/pkg/api/idempotency/response_writer.go b/pkg/api/idempotency/response_writer.go deleted file mode 100644 index 45173b6e7..000000000 --- a/pkg/api/idempotency/response_writer.go +++ /dev/null @@ -1,34 +0,0 @@ -package idempotency - -import ( - "bytes" - "io" - "net/http" - - "github.com/gin-gonic/gin" -) - -type responseWriter struct { - gin.ResponseWriter - buf *bytes.Buffer - writer io.Writer -} - -func (r *responseWriter) Write(i []byte) (int, error) { - return r.writer.Write(i) -} - -func (r *responseWriter) Bytes() []byte { - return r.buf.Bytes() -} - -var _ http.ResponseWriter = &responseWriter{} - -func newResponseWriter(underlying gin.ResponseWriter) *responseWriter { - buf := bytes.NewBuffer(make([]byte, 0)) - return &responseWriter{ - ResponseWriter: underlying, - buf: buf, - writer: io.MultiWriter(underlying, buf), - } -} diff --git a/pkg/api/idempotency/store.go b/pkg/api/idempotency/store.go deleted file mode 100644 index 47dfb7453..000000000 --- a/pkg/api/idempotency/store.go +++ /dev/null @@ -1,40 +0,0 @@ -package idempotency - -import ( - "context" - "errors" -) - -var ( - ErrIKNotFound = errors.New("not found") -) - -type Store interface { - CreateIK(ctx context.Context, key string, response Response) error - ReadIK(ctx context.Context, key string) (*Response, error) -} - -type inMemoryStore struct { - iks map[string]Response -} - -func (i *inMemoryStore) CreateIK(ctx context.Context, key string, response Response) error { - i.iks[key] = response - return nil -} - -func (i *inMemoryStore) ReadIK(ctx context.Context, key string) (*Response, error) { - response, ok := i.iks[key] - if !ok { - return nil, ErrIKNotFound - } - return &response, nil -} - -var _ Store = &inMemoryStore{} - -func NewInMemoryStore() *inMemoryStore { - return &inMemoryStore{ - iks: map[string]Response{}, - } -} diff --git a/pkg/api/internal/testing.go b/pkg/api/internal/testing.go deleted file mode 100644 index 6cfb68253..000000000 --- a/pkg/api/internal/testing.go +++ /dev/null @@ -1,322 +0,0 @@ -package internal - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "io" - "net/http" - "net/http/httptest" - "net/url" - "strings" - "testing" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/logging/logginglogrus" - "github.com/gin-gonic/gin" - "github.com/golang-jwt/jwt" - "github.com/numary/ledger/pkg/api" - "github.com/numary/ledger/pkg/api/controllers" - "github.com/numary/ledger/pkg/api/routes" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/ledgertesting" - "github.com/numary/ledger/pkg/storage" - "github.com/pborman/uuid" - "github.com/sirupsen/logrus" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "go.uber.org/fx" -) - -var testingLedger string - -func Encode(t *testing.T, v interface{}) []byte { - data, err := json.Marshal(v) - assert.NoError(t, err) - return data -} - -func Buffer(t *testing.T, v interface{}) *bytes.Buffer { - return bytes.NewBuffer(Encode(t, v)) -} - -func Decode(t *testing.T, reader io.Reader, v interface{}) bool { - err := json.NewDecoder(reader).Decode(v) - return assert.NoError(t, err) -} - -func DecodeSingleResponse[T any](t *testing.T, reader io.Reader) (T, bool) { - res := sharedapi.BaseResponse[T]{} - if !Decode(t, reader, &res) { - var zero T - return zero, false - } - return *res.Data, true -} - -func DecodeCursorResponse[T any](t *testing.T, reader io.Reader) *sharedapi.Cursor[T] { - res := sharedapi.BaseResponse[T]{} - Decode(t, reader, &res) - return res.Cursor -} - -func NewRequest(method, path string, body io.Reader) (*http.Request, *httptest.ResponseRecorder) { - rec := httptest.NewRecorder() - req := httptest.NewRequest(method, path, body) - req.Header.Set("Content-Type", "application/json") - - token := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{ - "scope": strings.Join(routes.AllScopes, " "), - }) - signed, err := token.SignedString([]byte("0000000000000000")) - if err != nil { - panic(err) - } - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", signed)) - return req, rec -} - -func PostTransaction(t *testing.T, handler http.Handler, payload controllers.PostTransaction, preview bool) *httptest.ResponseRecorder { - path := fmt.Sprintf("/%s/transactions", testingLedger) - if preview { - path += "?preview=true" - } - req, rec := NewRequest(http.MethodPost, path, Buffer(t, payload)) - handler.ServeHTTP(rec, req) - return rec -} - -func PostTransactionBatch(t *testing.T, handler http.Handler, txs core.Transactions) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodPost, "/"+testingLedger+"/transactions/batch", Buffer(t, txs)) - handler.ServeHTTP(rec, req) - return rec -} - -func PostTransactionMetadata(t *testing.T, handler http.Handler, id uint64, m core.Metadata) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodPost, fmt.Sprintf("/%s/transactions/%d/metadata", testingLedger, id), Buffer(t, m)) - handler.ServeHTTP(rec, req) - return rec -} - -func CountTransactions(handler http.Handler, query url.Values) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodHead, fmt.Sprintf("/%s/transactions", testingLedger), nil) - req.URL.RawQuery = query.Encode() - handler.ServeHTTP(rec, req) - return rec -} - -func GetTransactions(handler http.Handler, query url.Values) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodGet, fmt.Sprintf("/%s/transactions", testingLedger), nil) - req.URL.RawQuery = query.Encode() - handler.ServeHTTP(rec, req) - return rec -} - -func GetTransaction(handler http.Handler, id uint64) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodGet, fmt.Sprintf("/%s/transactions/%d", testingLedger, id), nil) - handler.ServeHTTP(rec, req) - return rec -} - -func RevertTransaction(handler http.Handler, id uint64) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodPost, fmt.Sprintf("/"+testingLedger+"/transactions/%d/revert", id), nil) - handler.ServeHTTP(rec, req) - return rec -} - -func CountAccounts(handler http.Handler, query url.Values) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodHead, fmt.Sprintf("/%s/accounts", testingLedger), nil) - req.URL.RawQuery = query.Encode() - handler.ServeHTTP(rec, req) - return rec -} - -func GetAccounts(handler http.Handler, query url.Values) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodGet, fmt.Sprintf("/%s/accounts", testingLedger), nil) - req.URL.RawQuery = query.Encode() - handler.ServeHTTP(rec, req) - return rec -} - -func GetBalances(handler http.Handler, query url.Values) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodGet, fmt.Sprintf("/%s/balances", testingLedger), nil) - req.URL.RawQuery = query.Encode() - handler.ServeHTTP(rec, req) - return rec -} - -func GetBalancesAggregated(handler http.Handler, query url.Values) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodGet, fmt.Sprintf("/%s/aggregate/balances", testingLedger), nil) - req.URL.RawQuery = query.Encode() - handler.ServeHTTP(rec, req) - return rec -} - -func GetAccount(handler http.Handler, addr string) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodGet, fmt.Sprintf("/%s/accounts/%s", testingLedger, addr), nil) - handler.ServeHTTP(rec, req) - return rec -} - -func PostAccountMetadata(t *testing.T, handler http.Handler, addr string, m core.Metadata) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodPost, fmt.Sprintf("/%s/accounts/%s/metadata", testingLedger, addr), Buffer(t, m)) - handler.ServeHTTP(rec, req) - return rec -} - -func NewRequestOnLedger(t *testing.T, handler http.Handler, path string, body any) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodPost, fmt.Sprintf("/%s%s", testingLedger, path), Buffer(t, body)) - handler.ServeHTTP(rec, req) - return rec -} - -func NewGetOnLedger(handler http.Handler, path string) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodGet, fmt.Sprintf("/%s%s", testingLedger, path), nil) - handler.ServeHTTP(rec, req) - return rec -} - -func NewPostOnLedger(t *testing.T, handler http.Handler, path string, body any) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodPost, fmt.Sprintf("/%s%s", testingLedger, path), Buffer(t, body)) - handler.ServeHTTP(rec, req) - return rec -} - -func GetLedgerInfo(handler http.Handler) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodGet, fmt.Sprintf("/%s/_info", testingLedger), nil) - handler.ServeHTTP(rec, req) - return rec -} - -func GetLedgerStats(handler http.Handler) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodGet, fmt.Sprintf("/%s/stats", testingLedger), nil) - handler.ServeHTTP(rec, req) - return rec -} - -func GetLedgerLogs(handler http.Handler, query url.Values) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodGet, fmt.Sprintf("/%s/logs", testingLedger), nil) - req.URL.RawQuery = query.Encode() - handler.ServeHTTP(rec, req) - return rec -} - -func LoadMapping(handler http.Handler) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodGet, fmt.Sprintf("/%s/mapping", testingLedger), nil) - handler.ServeHTTP(rec, req) - return rec -} - -func SaveMapping(t *testing.T, handler http.Handler, m core.Mapping) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodPut, fmt.Sprintf("/%s/mapping", testingLedger), Buffer(t, m)) - handler.ServeHTTP(rec, req) - return rec -} - -func GetInfo(handler http.Handler) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodGet, "/_info", nil) - handler.ServeHTTP(rec, req) - return rec -} - -func PostScript(t *testing.T, handler http.Handler, s core.ScriptData, query url.Values) *httptest.ResponseRecorder { - req, rec := NewRequest(http.MethodPost, fmt.Sprintf("/%s/script", testingLedger), Buffer(t, s)) - req.URL.RawQuery = query.Encode() - handler.ServeHTTP(rec, req) - return rec -} - -func GetLedgerStore(t *testing.T, driver storage.Driver[ledger.Store], ctx context.Context) ledger.Store { - store, _, err := driver.GetLedgerStore(ctx, testingLedger, true) - require.NoError(t, err) - return store -} - -func RunTest(t *testing.T, options ...fx.Option) { - l := logrus.New() - if testing.Verbose() { - l.Level = logrus.DebugLevel - } - logging.SetFactory(logging.StaticLoggerFactory(logginglogrus.New(l))) - - testingLedger = uuid.New() - ch := make(chan struct{}) - - options = append([]fx.Option{ - api.Module(api.Config{StorageDriver: "sqlite", Version: "latest", UseScopes: true}), - // 100 000 000 bytes is 100 MB - ledger.ResolveModule(100000000, 100), - ledgertesting.ProvideLedgerStorageDriver(), - fx.Invoke(func(driver storage.Driver[ledger.Store], lc fx.Lifecycle) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - store, _, err := driver.GetLedgerStore(ctx, testingLedger, true) - if err != nil { - return err - } - defer func(store ledger.Store, ctx context.Context) { - require.NoError(t, store.Close(ctx)) - }(store, ctx) - - _, err = store.Initialize(ctx) - return err - }, - }) - }), - fx.NopLogger, - }, options...) - - options = append(options, fx.Provide( - fx.Annotate(func() []ledger.LedgerOption { - ledgerOptions := []ledger.LedgerOption{} - - return ledgerOptions - }, fx.ResultTags(ledger.ResolverLedgerOptionsKey)), - )) - - options = append(options, routes.ProvidePerLedgerMiddleware(func() []gin.HandlerFunc { - return []gin.HandlerFunc{ - func(c *gin.Context) { - handled := false - auth.Middleware(auth.NewHttpBearerMethod( - auth.NoOpValidator, - ))(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - handled = true - // The middleware replace the context of the request to include the agent - // We have to forward it to gin - c.Request = r - c.Next() - })).ServeHTTP(c.Writer, c.Request) - if !handled { - c.Abort() - } - }, - } - }, fx.ParamTags(`optional:"true"`))) - - options = append(options, - fx.Invoke(func(lc fx.Lifecycle) { - lc.Append(fx.Hook{ - OnStop: func(ctx context.Context) error { - close(ch) - return nil - }, - }) - })) - - app := fx.New(options...) - - assert.NoError(t, app.Start(context.Background())) - - select { - case <-ch: - default: - if app.Err() != nil { - assert.Fail(t, app.Err().Error()) - } - } -} diff --git a/pkg/api/middlewares/ledger_middleware.go b/pkg/api/middlewares/ledger_middleware.go deleted file mode 100644 index c9a9a98c0..000000000 --- a/pkg/api/middlewares/ledger_middleware.go +++ /dev/null @@ -1,47 +0,0 @@ -package middlewares - -import ( - "context" - "net/http" - - "github.com/gin-gonic/gin" - "github.com/numary/ledger/pkg/api/apierrors" - "github.com/numary/ledger/pkg/contextlogger" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/opentelemetry" -) - -type LedgerMiddleware struct { - resolver *ledger.Resolver -} - -func NewLedgerMiddleware(resolver *ledger.Resolver) LedgerMiddleware { - return LedgerMiddleware{ - resolver: resolver, - } -} - -func (m *LedgerMiddleware) LedgerMiddleware() gin.HandlerFunc { - return func(c *gin.Context) { - name := c.Param("ledger") - if name == "" { - c.AbortWithStatus(http.StatusNotFound) - return - } - - span := opentelemetry.WrapGinContext(c, "Ledger access") - defer span.End() - - contextlogger.WrapGinRequest(c) - - l, err := m.resolver.GetLedger(c.Request.Context(), name) - if err != nil { - apierrors.ResponseError(c, err) - return - } - defer l.Close(context.Background()) - - c.Set("ledger", l) - c.Next() - } -} diff --git a/pkg/api/middlewares/lock.go b/pkg/api/middlewares/lock.go deleted file mode 100644 index 24e333da2..000000000 --- a/pkg/api/middlewares/lock.go +++ /dev/null @@ -1,54 +0,0 @@ -package middlewares - -import ( - "context" - "sync" -) - -type Unlock func(ctx context.Context) - -type Locker interface { - Lock(ctx context.Context, name string) (Unlock, error) -} -type LockerFn func(ctx context.Context, name string) (Unlock, error) - -func (fn LockerFn) Lock(ctx context.Context, name string) (Unlock, error) { - return fn(ctx, name) -} - -var NoOpLocker = LockerFn(func(ctx context.Context, name string) (Unlock, error) { - return func(ctx context.Context) {}, nil -}) - -type InMemoryLocker struct { - globalLock sync.RWMutex - locks map[string]*sync.Mutex -} - -func (d *InMemoryLocker) Lock(ctx context.Context, ledger string) (Unlock, error) { - d.globalLock.RLock() - lock, ok := d.locks[ledger] - d.globalLock.RUnlock() - if ok { - goto ret - } - - d.globalLock.Lock() - lock, ok = d.locks[ledger] // Double check, the lock can have been acquired by another go routing between RUnlock and Lock - if !ok { - lock = &sync.Mutex{} - d.locks[ledger] = lock - } - d.globalLock.Unlock() -ret: - lock.Lock() - return func(ctx context.Context) { - lock.Unlock() - }, nil -} - -func NewInMemoryLocker() *InMemoryLocker { - return &InMemoryLocker{ - locks: map[string]*sync.Mutex{}, - } -} diff --git a/pkg/api/middlewares/log_middleware.go b/pkg/api/middlewares/log_middleware.go deleted file mode 100644 index 7429a6e8c..000000000 --- a/pkg/api/middlewares/log_middleware.go +++ /dev/null @@ -1,24 +0,0 @@ -package middlewares - -import ( - "time" - - "github.com/formancehq/go-libs/logging" - "github.com/gin-gonic/gin" -) - -func Log() gin.HandlerFunc { - return func(c *gin.Context) { - start := time.Now() - c.Next() - latency := time.Since(start) - logging.GetLogger(c.Request.Context()).WithFields(map[string]interface{}{ - "status": c.Writer.Status(), - "method": c.Request.Method, - "path": c.Request.URL.Path, - "ip": c.ClientIP(), - "latency": latency, - "user_agent": c.Request.UserAgent(), - }).Info("Request") - } -} diff --git a/pkg/api/middlewares/module.go b/pkg/api/middlewares/module.go deleted file mode 100644 index 770aa0dde..000000000 --- a/pkg/api/middlewares/module.go +++ /dev/null @@ -1,12 +0,0 @@ -package middlewares - -import ( - "go.uber.org/fx" -) - -var Module = fx.Options( - fx.Provide(NewLedgerMiddleware), - fx.Provide(func() Locker { - return NewInMemoryLocker() - }), -) diff --git a/pkg/api/middlewares/module_test.go b/pkg/api/middlewares/module_test.go deleted file mode 100644 index c08447799..000000000 --- a/pkg/api/middlewares/module_test.go +++ /dev/null @@ -1,34 +0,0 @@ -package middlewares_test - -import ( - "errors" - "net/http" - "net/http/httptest" - "testing" - - "github.com/gin-gonic/gin" - "github.com/numary/ledger/pkg/api" - "github.com/numary/ledger/pkg/api/internal" - "github.com/numary/ledger/pkg/api/routes" - "github.com/stretchr/testify/assert" - "go.uber.org/fx" -) - -func TestAdditionalGlobalMiddleware(t *testing.T) { - internal.RunTest(t, - routes.ProvideMiddlewares(func() []gin.HandlerFunc { - return []gin.HandlerFunc{ - func(context *gin.Context) { - _ = context.AbortWithError(418, errors.New("")) - }, - } - }), - fx.Invoke(func(api *api.API) { - rec := httptest.NewRecorder() - req := httptest.NewRequest(http.MethodGet, "/_info", nil) - - api.ServeHTTP(rec, req) - assert.Equal(t, 418, rec.Code) - }), - ) -} diff --git a/pkg/api/middlewares/transaction.go b/pkg/api/middlewares/transaction.go deleted file mode 100644 index 70e973504..000000000 --- a/pkg/api/middlewares/transaction.go +++ /dev/null @@ -1,93 +0,0 @@ -package middlewares - -import ( - "bytes" - "context" - "io" - - "github.com/gin-gonic/gin" - "github.com/numary/ledger/pkg/api/apierrors" - "github.com/numary/ledger/pkg/opentelemetry" - "github.com/numary/ledger/pkg/storage" -) - -type bufferedResponseWriter struct { - gin.ResponseWriter - buf io.ReadWriter - statusCode int -} - -func (r *bufferedResponseWriter) WriteString(s string) (int, error) { - return r.Write([]byte(s)) -} - -func (r *bufferedResponseWriter) WriteHeaderNow() {} - -func (r *bufferedResponseWriter) Write(data []byte) (int, error) { - return r.buf.Write(data) -} - -func (r *bufferedResponseWriter) WriteHeader(statusCode int) { - r.statusCode = statusCode -} - -func (r *bufferedResponseWriter) Status() int { - return r.statusCode -} - -func (r *bufferedResponseWriter) WriteResponse() error { - r.ResponseWriter.WriteHeader(r.statusCode) - _, err := io.Copy(r.ResponseWriter, r.buf) - return err -} - -func newBufferedWriter(rw gin.ResponseWriter) *bufferedResponseWriter { - buf := bytes.NewBuffer(make([]byte, 0)) - return &bufferedResponseWriter{ - ResponseWriter: rw, - buf: buf, - } -} - -func Transaction(locker Locker) func(c *gin.Context) { - return func(c *gin.Context) { - - ctx, span := opentelemetry.Start(c.Request.Context(), "Wait ledger lock") - defer span.End() - - c.Request = c.Request.WithContext(ctx) - - bufferedWriter := newBufferedWriter(c.Writer) - c.Writer = bufferedWriter - - func() { - unlock, err := locker.Lock(c.Request.Context(), c.Param("ledger")) - if err != nil { - panic(err) - } - defer unlock(context.Background()) // Use a background context instead of the request one as it could have been cancelled - - ctx, span = opentelemetry.Start(c.Request.Context(), "Ledger locked") - defer span.End() - c.Request = c.Request.WithContext(ctx) - c.Request = c.Request.WithContext(storage.TransactionalContext(c.Request.Context())) - defer func() { - _ = storage.RollbackTransaction(c.Request.Context()) - }() - - c.Next() - - if c.Writer.Status() >= 200 && c.Writer.Status() < 300 && - storage.IsTransactionRegistered(c.Request.Context()) { - if err := storage.CommitTransaction(c.Request.Context()); err != nil { - apierrors.ResponseError(c, err) - return - } - } - }() - - if err := bufferedWriter.WriteResponse(); err != nil { - _ = c.Error(err) - } - } -} diff --git a/pkg/api/routes/routes.go b/pkg/api/routes/routes.go deleted file mode 100644 index ab4fb4662..000000000 --- a/pkg/api/routes/routes.go +++ /dev/null @@ -1,210 +0,0 @@ -package routes - -import ( - "net/http" - - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/go-libs/health" - "github.com/gin-gonic/gin" - "github.com/numary/ledger/pkg/api/controllers" - "github.com/numary/ledger/pkg/api/idempotency" - "github.com/numary/ledger/pkg/api/middlewares" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/storage" - "go.opentelemetry.io/contrib/instrumentation/github.com/gin-gonic/gin/otelgin" - "go.uber.org/fx" -) - -const GlobalMiddlewaresKey = `name:"_routesGlobalMiddlewares" optional:"true"` -const PerLedgerMiddlewaresKey = `name:"_perLedgerMiddlewares" optional:"true"` - -var Module = fx.Options( - fx.Provide( - fx.Annotate(NewRoutes, fx.ParamTags(GlobalMiddlewaresKey, PerLedgerMiddlewaresKey)), - ), -) - -func ProvideMiddlewares(provider interface{}, additionalAnnotations ...fx.Annotation) fx.Option { - opts := []fx.Annotation{fx.ResultTags(GlobalMiddlewaresKey)} - return fx.Provide( - fx.Annotate(provider, append(opts, additionalAnnotations...)...), - ) -} - -func ProvidePerLedgerMiddleware(provider interface{}, additionalAnnotations ...fx.Annotation) fx.Option { - opts := []fx.Annotation{fx.ResultTags(PerLedgerMiddlewaresKey)} - return fx.Provide( - fx.Annotate(provider, append(opts, additionalAnnotations...)...), - ) -} - -type UseScopes bool - -const ( - ScopeTransactionsRead = "transactions:read" - ScopeTransactionsWrite = "transactions:write" - ScopeAccountsRead = "accounts:read" - ScopeAccountsWrite = "accounts:write" - ScopeMappingRead = "mapping:read" - ScopeMappingWrite = "mapping:write" - ScopesInfoRead = "info" - ScopesStatsRead = "stats" - ScopesLogsRead = "logs" -) - -var AllScopes = []string{ - ScopeTransactionsRead, - ScopeAccountsWrite, - ScopeTransactionsWrite, - ScopeAccountsRead, - ScopeMappingRead, - ScopeMappingWrite, - ScopesInfoRead, - ScopesStatsRead, - ScopesLogsRead, -} - -type Routes struct { - resolver *ledger.Resolver - ledgerMiddleware middlewares.LedgerMiddleware - healthController *health.HealthController - configController controllers.ConfigController - ledgerController controllers.LedgerController - scriptController controllers.ScriptController - accountController controllers.AccountController - balanceController controllers.BalanceController - transactionController controllers.TransactionController - mappingController controllers.MappingController - globalMiddlewares []gin.HandlerFunc - perLedgerMiddlewares []gin.HandlerFunc - useScopes UseScopes - idempotencyStore storage.Driver[idempotency.Store] - locker middlewares.Locker -} - -func NewRoutes( - globalMiddlewares []gin.HandlerFunc, - perLedgerMiddlewares []gin.HandlerFunc, - resolver *ledger.Resolver, - ledgerMiddleware middlewares.LedgerMiddleware, - configController controllers.ConfigController, - ledgerController controllers.LedgerController, - scriptController controllers.ScriptController, - accountController controllers.AccountController, - balanceController controllers.BalanceController, - transactionController controllers.TransactionController, - mappingController controllers.MappingController, - healthController *health.HealthController, - useScopes UseScopes, - idempotencyStore storage.Driver[idempotency.Store], - locker middlewares.Locker, -) *Routes { - return &Routes{ - globalMiddlewares: globalMiddlewares, - perLedgerMiddlewares: perLedgerMiddlewares, - resolver: resolver, - ledgerMiddleware: ledgerMiddleware, - configController: configController, - ledgerController: ledgerController, - scriptController: scriptController, - accountController: accountController, - balanceController: balanceController, - transactionController: transactionController, - mappingController: mappingController, - healthController: healthController, - useScopes: useScopes, - idempotencyStore: idempotencyStore, - locker: locker, - } -} - -func (r *Routes) wrapWithScopes(handler gin.HandlerFunc, scopes ...string) gin.HandlerFunc { - if !r.useScopes { - return handler - } - return func(context *gin.Context) { - ok := false - auth.NeedOneOfScopes(scopes...)(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - context.Request = r - ok = true - handler(context) - })).ServeHTTP(context.Writer, context.Request) - if !ok { - context.AbortWithStatus(http.StatusForbidden) - } - } -} - -func (r *Routes) Engine() *gin.Engine { - engine := gin.New() - - engine.Use(r.globalMiddlewares...) - - // Deprecated - engine.GET("/_health", func(context *gin.Context) { - r.healthController.Check(context.Writer, context.Request) - }) - engine.GET("/_healthcheck", func(context *gin.Context) { - r.healthController.Check(context.Writer, context.Request) - }) - engine.GET("/swagger.yaml", r.configController.GetDocsAsYaml) - engine.GET("/swagger.json", r.configController.GetDocsAsJSON) - - engineWithOtel := engine.Group("/") - engineWithOtel.Use(otelgin.Middleware("ledger")) - engineWithOtel.GET("/_info", r.configController.GetInfo) - - dedicatedLedgerRouter := engineWithOtel.Group("/:ledger") - dedicatedLedgerRouter.Use(append(r.perLedgerMiddlewares, r.ledgerMiddleware.LedgerMiddleware())...) - - // LedgerController - dedicatedLedgerRouter.GET("/_info", r.wrapWithScopes(r.ledgerController.GetInfo, ScopesInfoRead)) - dedicatedLedgerRouter.GET("/stats", r.wrapWithScopes(r.ledgerController.GetStats, ScopesStatsRead)) - dedicatedLedgerRouter.GET("/logs", r.wrapWithScopes(r.ledgerController.GetLogs, ScopesLogsRead)) - - // AccountController - dedicatedLedgerRouter.GET("/accounts", r.wrapWithScopes(r.accountController.GetAccounts, ScopeAccountsRead, ScopeAccountsWrite)) - dedicatedLedgerRouter.HEAD("/accounts", r.wrapWithScopes(r.accountController.CountAccounts, ScopeAccountsRead, ScopeAccountsWrite)) - dedicatedLedgerRouter.GET("/accounts/:address", r.wrapWithScopes(r.accountController.GetAccount, ScopeAccountsRead, ScopeAccountsWrite)) - dedicatedLedgerRouter.POST("/accounts/:address/metadata", - middlewares.Transaction(r.locker), - idempotency.Middleware(r.idempotencyStore), - r.wrapWithScopes(r.accountController.PostAccountMetadata, ScopeAccountsWrite)) - - // TransactionController - dedicatedLedgerRouter.GET("/transactions", r.wrapWithScopes(r.transactionController.GetTransactions, ScopeTransactionsRead, ScopeTransactionsWrite)) - dedicatedLedgerRouter.HEAD("/transactions", r.wrapWithScopes(r.transactionController.CountTransactions, ScopeTransactionsRead, ScopeTransactionsWrite)) - dedicatedLedgerRouter.POST("/transactions", - middlewares.Transaction(r.locker), - idempotency.Middleware(r.idempotencyStore), - r.wrapWithScopes(r.transactionController.PostTransaction, ScopeTransactionsWrite)).Use() - dedicatedLedgerRouter.POST("/transactions/batch", - middlewares.Transaction(r.locker), - idempotency.Middleware(r.idempotencyStore), - r.wrapWithScopes(r.transactionController.PostTransactionsBatch, ScopeTransactionsWrite)) - dedicatedLedgerRouter.GET("/transactions/:txid", r.wrapWithScopes(r.transactionController.GetTransaction, ScopeTransactionsRead, ScopeTransactionsWrite)) - dedicatedLedgerRouter.POST("/transactions/:txid/revert", - middlewares.Transaction(r.locker), - idempotency.Middleware(r.idempotencyStore), - r.wrapWithScopes(r.transactionController.RevertTransaction, ScopeTransactionsWrite)) - dedicatedLedgerRouter.POST("/transactions/:txid/metadata", - middlewares.Transaction(r.locker), - idempotency.Middleware(r.idempotencyStore), - r.wrapWithScopes(r.transactionController.PostTransactionMetadata, ScopeTransactionsWrite)) - - // BalanceController - dedicatedLedgerRouter.GET("/balances", r.wrapWithScopes(r.balanceController.GetBalances, ScopeAccountsRead)) - dedicatedLedgerRouter.GET("/aggregate/balances", r.wrapWithScopes(r.balanceController.GetBalancesAggregated, ScopeAccountsRead)) - - // MappingController - dedicatedLedgerRouter.GET("/mapping", r.wrapWithScopes(r.mappingController.GetMapping, ScopeMappingRead, ScopeMappingWrite)) - dedicatedLedgerRouter.PUT("/mapping", r.wrapWithScopes(r.mappingController.PutMapping, ScopeMappingWrite)) - - // ScriptController - dedicatedLedgerRouter.POST("/script", - middlewares.Transaction(r.locker), - idempotency.Middleware(r.idempotencyStore), - r.wrapWithScopes(r.scriptController.PostScript, ScopeTransactionsWrite)) - - return engine -} diff --git a/pkg/bus/message.go b/pkg/bus/message.go deleted file mode 100644 index c49004d96..000000000 --- a/pkg/bus/message.go +++ /dev/null @@ -1,98 +0,0 @@ -package bus - -import ( - "time" - - "github.com/numary/ledger/pkg/core" -) - -const ( - EventVersion = "v1" - EventApp = "ledger" - - EventTypeCommittedTransactions = "COMMITTED_TRANSACTIONS" - EventTypeSavedMetadata = "SAVED_METADATA" - EventTypeUpdatedMapping = "UPDATED_MAPPING" - EventTypeRevertedTransaction = "REVERTED_TRANSACTION" -) - -type EventMessage struct { - Date time.Time `json:"date"` - App string `json:"app"` - Version string `json:"version"` - Type string `json:"type"` - Payload any `json:"payload"` - // TODO: deprecated in future version - Ledger string `json:"ledger"` -} - -type CommittedTransactions struct { - Ledger string `json:"ledger"` - Transactions []core.ExpandedTransaction `json:"transactions"` - // Deprecated (use postCommitVolumes) - Volumes core.AccountsAssetsVolumes `json:"volumes"` - PostCommitVolumes core.AccountsAssetsVolumes `json:"postCommitVolumes"` - PreCommitVolumes core.AccountsAssetsVolumes `json:"preCommitVolumes"` -} - -func newEventCommittedTransactions(txs CommittedTransactions) EventMessage { - return EventMessage{ - Date: time.Now().UTC(), - App: EventApp, - Version: EventVersion, - Type: EventTypeCommittedTransactions, - Payload: txs, - Ledger: txs.Ledger, - } -} - -type SavedMetadata struct { - Ledger string `json:"ledger"` - TargetType string `json:"targetType"` - TargetID string `json:"targetId"` - Metadata core.Metadata `json:"metadata"` -} - -func newEventSavedMetadata(metadata SavedMetadata) EventMessage { - return EventMessage{ - Date: time.Now().UTC(), - App: EventApp, - Version: EventVersion, - Type: EventTypeSavedMetadata, - Payload: metadata, - Ledger: metadata.Ledger, - } -} - -type UpdatedMapping struct { - Ledger string `json:"ledger"` - Mapping core.Mapping `json:"mapping"` -} - -func newEventUpdatedMapping(mapping UpdatedMapping) EventMessage { - return EventMessage{ - Date: time.Now().UTC(), - App: EventApp, - Version: EventVersion, - Type: EventTypeUpdatedMapping, - Payload: mapping, - Ledger: mapping.Ledger, - } -} - -type RevertedTransaction struct { - Ledger string `json:"ledger"` - RevertedTransaction core.ExpandedTransaction `json:"revertedTransaction"` - RevertTransaction core.ExpandedTransaction `json:"revertTransaction"` -} - -func newEventRevertedTransaction(tx RevertedTransaction) EventMessage { - return EventMessage{ - Date: time.Now().UTC(), - App: EventApp, - Version: EventVersion, - Type: EventTypeRevertedTransaction, - Payload: tx, - Ledger: tx.Ledger, - } -} diff --git a/pkg/bus/monitor.go b/pkg/bus/monitor.go deleted file mode 100644 index 4faef2ff5..000000000 --- a/pkg/bus/monitor.go +++ /dev/null @@ -1,84 +0,0 @@ -package bus - -import ( - "context" - - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/publish" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "go.uber.org/fx" -) - -type ledgerMonitor struct { - publisher *publish.TopicMapperPublisher -} - -var _ ledger.Monitor = &ledgerMonitor{} - -func newLedgerMonitor(publisher *publish.TopicMapperPublisher) *ledgerMonitor { - m := &ledgerMonitor{ - publisher: publisher, - } - return m -} - -func LedgerMonitorModule() fx.Option { - return fx.Options( - fx.Provide( - fx.Annotate( - newLedgerMonitor, - fx.ParamTags(``, `group:"monitorOptions"`), - ), - ), - ledger.ProvideResolverOption(func(monitor *ledgerMonitor) ledger.ResolveOptionFn { - return ledger.WithMonitor(monitor) - }), - ) -} - -func (l *ledgerMonitor) CommittedTransactions(ctx context.Context, ledger string, txs ...core.ExpandedTransaction) { - postCommitVolumes := core.AggregatePostCommitVolumes(txs...) - l.publish(ctx, EventTypeCommittedTransactions, - newEventCommittedTransactions(CommittedTransactions{ - Ledger: ledger, - Transactions: txs, - Volumes: postCommitVolumes, - PostCommitVolumes: postCommitVolumes, - PreCommitVolumes: core.AggregatePreCommitVolumes(txs...), - })) -} - -func (l *ledgerMonitor) SavedMetadata(ctx context.Context, ledger, targetType, targetID string, metadata core.Metadata) { - l.publish(ctx, EventTypeSavedMetadata, - newEventSavedMetadata(SavedMetadata{ - Ledger: ledger, - TargetType: targetType, - TargetID: targetID, - Metadata: metadata, - })) -} - -func (l *ledgerMonitor) UpdatedMapping(ctx context.Context, ledger string, mapping core.Mapping) { - l.publish(ctx, EventTypeUpdatedMapping, - newEventUpdatedMapping(UpdatedMapping{ - Ledger: ledger, - Mapping: mapping, - })) -} - -func (l *ledgerMonitor) RevertedTransaction(ctx context.Context, ledger string, reverted, revert *core.ExpandedTransaction) { - l.publish(ctx, EventTypeRevertedTransaction, - newEventRevertedTransaction(RevertedTransaction{ - Ledger: ledger, - RevertedTransaction: *reverted, - RevertTransaction: *revert, - })) -} - -func (l *ledgerMonitor) publish(ctx context.Context, topic string, ev EventMessage) { - if err := l.publisher.Publish(ctx, topic, ev); err != nil { - logging.GetLogger(ctx).Errorf("publishing message: %s", err) - return - } -} diff --git a/pkg/contextlogger/contextlogger.go b/pkg/contextlogger/contextlogger.go deleted file mode 100644 index 3b2fed6e9..000000000 --- a/pkg/contextlogger/contextlogger.go +++ /dev/null @@ -1,51 +0,0 @@ -package contextlogger - -import ( - "context" - - "github.com/formancehq/go-libs/logging" - "github.com/gin-gonic/gin" - "github.com/google/uuid" - "go.opentelemetry.io/otel/trace" -) - -type contextKey string - -var loggerContextKey contextKey = "logger" - -type Factory struct { - underlying logging.LoggerFactory -} - -func (c *Factory) Get(ctx context.Context) logging.Logger { - v := ctx.Value(loggerContextKey) - if v == nil { - return c.underlying.Get(ctx) - } - return v.(logging.Logger) -} - -func NewFactory(underlyingFactory logging.LoggerFactory) *Factory { - return &Factory{ - underlying: underlyingFactory, - } -} - -var _ logging.LoggerFactory = &Factory{} - -func ContextWithLogger(ctx context.Context, logger logging.Logger) context.Context { - return context.WithValue(ctx, loggerContextKey, logger) -} - -func WrapGinRequest(c *gin.Context) { - span := trace.SpanFromContext(c.Request.Context()) - contextKeyID := uuid.NewString() - if span.SpanContext().SpanID().IsValid() { - contextKeyID = span.SpanContext().SpanID().String() - } - c.Request = c.Request.WithContext( - ContextWithLogger(c.Request.Context(), logging.GetLogger(c.Request.Context()).WithFields(map[string]any{ - "contextID": contextKeyID, - })), - ) -} diff --git a/pkg/core/account.go b/pkg/core/account.go deleted file mode 100644 index d3d060891..000000000 --- a/pkg/core/account.go +++ /dev/null @@ -1,16 +0,0 @@ -package core - -const ( - WORLD = "world" -) - -type Account struct { - Address string `json:"address" example:"users:001"` - Metadata Metadata `json:"metadata" swaggertype:"object"` -} - -type AccountWithVolumes struct { - Account - Volumes AssetsVolumes `json:"volumes"` - Balances AssetsBalances `json:"balances" example:"COIN:100"` -} diff --git a/pkg/core/asset.go b/pkg/core/asset.go deleted file mode 100644 index e28f03aa1..000000000 --- a/pkg/core/asset.go +++ /dev/null @@ -1,11 +0,0 @@ -package core - -import ( - "regexp" -) - -var assetRegexp = regexp.MustCompile(`^[A-Z][A-Z0-9]{0,16}(\/\d{1,6})?$`) - -func AssetIsValid(v string) bool { - return assetRegexp.Match([]byte(v)) -} diff --git a/pkg/core/contract.go b/pkg/core/contract.go deleted file mode 100644 index ca97d2b20..000000000 --- a/pkg/core/contract.go +++ /dev/null @@ -1,41 +0,0 @@ -package core - -import ( - "encoding/json" - "regexp" - "strings" -) - -type Contract struct { - Name string `json:"name"` - Account string `json:"account"` - Expr Expr `json:"expr"` -} - -func (c *Contract) UnmarshalJSON(data []byte) error { - type AuxContract Contract - type Aux struct { - AuxContract - Expr map[string]interface{} `json:"expr"` - } - aux := Aux{} - err := json.Unmarshal(data, &aux) - if err != nil { - return err - } - expr, err := ParseRuleExpr(aux.Expr) - if err != nil { - return err - } - *c = Contract{ - Expr: expr, - Account: aux.Account, - Name: aux.Name, - } - return nil -} - -func (c Contract) Match(addr string) bool { - r := strings.ReplaceAll(c.Account, "*", ".*") - return regexp.MustCompile(r).Match([]byte(addr)) -} diff --git a/pkg/core/contract_test.go b/pkg/core/contract_test.go deleted file mode 100644 index dc53d4be3..000000000 --- a/pkg/core/contract_test.go +++ /dev/null @@ -1,15 +0,0 @@ -package core - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestContract_UnmarshalJSON(t *testing.T) { - contract := &Contract{} - data := `{"id": "foo", "account": "order:*", "expr": { "$gte": ["$balance", 0] }}` - err := json.Unmarshal([]byte(data), contract) - assert.NoError(t, err) -} diff --git a/pkg/core/expr.go b/pkg/core/expr.go deleted file mode 100644 index 9e8a34b24..000000000 --- a/pkg/core/expr.go +++ /dev/null @@ -1,291 +0,0 @@ -package core - -import ( - "encoding/json" - "errors" - "fmt" - "math" - "reflect" - "strings" -) - -type EvalContext struct { - Variables map[string]interface{} - Metadata Metadata - Asset string -} - -type Expr interface { - Eval(EvalContext) bool -} - -type Value interface { - eval(ctx EvalContext) interface{} -} - -type ExprOr []Expr - -func (o ExprOr) Eval(ctx EvalContext) bool { - for _, e := range o { - if e.Eval(ctx) { - return true - } - } - return false -} - -func (e ExprOr) MarshalJSON() ([]byte, error) { - return json.Marshal(map[string]interface{}{ - "$or": []Expr(e), - }) -} - -type ExprAnd []Expr - -func (o ExprAnd) Eval(ctx EvalContext) bool { - for _, e := range o { - if !e.Eval(ctx) { - return false - } - } - return true -} - -func (e ExprAnd) MarshalJSON() ([]byte, error) { - return json.Marshal(map[string]interface{}{ - "$and": []Expr(e), - }) -} - -type ExprEq struct { - Op1 Value - Op2 Value -} - -func (o *ExprEq) Eval(ctx EvalContext) bool { - return reflect.DeepEqual(o.Op1.eval(ctx), o.Op2.eval(ctx)) -} - -func (e ExprEq) MarshalJSON() ([]byte, error) { - return json.Marshal(map[string]interface{}{ - "$eq": []interface{}{e.Op1, e.Op2}, - }) -} - -type ExprGt struct { - Op1 Value - Op2 Value -} - -func (o *ExprGt) Eval(ctx EvalContext) bool { - return o.Op1.eval(ctx).(*MonetaryInt).Gt(o.Op2.eval(ctx).(*MonetaryInt)) -} - -func (e ExprGt) MarshalJSON() ([]byte, error) { - return json.Marshal(map[string]interface{}{ - "$gt": []interface{}{e.Op1, e.Op2}, - }) -} - -type ExprLt struct { - Op1 Value - Op2 Value -} - -func (o *ExprLt) Eval(ctx EvalContext) bool { - return o.Op1.eval(ctx).(*MonetaryInt).Lt(o.Op2.eval(ctx).(*MonetaryInt)) -} - -func (e ExprLt) MarshalJSON() ([]byte, error) { - return json.Marshal(map[string]interface{}{ - "$lt": []interface{}{e.Op1, e.Op2}, - }) -} - -type ExprGte struct { - Op1 Value - Op2 Value -} - -func (o *ExprGte) Eval(ctx EvalContext) bool { - return o.Op1.eval(ctx).(*MonetaryInt).Gte(o.Op2.eval(ctx).(*MonetaryInt)) -} - -func (e ExprGte) MarshalJSON() ([]byte, error) { - return json.Marshal(map[string]interface{}{ - "$gte": []interface{}{e.Op1, e.Op2}, - }) -} - -type ExprLte struct { - Op1 Value - Op2 Value -} - -func (o *ExprLte) Eval(ctx EvalContext) bool { - return o.Op1.eval(ctx).(*MonetaryInt).Lte(o.Op2.eval(ctx).(*MonetaryInt)) -} - -func (e ExprLte) MarshalJSON() ([]byte, error) { - return json.Marshal(map[string]interface{}{ - "$lte": []interface{}{e.Op1, e.Op2}, - }) -} - -type ConstantExpr struct { - Value interface{} -} - -func (e ConstantExpr) eval(ctx EvalContext) interface{} { - return e.Value -} - -func (e ConstantExpr) MarshalJSON() ([]byte, error) { - return json.Marshal(e.Value) -} - -type VariableExpr struct { - Name string -} - -func (e VariableExpr) eval(ctx EvalContext) interface{} { - return ctx.Variables[e.Name] -} - -func (e VariableExpr) MarshalJSON() ([]byte, error) { - return []byte(fmt.Sprintf(`"$%s"`, e.Name)), nil -} - -type MetaExpr struct { - Name string -} - -func (e MetaExpr) eval(ctx EvalContext) interface{} { - return ctx.Metadata[e.Name] -} - -func (e MetaExpr) MarshalJSON() ([]byte, error) { - return json.Marshal(map[string]interface{}{ - "$meta": e.Name, - }) -} - -func parse(v interface{}) (expr interface{}, err error) { - switch vv := v.(type) { - case map[string]interface{}: - if len(vv) != 1 { - return nil, errors.New("malformed expression") - } - for key, vvv := range vv { - switch { - case strings.HasPrefix(key, "$"): - switch key { - case "$meta": - value, ok := vvv.(string) - if !ok { - return nil, errors.New("$meta operator invalid") - } - return &MetaExpr{Name: value}, nil - case "$or", "$and": - slice, ok := vvv.([]interface{}) - if !ok { - return nil, errors.New("Expected slice for operator " + key) - } - exprs := make([]Expr, 0) - for _, item := range slice { - r, err := parse(item) - if err != nil { - return nil, err - } - expr, ok := r.(Expr) - if !ok { - return nil, errors.New("unexpected value when parsing " + key) - } - exprs = append(exprs, expr) - } - switch key { - case "$and": - expr = ExprAnd(exprs) - case "$or": - expr = ExprOr(exprs) - } - case "$eq", "$gt", "$gte", "$lt", "$lte": - vv, ok := vvv.([]interface{}) - if !ok { - return nil, errors.New("expected array when using $eq") - } - if len(vv) != 2 { - return nil, errors.New("expected 2 items when using $eq") - } - op1, err := parse(vv[0]) - if err != nil { - return nil, err - } - op1Value, ok := op1.(Value) - if !ok { - return nil, errors.New("op1 must be valuable") - } - op2, err := parse(vv[1]) - if err != nil { - return nil, err - } - op2Value, ok := op2.(Value) - if !ok { - return nil, errors.New("op2 must be valuable") - } - switch key { - case "$eq": - expr = &ExprEq{ - Op1: op1Value, - Op2: op2Value, - } - case "$gt": - expr = &ExprGt{ - Op1: op1Value, - Op2: op2Value, - } - case "$gte": - expr = &ExprGte{ - Op1: op1Value, - Op2: op2Value, - } - case "$lt": - expr = &ExprLt{ - Op1: op1Value, - Op2: op2Value, - } - case "$lte": - expr = &ExprLte{ - Op1: op1Value, - Op2: op2Value, - } - } - default: - return nil, errors.New("unknown operator '" + key + "'") - } - } - } - case string: - if !strings.HasPrefix(vv, "$") { - return ConstantExpr{v}, nil - } - return VariableExpr{vv[1:]}, nil - case float64: - if math.Round(vv) != vv { - return nil, errors.New("only integer supported") - } - return ConstantExpr{NewMonetaryInt(int64(vv))}, nil - default: - return ConstantExpr{v}, nil - } - - return expr, nil -} - -func ParseRuleExpr(v map[string]interface{}) (Expr, error) { - ret, err := parse(v) - if err != nil { - return nil, err - } - return ret.(Expr), nil -} diff --git a/pkg/core/expr_test.go b/pkg/core/expr_test.go deleted file mode 100644 index e2024426b..000000000 --- a/pkg/core/expr_test.go +++ /dev/null @@ -1,136 +0,0 @@ -package core - -import ( - "fmt" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestRules(t *testing.T) { - - type testCase struct { - rule map[string]interface{} - context EvalContext - shouldBeAccepted bool - } - - var tests = []testCase{ - { - rule: map[string]interface{}{ - "$or": []interface{}{ - map[string]interface{}{ - "$gt": []interface{}{ - "$balance", float64(0), - }, - }, - map[string]interface{}{ - "$eq": []interface{}{ - map[string]interface{}{ - "$meta": "approved", - }, - "yes", - }, - }, - }, - }, - context: EvalContext{ - Variables: map[string]interface{}{ - "balance": NewMonetaryInt(-10), - }, - Metadata: map[string]any{ - "approved": "yes", - }, - }, - shouldBeAccepted: true, - }, - { - rule: map[string]interface{}{ - "$or": []interface{}{ - map[string]interface{}{ - "$gte": []interface{}{ - "$balance", float64(0), - }, - }, - map[string]interface{}{ - "$lte": []interface{}{ - "$balance", float64(0), - }, - }, - }, - }, - context: EvalContext{ - Variables: map[string]interface{}{ - "balance": NewMonetaryInt(-100), - }, - Metadata: map[string]any{}, - }, - shouldBeAccepted: true, - }, - { - rule: map[string]interface{}{ - "$lt": []interface{}{ - "$balance", float64(0), - }, - }, - context: EvalContext{ - Variables: map[string]interface{}{ - "balance": NewMonetaryInt(100), - }, - Metadata: map[string]any{}, - }, - shouldBeAccepted: false, - }, - { - rule: map[string]interface{}{ - "$lte": []interface{}{ - "$balance", float64(0), - }, - }, - context: EvalContext{ - Variables: map[string]interface{}{ - "balance": NewMonetaryInt(0), - }, - Metadata: map[string]any{}, - }, - shouldBeAccepted: true, - }, - { - rule: map[string]interface{}{ - "$and": []interface{}{ - map[string]interface{}{ - "$gt": []interface{}{ - "$balance", float64(0), - }, - }, - map[string]interface{}{ - "$eq": []interface{}{ - map[string]interface{}{ - "$meta": "approved", - }, - "yes", - }, - }, - }, - }, - context: EvalContext{ - Variables: map[string]interface{}{ - "balance": NewMonetaryInt(10), - }, - Metadata: map[string]any{ - "approved": "no", - }, - }, - shouldBeAccepted: false, - }, - } - - for i, test := range tests { - t.Run(fmt.Sprintf("test%d", i), func(t *testing.T) { - r, err := ParseRuleExpr(test.rule) - assert.NoError(t, err) - assert.Equal(t, test.shouldBeAccepted, r.Eval(test.context)) - }) - } - -} diff --git a/pkg/core/log.go b/pkg/core/log.go deleted file mode 100644 index 65d6ad66d..000000000 --- a/pkg/core/log.go +++ /dev/null @@ -1,180 +0,0 @@ -package core - -import ( - "encoding/json" - "strconv" - "strings" - "time" -) - -const SetMetadataType = "SET_METADATA" -const NewTransactionType = "NEW_TRANSACTION" - -type Log struct { - ID uint64 `json:"id"` - Type string `json:"type"` - Data interface{} `json:"data"` - Hash string `json:"hash"` - Date time.Time `json:"date"` -} - -func NewTransactionLogWithDate(previousLog *Log, tx Transaction, time time.Time) Log { - id := uint64(0) - if previousLog != nil { - id = previousLog.ID + 1 - } - l := Log{ - ID: id, - Type: NewTransactionType, - Date: time, - Data: tx, - } - l.Hash = Hash(previousLog, &l) - return l -} - -func NewTransactionLog(previousLog *Log, tx Transaction) Log { - return NewTransactionLogWithDate(previousLog, tx, tx.Timestamp) -} - -type SetMetadata struct { - TargetType string `json:"targetType"` - TargetID interface{} `json:"targetId"` - Metadata Metadata `json:"metadata"` -} - -func (s *SetMetadata) UnmarshalJSON(data []byte) error { - type X struct { - TargetType string `json:"targetType"` - TargetID json.RawMessage `json:"targetId"` - Metadata Metadata `json:"metadata"` - } - x := X{} - err := json.Unmarshal(data, &x) - if err != nil { - return err - } - var id interface{} - switch strings.ToUpper(x.TargetType) { - case strings.ToUpper(MetaTargetTypeAccount): - id = "" - err = json.Unmarshal(x.TargetID, &id) - case strings.ToUpper(MetaTargetTypeTransaction): - id, err = strconv.ParseUint(string(x.TargetID), 10, 64) - default: - panic("unknown type") - } - if err != nil { - return err - } - - *s = SetMetadata{ - TargetType: x.TargetType, - TargetID: id, - Metadata: x.Metadata, - } - return nil -} - -func NewSetMetadataLog(previousLog *Log, at time.Time, metadata SetMetadata) Log { - id := uint64(0) - if previousLog != nil { - id = previousLog.ID + 1 - } - l := Log{ - ID: id, - Type: SetMetadataType, - Date: at, - Data: metadata, - } - l.Hash = Hash(previousLog, &l) - return l -} - -func HydrateLog(_type string, data string) (interface{}, error) { - switch _type { - case NewTransactionType: - tx := Transaction{} - err := json.Unmarshal([]byte(data), &tx) - if err != nil { - return nil, err - } - - return tx, nil - case SetMetadataType: - sm := SetMetadata{} - err := json.Unmarshal([]byte(data), &sm) - if err != nil { - return nil, err - } - return sm, nil - default: - panic("unknown type " + _type) - } -} - -type Accounts map[string]Account - -func (a Accounts) ensureExists(accounts ...string) { - for _, account := range accounts { - _, ok := a[account] - if !ok { - a[account] = Account{ - Address: account, - Metadata: Metadata{}, - } - } - } -} - -type LogProcessor struct { - Transactions []*ExpandedTransaction - Accounts Accounts - Volumes AccountsAssetsVolumes -} - -func (m *LogProcessor) ProcessNextLog(logs ...Log) { - for _, log := range logs { - switch log.Type { - case NewTransactionType: - tx := ExpandedTransaction{ - Transaction: log.Data.(Transaction), - PreCommitVolumes: AccountsAssetsVolumes{}, - PostCommitVolumes: AccountsAssetsVolumes{}, - } - m.Transactions = append(m.Transactions, &tx) - for _, posting := range tx.Postings { - tx.PreCommitVolumes.SetVolumes(posting.Source, posting.Asset, m.Volumes.GetVolumes(posting.Source, posting.Asset)) - tx.PreCommitVolumes.SetVolumes(posting.Destination, posting.Asset, m.Volumes.GetVolumes(posting.Destination, posting.Asset)) - } - for _, posting := range tx.Postings { - m.Accounts.ensureExists(posting.Source, posting.Destination) - m.Volumes.AddOutput(posting.Source, posting.Asset, posting.Amount) - m.Volumes.AddInput(posting.Destination, posting.Asset, posting.Amount) - } - for _, posting := range tx.Postings { - tx.PostCommitVolumes.SetVolumes(posting.Source, posting.Asset, m.Volumes.GetVolumes(posting.Source, posting.Asset)) - tx.PostCommitVolumes.SetVolumes(posting.Destination, posting.Asset, m.Volumes.GetVolumes(posting.Destination, posting.Asset)) - } - case SetMetadataType: - setMetadata := log.Data.(SetMetadata) - switch setMetadata.TargetType { - case MetaTargetTypeAccount: - account := setMetadata.TargetID.(string) - m.Accounts.ensureExists(account) - m.Accounts[account].Metadata.Merge(setMetadata.Metadata) - case MetaTargetTypeTransaction: - id := setMetadata.TargetID.(int) - m.Transactions[id].Metadata.Merge(setMetadata.Metadata) - } - } - } -} - -func NewLogProcessor() *LogProcessor { - return &LogProcessor{ - Transactions: make([]*ExpandedTransaction, 0), - Accounts: Accounts{}, - Volumes: AccountsAssetsVolumes{}, - } -} diff --git a/pkg/core/log_test.go b/pkg/core/log_test.go deleted file mode 100644 index 69d5c3865..000000000 --- a/pkg/core/log_test.go +++ /dev/null @@ -1,255 +0,0 @@ -package core - -import ( - "encoding/json" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestLog(t *testing.T) { - - d := time.Unix(1648542028, 0).UTC() - - log1 := NewTransactionLogWithDate(nil, Transaction{ - TransactionData: TransactionData{ - Metadata: Metadata{}, - }, - }, d) - log2 := NewTransactionLogWithDate(&log1, Transaction{ - TransactionData: TransactionData{ - Metadata: Metadata{}, - }, - }, d) - if !assert.Equal(t, "9ee060170400f556b7e1575cb13f9db004f150a08355c7431c62bc639166431e", log2.Hash) { - return - } -} - -func TestLogProcessor(t *testing.T) { - - inputs := []any{ - Transaction{ - TransactionData: TransactionData{ - Postings: []Posting{ - { - Source: "world", - Destination: "orders:1234", - Amount: NewMonetaryInt(100), - Asset: "USD", - }, - }, - Metadata: Metadata{}, - }, - ID: 0, - }, - Transaction{ - TransactionData: TransactionData{ - Postings: []Posting{ - { - Source: "orders:1234", - Destination: "merchant:1234", - Amount: NewMonetaryInt(90), - Asset: "USD", - }, - { - Source: "orders:1234", - Destination: "fees", - Amount: NewMonetaryInt(10), - Asset: "USD", - }, - }, - Metadata: Metadata{}, - }, - ID: 1, - }, - SetMetadata{ - TargetType: MetaTargetTypeTransaction, - TargetID: 0, - Metadata: Metadata{ - "psp-ref": json.RawMessage(`"#ABCDEF"`), - }, - }, - SetMetadata{ - TargetType: MetaTargetTypeAccount, - TargetID: "orders:1234", - Metadata: Metadata{ - "booking-online": json.RawMessage(`true`), - }, - }, - } - - p := NewLogProcessor() - var previousLog *Log - for _, input := range inputs { - var log Log - switch ob := input.(type) { - case Transaction: - log = NewTransactionLog(previousLog, ob) - case SetMetadata: - log = NewSetMetadataLog(previousLog, time.Now().Truncate(time.Second).UTC(), ob) - } - p.ProcessNextLog(log) - previousLog = &log - } - - require.Equal(t, []*ExpandedTransaction{ - { - Transaction: Transaction{ - TransactionData: TransactionData{ - Postings: []Posting{ - { - Source: "world", - Destination: "orders:1234", - Amount: NewMonetaryInt(100), - Asset: "USD", - }, - }, - Metadata: Metadata{ - "psp-ref": json.RawMessage(`"#ABCDEF"`), - }, - }, - ID: 0, - }, - PreCommitVolumes: AccountsAssetsVolumes{ - "world": { - "USD": { - Input: NewMonetaryInt(0), - Output: NewMonetaryInt(0), - }, - }, - "orders:1234": { - "USD": { - Input: NewMonetaryInt(0), - Output: NewMonetaryInt(0), - }, - }, - }, - PostCommitVolumes: AccountsAssetsVolumes{ - "world": { - "USD": { - Input: NewMonetaryInt(0), - Output: NewMonetaryInt(100), - }, - }, - "orders:1234": { - "USD": { - Input: NewMonetaryInt(100), - Output: NewMonetaryInt(0), - }, - }, - }, - }, - { - Transaction: Transaction{ - TransactionData: TransactionData{ - Postings: []Posting{ - { - Source: "orders:1234", - Destination: "merchant:1234", - Amount: NewMonetaryInt(90), - Asset: "USD", - }, - { - Source: "orders:1234", - Destination: "fees", - Amount: NewMonetaryInt(10), - Asset: "USD", - }, - }, - Metadata: Metadata{}, - }, - ID: 1, - }, - PreCommitVolumes: AccountsAssetsVolumes{ - "orders:1234": { - "USD": { - Input: NewMonetaryInt(100), - Output: NewMonetaryInt(0), - }, - }, - "merchant:1234": { - "USD": { - Input: NewMonetaryInt(0), - Output: NewMonetaryInt(0), - }, - }, - "fees": { - "USD": { - Input: NewMonetaryInt(0), - Output: NewMonetaryInt(0), - }, - }, - }, - PostCommitVolumes: AccountsAssetsVolumes{ - "orders:1234": { - "USD": { - Input: NewMonetaryInt(100), - Output: NewMonetaryInt(100), - }, - }, - "merchant:1234": { - "USD": { - Input: NewMonetaryInt(90), - Output: NewMonetaryInt(0), - }, - }, - "fees": { - "USD": { - Input: NewMonetaryInt(10), - Output: NewMonetaryInt(0), - }, - }, - }, - }, - }, p.Transactions) - require.Equal(t, AccountsAssetsVolumes{ - "world": { - "USD": { - Input: NewMonetaryInt(0), - Output: NewMonetaryInt(100), - }, - }, - "orders:1234": { - "USD": { - Input: NewMonetaryInt(100), - Output: NewMonetaryInt(100), - }, - }, - "merchant:1234": { - "USD": { - Input: NewMonetaryInt(90), - Output: NewMonetaryInt(0), - }, - }, - "fees": { - "USD": { - Input: NewMonetaryInt(10), - Output: NewMonetaryInt(0), - }, - }, - }, p.Volumes) - require.EqualValues(t, Accounts{ - "world": { - Address: "world", - Metadata: Metadata{}, - }, - "orders:1234": { - Address: "orders:1234", - Metadata: Metadata{ - "booking-online": json.RawMessage(`true`), - }, - }, - "merchant:1234": { - Address: "merchant:1234", - Metadata: Metadata{}, - }, - "fees": { - Address: "fees", - Metadata: Metadata{}, - }, - }, p.Accounts) - -} diff --git a/pkg/core/mapping.go b/pkg/core/mapping.go deleted file mode 100644 index b2599e7b9..000000000 --- a/pkg/core/mapping.go +++ /dev/null @@ -1,5 +0,0 @@ -package core - -type Mapping struct { - Contracts []Contract `json:"contracts"` -} diff --git a/pkg/core/metadata.go b/pkg/core/metadata.go deleted file mode 100644 index 4d6418c6f..000000000 --- a/pkg/core/metadata.go +++ /dev/null @@ -1,95 +0,0 @@ -package core - -import ( - "database/sql/driver" - "encoding/json" - "fmt" - "reflect" -) - -const ( - numaryNamespace = "com.numary.spec/" - revertKey = "state/reverts" - revertedKey = "state/reverted" - MetaTargetTypeAccount = "ACCOUNT" - MetaTargetTypeTransaction = "TRANSACTION" -) - -func SpecMetadata(name string) string { - return numaryNamespace + name -} - -type Metadata map[string]any - -// IsEquivalentTo allow to compare to metadata object. -func (m1 Metadata) IsEquivalentTo(m2 Metadata) bool { - return reflect.DeepEqual(m1, m2) -} - -func (m1 Metadata) Merge(m2 Metadata) Metadata { - for k, v := range m2 { - m1[k] = v - } - return m1 -} - -func (m Metadata) MarkReverts(txID uint64) { - m.Merge(RevertMetadata(txID)) -} - -func (m Metadata) IsReverted() bool { - return m[SpecMetadata(revertedKey)].(string) == "\"reverted\"" -} - -// Scan - Implement the database/sql scanner interface -func (m *Metadata) Scan(value interface{}) error { - if value == nil { - return nil - } - v, err := driver.String.ConvertValue(value) - if err != nil { - return err - } - - *m = Metadata{} - switch vv := v.(type) { - case []uint8: - return json.Unmarshal(vv, m) - case string: - return json.Unmarshal([]byte(vv), m) - default: - panic("not handled type") - } -} - -func (m Metadata) ConvertValue(v interface{}) (driver.Value, error) { - return json.Marshal(v) -} - -type RevertedMetadataSpecValue struct { - By string `json:"by"` -} - -func RevertedMetadataSpecKey() string { - return SpecMetadata(revertedKey) -} - -func RevertMetadataSpecKey() string { - return SpecMetadata(revertKey) -} - -func ComputeMetadata(key string, value interface{}) Metadata { - return Metadata{ - key: value, - } -} - -func RevertedMetadata(by uint64) Metadata { - return ComputeMetadata(RevertedMetadataSpecKey(), RevertedMetadataSpecValue{ - By: fmt.Sprint(by), - }) -} - -func RevertMetadata(tx uint64) Metadata { - return ComputeMetadata(RevertMetadataSpecKey(), fmt.Sprint(tx)) -} diff --git a/pkg/core/migrations.go b/pkg/core/migrations.go deleted file mode 100644 index 0cdb29a34..000000000 --- a/pkg/core/migrations.go +++ /dev/null @@ -1,10 +0,0 @@ -package core - -import "time" - -type MigrationInfo struct { - Version string `json:"version"` - Name string `json:"name"` - State string `json:"state,omitempty"` - Date time.Time `json:"date,omitempty"` -} diff --git a/pkg/core/operations.go b/pkg/core/operations.go deleted file mode 100644 index dca1945d9..000000000 --- a/pkg/core/operations.go +++ /dev/null @@ -1,7 +0,0 @@ -package core - -type AdditionalOperations struct { - SetAccountMeta AccountsMeta `json:"set_account_meta,omitempty"` -} - -type AccountsMeta map[string]Metadata diff --git a/pkg/core/script.go b/pkg/core/script.go deleted file mode 100644 index 298e52202..000000000 --- a/pkg/core/script.go +++ /dev/null @@ -1,18 +0,0 @@ -package core - -import ( - "encoding/json" - "time" -) - -type ScriptData struct { - Script - Timestamp time.Time `json:"timestamp"` - Reference string `json:"reference"` - Metadata Metadata `json:"metadata"` -} - -type Script struct { - Plain string `json:"plain"` - Vars map[string]json.RawMessage `json:"vars" swaggertype:"object"` -} diff --git a/pkg/core/transaction.go b/pkg/core/transaction.go deleted file mode 100644 index 288df911e..000000000 --- a/pkg/core/transaction.go +++ /dev/null @@ -1,93 +0,0 @@ -package core - -import ( - "crypto/sha256" - "encoding/json" - "fmt" - "time" -) - -type Transactions struct { - Transactions []TransactionData `json:"transactions" binding:"required,dive"` -} - -type TransactionData struct { - Postings Postings `json:"postings"` - Reference string `json:"reference"` - Metadata Metadata `json:"metadata" swaggertype:"object"` - Timestamp time.Time `json:"timestamp"` -} - -func (t *TransactionData) Reverse() TransactionData { - postings := make(Postings, len(t.Postings)) - copy(postings, t.Postings) - postings.Reverse() - - ret := TransactionData{ - Postings: postings, - } - if t.Reference != "" { - ret.Reference = "revert_" + t.Reference - } - return ret -} - -var _ json.Marshaler = ExpandedTransaction{} - -type Transaction struct { - TransactionData - ID uint64 `json:"txid"` -} - -type ExpandedTransaction struct { - Transaction - PreCommitVolumes AccountsAssetsVolumes `json:"preCommitVolumes,omitempty"` - PostCommitVolumes AccountsAssetsVolumes `json:"postCommitVolumes,omitempty"` -} - -func (t ExpandedTransaction) MarshalJSON() ([]byte, error) { - type transaction ExpandedTransaction - return json.Marshal(struct { - transaction - Timestamp string `json:"timestamp"` - }{ - transaction: transaction(t), - // The std lib format time as RFC3339Nano, use a custom encoding to ensure backward compatibility - Timestamp: t.Timestamp.Format(time.RFC3339), - }) -} - -func (t *ExpandedTransaction) AppendPosting(p Posting) { - t.Postings = append(t.Postings, p) -} - -func (t *ExpandedTransaction) IsReverted() bool { - if _, ok := t.Metadata[RevertedMetadataSpecKey()]; ok { - return true - } - return false -} - -func Hash(t1, t2 interface{}) string { - b1, err := json.Marshal(t1) - if err != nil { - panic(err) - } - - b2, err := json.Marshal(t2) - if err != nil { - panic(err) - } - - h := sha256.New() - _, err = h.Write(b1) - if err != nil { - panic(err) - } - _, err = h.Write(b2) - if err != nil { - panic(err) - } - - return fmt.Sprintf("%x", h.Sum(nil)) -} diff --git a/pkg/core/transaction_test.go b/pkg/core/transaction_test.go deleted file mode 100644 index 7cc935e85..000000000 --- a/pkg/core/transaction_test.go +++ /dev/null @@ -1,53 +0,0 @@ -package core - -import ( - "testing" - - "github.com/google/go-cmp/cmp" -) - -func TestReverseTransaction(t *testing.T) { - tx := &ExpandedTransaction{ - Transaction: Transaction{ - TransactionData: TransactionData{ - Postings: Postings{ - { - Source: "world", - Destination: "users:001", - Amount: NewMonetaryInt(100), - Asset: "COIN", - }, - { - Source: "users:001", - Destination: "payments:001", - Amount: NewMonetaryInt(100), - Asset: "COIN", - }, - }, - Reference: "foo", - }, - }, - } - - expected := TransactionData{ - Postings: Postings{ - { - Source: "payments:001", - Destination: "users:001", - Amount: NewMonetaryInt(100), - Asset: "COIN", - }, - { - Source: "users:001", - Destination: "world", - Amount: NewMonetaryInt(100), - Asset: "COIN", - }, - }, - Reference: "revert_foo", - } - - if diff := cmp.Diff(expected, tx.Reverse()); diff != "" { - t.Errorf("Reverse() mismatch (-want +got):\n%s", diff) - } -} diff --git a/pkg/core/volumes.go b/pkg/core/volumes.go deleted file mode 100644 index eea5e5f15..000000000 --- a/pkg/core/volumes.go +++ /dev/null @@ -1,197 +0,0 @@ -package core - -import ( - "database/sql/driver" - "encoding/json" -) - -type Volumes struct { - Input *MonetaryInt `json:"input"` - Output *MonetaryInt `json:"output"` -} - -type VolumesWithBalance struct { - Input *MonetaryInt `json:"input"` - Output *MonetaryInt `json:"output"` - Balance *MonetaryInt `json:"balance"` -} - -func (v Volumes) MarshalJSON() ([]byte, error) { - return json.Marshal(VolumesWithBalance{ - Input: v.Input, - Output: v.Output, - Balance: v.Input.Sub(v.Output), - }) -} - -func (v Volumes) Balance() *MonetaryInt { - return v.Input.Sub(v.Output) -} - -type AssetsBalances map[string]*MonetaryInt -type AssetsVolumes map[string]Volumes - -type AccountsBalances map[string]AssetsBalances - -func (v AssetsVolumes) Balances() AssetsBalances { - balances := AssetsBalances{} - for asset, vv := range v { - balances[asset] = vv.Input.Sub(vv.Output) - } - return balances -} - -type AccountsAssetsVolumes map[string]AssetsVolumes - -func (a AccountsAssetsVolumes) GetVolumes(account, asset string) Volumes { - if a == nil { - return Volumes{ - Input: NewMonetaryInt(0), - Output: NewMonetaryInt(0), - } - } - if assetsVolumes, ok := a[account]; !ok { - return Volumes{ - Input: NewMonetaryInt(0), - Output: NewMonetaryInt(0), - } - } else { - return Volumes{ - Input: assetsVolumes[asset].Input.OrZero(), - Output: assetsVolumes[asset].Output.OrZero(), - } - } -} - -func (a *AccountsAssetsVolumes) SetVolumes(account, asset string, volumes Volumes) { - if *a == nil { - *a = AccountsAssetsVolumes{} - } - if assetsVolumes, ok := (*a)[account]; !ok { - (*a)[account] = map[string]Volumes{ - asset: { - Input: volumes.Input.OrZero(), - Output: volumes.Output.OrZero(), - }, - } - } else { - assetsVolumes[asset] = Volumes{ - Input: volumes.Input.OrZero(), - Output: volumes.Output.OrZero(), - } - } -} - -func (a *AccountsAssetsVolumes) AddInput(account, asset string, input *MonetaryInt) { - if *a == nil { - *a = AccountsAssetsVolumes{} - } - if assetsVolumes, ok := (*a)[account]; !ok { - (*a)[account] = map[string]Volumes{ - asset: { - Input: input.OrZero(), - Output: NewMonetaryInt(0), - }, - } - } else { - volumes := assetsVolumes[asset] - volumes.Input = volumes.Input.Add(input) - assetsVolumes[asset] = volumes - } -} - -func (a *AccountsAssetsVolumes) AddOutput(account, asset string, output *MonetaryInt) { - if *a == nil { - *a = AccountsAssetsVolumes{} - } - if assetsVolumes, ok := (*a)[account]; !ok { - (*a)[account] = map[string]Volumes{ - asset: { - Output: output.OrZero(), - Input: NewMonetaryInt(0), - }, - } - } else { - volumes := assetsVolumes[asset] - volumes.Output = volumes.Output.Add(output) - assetsVolumes[asset] = volumes - } -} - -func (a AccountsAssetsVolumes) HasAccount(account string) bool { - if a == nil { - return false - } - _, ok := a[account] - return ok -} - -func (a AccountsAssetsVolumes) HasAccountAndAsset(account, asset string) bool { - if a == nil { - return false - } - volumesByAsset, ok := a[account] - if !ok { - return false - } - _, ok = volumesByAsset[asset] - return ok -} - -// Scan - Implement the database/sql scanner interface -func (a *AccountsAssetsVolumes) Scan(value interface{}) error { - if value == nil { - return nil - } - - val, err := driver.String.ConvertValue(value) - if err != nil { - return err - } - - *a = AccountsAssetsVolumes{} - switch val := val.(type) { - case []uint8: - return json.Unmarshal(val, a) - case string: - return json.Unmarshal([]byte(val), a) - default: - panic("not handled type") - } -} - -func AggregatePreCommitVolumes(txs ...ExpandedTransaction) AccountsAssetsVolumes { - ret := AccountsAssetsVolumes{} - for i := 0; i < len(txs); i++ { - tx := txs[i] - for _, posting := range tx.Postings { - if !ret.HasAccountAndAsset(posting.Source, posting.Asset) { - ret.SetVolumes(posting.Source, posting.Asset, - tx.PreCommitVolumes.GetVolumes(posting.Source, posting.Asset)) - } - if !ret.HasAccountAndAsset(posting.Destination, posting.Asset) { - ret.SetVolumes(posting.Destination, posting.Asset, - tx.PreCommitVolumes.GetVolumes(posting.Destination, posting.Asset)) - } - } - } - return ret -} - -func AggregatePostCommitVolumes(txs ...ExpandedTransaction) AccountsAssetsVolumes { - ret := AccountsAssetsVolumes{} - for i := len(txs) - 1; i >= 0; i-- { - tx := txs[i] - for _, posting := range tx.Postings { - if !ret.HasAccountAndAsset(posting.Source, posting.Asset) { - ret.SetVolumes(posting.Source, posting.Asset, - tx.PostCommitVolumes.GetVolumes(posting.Source, posting.Asset)) - } - if !ret.HasAccountAndAsset(posting.Destination, posting.Asset) { - ret.SetVolumes(posting.Destination, posting.Asset, - tx.PostCommitVolumes.GetVolumes(posting.Destination, posting.Asset)) - } - } - } - return ret -} diff --git a/pkg/events/events.go b/pkg/events/events.go new file mode 100644 index 000000000..f6d6f55c6 --- /dev/null +++ b/pkg/events/events.go @@ -0,0 +1,11 @@ +package events + +const ( + EventVersion = "v2" + EventApp = "ledger" + + EventTypeCommittedTransactions = "COMMITTED_TRANSACTIONS" + EventTypeSavedMetadata = "SAVED_METADATA" + EventTypeRevertedTransaction = "REVERTED_TRANSACTION" + EventTypeDeletedMetadata = "DELETED_METADATA" +) diff --git a/pkg/ledger/benchmarks_test.go b/pkg/ledger/benchmarks_test.go deleted file mode 100644 index 33d31e756..000000000 --- a/pkg/ledger/benchmarks_test.go +++ /dev/null @@ -1,279 +0,0 @@ -package ledger_test - -import ( - "context" - "fmt" - "strconv" - "testing" - - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/stretchr/testify/require" -) - -const nbPostings = 1000 - -func BenchmarkLedger_PostTransactions_Scripts_Single_FixedAccounts(b *testing.B) { - var execResScript core.ExpandedTransaction - - txData := core.TransactionData{} - for i := 0; i < nbPostings; i++ { - txData.Postings = append(txData.Postings, core.Posting{ - Source: "world", - Destination: "benchmarks:" + strconv.Itoa(i), - Asset: "COIN", - Amount: core.NewMonetaryInt(10), - }) - } - _, err := txData.Postings.Validate() - require.NoError(b, err) - - runOnLedger(func(l *ledger.Ledger) { - b.ResetTimer() - - res := core.ExpandedTransaction{} - - for n := 0; n < b.N; n++ { - b.StopTimer() - script := txToScriptData(txData) - b.StartTimer() - res, err = l.ExecuteScript(context.Background(), true, script) - require.NoError(b, err) - require.Len(b, res.Postings, nbPostings) - } - - execResScript = res - require.Len(b, execResScript.Postings, nbPostings) - }) -} - -func BenchmarkLedger_PostTransactions_Postings_Single_FixedAccounts(b *testing.B) { - var execRes []core.ExpandedTransaction - - runOnLedger(func(l *ledger.Ledger) { - txData := core.TransactionData{} - for i := 0; i < nbPostings; i++ { - txData.Postings = append(txData.Postings, core.Posting{ - Source: "world", - Destination: "benchmarks:" + strconv.Itoa(i), - Asset: "COIN", - Amount: core.NewMonetaryInt(10), - }) - } - - b.ResetTimer() - - res := []core.ExpandedTransaction{} - - for n := 0; n < b.N; n++ { - _, err := txData.Postings.Validate() - require.NoError(b, err) - res, err = l.ExecuteTxsData(context.Background(), true, txData) - require.NoError(b, err) - require.Len(b, res, 1) - require.Len(b, res[0].Postings, nbPostings) - } - - execRes = res - require.Len(b, execRes, 1) - require.Len(b, execRes[0].Postings, nbPostings) - }) -} - -func BenchmarkLedger_PostTransactions_Postings_Batch_FixedAccounts(b *testing.B) { - var execRes []core.ExpandedTransaction - - txsData := newTxsData(1) - - runOnLedger(func(l *ledger.Ledger) { - b.ResetTimer() - - res := []core.ExpandedTransaction{} - - for n := 0; n < b.N; n++ { - var err error - for _, txData := range txsData { - _, err := txData.Postings.Validate() - require.NoError(b, err) - } - res, err = l.ExecuteTxsData(context.Background(), true, txsData...) - require.NoError(b, err) - require.Len(b, res, 7) - require.Len(b, res[0].Postings, 1) - require.Len(b, res[1].Postings, 1) - require.Len(b, res[2].Postings, 2) - require.Len(b, res[3].Postings, 4) - require.Len(b, res[4].Postings, 4) - require.Len(b, res[5].Postings, 1) - require.Len(b, res[6].Postings, 1) - } - - execRes = res - require.Len(b, execRes, 7) - require.Len(b, execRes[0].Postings, 1) - require.Len(b, execRes[1].Postings, 1) - require.Len(b, execRes[2].Postings, 2) - require.Len(b, execRes[3].Postings, 4) - require.Len(b, execRes[4].Postings, 4) - require.Len(b, execRes[5].Postings, 1) - require.Len(b, execRes[6].Postings, 1) - }) -} - -func BenchmarkLedger_PostTransactions_Postings_Batch_VaryingAccounts(b *testing.B) { - var execRes []core.ExpandedTransaction - - runOnLedger(func(l *ledger.Ledger) { - b.ResetTimer() - - res := []core.ExpandedTransaction{} - - for n := 0; n < b.N; n++ { - b.StopTimer() - txsData := newTxsData(n) - b.StartTimer() - var err error - for _, txData := range txsData { - _, err := txData.Postings.Validate() - require.NoError(b, err) - } - res, err = l.ExecuteTxsData(context.Background(), true, txsData...) - require.NoError(b, err) - require.Len(b, res, 7) - require.Len(b, res[0].Postings, 1) - require.Len(b, res[1].Postings, 1) - require.Len(b, res[2].Postings, 2) - require.Len(b, res[3].Postings, 4) - require.Len(b, res[4].Postings, 4) - require.Len(b, res[5].Postings, 1) - require.Len(b, res[6].Postings, 1) - } - - execRes = res - require.Len(b, execRes, 7) - require.Len(b, execRes[0].Postings, 1) - require.Len(b, execRes[1].Postings, 1) - require.Len(b, execRes[2].Postings, 2) - require.Len(b, execRes[3].Postings, 4) - require.Len(b, execRes[4].Postings, 4) - require.Len(b, execRes[5].Postings, 1) - require.Len(b, execRes[6].Postings, 1) - }) -} - -func newTxsData(i int) []core.TransactionData { - return []core.TransactionData{ - { - Postings: core.Postings{ - { - Source: "world", - Destination: fmt.Sprintf("payins:%d", i), - Amount: core.NewMonetaryInt(10000), - Asset: "EUR/2", - }, - }, - }, - { - Postings: core.Postings{ - { - Source: fmt.Sprintf("payins:%d", i), - Destination: fmt.Sprintf("users:%d:wallet", i), - Amount: core.NewMonetaryInt(10000), - Asset: "EUR/2", - }, - }, - }, - { - Postings: core.Postings{ - { - Source: "world", - Destination: fmt.Sprintf("teller:%d", i), - Amount: core.NewMonetaryInt(350000), - Asset: "RBLX/6", - }, - { - Source: "world", - Destination: fmt.Sprintf("teller:%d", i), - Amount: core.NewMonetaryInt(1840000), - Asset: "SNAP/6", - }, - }, - }, - { - Postings: core.Postings{ - { - Source: fmt.Sprintf("users:%d:wallet", i), - Destination: fmt.Sprintf("trades:%d", i), - Amount: core.NewMonetaryInt(1500), - Asset: "EUR/2", - }, - { - Source: fmt.Sprintf("trades:%d", i), - Destination: fmt.Sprintf("fiat:holdings:%d", i), - Amount: core.NewMonetaryInt(1500), - Asset: "EUR/2", - }, - { - Source: fmt.Sprintf("teller:%d", i), - Destination: fmt.Sprintf("trades:%d", i), - Amount: core.NewMonetaryInt(350000), - Asset: "RBLX/6", - }, - { - Source: fmt.Sprintf("trades:%d", i), - Destination: fmt.Sprintf("users:%d:wallet", i), - Amount: core.NewMonetaryInt(350000), - Asset: "RBLX/6", - }, - }, - }, - { - Postings: core.Postings{ - { - Source: fmt.Sprintf("users:%d:wallet", i), - Destination: fmt.Sprintf("trades:%d", i), - Amount: core.NewMonetaryInt(4230), - Asset: "EUR/2", - }, - { - Source: fmt.Sprintf("trades:%d", i), - Destination: fmt.Sprintf("fiat:holdings:%d", i), - Amount: core.NewMonetaryInt(4230), - Asset: "EUR/2", - }, - { - Source: fmt.Sprintf("teller:%d", i), - Destination: fmt.Sprintf("trades:%d", i), - Amount: core.NewMonetaryInt(1840000), - Asset: "SNAP/6", - }, - { - Source: fmt.Sprintf("trades:%d", i), - Destination: fmt.Sprintf("users:%d:wallet", i), - Amount: core.NewMonetaryInt(1840000), - Asset: "SNAP/6", - }, - }, - }, - { - Postings: core.Postings{ - { - Source: fmt.Sprintf("users:%d:wallet", i), - Destination: fmt.Sprintf("users:%d:withdrawals", i), - Amount: core.NewMonetaryInt(2270), - Asset: "EUR/2", - }, - }, - }, - { - Postings: core.Postings{ - { - Source: fmt.Sprintf("users:%d:withdrawals", i), - Destination: fmt.Sprintf("payouts:%d", i), - Amount: core.NewMonetaryInt(2270), - Asset: "EUR/2", - }, - }, - }, - } -} diff --git a/pkg/ledger/cache.go b/pkg/ledger/cache.go deleted file mode 100644 index 41cabc0bf..000000000 --- a/pkg/ledger/cache.go +++ /dev/null @@ -1,19 +0,0 @@ -package ledger - -import ( - "github.com/dgraph-io/ristretto" - "github.com/pkg/errors" -) - -func NewCache(capacityBytes, maxNumKeys int64, metrics bool) *ristretto.Cache { - cache, err := ristretto.NewCache(&ristretto.Config{ - NumCounters: maxNumKeys * 10, - MaxCost: capacityBytes, - BufferItems: 64, - Metrics: metrics, - }) - if err != nil { - panic(errors.Wrap(err, "creating cache")) - } - return cache -} diff --git a/pkg/ledger/error.go b/pkg/ledger/error.go deleted file mode 100644 index af633e822..000000000 --- a/pkg/ledger/error.go +++ /dev/null @@ -1,184 +0,0 @@ -package ledger - -import ( - "fmt" - - "github.com/pkg/errors" -) - -type TransactionCommitError struct { - TXIndex int `json:"index"` - Err error `json:"error"` -} - -func (e TransactionCommitError) Unwrap() error { - return e.Err -} - -func (e TransactionCommitError) Error() string { - return errors.Wrapf(e.Err, "processing tx %d", e.TXIndex).Error() -} - -func (e TransactionCommitError) Is(err error) bool { - _, ok := err.(*TransactionCommitError) - return ok -} - -func NewTransactionCommitError(txIndex int, err error) *TransactionCommitError { - return &TransactionCommitError{ - TXIndex: txIndex, - Err: err, - } -} - -func IsTransactionCommitError(err error) bool { - return errors.Is(err, &TransactionCommitError{}) -} - -type InsufficientFundError struct { - Asset string -} - -func (e InsufficientFundError) Error() string { - return fmt.Sprintf("balance.insufficient.%s", e.Asset) -} - -func (e InsufficientFundError) Is(err error) bool { - _, ok := err.(*InsufficientFundError) - return ok -} - -func NewInsufficientFundError(asset string) *InsufficientFundError { - return &InsufficientFundError{ - Asset: asset, - } -} - -func IsInsufficientFundError(err error) bool { - return errors.Is(err, &InsufficientFundError{}) -} - -type ValidationError struct { - Msg string -} - -func (v ValidationError) Error() string { - return v.Msg -} - -func (v ValidationError) Is(err error) bool { - _, ok := err.(*ValidationError) - return ok -} - -func NewValidationError(msg string) *ValidationError { - return &ValidationError{ - Msg: msg, - } -} - -func IsValidationError(err error) bool { - return errors.Is(err, &ValidationError{}) -} - -type ConflictError struct{} - -func (e ConflictError) Error() string { - return "conflict error on reference" -} - -func (e ConflictError) Is(err error) bool { - _, ok := err.(*ConflictError) - return ok -} - -func NewConflictError() *ConflictError { - return &ConflictError{} -} - -func IsConflictError(err error) bool { - return errors.Is(err, &ConflictError{}) -} - -const ( - ScriptErrorInsufficientFund = "INSUFFICIENT_FUND" - ScriptErrorCompilationFailed = "COMPILATION_FAILED" - ScriptErrorNoScript = "NO_SCRIPT" - ScriptErrorMetadataOverride = "METADATA_OVERRIDE" -) - -type ScriptError struct { - Code string - Message string -} - -func (e ScriptError) Error() string { - return fmt.Sprintf("[%s] %s", e.Code, e.Message) -} - -func (e ScriptError) Is(err error) bool { - eerr, ok := err.(*ScriptError) - if !ok { - return false - } - return e.Code == eerr.Code -} - -func IsScriptErrorWithCode(err error, code string) bool { - return errors.Is(err, &ScriptError{ - Code: code, - }) -} - -func NewScriptError(code string, message string) *ScriptError { - return &ScriptError{ - Code: code, - Message: message, - } -} - -type LockError struct { - Err error -} - -func (e LockError) Error() string { - return e.Err.Error() -} - -func (e LockError) Is(err error) bool { - _, ok := err.(*LockError) - return ok -} - -func IsLockError(err error) bool { - return errors.Is(err, &LockError{}) -} - -func NewLockError(err error) *LockError { - return &LockError{ - Err: err, - } -} - -type NotFoundError struct { - Msg string -} - -func (v NotFoundError) Error() string { - return v.Msg -} - -func (v NotFoundError) Is(err error) bool { - _, ok := err.(*NotFoundError) - return ok -} - -func NewNotFoundError(msg string) *NotFoundError { - return &NotFoundError{ - Msg: msg, - } -} - -func IsNotFoundError(err error) bool { - return errors.Is(err, &NotFoundError{}) -} diff --git a/pkg/ledger/execute_script.go b/pkg/ledger/execute_script.go deleted file mode 100644 index beb335c93..000000000 --- a/pkg/ledger/execute_script.go +++ /dev/null @@ -1,329 +0,0 @@ -package ledger - -import ( - "context" - "crypto/sha256" - "encoding/json" - "fmt" - "time" - - "github.com/DmitriyVTitov/size" - "github.com/dgraph-io/ristretto" - machine "github.com/formancehq/machine/core" - "github.com/formancehq/machine/script/compiler" - "github.com/formancehq/machine/vm" - "github.com/formancehq/machine/vm/program" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/opentelemetry" - "github.com/numary/ledger/pkg/storage" - "github.com/pkg/errors" - "go.opentelemetry.io/otel/attribute" - "go.opentelemetry.io/otel/trace" -) - -func (l *Ledger) ExecuteScript(ctx context.Context, preview bool, script core.ScriptData) (core.ExpandedTransaction, error) { - ctx, span := opentelemetry.Start(ctx, "ExecuteScript") - defer span.End() - - addOps := new(core.AdditionalOperations) - - lastTx, err := l.store.GetLastTransaction(ctx) - if err != nil { - return core.ExpandedTransaction{}, errors.Wrap(err, - "could not get last transaction") - } - - vAggr := NewVolumeAggregator(l) - var nextTxId uint64 - if lastTx != nil { - nextTxId = lastTx.ID + 1 - } - - accs := map[string]*core.AccountWithVolumes{} - // Until v1.5.0, dates was stored as string using rfc3339 format - // So round the date to the second to keep the same behaviour - if script.Timestamp.IsZero() { - script.Timestamp = time.Now().UTC().Truncate(time.Second) - } else { - script.Timestamp = script.Timestamp.UTC() - } - - past := false - if lastTx != nil && script.Timestamp.Before(lastTx.Timestamp) { - past = true - } - if past && !l.allowPastTimestamps { - return core.ExpandedTransaction{}, NewValidationError(fmt.Sprintf( - "cannot pass a timestamp prior to the last transaction: %s (passed) is %s before %s (last)", - script.Timestamp.Format(time.RFC3339Nano), - lastTx.Timestamp.Sub(script.Timestamp), - lastTx.Timestamp.Format(time.RFC3339Nano))) - } - - if script.Reference != "" { - txs, err := l.GetTransactions(ctx, *NewTransactionsQuery(). - WithReferenceFilter(script.Reference)) - if err != nil { - return core.ExpandedTransaction{}, errors.Wrap(err, - "get transactions with reference") - } - if len(txs.Data) > 0 { - return core.ExpandedTransaction{}, NewConflictError() - } - } - - if script.Plain == "" { - return core.ExpandedTransaction{}, NewScriptError(ScriptErrorNoScript, - "no script to execute") - } - - m, err := NewMachineFromScript(script.Plain, l.cache, span) - if err != nil { - return core.ExpandedTransaction{}, NewScriptError(ScriptErrorCompilationFailed, - err.Error()) - } - - if err := m.SetVarsFromJSON(script.Vars); err != nil { - return core.ExpandedTransaction{}, NewScriptError(ScriptErrorCompilationFailed, - errors.Wrap(err, "could not set variables").Error()) - } - - resourcesChan, err := m.ResolveResources() - if err != nil { - return core.ExpandedTransaction{}, errors.Wrap(err, - "could not resolve program resources") - } - for req := range resourcesChan { - if req.Error != nil { - return core.ExpandedTransaction{}, NewScriptError(ScriptErrorCompilationFailed, - errors.Wrap(req.Error, "could not resolve program resources").Error()) - } - if _, ok := accs[req.Account]; !ok { - accs[req.Account], err = l.GetAccount(ctx, req.Account) - if err != nil { - return core.ExpandedTransaction{}, errors.Wrap(err, - fmt.Sprintf("could not get account %q", req.Account)) - } - } - if req.Key != "" { - entry, ok := accs[req.Account].Metadata[req.Key] - if !ok { - return core.ExpandedTransaction{}, NewScriptError(ScriptErrorCompilationFailed, - fmt.Sprintf("missing key %v in metadata for account %v", req.Key, req.Account)) - } - data, err := json.Marshal(entry) - if err != nil { - return core.ExpandedTransaction{}, errors.Wrap(err, "marshaling metadata") - } - value, err := machine.NewValueFromTypedJSON(data) - if err != nil { - return core.ExpandedTransaction{}, NewScriptError(ScriptErrorCompilationFailed, - errors.Wrap(err, fmt.Sprintf( - "invalid format for metadata at key %v for account %v", - req.Key, req.Account)).Error()) - } - req.Response <- *value - } else if req.Asset != "" { - amt := accs[req.Account].Balances[req.Asset].OrZero() - resp := machine.MonetaryInt(*amt) - req.Response <- &resp - } else { - return core.ExpandedTransaction{}, NewScriptError(ScriptErrorCompilationFailed, - errors.Wrap(err, fmt.Sprintf("invalid ResourceRequest: %+v", req)).Error()) - } - } - - balanceCh, err := m.ResolveBalances() - if err != nil { - return core.ExpandedTransaction{}, errors.Wrap(err, - "could not resolve balances") - } - for req := range balanceCh { - if req.Error != nil { - return core.ExpandedTransaction{}, NewScriptError(ScriptErrorCompilationFailed, - errors.Wrap(req.Error, "could not resolve program balances").Error()) - } - var amt *core.MonetaryInt - if _, ok := accs[req.Account]; !ok { - accs[req.Account], err = l.GetAccount(ctx, req.Account) - if err != nil { - return core.ExpandedTransaction{}, errors.Wrap(err, - fmt.Sprintf("could not get account %q", req.Account)) - } - } - amt = accs[req.Account].Balances[req.Asset].OrZero() - resp := machine.MonetaryInt(*amt) - req.Response <- &resp - } - - exitCode, err := m.Execute() - if err != nil { - return core.ExpandedTransaction{}, errors.Wrap(err, - "script execution failed") - } - - if exitCode != vm.EXIT_OK { - switch exitCode { - case vm.EXIT_FAIL: - return core.ExpandedTransaction{}, errors.New( - "script exited with error code EXIT_FAIL") - case vm.EXIT_FAIL_INVALID: - return core.ExpandedTransaction{}, errors.New( - "internal error: compiled script was invalid") - case vm.EXIT_FAIL_INSUFFICIENT_FUNDS: - // TODO: If the machine can provide the asset which is failing - // we should be able to use InsufficientFundError{} instead of error code - return core.ExpandedTransaction{}, NewScriptError(ScriptErrorInsufficientFund, - "account had insufficient funds") - default: - return core.ExpandedTransaction{}, errors.New( - "script execution failed") - } - } - - if len(m.Postings) == 0 { - return core.ExpandedTransaction{}, - NewValidationError("transaction has no postings") - } - - txVolumeAggr := vAggr.NextTx() - postings := make([]core.Posting, len(m.Postings)) - for j, posting := range m.Postings { - amt := core.MonetaryInt(*posting.Amount) - if err := txVolumeAggr.Transfer(ctx, - posting.Source, posting.Destination, posting.Asset, &amt, accs); err != nil { - return core.ExpandedTransaction{}, errors.Wrap(err, "transferring volumes") - } - postings[j] = core.Posting{ - Source: posting.Source, - Destination: posting.Destination, - Amount: &amt, - Asset: posting.Asset, - } - } - - for account, volumes := range txVolumeAggr.PostCommitVolumes { - if _, ok := accs[account]; !ok { - accs[account], err = l.GetAccount(ctx, account) - if err != nil { - return core.ExpandedTransaction{}, errors.Wrap(err, fmt.Sprintf("get account '%s'", account)) - } - } - for asset, vol := range volumes { - accs[account].Volumes[asset] = vol - } - accs[account].Balances = accs[account].Volumes.Balances() - } - - metadata := m.GetTxMetaJSON() - for k, v := range metadata { - asMapAny := make(map[string]any) - if err := json.Unmarshal(v.([]byte), &asMapAny); err != nil { - return core.ExpandedTransaction{}, errors.Wrap(err, "unmarshaling transaction metadata") - } - metadata[k] = asMapAny - } - for k, v := range script.Metadata { - _, ok := metadata[k] - if ok { - return core.ExpandedTransaction{}, NewScriptError(ScriptErrorMetadataOverride, - "cannot override metadata from script") - } - metadata[k] = v - } - - for account, meta := range m.GetAccountsMetaJSON() { - meta := meta.(map[string][]byte) - for k, v := range meta { - asMapAny := make(map[string]any) - if err := json.Unmarshal(v, &asMapAny); err != nil { - return core.ExpandedTransaction{}, errors.Wrap(err, "unmarshaling account metadata") - } - if account[0] == '@' { - account = account[1:] - } - if addOps.SetAccountMeta == nil { - addOps.SetAccountMeta = core.AccountsMeta{} - } - if _, ok := addOps.SetAccountMeta[account]; !ok { - addOps.SetAccountMeta[account] = core.Metadata{} - } - addOps.SetAccountMeta[account][k] = asMapAny - } - } - - tx := core.ExpandedTransaction{ - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Postings: postings, - Reference: script.Reference, - Metadata: core.Metadata(metadata), - Timestamp: script.Timestamp, - }, - ID: nextTxId, - }, - PreCommitVolumes: txVolumeAggr.PreCommitVolumes, - PostCommitVolumes: txVolumeAggr.PostCommitVolumes, - } - - if preview { - return tx, nil - } - - if err := l.store.Commit(ctx, tx); err != nil { - switch { - case storage.IsErrorCode(err, storage.ConstraintFailed): - return core.ExpandedTransaction{}, NewConflictError() - default: - return core.ExpandedTransaction{}, errors.Wrap(err, - "committing transactions") - } - } - - if addOps != nil && addOps.SetAccountMeta != nil { - for addr, m := range addOps.SetAccountMeta { - if err := l.store.UpdateAccountMetadata(ctx, - addr, m, time.Now().Round(time.Second).UTC()); err != nil { - return core.ExpandedTransaction{}, errors.Wrap(err, - "updating account metadata") - } - } - } - - l.monitor.CommittedTransactions(ctx, l.store.Name(), tx) - if addOps != nil && addOps.SetAccountMeta != nil { - for addr, m := range addOps.SetAccountMeta { - l.monitor.SavedMetadata(ctx, - l.store.Name(), core.MetaTargetTypeAccount, addr, m) - } - } - - return tx, nil -} - -func NewMachineFromScript(script string, cache *ristretto.Cache, span trace.Span) (*vm.Machine, error) { - h := sha256.New() - if _, err := h.Write([]byte(script)); err != nil { - return nil, errors.Wrap(err, "hashing script") - } - curr := h.Sum(nil) - - if cachedProgram, found := cache.Get(curr); found { - span.SetAttributes(attribute.Bool("numscript-cache-hit", true)) - return vm.NewMachine(cachedProgram.(program.Program)), nil - } - - span.SetAttributes(attribute.Bool("numscript-cache-hit", false)) - prog, err := compiler.Compile(script) - if err != nil { - return nil, err - } - - progSizeBytes := size.Of(*prog) - if progSizeBytes == -1 { - return nil, fmt.Errorf("error while calculating the size in bytes of the program") - } - cache.Set(curr, *prog, int64(progSizeBytes)) - - return vm.NewMachine(*prog), nil -} diff --git a/pkg/ledger/execute_script_test.go b/pkg/ledger/execute_script_test.go deleted file mode 100644 index 349cf4922..000000000 --- a/pkg/ledger/execute_script_test.go +++ /dev/null @@ -1,946 +0,0 @@ -package ledger_test - -import ( - "context" - "crypto/sha256" - "encoding/json" - "fmt" - "sort" - "strconv" - "strings" - "testing" - - "github.com/DmitriyVTitov/size" - "github.com/formancehq/machine/script/compiler" - "github.com/numary/ledger/pkg/api/apierrors" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/opentelemetry" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestNoScript(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - script := core.ScriptData{} - - _, err := l.ExecuteScript(context.Background(), false, script) - assert.IsType(t, &ledger.ScriptError{}, err) - assert.Equal(t, ledger.ScriptErrorNoScript, err.(*ledger.ScriptError).Code) - }) -} - -func TestCompilationError(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - script := core.ScriptData{ - Script: core.Script{Plain: "willnotcompile"}, - } - - _, err := l.ExecuteScript(context.Background(), false, script) - assert.IsType(t, &ledger.ScriptError{}, err) - assert.Equal(t, ledger.ScriptErrorCompilationFailed, err.(*ledger.ScriptError).Code) - }) -} - -func TestSend(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - t.Run("nominal", func(t *testing.T) { - script := core.ScriptData{ - Script: core.Script{ - Plain: ` - send [USD/2 99] ( - source = @world - destination = @user:001 - )`, - }, - } - _, err := l.ExecuteScript(context.Background(), false, script) - require.NoError(t, err) - - assertBalance(t, l, "user:001", - "USD/2", core.NewMonetaryInt(99)) - }) - - t.Run("one send with zero amount should fail", func(t *testing.T) { - script := core.ScriptData{ - Script: core.Script{ - Plain: ` - send [USD/2 0] ( - source = @world - destination = @user:001 - )`, - }, - } - _, err := l.ExecuteScript(context.Background(), false, script) - require.Error(t, err) - require.True(t, ledger.IsValidationError(err)) - require.ErrorContains(t, err, "transaction has no postings") - }) - - t.Run("one send with monetary all should fail", func(t *testing.T) { - script := core.ScriptData{ - Script: core.Script{ - Plain: ` - send [USD/2 *] ( - source = @alice - destination = @user:001 - )`, - }, - } - _, err := l.ExecuteScript(context.Background(), false, script) - require.Error(t, err) - require.True(t, ledger.IsValidationError(err)) - require.ErrorContains(t, err, "transaction has no postings") - }) - - t.Run("one send with zero amount and another with positive amount should succeed", func(t *testing.T) { - script := core.ScriptData{ - Script: core.Script{ - Plain: ` - send [USD/2 0] ( - source = @world - destination = @user:001 - ) - send [USD/2 1] ( - source = @world - destination = @user:001 - )`, - }, - } - res, err := l.ExecuteScript(context.Background(), false, script) - require.NoError(t, err) - require.Equal(t, 1, len(res.Postings)) - - assertBalance(t, l, "user:001", - "USD/2", core.NewMonetaryInt(100)) - }) - - t.Run("one send with monetary all and another with positive amount should succeed", func(t *testing.T) { - script := core.ScriptData{ - Script: core.Script{ - Plain: ` - send [USD/2 *] ( - source = @alice - destination = @user:001 - ) - send [USD/2 1] ( - source = @world - destination = @user:001 - )`, - }, - } - res, err := l.ExecuteScript(context.Background(), false, script) - require.NoError(t, err) - require.Equal(t, 1, len(res.Postings)) - - assertBalance(t, l, "user:001", - "USD/2", core.NewMonetaryInt(101)) - }) - }) -} - -func TestNoVariables(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - script := core.ScriptData{ - Script: core.Script{ - Plain: ` - vars { - account $dest - } - - send [CAD/2 42] ( - source = @world - destination = $dest - )`, - Vars: map[string]json.RawMessage{}, - }, - } - - _, err := l.ExecuteScript(context.Background(), false, script) - assert.Error(t, err) - }) -} - -func TestVariables(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - script := core.ScriptData{ - Script: core.Script{ - Plain: ` - vars { - account $dest - } - - send [CAD/2 42] ( - source = @world - destination = $dest - )`, - Vars: map[string]json.RawMessage{ - "dest": json.RawMessage(`"user:042"`), - }, - }, - } - - _, err := l.ExecuteScript(context.Background(), false, script) - require.NoError(t, err) - - assertBalance(t, l, "user:042", - "CAD/2", core.NewMonetaryInt(42)) - }) -} - -func TestVariablesEmptyAccount(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - script := core.ScriptData{ - Script: core.Script{ - Plain: ` - send [EUR 1] ( - source = @world - destination = @bob - )`, - }, - } - _, err := l.ExecuteScript(context.Background(), false, script) - require.NoError(t, err) - - script = core.ScriptData{ - Script: core.Script{ - Plain: ` - vars { - account $acc - } - - send [EUR 1] ( - source = { - @bob - $acc - } - destination = @alice - )`, - Vars: map[string]json.RawMessage{ - "acc": json.RawMessage(`""`), - }, - }, - } - _, err = l.ExecuteScript(context.Background(), false, script) - require.NoError(t, err) - - assertBalance(t, l, "alice", "EUR", core.NewMonetaryInt(1)) - assertBalance(t, l, "bob", "EUR", core.NewMonetaryInt(0)) - }) -} - -func TestEnoughFunds(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - tx := core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "user:001", - Amount: core.NewMonetaryInt(100), - Asset: "COIN", - }, - }, - } - - _, err := l.ExecuteTxsData(context.Background(), false, tx) - require.NoError(t, err) - - script := core.ScriptData{ - Script: core.Script{ - Plain: ` - send [COIN 95] ( - source = @user:001 - destination = @world - )`, - }, - } - - _, err = l.ExecuteScript(context.Background(), false, script) - assert.NoError(t, err) - }) -} - -func TestNotEnoughFunds(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - tx := core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "user:002", - Amount: core.NewMonetaryInt(100), - Asset: "COIN", - }, - }, - } - - _, err := l.ExecuteTxsData(context.Background(), false, tx) - require.NoError(t, err) - - script := core.ScriptData{ - Script: core.Script{ - Plain: ` - send [COIN 105] ( - source = @user:002 - destination = @world - )`, - }, - } - - _, err = l.ExecuteScript(context.Background(), false, script) - assert.True(t, ledger.IsScriptErrorWithCode(err, apierrors.ErrInsufficientFund)) - }) -} - -func TestMissingMetadata(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - plain := ` - vars { - account $sale - account $seller = meta($sale, "seller") - } - - send [COIN *] ( - source = $sale - destination = $seller - )` - script := core.ScriptData{ - Script: core.Script{ - Plain: plain, - Vars: map[string]json.RawMessage{ - "sale": json.RawMessage(`"sales:042"`), - }, - }, - } - - _, err := l.ExecuteScript(context.Background(), false, script) - assert.True(t, ledger.IsScriptErrorWithCode(err, ledger.ScriptErrorCompilationFailed)) - }) -} - -func TestMetadata(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - tx := core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "sales:042", - Amount: core.NewMonetaryInt(100), - Asset: "COIN", - }, - }, - } - - _, err := l.ExecuteTxsData(context.Background(), false, tx) - require.NoError(t, err) - - err = l.SaveMeta(context.Background(), core.MetaTargetTypeAccount, - "sales:042", - core.Metadata{ - "seller": json.RawMessage(`{ - "type": "account", - "value": "users:053" - }`), - }) - require.NoError(t, err) - - err = l.SaveMeta(context.Background(), core.MetaTargetTypeAccount, - "users:053", - core.Metadata{ - "commission": json.RawMessage(`{ - "type": "portion", - "value": "15.5%" - }`), - }) - require.NoError(t, err) - - plain := ` - vars { - account $sale - account $seller = meta($sale, "seller") - portion $commission = meta($seller, "commission") - } - - send [COIN *] ( - source = $sale - destination = { - remaining to $seller - $commission to @platform - } - ) - ` - require.NoError(t, err) - - script := core.ScriptData{ - Script: core.Script{ - Plain: plain, - Vars: map[string]json.RawMessage{ - "sale": json.RawMessage(`"sales:042"`), - }, - }, - } - - _, err = l.ExecuteScript(context.Background(), false, script) - require.NoError(t, err) - - assertBalance(t, l, "sales:042", "COIN", core.NewMonetaryInt(0)) - assertBalance(t, l, "users:053", "COIN", core.NewMonetaryInt(85)) - assertBalance(t, l, "platform", "COIN", core.NewMonetaryInt(15)) - }) -} - -func TestSetTxMeta(t *testing.T) { - type testCase struct { - name string - script core.ScriptData - expectedMetadata core.Metadata - expectedErrorCode string - } - for _, tc := range []testCase{ - { - name: "nominal", - script: core.ScriptData{ - Script: core.Script{ - Plain: ` - send [USD/2 99] ( - source=@world - destination=@user:001 - )`, - }, - Metadata: core.Metadata{ - "priority": "low", - }, - }, - expectedMetadata: core.Metadata{ - "priority": "low", - }, - }, - { - name: "define metadata on script", - script: core.ScriptData{ - Script: core.Script{ - Plain: ` - set_tx_meta("priority", "low") - - send [COIN 10] ( - source = @world - destination = @user:001 - )`, - }, - }, - expectedMetadata: core.Metadata{ - "priority": map[string]any{"type": "string", "value": "low"}, - }, - }, - { - name: "override metadata of script", - script: core.ScriptData{ - Script: core.Script{ - Plain: ` - set_tx_meta("priority", "low") - - send [USD/2 99] ( - source=@world - destination=@user:001 - )`, - }, - Metadata: core.Metadata{ - "priority": "high", - }, - }, - expectedErrorCode: ledger.ScriptErrorMetadataOverride, - }, - } { - t.Run(tc.name, func(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - _, err := l.ExecuteScript(context.Background(), false, tc.script) - - if tc.expectedErrorCode != "" { - require.Error(t, err) - require.True(t, ledger.IsScriptErrorWithCode(err, tc.expectedErrorCode)) - } else { - require.NoError(t, err) - last, err := l.GetLedgerStore().GetLastTransaction(context.Background()) - require.NoError(t, err) - assert.True(t, last.Metadata.IsEquivalentTo(tc.expectedMetadata)) - } - }) - }) - } -} - -func TestScriptSetReference(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - plain := ` - send [USD/2 99] ( - source=@world - destination=@user:001 - )` - - script := core.ScriptData{ - Script: core.Script{ - Plain: plain, - Vars: map[string]json.RawMessage{}, - }, - Reference: "tx_ref", - } - - _, err := l.ExecuteScript(context.Background(), false, script) - require.NoError(t, err) - - last, err := l.GetLedgerStore().GetLastTransaction(context.Background()) - require.NoError(t, err) - - assert.Equal(t, script.Reference, last.Reference) - }) -} - -func TestScriptReferenceConflict(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - _, err := l.ExecuteScript(context.Background(), false, - core.ScriptData{ - Script: core.Script{ - Plain: ` - send [USD/2 99] ( - source=@world - destination=@user:001 - )`, - Vars: map[string]json.RawMessage{}, - }, - Reference: "tx_ref", - }) - require.NoError(t, err) - - _, err = l.ExecuteScript(context.Background(), false, - core.ScriptData{ - Script: core.Script{ - Plain: ` - send [USD/2 99] ( - source=@unexists - destination=@user:001 - )`, - Vars: map[string]json.RawMessage{}, - }, - Reference: "tx_ref", - }) - require.Error(t, err) - require.True(t, ledger.IsConflictError(err)) - }) -} - -func TestSetAccountMeta(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - t.Run("valid", func(t *testing.T) { - _, err := l.ExecuteScript(context.Background(), false, - core.ScriptData{ - Script: core.Script{Plain: ` - send [USD/2 99] ( - source = @world - destination = @user:001 - ) - set_account_meta(@alice, "aaa", "string meta") - set_account_meta(@alice, "bbb", 42) - set_account_meta(@alice, "ccc", COIN) - set_account_meta(@alice, "ddd", [COIN 30]) - set_account_meta(@alice, "eee", @bob) - `}, - }) - require.NoError(t, err) - - acc, err := l.GetAccount(context.Background(), "alice") - require.NoError(t, err) - require.Equal(t, core.Metadata{ - "aaa": map[string]any{"type": "string", "value": "string meta"}, - "bbb": map[string]any{"type": "number", "value": 42.}, - "ccc": map[string]any{"type": "asset", "value": "COIN"}, - "ddd": map[string]any{"type": "monetary", - "value": map[string]any{"asset": "COIN", "amount": 30.}}, - "eee": map[string]any{"type": "account", "value": "bob"}, - }, acc.Metadata) - }) - - t.Run("invalid syntax", func(t *testing.T) { - _, err := l.ExecuteScript(context.Background(), false, - core.ScriptData{ - Script: core.Script{Plain: ` - send [USD/2 99] ( - source = @world - destination = @user:001 - ) - set_account_meta(@bob, "is") - `}, - }) - require.True(t, ledger.IsScriptErrorWithCode(err, - ledger.ScriptErrorCompilationFailed)) - }) - }) -} - -func TestMonetaryVariableBalance(t *testing.T) { - t.Run("simple", func(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - tx := core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "users:001", - Amount: core.NewMonetaryInt(100), - Asset: "COIN", - }, - }, - } - _, err := l.ExecuteTxsData(context.Background(), false, tx) - require.NoError(t, err) - - script := core.ScriptData{ - Script: core.Script{ - Plain: ` - vars { - monetary $bal = balance(@users:001, COIN) - } - send $bal ( - source = @users:001 - destination = @world - )`, - }, - } - - _, err = l.ExecuteScript(context.Background(), false, script) - require.NoError(t, err) - assertBalance(t, l, "world", "COIN", core.NewMonetaryInt(0)) - assertBalance(t, l, "users:001", "COIN", core.NewMonetaryInt(0)) - }) - }) - - t.Run("complex", func(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - tx := core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "A", - Amount: core.NewMonetaryInt(40), - Asset: "USD/2", - }, - { - Source: "world", - Destination: "C", - Amount: core.NewMonetaryInt(90), - Asset: "USD/2", - }, - }, - } - _, err := l.ExecuteTxsData(context.Background(), false, tx) - require.NoError(t, err) - - script := core.ScriptData{ - Script: core.Script{ - Plain: ` - vars { - monetary $initial = balance(@A, USD/2) - } - send [USD/2 100] ( - source = { - @A - @C - } - destination = { - max $initial to @B - remaining to @D - } - )`, - }, - } - - _, err = l.ExecuteScript(context.Background(), false, script) - require.NoError(t, err) - assertBalance(t, l, "B", "USD/2", core.NewMonetaryInt(40)) - assertBalance(t, l, "D", "USD/2", core.NewMonetaryInt(60)) - }) - }) - - t.Run("error insufficient funds", func(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - tx := core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "users:001", - Amount: core.NewMonetaryInt(100), - Asset: "COIN", - }, - }, - } - _, err := l.ExecuteTxsData(context.Background(), false, tx) - require.NoError(t, err) - - script := core.ScriptData{ - Script: core.Script{ - Plain: ` - vars { - monetary $bal = balance(@users:001, COIN) - } - send $bal ( - source = @users:001 - destination = @world - ) - send $bal ( - source = @users:001 - destination = @world - )`, - }, - } - _, err = l.ExecuteScript(context.Background(), false, script) - assert.True(t, ledger.IsScriptErrorWithCode(err, apierrors.ErrInsufficientFund)) - }) - }) - - t.Run("error negative balance", func(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - tx := core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "users:001", - Amount: core.NewMonetaryInt(100), - Asset: "COIN", - }, - }, - } - _, err := l.ExecuteTxsData(context.Background(), false, tx) - require.NoError(t, err) - - script := core.ScriptData{ - Script: core.Script{ - Plain: ` - vars { - monetary $bal = balance(@world, COIN) - } - send $bal ( - source = @users:001 - destination = @world - )`, - }, - } - - _, err = l.ExecuteScript(context.Background(), false, script) - assert.True(t, ledger.IsScriptErrorWithCode(err, ledger.ScriptErrorCompilationFailed)) - assert.ErrorContains(t, err, "must be non-negative") - }) - }) - - t.Run("error variable type", func(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - script := core.ScriptData{ - Script: core.Script{ - Plain: ` - vars { - account $bal = balance(@users:001, COIN) - } - send $bal ( - source = @users:001 - destination = @world - )`, - }, - } - _, err := l.ExecuteScript(context.Background(), false, script) - assert.True(t, ledger.IsScriptErrorWithCode(err, apierrors.ErrScriptCompilationFailed)) - }) - }) -} - -func assertBalance(t *testing.T, l *ledger.Ledger, account, asset string, amount *core.MonetaryInt) { - user, err := l.GetAccount(context.Background(), account) - require.NoError(t, err) - - b := user.Balances[asset] - assert.Equalf(t, amount.String(), b.String(), - "wrong %v balance for account %v, expected: %s got: %s", - asset, account, - amount, b, - ) -} - -func TestNewMachineFromScript(t *testing.T) { - _, span := opentelemetry.Start(context.Background(), "TestNewMachineFromScript") - defer span.End() - - txData := core.TransactionData{} - for i := 0; i < nbPostings; i++ { - txData.Postings = append(txData.Postings, core.Posting{ - Source: "world", - Destination: "benchmarks:" + strconv.Itoa(i), - Asset: "COIN", - Amount: core.NewMonetaryInt(10), - }) - } - _, err := txData.Postings.Validate() - require.NoError(t, err) - script := txToScriptData(txData) - - h := sha256.New() - _, err = h.Write([]byte(script.Plain)) - require.NoError(t, err) - key := h.Sum(nil) - keySizeBytes := size.Of(key) - require.NotEqual(t, -1, keySizeBytes) - - prog, err := compiler.Compile(script.Plain) - require.NoError(t, err) - progSizeBytes := size.Of(*prog) - require.NotEqual(t, -1, progSizeBytes) - - t.Run("exact size", func(t *testing.T) { - capacityBytes := int64(keySizeBytes + progSizeBytes) - - cache := ledger.NewCache(capacityBytes, 1, true) - - m, err := ledger.NewMachineFromScript(script.Plain, cache, span) - require.NoError(t, err) - require.NotNil(t, m) - cache.Wait() - require.Equal(t, uint64(0), cache.Metrics.Hits()) - require.Equal(t, uint64(1), cache.Metrics.Misses()) - require.Equal(t, uint64(1), cache.Metrics.KeysAdded()) - - m, err = ledger.NewMachineFromScript(script.Plain, cache, span) - require.NoError(t, err) - require.NotNil(t, m) - cache.Wait() - require.Equal(t, uint64(1), cache.Metrics.Hits()) - require.Equal(t, uint64(1), cache.Metrics.Misses()) - require.Equal(t, uint64(1), cache.Metrics.KeysAdded()) - }) - - t.Run("one byte too small", func(t *testing.T) { - capacityBytes := int64(keySizeBytes+progSizeBytes) - 1 - - cache := ledger.NewCache(capacityBytes, 1, true) - - m, err := ledger.NewMachineFromScript(script.Plain, cache, span) - require.NoError(t, err) - require.NotNil(t, m) - cache.Wait() - require.Equal(t, uint64(0), cache.Metrics.Hits()) - require.Equal(t, uint64(1), cache.Metrics.Misses()) - require.Equal(t, uint64(0), cache.Metrics.KeysAdded()) - - m, err = ledger.NewMachineFromScript(script.Plain, cache, span) - require.NoError(t, err) - require.NotNil(t, m) - cache.Wait() - require.Equal(t, uint64(0), cache.Metrics.Hits()) - require.Equal(t, uint64(2), cache.Metrics.Misses()) - require.Equal(t, uint64(0), cache.Metrics.KeysAdded()) - }) -} - -type variable struct { - name string - jsonVal json.RawMessage -} - -func txToScriptData(txData core.TransactionData) core.ScriptData { - if len(txData.Postings) == 0 { - return core.ScriptData{} - } - - sb := strings.Builder{} - monetaryToVars := map[string]variable{} - accountsToVars := map[string]variable{} - i := 0 - j := 0 - for _, p := range txData.Postings { - if _, ok := accountsToVars[p.Source]; !ok { - if p.Source != core.WORLD { - accountsToVars[p.Source] = variable{ - name: fmt.Sprintf("va%d", i), - jsonVal: json.RawMessage(`"` + p.Source + `"`), - } - i++ - } - } - if _, ok := accountsToVars[p.Destination]; !ok { - if p.Destination != core.WORLD { - accountsToVars[p.Destination] = variable{ - name: fmt.Sprintf("va%d", i), - jsonVal: json.RawMessage(`"` + p.Destination + `"`), - } - i++ - } - } - mon := fmt.Sprintf("[%s %s]", p.Amount.String(), p.Asset) - if _, ok := monetaryToVars[mon]; !ok { - monetaryToVars[mon] = variable{ - name: fmt.Sprintf("vm%d", j), - jsonVal: json.RawMessage( - `{"asset":"` + p.Asset + `","amount":` + p.Amount.String() + `}`), - } - j++ - } - } - - sb.WriteString("vars {\n") - accVars := make([]string, 0) - for _, v := range accountsToVars { - accVars = append(accVars, v.name) - } - sort.Strings(accVars) - for _, v := range accVars { - sb.WriteString(fmt.Sprintf("\taccount $%s\n", v)) - } - monVars := make([]string, 0) - for _, v := range monetaryToVars { - monVars = append(monVars, v.name) - } - sort.Strings(monVars) - for _, v := range monVars { - sb.WriteString(fmt.Sprintf("\tmonetary $%s\n", v)) - } - sb.WriteString("}\n") - - for _, p := range txData.Postings { - m := fmt.Sprintf("[%s %s]", p.Amount.String(), p.Asset) - mon, ok := monetaryToVars[m] - if !ok { - panic(fmt.Sprintf("monetary %s not found", m)) - } - sb.WriteString(fmt.Sprintf("send $%s (\n", mon.name)) - if p.Source == core.WORLD { - sb.WriteString("\tsource = @world\n") - } else { - src, ok := accountsToVars[p.Source] - if !ok { - panic(fmt.Sprintf("source %s not found", p.Source)) - } - sb.WriteString(fmt.Sprintf("\tsource = $%s allowing unbounded overdraft\n", src.name)) - } - if p.Destination == core.WORLD { - sb.WriteString("\tdestination = @world\n") - } else { - dest, ok := accountsToVars[p.Destination] - if !ok { - panic(fmt.Sprintf("destination %s not found", p.Destination)) - } - sb.WriteString(fmt.Sprintf("\tdestination = $%s\n", dest.name)) - } - sb.WriteString(")\n") - } - - vars := map[string]json.RawMessage{} - for _, v := range accountsToVars { - vars[v.name] = v.jsonVal - } - for _, v := range monetaryToVars { - vars[v.name] = v.jsonVal - } - - return core.ScriptData{ - Script: core.Script{ - Plain: sb.String(), - Vars: vars, - }, - Timestamp: txData.Timestamp, - Reference: txData.Reference, - Metadata: txData.Metadata, - } -} diff --git a/pkg/ledger/execute_txsdata.go b/pkg/ledger/execute_txsdata.go deleted file mode 100644 index 0c20f5b20..000000000 --- a/pkg/ledger/execute_txsdata.go +++ /dev/null @@ -1,168 +0,0 @@ -package ledger - -import ( - "context" - "fmt" - "time" - - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/opentelemetry" - "github.com/numary/ledger/pkg/storage" - "github.com/pkg/errors" -) - -func (l *Ledger) ExecuteTxsData(ctx context.Context, preview bool, txsData ...core.TransactionData) ([]core.ExpandedTransaction, error) { - ctx, span := opentelemetry.Start(ctx, "ExecuteTxsData") - defer span.End() - - if len(txsData) == 0 { - return []core.ExpandedTransaction{}, errors.New("no transaction data to execute") - } - - lastTx, err := l.store.GetLastTransaction(ctx) - if err != nil { - return []core.ExpandedTransaction{}, errors.Wrap(err, - "could not get last transaction") - } - - vAggr := NewVolumeAggregator(l) - txs := make([]core.ExpandedTransaction, 0) - var nextTxId uint64 - var lastTxTimestamp time.Time - if lastTx != nil { - nextTxId = lastTx.ID + 1 - lastTxTimestamp = lastTx.Timestamp - } - - contracts := make([]core.Contract, 0) - mapping, err := l.store.LoadMapping(ctx) - if err != nil { - return []core.ExpandedTransaction{}, errors.Wrap(err, - "loading mapping") - } - if mapping != nil { - contracts = append(contracts, mapping.Contracts...) - } - contracts = append(contracts, DefaultContracts...) - - usedReferences := make(map[string]struct{}) - accs := map[string]*core.AccountWithVolumes{} - for i, txData := range txsData { - if len(txData.Postings) == 0 { - return []core.ExpandedTransaction{}, NewValidationError( - fmt.Sprintf("executing transaction data %d: no postings", i)) - } - // Until v1.5.0, dates was stored as string using rfc3339 format - // So round the date to the second to keep the same behaviour - if txData.Timestamp.IsZero() { - txData.Timestamp = time.Now().UTC().Truncate(time.Second) - } else { - txData.Timestamp = txData.Timestamp.UTC() - } - - past := false - if lastTx != nil && txData.Timestamp.Before(lastTxTimestamp) { - past = true - } - if past && !l.allowPastTimestamps { - return []core.ExpandedTransaction{}, NewValidationError(fmt.Sprintf( - "cannot pass a timestamp prior to the last transaction: %s (passed) is %s before %s (last)", - txData.Timestamp.Format(time.RFC3339Nano), - lastTxTimestamp.Sub(txData.Timestamp), - lastTxTimestamp.Format(time.RFC3339Nano))) - } - lastTxTimestamp = txData.Timestamp - - if txData.Reference != "" { - if _, ok := usedReferences[txData.Reference]; ok { - return []core.ExpandedTransaction{}, NewConflictError() - } - usedReferences[txData.Reference] = struct{}{} - - txs, err := l.GetTransactions(ctx, *NewTransactionsQuery(). - WithReferenceFilter(txData.Reference)) - if err != nil { - return []core.ExpandedTransaction{}, errors.Wrap(err, - "get transactions with reference") - } - if len(txs.Data) > 0 { - return []core.ExpandedTransaction{}, NewConflictError() - } - } - - txVolumeAggr := vAggr.NextTx() - for _, posting := range txData.Postings { - if err := txVolumeAggr.Transfer(ctx, - posting.Source, posting.Destination, posting.Asset, posting.Amount, accs); err != nil { - return []core.ExpandedTransaction{}, NewTransactionCommitError(i, err) - } - } - - for account, volumes := range txVolumeAggr.PostCommitVolumes { - if _, ok := accs[account]; !ok { - accs[account], err = l.GetAccount(ctx, account) - if err != nil { - return []core.ExpandedTransaction{}, NewTransactionCommitError(i, - errors.Wrap(err, fmt.Sprintf("get account '%s'", account))) - } - } - for asset, vol := range volumes { - accs[account].Volumes[asset] = vol - } - accs[account].Balances = accs[account].Volumes.Balances() - for asset, volume := range volumes { - if account == core.WORLD { - continue - } - - for _, contract := range contracts { - if contract.Match(account) { - if ok := contract.Expr.Eval(core.EvalContext{ - Variables: map[string]interface{}{ - "balance": volume.Balance(), - }, - Metadata: accs[account].Metadata, - Asset: asset, - }); !ok { - return []core.ExpandedTransaction{}, NewInsufficientFundError(asset) - } - break - } - } - } - } - - if txData.Metadata == nil { - txData.Metadata = core.Metadata{} - } - - tx := core.ExpandedTransaction{ - Transaction: core.Transaction{ - TransactionData: txData, - ID: nextTxId, - }, - PreCommitVolumes: txVolumeAggr.PreCommitVolumes, - PostCommitVolumes: txVolumeAggr.PostCommitVolumes, - } - lastTx = &tx - txs = append(txs, tx) - nextTxId++ - } - - if preview { - return txs, nil - } - - if err := l.store.Commit(ctx, txs...); err != nil { - switch { - case storage.IsErrorCode(err, storage.ConstraintFailed): - return []core.ExpandedTransaction{}, NewConflictError() - default: - return []core.ExpandedTransaction{}, errors.Wrap(err, - "committing transactions") - } - } - - l.monitor.CommittedTransactions(ctx, l.store.Name(), txs...) - return txs, nil -} diff --git a/pkg/ledger/execute_txsdata_test.go b/pkg/ledger/execute_txsdata_test.go deleted file mode 100644 index 9057e4a44..000000000 --- a/pkg/ledger/execute_txsdata_test.go +++ /dev/null @@ -1,357 +0,0 @@ -package ledger_test - -import ( - "context" - "testing" - "time" - - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestLedger_ExecuteTxsData(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - t.Run("multi assets", func(t *testing.T) { - worldTotoUSD := core.NewMonetaryInt(43) - worldAliceUSD := core.NewMonetaryInt(98) - aliceTotoUSD := core.NewMonetaryInt(45) - worldTotoEUR := core.NewMonetaryInt(15) - worldAliceEUR := core.NewMonetaryInt(10) - totoAliceEUR := core.NewMonetaryInt(5) - - postings := []core.Posting{ - { - Source: "world", - Destination: "toto", - Amount: worldTotoUSD, - Asset: "USD", - }, - { - Source: "world", - Destination: "alice", - Amount: worldAliceUSD, - Asset: "USD", - }, - { - Source: "alice", - Destination: "toto", - Amount: aliceTotoUSD, - Asset: "USD", - }, - { - Source: "world", - Destination: "toto", - Amount: worldTotoEUR, - Asset: "EUR", - }, - { - Source: "world", - Destination: "alice", - Amount: worldAliceEUR, - Asset: "EUR", - }, - { - Source: "toto", - Destination: "alice", - Amount: totoAliceEUR, - Asset: "EUR", - }, - } - - expectedPreCommitVol := core.AccountsAssetsVolumes{ - "alice": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - "EUR": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - }, - "toto": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - "EUR": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - }, - "world": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - "EUR": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - }, - } - - expectedPostCommitVol := core.AccountsAssetsVolumes{ - "alice": core.AssetsVolumes{ - "USD": { - Input: worldAliceUSD, - Output: aliceTotoUSD, - }, - "EUR": { - Input: worldAliceEUR.Add(totoAliceEUR), - Output: core.NewMonetaryInt(0), - }, - }, - "toto": core.AssetsVolumes{ - "USD": { - Input: worldTotoUSD.Add(aliceTotoUSD), - Output: core.NewMonetaryInt(0), - }, - "EUR": { - Input: worldTotoEUR, - Output: totoAliceEUR, - }, - }, - "world": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(0), - Output: worldTotoUSD.Add(worldAliceUSD), - }, - "EUR": { - Input: core.NewMonetaryInt(0), - Output: worldTotoEUR.Add(worldAliceEUR), - }, - }, - } - - t.Run("single transaction multi postings", func(t *testing.T) { - txsData := []core.TransactionData{ - { - Postings: postings, - Timestamp: time.Now().UTC().Round(time.Second), - Metadata: core.Metadata{}, - }, - } - - res, err := l.ExecuteTxsData(context.Background(), true, txsData...) - assert.NoError(t, err) - - assert.Equal(t, len(txsData), len(res)) - - expectedTxs := []core.ExpandedTransaction{{ - Transaction: core.Transaction{ - TransactionData: txsData[0], - ID: 0, - }, - PreCommitVolumes: expectedPreCommitVol, - PostCommitVolumes: expectedPostCommitVol, - }} - assert.Equal(t, expectedTxs, res) - - preCommitVolumes := core.AggregatePreCommitVolumes(res...) - postCommitVolumes := core.AggregatePostCommitVolumes(res...) - assert.Equal(t, expectedPreCommitVol, preCommitVolumes) - assert.Equal(t, expectedPostCommitVol, postCommitVolumes) - }) - - t.Run("multi transactions single postings", func(t *testing.T) { - now := time.Now().Round(time.Second) - txsData := []core.TransactionData{ - { - Postings: []core.Posting{postings[0]}, - Timestamp: now, - }, - { - Postings: []core.Posting{postings[1]}, - Timestamp: now.Add(time.Second), - }, - { - Postings: []core.Posting{postings[2]}, - Timestamp: now.Add(2 * time.Second), - }, - { - Postings: []core.Posting{postings[3]}, - Timestamp: now.Add(3 * time.Second), - }, - { - Postings: []core.Posting{postings[4]}, - Timestamp: now.Add(4 * time.Second), - }, - { - Postings: []core.Posting{postings[5]}, - Timestamp: now.Add(5 * time.Second), - }, - } - - res, err := l.ExecuteTxsData(context.Background(), true, txsData...) - require.NoError(t, err) - require.Equal(t, len(txsData), len(res)) - - expectedTxs := []core.ExpandedTransaction{ - { - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Timestamp: now.UTC(), - Postings: core.Postings{postings[0]}, - Metadata: core.Metadata{}, - }, - ID: 0, - }, - PreCommitVolumes: core.AccountsAssetsVolumes{ - "toto": core.AssetsVolumes{"USD": core.Volumes{Input: core.NewMonetaryInt(0), Output: core.NewMonetaryInt(0)}}, - "world": core.AssetsVolumes{"USD": core.Volumes{Input: core.NewMonetaryInt(0), Output: core.NewMonetaryInt(0)}}}, - PostCommitVolumes: core.AccountsAssetsVolumes{ - "toto": core.AssetsVolumes{"USD": core.Volumes{Input: worldTotoUSD, Output: core.NewMonetaryInt(0)}}, - "world": core.AssetsVolumes{"USD": core.Volumes{Input: core.NewMonetaryInt(0), Output: worldTotoUSD}}}, - }, - { - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Postings: core.Postings{postings[1]}, - Timestamp: now.UTC().Add(time.Second), - Metadata: core.Metadata{}, - }, - ID: 1, - }, - PreCommitVolumes: core.AccountsAssetsVolumes{ - "world": core.AssetsVolumes{"USD": core.Volumes{Input: core.NewMonetaryInt(0), Output: worldTotoUSD}}, - "alice": core.AssetsVolumes{"USD": core.Volumes{Input: core.NewMonetaryInt(0), Output: core.NewMonetaryInt(0)}}, - }, - PostCommitVolumes: core.AccountsAssetsVolumes{ - "world": core.AssetsVolumes{"USD": core.Volumes{Input: core.NewMonetaryInt(0), Output: worldTotoUSD.Add(worldAliceUSD)}}, - "alice": core.AssetsVolumes{"USD": core.Volumes{Input: worldAliceUSD, Output: core.NewMonetaryInt(0)}}, - }, - }, - { - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Timestamp: now.UTC().Add(2 * time.Second), - Postings: core.Postings{postings[2]}, - Metadata: core.Metadata{}, - }, - ID: 2, - }, - PreCommitVolumes: core.AccountsAssetsVolumes{ - "alice": core.AssetsVolumes{"USD": core.Volumes{Input: worldAliceUSD, Output: core.NewMonetaryInt(0)}}, - "toto": core.AssetsVolumes{"USD": core.Volumes{Input: worldTotoUSD, Output: core.NewMonetaryInt(0)}}, - }, - PostCommitVolumes: core.AccountsAssetsVolumes{ - "alice": core.AssetsVolumes{"USD": core.Volumes{Input: worldAliceUSD, Output: aliceTotoUSD}}, - "toto": core.AssetsVolumes{"USD": core.Volumes{Input: worldTotoUSD.Add(aliceTotoUSD), Output: core.NewMonetaryInt(0)}}, - }, - }, - { - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Timestamp: now.UTC().Add(3 * time.Second), - Postings: core.Postings{postings[3]}, - Metadata: core.Metadata{}, - }, - ID: 3, - }, - PreCommitVolumes: core.AccountsAssetsVolumes{ - "world": core.AssetsVolumes{"EUR": core.Volumes{Input: core.NewMonetaryInt(0), Output: core.NewMonetaryInt(0)}}, - "toto": core.AssetsVolumes{"EUR": core.Volumes{Input: core.NewMonetaryInt(0), Output: core.NewMonetaryInt(0)}}, - }, - PostCommitVolumes: core.AccountsAssetsVolumes{ - "world": core.AssetsVolumes{"EUR": core.Volumes{Input: core.NewMonetaryInt(0), Output: worldTotoEUR}}, - "toto": core.AssetsVolumes{"EUR": core.Volumes{Input: worldTotoEUR, Output: core.NewMonetaryInt(0)}}, - }, - }, - { - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Timestamp: now.UTC().Add(4 * time.Second), - Postings: core.Postings{postings[4]}, - Metadata: core.Metadata{}, - }, - ID: 4, - }, - PreCommitVolumes: core.AccountsAssetsVolumes{ - "world": core.AssetsVolumes{"EUR": core.Volumes{Input: core.NewMonetaryInt(0), Output: worldTotoEUR}}, - "alice": core.AssetsVolumes{"EUR": core.Volumes{Input: core.NewMonetaryInt(0), Output: core.NewMonetaryInt(0)}}, - }, - PostCommitVolumes: core.AccountsAssetsVolumes{ - "world": core.AssetsVolumes{"EUR": core.Volumes{Input: core.NewMonetaryInt(0), Output: worldTotoEUR.Add(worldAliceEUR)}}, - "alice": core.AssetsVolumes{"EUR": core.Volumes{Input: worldAliceEUR, Output: core.NewMonetaryInt(0)}}, - }, - }, - { - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Timestamp: now.UTC().Add(5 * time.Second), - Postings: core.Postings{postings[5]}, - Metadata: core.Metadata{}, - }, - ID: 5, - }, - PreCommitVolumes: core.AccountsAssetsVolumes{ - "toto": core.AssetsVolumes{"EUR": core.Volumes{Input: worldTotoEUR, Output: core.NewMonetaryInt(0)}}, - "alice": core.AssetsVolumes{"EUR": core.Volumes{Input: worldAliceEUR, Output: core.NewMonetaryInt(0)}}, - }, - PostCommitVolumes: core.AccountsAssetsVolumes{ - "toto": core.AssetsVolumes{"EUR": core.Volumes{Input: worldTotoEUR, Output: totoAliceEUR}}, - "alice": core.AssetsVolumes{"EUR": core.Volumes{Input: worldAliceEUR.Add(totoAliceEUR), Output: core.NewMonetaryInt(0)}}, - }, - }, - } - assert.Equal(t, expectedTxs, res) - - preCommitVolumes := core.AggregatePreCommitVolumes(res...) - postCommitVolumes := core.AggregatePostCommitVolumes(res...) - assert.Equal(t, expectedPreCommitVol, preCommitVolumes) - assert.Equal(t, expectedPostCommitVol, postCommitVolumes) - }) - }) - - t.Run("empty", func(t *testing.T) { - _, err := l.ExecuteTxsData(context.Background(), true, core.TransactionData{}) - assert.Error(t, err) - assert.ErrorContains(t, err, "executing transaction data 0: no postings") - }) - - t.Run("amount zero", func(t *testing.T) { - res, err := l.ExecuteTxsData(context.Background(), true, core.TransactionData{ - Postings: core.Postings{ - { - Source: "world", - Destination: "alice", - Amount: core.NewMonetaryInt(0), - Asset: "USD", - }, - }, - }) - assert.NoError(t, err) - assert.Equal(t, 1, len(res)) - }) - }) - - runOnLedger(func(l *ledger.Ledger) { - t.Run("date in the past (allowed by policy)", func(t *testing.T) { - now := time.Now() - require.NoError(t, l.GetLedgerStore().Commit(context.Background(), core.ExpandedTransaction{ - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Timestamp: now.UTC(), - Postings: []core.Posting{{}}, - }, - ID: 0, - }, - })) - - _, err := l.ExecuteTxsData(context.Background(), true, - core.TransactionData{ - Postings: []core.Posting{{ - Source: "world", - Destination: "bank", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }}, - Timestamp: now.UTC().Add(-time.Second), - }) - assert.NoError(t, err) - }) - }, ledger.WithPastTimestamps) -} diff --git a/pkg/ledger/info.go b/pkg/ledger/info.go deleted file mode 100644 index 7d73facf6..000000000 --- a/pkg/ledger/info.go +++ /dev/null @@ -1,50 +0,0 @@ -package ledger - -import ( - "context" - "time" - - "github.com/numary/ledger/pkg/core" - "github.com/pkg/errors" -) - -func (l *Ledger) GetMigrationsInfo(ctx context.Context) ([]core.MigrationInfo, error) { - migrationsAvailable, err := l.store.GetMigrationsAvailable() - if err != nil { - return []core.MigrationInfo{}, errors.Wrap(err, "getting migrations available") - } - - migrationsDone, err := l.store.GetMigrationsDone(ctx) - if err != nil { - return []core.MigrationInfo{}, errors.Wrap(err, "getting migrations done") - } - - res := make([]core.MigrationInfo, 0) - for _, mAvailable := range migrationsAvailable { - timestamp := time.Time{} - done := false - for _, mDone := range migrationsDone { - if mDone.Version == mAvailable.Version { - done = true - timestamp = mDone.Date - break - } - } - if done { - res = append(res, core.MigrationInfo{ - Version: mAvailable.Version, - Name: mAvailable.Name, - Date: timestamp, - State: "DONE", - }) - } else { - res = append(res, core.MigrationInfo{ - Version: mAvailable.Version, - Name: mAvailable.Name, - State: "TO DO", - }) - } - } - - return res, nil -} diff --git a/pkg/ledger/ledger.go b/pkg/ledger/ledger.go deleted file mode 100644 index aad3381d4..000000000 --- a/pkg/ledger/ledger.go +++ /dev/null @@ -1,193 +0,0 @@ -package ledger - -import ( - "context" - "fmt" - "time" - - "github.com/dgraph-io/ristretto" - "github.com/formancehq/go-libs/api" - "github.com/numary/ledger/pkg/core" - "github.com/pkg/errors" -) - -var DefaultContracts = []core.Contract{ - { - Name: "default", - Account: "*", // world still an exception - Expr: &core.ExprGte{ - Op1: core.VariableExpr{ - Name: "balance", - }, - Op2: core.ConstantExpr{ - Value: core.NewMonetaryInt(0), - }, - }, - }, -} - -type Ledger struct { - store Store - monitor Monitor - allowPastTimestamps bool - cache *ristretto.Cache -} - -type LedgerOption = func(*Ledger) - -func WithPastTimestamps(l *Ledger) { - l.allowPastTimestamps = true -} - -func NewLedger(store Store, monitor Monitor, cache *ristretto.Cache, options ...LedgerOption) (*Ledger, error) { - l := &Ledger{ - store: store, - monitor: monitor, - cache: cache, - } - - for _, option := range options { - option(l) - } - - return l, nil -} - -func (l *Ledger) Close(ctx context.Context) error { - if err := l.store.Close(ctx); err != nil { - return errors.Wrap(err, "closing store") - } - return nil -} - -func (l *Ledger) GetLedgerStore() Store { - return l.store -} - -func (l *Ledger) GetTransactions(ctx context.Context, q TransactionsQuery) (api.Cursor[core.ExpandedTransaction], error) { - return l.store.GetTransactions(ctx, q) -} - -func (l *Ledger) CountTransactions(ctx context.Context, q TransactionsQuery) (uint64, error) { - return l.store.CountTransactions(ctx, q) -} - -func (l *Ledger) GetTransaction(ctx context.Context, id uint64) (*core.ExpandedTransaction, error) { - tx, err := l.store.GetTransaction(ctx, id) - if err != nil { - return nil, err - } - if tx == nil { - return nil, NewNotFoundError("transaction not found") - } - - return tx, nil -} - -func (l *Ledger) SaveMapping(ctx context.Context, mapping core.Mapping) error { - if err := l.store.SaveMapping(ctx, mapping); err != nil { - return err - } - - l.monitor.UpdatedMapping(ctx, l.store.Name(), mapping) - return nil -} - -func (l *Ledger) LoadMapping(ctx context.Context) (*core.Mapping, error) { - return l.store.LoadMapping(ctx) -} - -func (l *Ledger) RevertTransaction(ctx context.Context, id uint64) (*core.ExpandedTransaction, error) { - revertedTx, err := l.store.GetTransaction(ctx, id) - if err != nil { - return nil, errors.Wrap(err, fmt.Sprintf("getting transaction %d", id)) - } - if revertedTx == nil { - return nil, NewNotFoundError(fmt.Sprintf("transaction %d not found", id)) - } - if revertedTx.IsReverted() { - return nil, NewValidationError(fmt.Sprintf("transaction %d already reverted", id)) - } - - rt := revertedTx.Reverse() - rt.Metadata = core.Metadata{} - rt.Metadata.MarkReverts(revertedTx.ID) - - txData := core.TransactionData{ - Postings: rt.Postings, - Timestamp: rt.Timestamp, - Reference: rt.Reference, - Metadata: rt.Metadata, - } - res, err := l.ExecuteTxsData(ctx, false, txData) - if err != nil { - return nil, errors.Wrap(err, fmt.Sprintf( - "executing revert script for transaction %d", id)) - } - revertTx := res[0] - - if err := l.store.UpdateTransactionMetadata(ctx, - revertedTx.ID, core.RevertedMetadata(revertTx.ID), revertTx.Timestamp); err != nil { - return nil, errors.Wrap(err, fmt.Sprintf( - "updating transaction %d metadata while reverting", id)) - } - - if revertedTx.Metadata == nil { - revertedTx.Metadata = core.Metadata{} - } - revertedTx.Metadata.Merge(core.RevertedMetadata(revertTx.ID)) - - l.monitor.RevertedTransaction(ctx, l.store.Name(), revertedTx, &revertTx) - return &revertTx, nil -} - -func (l *Ledger) CountAccounts(ctx context.Context, a AccountsQuery) (uint64, error) { - return l.store.CountAccounts(ctx, a) -} - -func (l *Ledger) GetAccounts(ctx context.Context, a AccountsQuery) (api.Cursor[core.Account], error) { - return l.store.GetAccounts(ctx, a) -} - -func (l *Ledger) GetAccount(ctx context.Context, address string) (*core.AccountWithVolumes, error) { - return l.store.GetAccountWithVolumes(ctx, address) -} - -func (l *Ledger) GetBalances(ctx context.Context, q BalancesQuery) (api.Cursor[core.AccountsBalances], error) { - return l.store.GetBalances(ctx, q) -} - -func (l *Ledger) GetBalancesAggregated(ctx context.Context, q BalancesQuery) (core.AssetsBalances, error) { - return l.store.GetBalancesAggregated(ctx, q) -} - -func (l *Ledger) SaveMeta(ctx context.Context, targetType string, targetID interface{}, m core.Metadata) error { - - if targetType == "" { - return NewValidationError("empty target type") - } - - if targetID == "" { - return NewValidationError("empty target id") - } - - var err error - switch targetType { - case core.MetaTargetTypeTransaction: - err = l.store.UpdateTransactionMetadata(ctx, targetID.(uint64), m, time.Now().Round(time.Second).UTC()) - case core.MetaTargetTypeAccount: - err = l.store.UpdateAccountMetadata(ctx, targetID.(string), m, time.Now().Round(time.Second).UTC()) - default: - return NewValidationError(fmt.Sprintf("unknown target type '%s'", targetType)) - } - if err != nil { - return err - } - - l.monitor.SavedMetadata(ctx, l.store.Name(), targetType, fmt.Sprint(targetID), m) - return nil -} - -func (l *Ledger) GetLogs(ctx context.Context, q *LogsQuery) (api.Cursor[core.Log], error) { - return l.store.GetLogs(ctx, q) -} diff --git a/pkg/ledger/ledger_test.go b/pkg/ledger/ledger_test.go deleted file mode 100644 index 285a4c3f4..000000000 --- a/pkg/ledger/ledger_test.go +++ /dev/null @@ -1,735 +0,0 @@ -package ledger_test - -import ( - "context" - "flag" - "fmt" - "math/rand" - "os" - "reflect" - "testing" - "time" - - "github.com/mitchellh/mapstructure" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/ledgertesting" - "github.com/numary/ledger/pkg/storage" - "github.com/pborman/uuid" - "github.com/sirupsen/logrus" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "go.uber.org/fx" -) - -func withContainer(options ...fx.Option) { - done := make(chan struct{}) - opts := append([]fx.Option{ - fx.NopLogger, - ledgertesting.ProvideLedgerStorageDriver(), - }, options...) - opts = append(opts, fx.Invoke(func(lc fx.Lifecycle) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - close(done) - return nil - }, - }) - })) - app := fx.New(opts...) - go func() { - if err := app.Start(context.Background()); err != nil { - panic(err) - } - }() - - <-done - if app.Err() != nil { - panic(app.Err()) - } - - ctx, cancel := context.WithDeadline(context.Background(), time.Now().Add(5*time.Second)) - defer cancel() - - if err := app.Stop(ctx); err != nil { - panic(err) - } -} - -func runOnLedger(f func(l *ledger.Ledger), ledgerOptions ...ledger.LedgerOption) { - withContainer(fx.Invoke(func(lc fx.Lifecycle, storageDriver storage.Driver[ledger.Store]) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - name := uuid.New() - store, _, err := storageDriver.GetLedgerStore(context.Background(), name, true) - if err != nil { - return err - } - _, err = store.Initialize(context.Background()) - if err != nil { - return err - } - // 100 000 000 is 100MB - cache := ledger.NewCache(100000000, 100, true) - l, err := ledger.NewLedger(store, - ledger.NewNoOpMonitor(), - cache, - ledgerOptions...) - if err != nil { - panic(err) - } - lc.Append(fx.Hook{ - OnStop: func(ctx context.Context) error { - cache.Close() - return l.Close(ctx) - }, - }) - f(l) - return nil - }, - }) - })) -} - -func TestMain(m *testing.M) { - var code int - defer func() { - os.Exit(code) // os.Exit don't care about defer so defer the os.Exit allow us to execute other defer - }() - - flag.Parse() - if testing.Verbose() { - logrus.StandardLogger().Level = logrus.DebugLevel - } - - code = m.Run() -} - -func TestTransaction(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - testsize := 1e4 - total := core.NewMonetaryInt(0) - batch := []core.TransactionData{} - - for i := 1; i <= int(testsize); i++ { - user := fmt.Sprintf("users:%03d", 1+rand.Intn(100)) - amount := core.NewMonetaryInt(100) - total = total.Add(amount) - - batch = append(batch, core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "mint", - Asset: "GEM", - Amount: amount, - }, - { - Source: "mint", - Destination: user, - Asset: "GEM", - Amount: amount, - }, - }, - }) - - if i%int(1e3) != 0 { - continue - } - - _, err := l.ExecuteTxsData(context.Background(), false, batch...) - require.NoError(t, err) - - batch = []core.TransactionData{} - } - - world, err := l.GetAccount(context.Background(), "world") - require.NoError(t, err) - - expected := total.Neg() - b := world.Balances["GEM"] - assert.Equalf(t, expected, b, - "wrong GEM balance for account world, expected: %s got: %s", - expected, b) - - require.NoError(t, l.Close(context.Background())) - }) -} - -func TestTransactionBatchWithConflictingReference(t *testing.T) { - t.Run("With conflict reference on transaction set", func(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - batch := []core.TransactionData{ - { - Postings: []core.Posting{ - { - Source: "world", - Destination: "player", - Asset: "GEM", - Amount: core.NewMonetaryInt(100), - }, - }, - Reference: "ref1", - }, - { - Postings: []core.Posting{ - { - Source: "player", - Destination: "game", - Asset: "GEM", - Amount: core.NewMonetaryInt(100), - }, - }, - Reference: "ref2", - }, - { - Postings: []core.Posting{ - { - Source: "player", - Destination: "player2", - Asset: "GEM", - Amount: core.NewMonetaryInt(1000), // Should trigger an insufficient fund error but the conflict error has precedence over it - }, - }, - Reference: "ref1", - }, - } - - _, err := l.ExecuteTxsData(context.Background(), false, batch...) - assert.Error(t, err) - assert.IsType(t, new(ledger.ConflictError), err) - }) - }) - t.Run("with conflict reference on database", func(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - txData := core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "player", - Asset: "GEM", - Amount: core.NewMonetaryInt(100), - }, - }, - Reference: "ref1", - } - _, err := l.ExecuteTxsData(context.Background(), false, txData) - require.NoError(t, err) - - _, err = l.ExecuteTxsData(context.Background(), false, txData) - assert.Error(t, err) - assert.IsType(t, new(ledger.ConflictError), err) - }) - }) -} - -func TestTransactionBatchTimestamps(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - timestamp1 := time.Now().UTC().Add(-10 * time.Second) - timestamp2 := time.Now().UTC().Add(-9 * time.Second) - timestamp3 := time.Now().UTC().Add(-8 * time.Second) - timestamp4 := time.Now().UTC().Add(-7 * time.Second) - t.Run("descending order should fail", func(t *testing.T) { - batch := []core.TransactionData{ - { - Postings: []core.Posting{ - { - Source: core.WORLD, - Destination: "player", - Asset: "GEM", - Amount: core.NewMonetaryInt(1), - }, - }, - Timestamp: timestamp2, - }, - { - Postings: []core.Posting{ - { - Source: core.WORLD, - Destination: "player", - Asset: "GEM", - Amount: core.NewMonetaryInt(1), - }, - }, - Timestamp: timestamp1, - }, - } - _, err := l.ExecuteTxsData(context.Background(), false, batch...) - require.True(t, ledger.IsValidationError(err), err) - require.ErrorContains(t, err, "cannot pass a timestamp prior to the last transaction") - }) - t.Run("ascending order should succeed", func(t *testing.T) { - batch := []core.TransactionData{ - { - Postings: []core.Posting{ - { - Source: core.WORLD, - Destination: "player", - Asset: "GEM", - Amount: core.NewMonetaryInt(1), - }, - }, - Timestamp: timestamp2, - }, - { - Postings: []core.Posting{ - { - Source: core.WORLD, - Destination: "player", - Asset: "GEM", - Amount: core.NewMonetaryInt(1), - }, - }, - Timestamp: timestamp3, - }, - } - _, err := l.ExecuteTxsData(context.Background(), false, batch...) - assert.NoError(t, err) - }) - t.Run("ascending order but before last inserted should fail", func(t *testing.T) { - batch := []core.TransactionData{ - { - Postings: []core.Posting{ - { - Source: core.WORLD, - Destination: "player", - Asset: "GEM", - Amount: core.NewMonetaryInt(1), - }, - }, - Timestamp: timestamp1, - }, - { - Postings: []core.Posting{ - { - Source: core.WORLD, - Destination: "player", - Asset: "GEM", - Amount: core.NewMonetaryInt(1), - }, - }, - Timestamp: timestamp4, - }, - } - _, err := l.ExecuteTxsData(context.Background(), false, batch...) - require.True(t, ledger.IsValidationError(err)) - require.ErrorContains(t, err, "cannot pass a timestamp prior to the last transaction") - }) - }) -} - -func TestTransactionExpectedVolumes(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - txsData := []core.TransactionData{ - { - Postings: []core.Posting{ - { - Source: "world", - Destination: "player", - Asset: "USD", - Amount: core.NewMonetaryInt(100), - }, - }, - }, - { - Postings: []core.Posting{ - { - Source: "world", - Destination: "player", - Asset: "EUR", - Amount: core.NewMonetaryInt(100), - }, - }, - }, - { - Postings: []core.Posting{ - { - Source: "world", - Destination: "player2", - Asset: "EUR", - Amount: core.NewMonetaryInt(100), - }, - }, - }, - { - Postings: []core.Posting{ - { - Source: "player", - Destination: "player2", - Asset: "EUR", - Amount: core.NewMonetaryInt(50), - }, - }, - }, - } - - res, err := l.ExecuteTxsData(context.Background(), false, txsData...) - assert.NoError(t, err) - - postCommitVolumes := core.AggregatePostCommitVolumes(res...) - assert.Equal(t, 4, len(res)) - assert.EqualValues(t, core.AccountsAssetsVolumes{ - "world": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(100), - }, - "EUR": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(200), - }, - }, - "player": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(100), - Output: core.NewMonetaryInt(0), - }, - "EUR": { - Input: core.NewMonetaryInt(100), - Output: core.NewMonetaryInt(50), - }, - }, - "player2": core.AssetsVolumes{ - "EUR": { - Input: core.NewMonetaryInt(150), - Output: core.NewMonetaryInt(0), - }, - }, - }, postCommitVolumes) - }) -} - -func TestReference(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - tx := core.TransactionData{ - Reference: "payment_processor_id_01", - Postings: []core.Posting{ - { - Source: "world", - Destination: "payments:001", - Amount: core.NewMonetaryInt(100), - Asset: "COIN", - }, - }, - } - - _, err := l.ExecuteTxsData(context.Background(), false, tx) - require.NoError(t, err) - - _, err = l.ExecuteTxsData(context.Background(), false, tx) - assert.Error(t, err) - }) -} - -func TestAccountMetadata(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - - err := l.SaveMeta(context.Background(), core.MetaTargetTypeAccount, "users:001", core.Metadata{ - "a random metadata": "old value", - }) - assert.NoError(t, err) - - err = l.SaveMeta(context.Background(), core.MetaTargetTypeAccount, "users:001", core.Metadata{ - "a random metadata": "new value", - }) - assert.NoError(t, err) - - { - acc, err := l.GetAccount(context.Background(), "users:001") - require.NoError(t, err) - - meta, ok := acc.Metadata["a random metadata"] - require.True(t, ok) - - assert.Equalf(t, meta, "new value", - "metadata entry did not match in get: expected \"new value\", got %v", meta) - } - - { - // We have to create at least one transaction to retrieve an account from GetAccounts store method - _, err := l.ExecuteTxsData(context.Background(), false, core.TransactionData{ - Postings: core.Postings{ - { - Source: "world", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - Destination: "users:001", - }, - }, - }) - assert.NoError(t, err) - - acc, err := l.GetAccount(context.Background(), "users:001") - assert.NoError(t, err) - require.True(t, acc.Address == "users:001", "no account returned by get account") - - meta, ok := acc.Metadata["a random metadata"] - assert.True(t, ok) - assert.Equalf(t, meta, "new value", - "metadata entry did not match in find: expected \"new value\", got %v", meta) - } - }) -} - -func TestTransactionMetadata(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - _, err := l.ExecuteTxsData(context.Background(), false, - core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "payments:001", - Amount: core.NewMonetaryInt(100), - Asset: "COIN", - }, - }, - }) - require.NoError(t, err) - - tx, err := l.GetLedgerStore().GetLastTransaction(context.Background()) - require.NoError(t, err) - - err = l.SaveMeta(context.Background(), core.MetaTargetTypeTransaction, tx.ID, core.Metadata{ - "a random metadata": "old value", - }) - require.NoError(t, err) - - err = l.SaveMeta(context.Background(), core.MetaTargetTypeTransaction, tx.ID, core.Metadata{ - "a random metadata": "new value", - }) - require.NoError(t, err) - - tx, err = l.GetLedgerStore().GetLastTransaction(context.Background()) - require.NoError(t, err) - - meta, ok := tx.Metadata["a random metadata"] - require.True(t, ok) - - require.Equalf(t, meta, "new value", - "metadata entry did not match: expected \"new value\", got %v", meta) - }) -} - -func TestSaveTransactionMetadata(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - _, err := l.ExecuteTxsData(context.Background(), false, - core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "payments:001", - Amount: core.NewMonetaryInt(100), - Asset: "COIN", - }, - }, - Metadata: core.Metadata{ - "a metadata": "a value", - }, - }) - require.NoError(t, err) - - tx, err := l.GetLedgerStore().GetLastTransaction(context.Background()) - require.NoError(t, err) - - meta, ok := tx.Metadata["a metadata"] - require.True(t, ok) - - assert.Equalf(t, meta, "a value", - "metadata entry did not match: expected \"a value\", got %v", meta) - }) -} - -func TestGetTransaction(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - _, err := l.ExecuteTxsData(context.Background(), false, - core.TransactionData{ - Reference: "bar", - Postings: []core.Posting{ - { - Source: "world", - Destination: "payments:001", - Amount: core.NewMonetaryInt(100), - Asset: "COIN", - }, - }, - }) - require.NoError(t, err) - - last, err := l.GetLedgerStore().GetLastTransaction(context.Background()) - require.NoError(t, err) - - tx, err := l.GetTransaction(context.Background(), last.ID) - require.NoError(t, err) - - assert.True(t, reflect.DeepEqual(tx, last)) - }) -} - -func TestGetTransactions(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - tx := core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "test_get_transactions", - Amount: core.NewMonetaryInt(100), - Asset: "COIN", - }, - }, - } - - _, err := l.ExecuteTxsData(context.Background(), false, tx) - require.NoError(t, err) - - res, err := l.GetTransactions(context.Background(), *ledger.NewTransactionsQuery()) - require.NoError(t, err) - - assert.Equal(t, "test_get_transactions", res.Data[0].Postings[0].Destination) - }) -} - -func TestRevertTransaction(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - revertAmt := core.NewMonetaryInt(100) - - res, err := l.ExecuteTxsData(context.Background(), false, - core.TransactionData{ - Reference: "foo", - Postings: []core.Posting{ - { - Source: "world", - Destination: "payments:001", - Amount: revertAmt, - Asset: "COIN", - }, - }, - }) - require.NoError(t, err) - - world, err := l.GetAccount(context.Background(), "world") - require.NoError(t, err) - - originalBal := world.Balances["COIN"] - - revertTx, err := l.RevertTransaction(context.Background(), res[0].ID) - require.NoError(t, err) - - require.Equal(t, core.Postings{ - { - Source: "payments:001", - Destination: "world", - Amount: core.NewMonetaryInt(100), - Asset: "COIN", - }, - }, revertTx.TransactionData.Postings) - - require.EqualValues(t, fmt.Sprintf("%d", res[0].ID), - revertTx.Metadata[core.RevertMetadataSpecKey()]) - - tx, err := l.GetTransaction(context.Background(), res[0].ID) - require.NoError(t, err) - - v := core.RevertedMetadataSpecValue{} - require.NoError(t, mapstructure.Decode(tx.Metadata[core.RevertedMetadataSpecKey()], &v)) - require.Equal(t, core.RevertedMetadataSpecValue{ - By: fmt.Sprint(revertTx.ID), - }, v) - - world, err = l.GetAccount(context.Background(), "world") - require.NoError(t, err) - - newBal := world.Balances["COIN"] - expectedBal := originalBal.Add(revertAmt) - require.Equalf(t, expectedBal, newBal, - "COIN world balances expected %d, got %d", expectedBal, newBal) - }) -} - -func TestVeryBigTransaction(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - amount, err := core.ParseMonetaryInt( - "199999999999999999992919191919192929292939847477171818284637291884661818183647392936472918836161728274766266161728493736383838") - require.NoError(t, err) - - res, err := l.ExecuteTxsData(context.Background(), false, - core.TransactionData{ - Postings: []core.Posting{{ - Source: "world", - Destination: "bank", - Asset: "ETH/18", - Amount: amount, - }}, - }) - require.NoError(t, err) - - txFromDB, err := l.GetTransaction(context.Background(), res[0].ID) - require.NoError(t, err) - require.Equal(t, txFromDB.Postings[0].Amount, amount) - }) -} - -func BenchmarkTransaction1(b *testing.B) { - runOnLedger(func(l *ledger.Ledger) { - for n := 0; n < b.N; n++ { - txs := []core.TransactionData{} - - txs = append(txs, core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "benchmark", - Asset: "COIN", - Amount: core.NewMonetaryInt(10), - }, - }, - }) - - _, err := l.ExecuteTxsData(context.Background(), false, txs...) - require.NoError(b, err) - } - }) -} - -func BenchmarkTransaction_20_1k(b *testing.B) { - runOnLedger(func(l *ledger.Ledger) { - for n := 0; n < b.N; n++ { - for i := 0; i < 20; i++ { - txs := []core.TransactionData{} - - for j := 0; j < 1e3; j++ { - txs = append(txs, core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "benchmark", - Asset: "COIN", - Amount: core.NewMonetaryInt(10), - }, - }, - }) - } - - _, err := l.ExecuteTxsData(context.Background(), false, txs...) - require.NoError(b, err) - } - } - }) -} - -func BenchmarkGetAccount(b *testing.B) { - runOnLedger(func(l *ledger.Ledger) { - for i := 0; i < b.N; i++ { - _, err := l.GetAccount(context.Background(), "users:013") - require.NoError(b, err) - } - }) -} - -func BenchmarkGetTransactions(b *testing.B) { - runOnLedger(func(l *ledger.Ledger) { - for i := 0; i < b.N; i++ { - _, err := l.GetTransactions(context.Background(), ledger.TransactionsQuery{}) - require.NoError(b, err) - } - }) -} diff --git a/pkg/ledger/monitor.go b/pkg/ledger/monitor.go deleted file mode 100644 index be3c08099..000000000 --- a/pkg/ledger/monitor.go +++ /dev/null @@ -1,30 +0,0 @@ -package ledger - -import ( - "context" - - "github.com/numary/ledger/pkg/core" -) - -type Monitor interface { - CommittedTransactions(ctx context.Context, ledger string, res ...core.ExpandedTransaction) - SavedMetadata(ctx context.Context, ledger, targetType, id string, metadata core.Metadata) - UpdatedMapping(ctx context.Context, ledger string, mapping core.Mapping) - RevertedTransaction(ctx context.Context, ledger string, reverted, revert *core.ExpandedTransaction) -} - -type noOpMonitor struct{} - -func (n noOpMonitor) CommittedTransactions(ctx context.Context, s string, res ...core.ExpandedTransaction) { -} -func (n noOpMonitor) SavedMetadata(ctx context.Context, ledger string, targetType string, id string, metadata core.Metadata) { -} -func (n noOpMonitor) UpdatedMapping(ctx context.Context, s string, mapping core.Mapping) {} -func (n noOpMonitor) RevertedTransaction(ctx context.Context, ledger string, reverted, revert *core.ExpandedTransaction) { -} - -var _ Monitor = &noOpMonitor{} - -func NewNoOpMonitor() *noOpMonitor { - return &noOpMonitor{} -} diff --git a/pkg/ledger/resolver.go b/pkg/ledger/resolver.go deleted file mode 100644 index b605ccb96..000000000 --- a/pkg/ledger/resolver.go +++ /dev/null @@ -1,120 +0,0 @@ -package ledger - -import ( - "context" - "sync" - - "github.com/dgraph-io/ristretto" - "github.com/numary/ledger/pkg/storage" - "github.com/pkg/errors" - "go.uber.org/fx" -) - -type ResolverOption interface { - apply(r *Resolver) error -} -type ResolveOptionFn func(r *Resolver) error - -func (fn ResolveOptionFn) apply(r *Resolver) error { - return fn(r) -} - -func WithMonitor(monitor Monitor) ResolveOptionFn { - return func(r *Resolver) error { - r.monitor = monitor - return nil - } -} - -var DefaultResolverOptions = []ResolverOption{ - WithMonitor(&noOpMonitor{}), -} - -type Resolver struct { - storageDriver storage.Driver[Store] - lock sync.RWMutex - initializedStores map[string]struct{} - monitor Monitor - ledgerOptions []LedgerOption - cache *ristretto.Cache -} - -func NewResolver( - storageFactory storage.Driver[Store], - ledgerOptions []LedgerOption, - cacheBytesCapacity, cacheMaxNumKeys int64, - options ...ResolverOption, -) *Resolver { - options = append(DefaultResolverOptions, options...) - r := &Resolver{ - storageDriver: storageFactory, - initializedStores: map[string]struct{}{}, - cache: NewCache(cacheBytesCapacity, cacheMaxNumKeys, false), - } - for _, opt := range options { - if err := opt.apply(r); err != nil { - panic(errors.Wrap(err, "applying option on resolver")) - } - } - r.ledgerOptions = ledgerOptions - - return r -} - -func (r *Resolver) GetLedger(ctx context.Context, name string) (*Ledger, error) { - store, _, err := r.storageDriver.GetLedgerStore(ctx, name, true) - if err != nil { - return nil, errors.Wrap(err, "retrieving ledger store") - } - - r.lock.RLock() - _, ok := r.initializedStores[name] - r.lock.RUnlock() - if ok { - return NewLedger(store, r.monitor, r.cache, r.ledgerOptions...) - } - - r.lock.Lock() - defer r.lock.Unlock() - - if _, ok = r.initializedStores[name]; !ok { - _, err = store.Initialize(ctx) - if err != nil { - return nil, errors.Wrap(err, "initializing ledger store") - } - r.initializedStores[name] = struct{}{} - } - - return NewLedger(store, r.monitor, r.cache, r.ledgerOptions...) -} - -func (r *Resolver) Close() { - r.cache.Close() -} - -const ResolverOptionsKey = `group:"_ledgerResolverOptions"` -const ResolverLedgerOptionsKey = `name:"_ledgerResolverLedgerOptions"` - -func ProvideResolverOption(provider interface{}) fx.Option { - return fx.Provide( - fx.Annotate(provider, fx.ResultTags(ResolverOptionsKey), fx.As(new(ResolverOption))), - ) -} - -func ResolveModule(cacheBytesCapacity, cacheMaxNumKeys int64) fx.Option { - return fx.Options( - fx.Provide( - fx.Annotate(func(storageFactory storage.Driver[Store], ledgerOptions []LedgerOption, options ...ResolverOption) *Resolver { - return NewResolver(storageFactory, ledgerOptions, cacheBytesCapacity, cacheMaxNumKeys, options...) - }, fx.ParamTags("", ResolverLedgerOptionsKey, ResolverOptionsKey)), - ), - fx.Invoke(func(lc fx.Lifecycle, r *Resolver) { - lc.Append(fx.Hook{ - OnStop: func(ctx context.Context) error { - r.Close() - return nil - }, - }) - }), - ) -} diff --git a/pkg/ledger/stats.go b/pkg/ledger/stats.go deleted file mode 100644 index 174b05090..000000000 --- a/pkg/ledger/stats.go +++ /dev/null @@ -1,29 +0,0 @@ -package ledger - -import ( - "context" -) - -type Stats struct { - Transactions uint64 `json:"transactions"` - Accounts uint64 `json:"accounts"` -} - -func (l *Ledger) Stats(ctx context.Context) (Stats, error) { - var stats Stats - - transactions, err := l.store.CountTransactions(ctx, TransactionsQuery{}) - if err != nil { - return stats, err - } - - accounts, err := l.store.CountAccounts(ctx, AccountsQuery{}) - if err != nil { - return stats, err - } - - return Stats{ - Transactions: transactions, - Accounts: accounts, - }, nil -} diff --git a/pkg/ledger/stats_test.go b/pkg/ledger/stats_test.go deleted file mode 100644 index d3fa08bdc..000000000 --- a/pkg/ledger/stats_test.go +++ /dev/null @@ -1,16 +0,0 @@ -package ledger_test - -import ( - "context" - "testing" - - "github.com/numary/ledger/pkg/ledger" - "github.com/stretchr/testify/assert" -) - -func TestStats(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - _, err := l.Stats(context.Background()) - assert.NoError(t, err) - }) -} diff --git a/pkg/ledger/storage.go b/pkg/ledger/storage.go deleted file mode 100644 index bee74fb8c..000000000 --- a/pkg/ledger/storage.go +++ /dev/null @@ -1,310 +0,0 @@ -package ledger - -import ( - "context" - "time" - - "github.com/formancehq/go-libs/api" - "github.com/numary/ledger/pkg/core" -) - -type Store interface { - GetLastTransaction(ctx context.Context) (*core.ExpandedTransaction, error) - CountTransactions(context.Context, TransactionsQuery) (uint64, error) - GetTransactions(context.Context, TransactionsQuery) (api.Cursor[core.ExpandedTransaction], error) - GetTransaction(ctx context.Context, txid uint64) (*core.ExpandedTransaction, error) - GetAccount(ctx context.Context, accountAddress string) (*core.Account, error) - GetAssetsVolumes(ctx context.Context, accountAddress string) (core.AssetsVolumes, error) - GetAccountWithVolumes(ctx context.Context, account string) (*core.AccountWithVolumes, error) - GetVolumes(ctx context.Context, accountAddress, asset string) (core.Volumes, error) - CountAccounts(context.Context, AccountsQuery) (uint64, error) - GetAccounts(context.Context, AccountsQuery) (api.Cursor[core.Account], error) - GetBalances(context.Context, BalancesQuery) (api.Cursor[core.AccountsBalances], error) - GetBalancesAggregated(context.Context, BalancesQuery) (core.AssetsBalances, error) - GetLastLog(context.Context) (*core.Log, error) - GetLogs(context.Context, *LogsQuery) (api.Cursor[core.Log], error) - LoadMapping(context.Context) (*core.Mapping, error) - GetMigrationsAvailable() ([]core.MigrationInfo, error) - GetMigrationsDone(context.Context) ([]core.MigrationInfo, error) - - UpdateTransactionMetadata(ctx context.Context, txid uint64, metadata core.Metadata, at time.Time) error - UpdateAccountMetadata(ctx context.Context, address string, metadata core.Metadata, at time.Time) error - Commit(ctx context.Context, txs ...core.ExpandedTransaction) error - SaveMapping(ctx context.Context, m core.Mapping) error - Name() string - Initialize(context.Context) (bool, error) - Close(context.Context) error -} - -const ( - QueryDefaultPageSize = 15 -) - -type TransactionsQuery struct { - PageSize uint - AfterTxID uint64 - Filters TransactionsQueryFilters -} - -type TransactionsQueryFilters struct { - Reference string - Destination string - Source string - Account string - EndTime time.Time - StartTime time.Time - Metadata map[string]string -} - -func NewTransactionsQuery() *TransactionsQuery { - return &TransactionsQuery{ - PageSize: QueryDefaultPageSize, - } -} - -func (a *TransactionsQuery) WithPageSize(pageSize uint) *TransactionsQuery { - if pageSize != 0 { - a.PageSize = pageSize - } - - return a -} - -func (a *TransactionsQuery) WithAfterTxID(after uint64) *TransactionsQuery { - a.AfterTxID = after - - return a -} - -func (a *TransactionsQuery) WithStartTimeFilter(start time.Time) *TransactionsQuery { - if !start.IsZero() { - a.Filters.StartTime = start - } - - return a -} - -func (a *TransactionsQuery) WithEndTimeFilter(end time.Time) *TransactionsQuery { - if !end.IsZero() { - a.Filters.EndTime = end - } - - return a -} - -func (a *TransactionsQuery) WithAccountFilter(account string) *TransactionsQuery { - a.Filters.Account = account - - return a -} - -func (a *TransactionsQuery) WithDestinationFilter(dest string) *TransactionsQuery { - a.Filters.Destination = dest - - return a -} - -func (a *TransactionsQuery) WithReferenceFilter(ref string) *TransactionsQuery { - a.Filters.Reference = ref - - return a -} - -func (a *TransactionsQuery) WithSourceFilter(source string) *TransactionsQuery { - a.Filters.Source = source - - return a -} - -func (a *TransactionsQuery) WithMetadataFilter(metadata map[string]string) *TransactionsQuery { - a.Filters.Metadata = metadata - - return a -} - -type AccountsQuery struct { - PageSize uint - Offset uint - AfterAddress string - Filters AccountsQueryFilters -} - -type AccountsQueryFilters struct { - Address string - Balance string - BalanceOperator BalanceOperator - Metadata map[string]string -} - -type BalanceOperator string - -const ( - BalanceOperatorE BalanceOperator = "e" - BalanceOperatorGt BalanceOperator = "gt" - BalanceOperatorGte BalanceOperator = "gte" - BalanceOperatorLt BalanceOperator = "lt" - BalanceOperatorLte BalanceOperator = "lte" - BalanceOperatorNe BalanceOperator = "ne" - - DefaultBalanceOperator = BalanceOperatorGte -) - -func (b BalanceOperator) IsValid() bool { - switch b { - case BalanceOperatorE, - BalanceOperatorGt, - BalanceOperatorGte, - BalanceOperatorLt, - BalanceOperatorNe, - BalanceOperatorLte: - return true - } - - return false -} - -func NewBalanceOperator(s string) (BalanceOperator, bool) { - if !BalanceOperator(s).IsValid() { - return "", false - } - - return BalanceOperator(s), true -} - -func NewAccountsQuery() *AccountsQuery { - return &AccountsQuery{ - PageSize: QueryDefaultPageSize, - } -} - -func (a *AccountsQuery) WithPageSize(pageSize uint) *AccountsQuery { - if pageSize != 0 { - a.PageSize = pageSize - } - - return a -} - -func (a *AccountsQuery) WithOffset(offset uint) *AccountsQuery { - a.Offset = offset - - return a -} - -func (a *AccountsQuery) WithAfterAddress(after string) *AccountsQuery { - a.AfterAddress = after - - return a -} - -func (a *AccountsQuery) WithAddressFilter(address string) *AccountsQuery { - a.Filters.Address = address - - return a -} - -func (a *AccountsQuery) WithBalanceFilter(balance string) *AccountsQuery { - a.Filters.Balance = balance - - return a -} - -func (a *AccountsQuery) WithBalanceOperatorFilter(balanceOperator BalanceOperator) *AccountsQuery { - a.Filters.BalanceOperator = balanceOperator - - return a -} - -func (a *AccountsQuery) WithMetadataFilter(metadata map[string]string) *AccountsQuery { - a.Filters.Metadata = metadata - - return a -} - -type BalancesQuery struct { - PageSize uint - Offset uint - AfterAddress string - Filters BalancesQueryFilters -} - -type BalancesQueryFilters struct { - AddressRegexp string -} - -func NewBalancesQuery() *BalancesQuery { - return &BalancesQuery{ - PageSize: QueryDefaultPageSize, - } -} - -func (b *BalancesQuery) WithAfterAddress(after string) *BalancesQuery { - b.AfterAddress = after - - return b -} - -func (b *BalancesQuery) WithOffset(offset uint) *BalancesQuery { - b.Offset = offset - - return b -} - -func (b *BalancesQuery) WithAddressFilter(address string) *BalancesQuery { - b.Filters.AddressRegexp = address - - return b -} - -func (b *BalancesQuery) WithPageSize(pageSize uint) *BalancesQuery { - b.PageSize = pageSize - return b -} - -type LogsQuery struct { - AfterID uint64 - PageSize uint - - Filters LogsQueryFilters -} - -type LogsQueryFilters struct { - EndTime time.Time - StartTime time.Time -} - -func NewLogsQuery() *LogsQuery { - return &LogsQuery{ - PageSize: QueryDefaultPageSize, - } -} - -func (l *LogsQuery) WithAfterID(after uint64) *LogsQuery { - l.AfterID = after - - return l -} - -func (l *LogsQuery) WithPageSize(pageSize uint) *LogsQuery { - if pageSize != 0 { - l.PageSize = pageSize - } - - return l -} - -func (l *LogsQuery) WithStartTimeFilter(start time.Time) *LogsQuery { - if !start.IsZero() { - l.Filters.StartTime = start - } - - return l -} - -func (l *LogsQuery) WithEndTimeFilter(end time.Time) *LogsQuery { - if !end.IsZero() { - l.Filters.EndTime = end - } - - return l -} diff --git a/pkg/ledger/verification.go b/pkg/ledger/verification.go deleted file mode 100644 index 9c61d4b8a..000000000 --- a/pkg/ledger/verification.go +++ /dev/null @@ -1,6 +0,0 @@ -package ledger - -func (l *Ledger) Verify() error { - // TODO: iterate over all transactions - return nil -} diff --git a/pkg/ledger/verification_test.go b/pkg/ledger/verification_test.go deleted file mode 100644 index 4fe060997..000000000 --- a/pkg/ledger/verification_test.go +++ /dev/null @@ -1,17 +0,0 @@ -package ledger_test - -import ( - "testing" - - "github.com/numary/ledger/pkg/ledger" -) - -func TestVerify(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - err := l.Verify() - - if err != nil { - t.Error(err) - } - }) -} diff --git a/pkg/ledger/volume_agg.go b/pkg/ledger/volume_agg.go deleted file mode 100644 index 58e430e86..000000000 --- a/pkg/ledger/volume_agg.go +++ /dev/null @@ -1,94 +0,0 @@ -package ledger - -import ( - "context" - - "github.com/numary/ledger/pkg/core" -) - -type TxVolumeAggregator struct { - agg *VolumeAggregator - previousTx *TxVolumeAggregator - - PreCommitVolumes core.AccountsAssetsVolumes - PostCommitVolumes core.AccountsAssetsVolumes -} - -func (tva *TxVolumeAggregator) FindInPreviousTxs(addr, asset string) *core.Volumes { - current := tva.previousTx - for current != nil { - if v, ok := current.PostCommitVolumes[addr][asset]; ok { - return &v - } - current = current.previousTx - } - return nil -} - -func (tva *TxVolumeAggregator) Transfer( - ctx context.Context, - from, to, asset string, - amount *core.MonetaryInt, - accs map[string]*core.AccountWithVolumes, -) error { - for _, addr := range []string{from, to} { - if !tva.PreCommitVolumes.HasAccountAndAsset(addr, asset) { - previousVolumes := tva.FindInPreviousTxs(addr, asset) - if previousVolumes != nil { - tva.PreCommitVolumes.SetVolumes(addr, asset, *previousVolumes) - } else { - var vol core.Volumes - var ok1, ok2 bool - _, ok1 = accs[addr] - if ok1 { - _, ok2 = accs[addr].Volumes[asset] - } - if ok1 && ok2 { - vol = accs[addr].Volumes[asset] - } else { - acc, err := tva.agg.l.GetAccount(ctx, addr) - if err != nil { - return err - } - if accs[addr] == nil { - accs[addr] = acc - } - accs[addr].Volumes[asset] = acc.Volumes[asset] - vol = accs[addr].Volumes[asset] - } - tva.PreCommitVolumes.SetVolumes(addr, asset, vol) - } - } - if !tva.PostCommitVolumes.HasAccountAndAsset(addr, asset) { - tva.PostCommitVolumes.SetVolumes(addr, asset, tva.PreCommitVolumes.GetVolumes(addr, asset)) - } - } - tva.PostCommitVolumes.AddOutput(from, asset, amount) - tva.PostCommitVolumes.AddInput(to, asset, amount) - - return nil -} - -type VolumeAggregator struct { - l *Ledger - txs []*TxVolumeAggregator -} - -func (agg *VolumeAggregator) NextTx() *TxVolumeAggregator { - var previousTx *TxVolumeAggregator - if len(agg.txs) > 0 { - previousTx = agg.txs[len(agg.txs)-1] - } - tva := &TxVolumeAggregator{ - agg: agg, - previousTx: previousTx, - } - agg.txs = append(agg.txs, tva) - return tva -} - -func NewVolumeAggregator(l *Ledger) *VolumeAggregator { - return &VolumeAggregator{ - l: l, - } -} diff --git a/pkg/ledger/volume_agg_test.go b/pkg/ledger/volume_agg_test.go deleted file mode 100644 index 853114c0b..000000000 --- a/pkg/ledger/volume_agg_test.go +++ /dev/null @@ -1,199 +0,0 @@ -package ledger_test - -import ( - "context" - "testing" - - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/stretchr/testify/require" -) - -func TestVolumeAggregator(t *testing.T) { - runOnLedger(func(l *ledger.Ledger) { - defer func(l *ledger.Ledger, ctx context.Context) { - require.NoError(t, l.Close(ctx)) - }(l, context.Background()) - - tx1 := core.ExpandedTransaction{ - Transaction: core.Transaction{ - ID: 0, - TransactionData: core.TransactionData{ - Postings: []core.Posting{ - { - Source: "bob", - Destination: "zozo", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - }, - }, - PreCommitVolumes: map[string]core.AssetsVolumes{ - "bob": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - }, - "zozo": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - }, - }, - PostCommitVolumes: map[string]core.AssetsVolumes{ - "bob": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(100), - }, - }, - "zozo": { - "USD": { - Input: core.NewMonetaryInt(100), - Output: core.NewMonetaryInt(0), - }, - }, - }, - } - - tx2 := core.ExpandedTransaction{ - Transaction: core.Transaction{ - ID: 1, - TransactionData: core.TransactionData{ - Postings: []core.Posting{ - { - Source: "zozo", - Destination: "alice", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - }, - }, - PostCommitVolumes: map[string]core.AssetsVolumes{ - "alice": { - "USD": { - Input: core.NewMonetaryInt(100), - Output: core.NewMonetaryInt(0), - }, - }, - "zozo": { - "USD": { - Input: core.NewMonetaryInt(100), - Output: core.NewMonetaryInt(100), - }, - }, - }, - PreCommitVolumes: map[string]core.AssetsVolumes{ - "alice": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - }, - "zozo": { - "USD": { - Input: core.NewMonetaryInt(100), - Output: core.NewMonetaryInt(0), - }, - }, - }, - } - err := l.GetLedgerStore().Commit(context.Background(), tx1, tx2) - require.NoError(t, err) - - volumeAggregator := ledger.NewVolumeAggregator(l) - firstTx := volumeAggregator.NextTx() - accs := map[string]*core.AccountWithVolumes{} - require.NoError(t, firstTx.Transfer(context.Background(), "bob", "alice", "USD", core.NewMonetaryInt(100), accs)) - require.NoError(t, firstTx.Transfer(context.Background(), "bob", "zoro", "USD", core.NewMonetaryInt(50), accs)) - - require.Equal(t, core.AccountsAssetsVolumes{ - "bob": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(250), - }, - }, - "alice": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(200), - Output: core.NewMonetaryInt(0), - }, - }, - "zoro": { - "USD": { - Input: core.NewMonetaryInt(50), - Output: core.NewMonetaryInt(0), - }, - }, - }, firstTx.PostCommitVolumes) - require.Equal(t, core.AccountsAssetsVolumes{ - "bob": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(100), - }, - }, - "alice": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(100), - Output: core.NewMonetaryInt(0), - }, - }, - "zoro": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - }, - }, firstTx.PreCommitVolumes) - - secondTx := volumeAggregator.NextTx() - require.NoError(t, secondTx.Transfer(context.Background(), "alice", "fred", "USD", core.NewMonetaryInt(50), accs)) - require.NoError(t, secondTx.Transfer(context.Background(), "bob", "fred", "USD", core.NewMonetaryInt(25), accs)) - require.Equal(t, core.AccountsAssetsVolumes{ - "bob": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(275), - }, - }, - "alice": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(200), - Output: core.NewMonetaryInt(50), - }, - }, - "fred": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(75), - Output: core.NewMonetaryInt(0), - }, - }, - }, secondTx.PostCommitVolumes) - require.Equal(t, core.AccountsAssetsVolumes{ - "bob": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(250), - }, - }, - "alice": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(200), - Output: core.NewMonetaryInt(0), - }, - }, - "fred": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - }, - }, secondTx.PreCommitVolumes) - }) -} diff --git a/pkg/ledgertesting/storage.go b/pkg/ledgertesting/storage.go deleted file mode 100644 index f203c9e0d..000000000 --- a/pkg/ledgertesting/storage.go +++ /dev/null @@ -1,76 +0,0 @@ -package ledgertesting - -import ( - "context" - "os" - - "github.com/numary/ledger/internal/pgtesting" - "github.com/numary/ledger/pkg/api/idempotency" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/storage" - "github.com/numary/ledger/pkg/storage/sqlstorage" - "github.com/pborman/uuid" - "github.com/pkg/errors" - "go.uber.org/fx" -) - -func StorageDriverName() string { - fromEnv := os.Getenv("NUMARY_STORAGE_DRIVER") - if fromEnv != "" { - return fromEnv - } - return "sqlite" -} - -func StorageDriver() (*sqlstorage.Driver, func(), error) { - switch StorageDriverName() { - case "sqlite": - id := uuid.New() - return sqlstorage.NewDriver("sqlite", sqlstorage.NewSQLiteDB(os.TempDir(), id)), func() {}, nil - case "postgres": - pgServer, err := pgtesting.PostgresServer() - if err != nil { - return nil, nil, err - } - db, err := sqlstorage.OpenSQLDB(sqlstorage.PostgreSQL, pgServer.ConnString()) - if err != nil { - return nil, nil, err - } - return sqlstorage.NewDriver( - "postgres", - sqlstorage.NewPostgresDB(db), - ), func() { - _ = pgServer.Close() - }, nil - } - return nil, nil, errors.New("not found driver") -} - -func ProvideStorageDriver() fx.Option { - return fx.Provide(func(lc fx.Lifecycle) (*sqlstorage.Driver, error) { - driver, stopFn, err := StorageDriver() - if err != nil { - return nil, err - } - lc.Append(fx.Hook{ - OnStart: driver.Initialize, - OnStop: func(ctx context.Context) error { - stopFn() - return driver.Close(ctx) - }, - }) - return driver, nil - }) -} - -func ProvideLedgerStorageDriver() fx.Option { - return fx.Options( - ProvideStorageDriver(), - fx.Provide( - fx.Annotate(sqlstorage.NewLedgerStorageDriverFromRawDriver, - fx.As(new(storage.Driver[ledger.Store]))), - fx.Annotate(sqlstorage.NewIdempotencyStorageDriverFromRawDriver, - fx.As(new(storage.Driver[idempotency.Store]))), - ), - ) -} diff --git a/pkg/opentelemetry/tracer.go b/pkg/opentelemetry/tracer.go deleted file mode 100644 index 1c179ff6f..000000000 --- a/pkg/opentelemetry/tracer.go +++ /dev/null @@ -1,21 +0,0 @@ -package opentelemetry - -import ( - "context" - - "github.com/gin-gonic/gin" - "go.opentelemetry.io/otel" - "go.opentelemetry.io/otel/trace" -) - -var Tracer = otel.Tracer("com.formance.ledger") - -func Start(ctx context.Context, name string, opts ...trace.SpanStartOption) (context.Context, trace.Span) { - return Tracer.Start(ctx, name, opts...) -} - -func WrapGinContext(ginContext *gin.Context, name string, opts ...trace.SpanStartOption) trace.Span { - ctx, span := Start(ginContext.Request.Context(), name, opts...) - ginContext.Request = ginContext.Request.WithContext(ctx) - return span -} diff --git a/pkg/redis/lock.go b/pkg/redis/lock.go deleted file mode 100644 index 8747951ba..000000000 --- a/pkg/redis/lock.go +++ /dev/null @@ -1,106 +0,0 @@ -package redis - -import ( - "context" - "crypto/rand" - "encoding/base64" - "time" - - "github.com/formancehq/go-libs/logging" - "github.com/go-redis/redis/v8" - "github.com/numary/ledger/pkg/api/middlewares" - "github.com/pkg/errors" -) - -type BoolCmd = redis.BoolCmd -type StringCmd = redis.StringCmd -type IntCmd = redis.IntCmd - -type Client interface { - SetNX(ctx context.Context, lk string, rv interface{}, duration time.Duration) *BoolCmd - Get(ctx context.Context, lk string) *StringCmd - Del(ctx context.Context, lk ...string) *IntCmd -} - -func lockKey(name string) string { - return "ledger-lock-" + name -} - -var randomString = func() (string, error) { - data := make([]byte, 20) - _, err := rand.Read(data) - if err != nil { - return "", err - } - return base64.StdEncoding.EncodeToString(data), nil -} - -type Lock struct { - redisClient Client - lockDuration time.Duration - retry time.Duration -} - -func (l Lock) tryLock(ctx context.Context, name string) (bool, middlewares.Unlock, error) { - rv, err := randomString() - if err != nil { - return false, nil, errors.Wrap(err, "generating random string") - } - lk := lockKey(name) - cmd := l.redisClient.SetNX(ctx, lk, rv, l.lockDuration) - ok, err := cmd.Result() - if err != nil { - return false, nil, errors.Wrap(err, "setting lock redis side") - } - if !ok { - return false, nil, nil - } - - logger := logging.GetLogger(ctx) - - return true, func(ctx context.Context) { - getCmd := l.redisClient.Get(ctx, lk) - if getCmd.Err() != nil { - logger.Error(ctx, "error retrieving lock: %s", getCmd.Err()) - return - } - value := getCmd.Val() - if value != rv { - logger.Error(ctx, "unable to retrieve lock value, expect %s, got %s", rv, value) - return - } - delCmd := l.redisClient.Del(ctx, lk) - if delCmd.Err() != nil { - logger.Error(ctx, "error deleting lock: %s", delCmd.Err()) - return - } - }, nil -} - -func (l Lock) Lock(ctx context.Context, name string) (middlewares.Unlock, error) { - for { - ok, unlock, err := l.tryLock(ctx, name) - if err != nil { - return nil, errors.Wrap(err, "setting lock redis side") - } - if ok { - return unlock, nil - } - select { - case <-time.After(l.retry): - case <-ctx.Done(): - return nil, ctx.Err() - } - - } -} - -var _ middlewares.Locker = &Lock{} - -func NewLock(client Client, lockDuration, retry time.Duration) *Lock { - return &Lock{ - redisClient: client, - lockDuration: lockDuration, - retry: retry, - } -} diff --git a/pkg/redis/lock_test.go b/pkg/redis/lock_test.go deleted file mode 100644 index 40c6550d1..000000000 --- a/pkg/redis/lock_test.go +++ /dev/null @@ -1,51 +0,0 @@ -package redis - -import ( - "context" - "fmt" - "testing" - "time" - - "github.com/go-redis/redismock/v8" - "github.com/stretchr/testify/assert" -) - -func TestLock(t *testing.T) { - randCpt := 0 - randomString = func() (string, error) { - defer func() { - randCpt++ - }() - return fmt.Sprintf("%d", randCpt), nil - } - clusterClient, clusterMock := redismock.NewClientMock() - duration := 5 * time.Second - l := NewLock(clusterClient, duration, 100*time.Millisecond) - - ctx := context.Background() - clusterMock.ExpectSetNX(lockKey("quickstart"), "0", duration).SetVal(true) - ok, unlock, err := l.tryLock(ctx, "quickstart") - assert.True(t, ok) - assert.NoError(t, err) - - clusterMock.ExpectSetNX(lockKey("quickstart"), "1", duration).SetVal(false) - ok, _, err = l.tryLock(ctx, "quickstart") - assert.False(t, ok) - assert.NoError(t, err) - - clusterMock.ExpectSetNX(lockKey("another"), "2", duration).SetVal(true) - ok, _, err = l.tryLock(ctx, "another") - assert.True(t, ok) - assert.NoError(t, err) - - clusterMock.ExpectGet(lockKey("quickstart")).SetVal("0") - clusterMock.ExpectDel(lockKey("quickstart")).SetVal(0) - - unlock(ctx) - - clusterMock.ExpectSetNX(lockKey("quickstart"), "3", duration).SetVal(true) - ok, _, err = l.tryLock(ctx, "quickstart") - assert.True(t, ok) - assert.NoError(t, err) - -} diff --git a/pkg/redis/module.go b/pkg/redis/module.go deleted file mode 100644 index 3c05bd693..000000000 --- a/pkg/redis/module.go +++ /dev/null @@ -1,50 +0,0 @@ -package redis - -import ( - "crypto/tls" - "time" - - "github.com/go-redis/redis/v8" - "github.com/numary/ledger/pkg/api/middlewares" - "go.uber.org/fx" -) - -const ( - DefaultLockDuration = time.Minute - DefaultRetryInterval = time.Second -) - -type Options = redis.Options - -type Config struct { - Url string - LockDuration time.Duration - LockRetry time.Duration - TLSConfig *tls.Config -} - -func Module(cfg Config) fx.Option { - if cfg.LockRetry == 0 { - cfg.LockRetry = DefaultRetryInterval - } - if cfg.LockDuration == 0 { - cfg.LockDuration = DefaultLockDuration - } - return fx.Options( - fx.Provide(func() (Client, error) { - options, err := redis.ParseURL(cfg.Url) - if err != nil { - return nil, err - } - options.TLSConfig = cfg.TLSConfig - return redis.NewClient(options), nil - }), - fx.Decorate(func(redisClient Client) middlewares.Locker { - return NewLock( - redisClient, - cfg.LockDuration, - cfg.LockRetry, - ) - }), - ) -} diff --git a/pkg/storage/driver.go b/pkg/storage/driver.go deleted file mode 100644 index 1a7e27993..000000000 --- a/pkg/storage/driver.go +++ /dev/null @@ -1,41 +0,0 @@ -package storage - -import ( - "context" - "errors" -) - -var ( - ErrLedgerStoreNotFound = errors.New("ledger store not found") -) - -type SystemStore interface { - GetConfiguration(ctx context.Context, key string) (string, error) - InsertConfiguration(ctx context.Context, key, value string) error - ListLedgers(ctx context.Context) ([]string, error) - DeleteLedger(ctx context.Context, name string) error -} - -type LedgerStore interface { - Delete(ctx context.Context) error - Initialize(ctx context.Context) (bool, error) - Close(ctx context.Context) error -} - -type LedgerStoreProvider[STORE any] interface { - GetLedgerStore(ctx context.Context, name string, create bool) (STORE, bool, error) -} -type LedgerStoreProviderFn[STORE any] func(ctx context.Context, name string, create bool) (STORE, bool, error) - -func (fn LedgerStoreProviderFn[STORE]) GetLedgerStore(ctx context.Context, name string, create bool) (STORE, bool, error) { - return fn(ctx, name, create) -} - -type Driver[STORE any] interface { - LedgerStoreProvider[STORE] - Initialize(ctx context.Context) error - Close(ctx context.Context) error - Name() string - - GetSystemStore() SystemStore -} diff --git a/pkg/storage/errors.go b/pkg/storage/errors.go deleted file mode 100644 index af07dfb74..000000000 --- a/pkg/storage/errors.go +++ /dev/null @@ -1,54 +0,0 @@ -package storage - -import ( - "errors" - "fmt" -) - -var ( - ErrConfigurationNotFound = errors.New("configuration not found") -) - -type Code string - -const ( - ConstraintFailed Code = "CONSTRAINT_FAILED" - TooManyClient Code = "TOO_MANY_CLIENT" -) - -type Error struct { - Code Code - OriginalError error -} - -func (e Error) Is(err error) bool { - storageErr, ok := err.(*Error) - if !ok { - return false - } - if storageErr.Code == "" { - return true - } - return storageErr.Code == e.Code -} - -func (e Error) Error() string { - return fmt.Sprintf("%s [%s]", e.OriginalError, e.Code) -} - -func NewError(code Code, originalError error) *Error { - return &Error{ - Code: code, - OriginalError: originalError, - } -} - -func IsError(err error) bool { - return IsErrorCode(err, "") -} - -func IsErrorCode(err error, code Code) bool { - return errors.Is(err, &Error{ - Code: code, - }) -} diff --git a/pkg/storage/sqlstorage/accounts.go b/pkg/storage/sqlstorage/accounts.go deleted file mode 100644 index 52fd81561..000000000 --- a/pkg/storage/sqlstorage/accounts.go +++ /dev/null @@ -1,268 +0,0 @@ -package sqlstorage - -import ( - "context" - "database/sql" - "encoding/base64" - "encoding/json" - "fmt" - "strconv" - "strings" - "time" - - "github.com/formancehq/go-libs/api" - "github.com/huandu/go-sqlbuilder" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/pkg/errors" -) - -func (s *Store) buildAccountsQuery(p ledger.AccountsQuery) (*sqlbuilder.SelectBuilder, AccPaginationToken) { - sb := sqlbuilder.NewSelectBuilder() - t := AccPaginationToken{} - sb.From(s.schema.Table("accounts")) - - var ( - address = p.Filters.Address - metadata = p.Filters.Metadata - balance = p.Filters.Balance - balanceOperator = p.Filters.BalanceOperator - ) - - if address != "" { - arg := sb.Args.Add("^" + address + "$") - switch s.Schema().Flavor() { - case sqlbuilder.PostgreSQL: - sb.Where("address ~* " + arg) - case sqlbuilder.SQLite: - sb.Where("address REGEXP " + arg) - } - t.AddressRegexpFilter = address - } - - for key, value := range metadata { - arg := sb.Args.Add(value) - // TODO: Need to find another way to specify the prefix since Table() methods does not make sense for functions and procedures - sb.Where(s.schema.Table( - fmt.Sprintf("%s(metadata, %s, '%s')", - SQLCustomFuncMetaCompare, arg, strings.ReplaceAll(key, ".", "', '")), - )) - } - t.MetadataFilter = metadata - - if balance != "" { - sb.Join(s.schema.Table("volumes"), "accounts.address = volumes.account") - balanceOperation := "volumes.input - volumes.output" - - balanceValue, err := strconv.ParseInt(balance, 10, 0) - if err != nil { - // parameter is validated in the controller for now - panic(errors.Wrap(err, "invalid balance parameter")) - } - - if balanceOperator != "" { - switch balanceOperator { - case ledger.BalanceOperatorLte: - sb.Where(sb.LessEqualThan(balanceOperation, balanceValue)) - case ledger.BalanceOperatorLt: - sb.Where(sb.LessThan(balanceOperation, balanceValue)) - case ledger.BalanceOperatorGte: - sb.Where(sb.GreaterEqualThan(balanceOperation, balanceValue)) - case ledger.BalanceOperatorGt: - sb.Where(sb.GreaterThan(balanceOperation, balanceValue)) - case ledger.BalanceOperatorE: - sb.Where(sb.Equal(balanceOperation, balanceValue)) - case ledger.BalanceOperatorNe: - sb.Where(sb.NotEqual(balanceOperation, balanceValue)) - default: - // parameter is validated in the controller for now - panic("invalid balance operator parameter") - } - } else { // if no operator is given, default to gte - sb.Where(sb.GreaterEqualThan(balanceOperation, balanceValue)) - } - - t.BalanceFilter = balance - t.BalanceOperatorFilter = balanceOperator - } - - return sb, t -} - -func (s *Store) GetAccounts(ctx context.Context, q ledger.AccountsQuery) (api.Cursor[core.Account], error) { - accounts := make([]core.Account, 0) - - if q.PageSize == 0 { - return api.Cursor[core.Account]{Data: accounts}, nil - } - - sb, t := s.buildAccountsQuery(q) - sb.Select("address", "metadata") - sb.OrderBy("address desc") - - if q.AfterAddress != "" { - sb.Where(sb.L("address", q.AfterAddress)) - t.AfterAddress = q.AfterAddress - } - - // We fetch an additional account to know if there is more - sb.Limit(int(q.PageSize + 1)) - t.PageSize = q.PageSize - sb.Offset(int(q.Offset)) - - executor, err := s.executorProvider(ctx) - if err != nil { - return api.Cursor[core.Account]{}, err - } - - sqlq, args := sb.BuildWithFlavor(s.schema.Flavor()) - rows, err := executor.QueryContext(ctx, sqlq, args...) - if err != nil { - return api.Cursor[core.Account]{}, s.error(err) - } - defer rows.Close() - - for rows.Next() { - account := core.Account{ - Metadata: core.Metadata{}, - } - if err := rows.Scan(&account.Address, &account.Metadata); err != nil { - return api.Cursor[core.Account]{}, err - } - - accounts = append(accounts, account) - } - if rows.Err() != nil { - return api.Cursor[core.Account]{}, rows.Err() - } - - var previous, next string - if q.Offset > 0 { - offset := int(q.Offset) - int(q.PageSize) - if offset < 0 { - t.Offset = 0 - } else { - t.Offset = uint(offset) - } - raw, err := json.Marshal(t) - if err != nil { - return api.Cursor[core.Account]{}, s.error(err) - } - previous = base64.RawURLEncoding.EncodeToString(raw) - } - - if len(accounts) == int(q.PageSize+1) { - accounts = accounts[:len(accounts)-1] - t.Offset = q.Offset + q.PageSize - raw, err := json.Marshal(t) - if err != nil { - return api.Cursor[core.Account]{}, s.error(err) - } - next = base64.RawURLEncoding.EncodeToString(raw) - } - - hasMore := next != "" - return api.Cursor[core.Account]{ - PageSize: int(q.PageSize), - HasMore: hasMore, - Previous: previous, - Next: next, - Data: accounts, - PageSizeDeprecated: int(q.PageSize), - HasMoreDeprecated: &hasMore, - }, nil -} - -func (s *Store) GetAccount(ctx context.Context, addr string) (*core.Account, error) { - sb := sqlbuilder.NewSelectBuilder() - sb.Select("address", "metadata"). - From(s.schema.Table("accounts")). - Where(sb.Equal("address", addr)) - - account := core.Account{ - Address: addr, - Metadata: core.Metadata{}, - } - - executor, err := s.executorProvider(ctx) - if err != nil { - return nil, err - } - - sqlq, args := sb.BuildWithFlavor(s.schema.Flavor()) - row := executor.QueryRowContext(ctx, sqlq, args...) - if err := row.Err(); err != nil { - return nil, err - } - - if err := row.Scan(&account.Address, &account.Metadata); err != nil { - if err == sql.ErrNoRows { - return &account, nil - } - return nil, err - } - - return &account, nil -} - -func (s *Store) ensureAccountExists(ctx context.Context, account string) error { - - sb := sqlbuilder.NewInsertBuilder() - sqlq, args := sb. - InsertInto(s.schema.Table("accounts")). - Cols("address", "metadata"). - Values(account, "{}"). - SQL("ON CONFLICT DO NOTHING"). - BuildWithFlavor(s.schema.Flavor()) - - executor, err := s.executorProvider(ctx) - if err != nil { - return err - } - - _, err = executor.ExecContext(ctx, sqlq, args...) - return s.error(err) -} - -func (s *Store) UpdateAccountMetadata(ctx context.Context, address string, metadata core.Metadata, at time.Time) error { - ib := sqlbuilder.NewInsertBuilder() - - metadataData, err := json.Marshal(metadata) - if err != nil { - return err - } - placeholder := ib.Var(metadataData) - ib. - InsertInto(s.schema.Table("accounts")). - Cols("address", "metadata"). - Values(address, metadataData) - - switch Flavor(s.schema.Flavor()) { - case PostgreSQL: - ib.SQL(fmt.Sprintf("ON CONFLICT (address) DO UPDATE SET metadata = accounts.metadata || %s", placeholder)) - case SQLite: - ib.SQL(fmt.Sprintf("ON CONFLICT (address) DO UPDATE SET metadata = json_patch(metadata, %s)", placeholder)) - } - - executor, err := s.executorProvider(ctx) - if err != nil { - return err - } - - sqlq, args := ib.BuildWithFlavor(s.schema.Flavor()) - _, err = executor.ExecContext(ctx, sqlq, args...) - if err != nil { - return err - } - - lastLog, err := s.GetLastLog(ctx) - if err != nil { - return errors.Wrap(err, "reading last log") - } - - return s.appendLog(ctx, core.NewSetMetadataLog(lastLog, at, core.SetMetadata{ - TargetType: core.MetaTargetTypeAccount, - TargetID: address, - Metadata: metadata, - })) -} diff --git a/pkg/storage/sqlstorage/accounts_test.go b/pkg/storage/sqlstorage/accounts_test.go deleted file mode 100644 index 66b61d985..000000000 --- a/pkg/storage/sqlstorage/accounts_test.go +++ /dev/null @@ -1,85 +0,0 @@ -package sqlstorage - -import ( - "context" - "os" - "testing" - - "github.com/numary/ledger/pkg/ledger" - "github.com/pborman/uuid" - "github.com/stretchr/testify/assert" -) - -func TestAccounts(t *testing.T) { - d := NewDriver("sqlite", &sqliteDB{ - directory: os.TempDir(), - dbName: uuid.New(), - }) - - assert.NoError(t, d.Initialize(context.Background())) - - defer func(d *Driver, ctx context.Context) { - assert.NoError(t, d.Close(ctx)) - }(d, context.Background()) - - store, _, err := d.GetLedgerStore(context.Background(), "foo", true) - assert.NoError(t, err) - - _, err = store.Initialize(context.Background()) - assert.NoError(t, err) - - t.Run("success balance", func(t *testing.T) { - q := ledger.AccountsQuery{ - PageSize: 10, - Filters: ledger.AccountsQueryFilters{ - Balance: "50", - }, - } - - _, err := store.GetAccounts(context.Background(), q) - assert.NoError(t, err, "balance filter should not fail") - }) - - t.Run("panic invalid balance", func(t *testing.T) { - q := ledger.AccountsQuery{ - PageSize: 10, - Filters: ledger.AccountsQueryFilters{ - Balance: "TEST", - }, - } - - assert.PanicsWithError( - t, `invalid balance parameter: strconv.ParseInt: parsing "TEST": invalid syntax`, - - func() { - _, _ = store.GetAccounts(context.Background(), q) - }, "invalid balance in storage should panic") - }) - - t.Run("panic invalid balance operator", func(t *testing.T) { - assert.PanicsWithValue(t, "invalid balance operator parameter", func() { - q := ledger.AccountsQuery{ - PageSize: 10, - Filters: ledger.AccountsQueryFilters{ - Balance: "50", - BalanceOperator: "TEST", - }, - } - - _, _ = store.GetAccounts(context.Background(), q) - }, "invalid balance operator in storage should panic") - }) - - t.Run("success balance operator", func(t *testing.T) { - q := ledger.AccountsQuery{ - PageSize: 10, - Filters: ledger.AccountsQueryFilters{ - Balance: "50", - BalanceOperator: ledger.BalanceOperatorGte, - }, - } - - _, err := store.GetAccounts(context.Background(), q) - assert.NoError(t, err, "balance operator filter should not fail") - }) -} diff --git a/pkg/storage/sqlstorage/aggregations.go b/pkg/storage/sqlstorage/aggregations.go deleted file mode 100644 index 3d66fa53d..000000000 --- a/pkg/storage/sqlstorage/aggregations.go +++ /dev/null @@ -1,215 +0,0 @@ -package sqlstorage - -import ( - "context" - "database/sql" - "fmt" - - "github.com/huandu/go-sqlbuilder" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" -) - -func (s *Store) GetAccountWithVolumes(ctx context.Context, account string) (*core.AccountWithVolumes, error) { - sb := sqlbuilder.NewSelectBuilder() - sb.Select("accounts.metadata", "volumes.asset", "volumes.input", "volumes.output") - sb.From(s.schema.Table("accounts")) - sb.JoinWithOption(sqlbuilder.LeftOuterJoin, - s.schema.Table("volumes"), - "accounts.address = volumes.account") - sb.Where(sb.E("accounts.address", account)) - - executor, err := s.executorProvider(ctx) - if err != nil { - return nil, err - } - - q, args := sb.BuildWithFlavor(s.schema.Flavor()) - rows, err := executor.QueryContext(ctx, q, args...) - if err != nil { - return nil, s.error(err) - } - defer rows.Close() - - acc := core.Account{ - Address: account, - Metadata: core.Metadata{}, - } - assetsVolumes := core.AssetsVolumes{} - - for rows.Next() { - var asset, inputStr, outputStr sql.NullString - if err := rows.Scan(&acc.Metadata, &asset, &inputStr, &outputStr); err != nil { - return nil, s.error(err) - } - - if asset.Valid { - assetsVolumes[asset.String] = core.Volumes{ - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - } - - if inputStr.Valid { - input, err := core.ParseMonetaryInt(inputStr.String) - if err != nil { - return nil, s.error(err) - } - assetsVolumes[asset.String] = core.Volumes{ - Input: input, - Output: assetsVolumes[asset.String].Output, - } - } - - if outputStr.Valid { - output, err := core.ParseMonetaryInt(outputStr.String) - if err != nil { - return nil, s.error(err) - } - assetsVolumes[asset.String] = core.Volumes{ - Input: assetsVolumes[asset.String].Input, - Output: output, - } - } - } - } - if err := rows.Err(); err != nil { - return nil, s.error(err) - } - - res := &core.AccountWithVolumes{ - Account: acc, - Volumes: assetsVolumes, - } - res.Balances = res.Volumes.Balances() - - return res, nil -} - -func (s *Store) CountTransactions(ctx context.Context, q ledger.TransactionsQuery) (uint64, error) { - var count uint64 - - executor, err := s.executorProvider(ctx) - if err != nil { - return 0, err - } - - sb, _ := s.buildTransactionsQuery(Flavor(s.schema.Flavor()), q) - sqlq, args := sb.BuildWithFlavor(s.schema.Flavor()) - sqlq = fmt.Sprintf(`SELECT count(*) FROM (%s) AS t`, sqlq) - err = executor.QueryRowContext(ctx, sqlq, args...).Scan(&count) - - return count, s.error(err) -} - -func (s *Store) CountAccounts(ctx context.Context, q ledger.AccountsQuery) (uint64, error) { - var count uint64 - - executor, err := s.executorProvider(ctx) - if err != nil { - return 0, err - } - - sb, _ := s.buildAccountsQuery(q) - sqlq, args := sb.Select("count(*)").BuildWithFlavor(s.schema.Flavor()) - err = executor.QueryRowContext(ctx, sqlq, args...).Scan(&count) - - return count, s.error(err) -} - -func (s *Store) GetAssetsVolumes(ctx context.Context, accountAddress string) (core.AssetsVolumes, error) { - sb := sqlbuilder.NewSelectBuilder() - sb.Select("asset", "input", "output") - sb.From(s.schema.Table("volumes")) - sb.Where(sb.E("account", accountAddress)) - - executor, err := s.executorProvider(ctx) - if err != nil { - return nil, err - } - - q, args := sb.BuildWithFlavor(s.schema.Flavor()) - rows, err := executor.QueryContext(ctx, q, args...) - if err != nil { - return nil, s.error(err) - } - defer rows.Close() - - volumes := core.AssetsVolumes{} - for rows.Next() { - var ( - asset string - inputStr string - outputStr string - ) - err = rows.Scan(&asset, &inputStr, &outputStr) - if err != nil { - return nil, s.error(err) - } - - input, err := core.ParseMonetaryInt(inputStr) - - if err != nil { - return nil, s.error(err) - } - - output, err := core.ParseMonetaryInt(outputStr) - - if err != nil { - return nil, s.error(err) - } - - volumes[asset] = core.Volumes{ - Input: input, - Output: output, - } - } - if err := rows.Err(); err != nil { - return nil, s.error(err) - } - - return volumes, nil -} - -func (s *Store) GetVolumes(ctx context.Context, accountAddress, asset string) (core.Volumes, error) { - sb := sqlbuilder.NewSelectBuilder() - sb.Select("input", "output") - sb.From(s.schema.Table("volumes")) - sb.Where(sb.And(sb.E("account", accountAddress), sb.E("asset", asset))) - - executor, err := s.executorProvider(ctx) - if err != nil { - return core.Volumes{}, err - } - - q, args := sb.BuildWithFlavor(s.schema.Flavor()) - row := executor.QueryRowContext(ctx, q, args...) - if row.Err() != nil { - return core.Volumes{}, s.error(row.Err()) - } - - var inputStr, outputStr string - - if err := row.Scan(&inputStr, &outputStr); err != nil { - if err == sql.ErrNoRows { - return core.Volumes{}, nil - } - return core.Volumes{}, s.error(err) - } - - input, err := core.ParseMonetaryInt(inputStr) - - if err != nil { - return core.Volumes{}, s.error(err) - } - - output, err := core.ParseMonetaryInt(outputStr) - - if err != nil { - return core.Volumes{}, s.error(err) - } - - return core.Volumes{ - Input: input, - Output: output, - }, nil -} diff --git a/pkg/storage/sqlstorage/balances.go b/pkg/storage/sqlstorage/balances.go deleted file mode 100644 index f4cfcd367..000000000 --- a/pkg/storage/sqlstorage/balances.go +++ /dev/null @@ -1,190 +0,0 @@ -package sqlstorage - -import ( - "context" - "encoding/base64" - "encoding/json" - "strconv" - "strings" - - "github.com/formancehq/go-libs/api" - "github.com/huandu/go-sqlbuilder" - "github.com/lib/pq" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" -) - -func (s *Store) GetBalancesAggregated(ctx context.Context, q ledger.BalancesQuery) (core.AssetsBalances, error) { - sb := sqlbuilder.NewSelectBuilder() - sb.Select("asset", "sum(input - output)") - sb.From(s.schema.Table("volumes")) - sb.GroupBy("asset") - - if q.Filters.AddressRegexp != "" { - arg := sb.Args.Add("^" + q.Filters.AddressRegexp + "$") - switch s.Schema().Flavor() { - case sqlbuilder.PostgreSQL: - sb.Where("account ~* " + arg) - case sqlbuilder.SQLite: - sb.Where("account REGEXP " + arg) - } - } - - executor, err := s.executorProvider(ctx) - if err != nil { - return nil, err - } - - balanceAggregatedQuery, args := sb.BuildWithFlavor(s.schema.Flavor()) - rows, err := executor.QueryContext(ctx, balanceAggregatedQuery, args...) - if err != nil { - return nil, s.error(err) - } - defer rows.Close() - - aggregatedBalances := core.AssetsBalances{} - - for rows.Next() { - var ( - asset string - balancesStr string - ) - if err = rows.Scan(&asset, &balancesStr); err != nil { - return nil, s.error(err) - } - - balances, err := core.ParseMonetaryInt(balancesStr) - - if err != nil { - return nil, s.error(err) - } - - aggregatedBalances[asset] = balances - } - if err := rows.Err(); err != nil { - return nil, s.error(err) - } - - return aggregatedBalances, nil -} - -func (s *Store) GetBalances(ctx context.Context, q ledger.BalancesQuery) (api.Cursor[core.AccountsBalances], error) { - executor, err := s.executorProvider(ctx) - if err != nil { - return api.Cursor[core.AccountsBalances]{}, err - } - - sb := sqlbuilder.NewSelectBuilder() - switch s.Schema().Flavor() { - case sqlbuilder.PostgreSQL: - sb.Select("account", "array_agg((asset, input - output))") - case sqlbuilder.SQLite: - // we try to get the same format as array_agg from postgres : {"(USD,-12686)","(EUR,-250)"} - // so don't have to dev a marshal method for each storage - sb.Select("account", `'{"(' || group_concat(asset||','||(input-output), ')","(')|| ')"}' as key_value_pairs`) - } - - sb.From(s.schema.Table("volumes")) - sb.GroupBy("account") - sb.OrderBy("account desc") - - t := BalancesPaginationToken{} - - if q.AfterAddress != "" { - sb.Where(sb.L("account", q.AfterAddress)) - t.AfterAddress = q.AfterAddress - } - - if q.Filters.AddressRegexp != "" { - arg := sb.Args.Add("^" + q.Filters.AddressRegexp + "$") - switch s.Schema().Flavor() { - case sqlbuilder.PostgreSQL: - sb.Where("account ~* " + arg) - case sqlbuilder.SQLite: - sb.Where("account REGEXP " + arg) - } - t.AddressRegexpFilter = q.Filters.AddressRegexp - } - - sb.Limit(int(q.PageSize + 1)) - t.PageSize = q.PageSize - sb.Offset(int(q.Offset)) - - balanceQuery, args := sb.BuildWithFlavor(s.schema.Flavor()) - rows, err := executor.QueryContext(ctx, balanceQuery, args...) - if err != nil { - return api.Cursor[core.AccountsBalances]{}, s.error(err) - } - defer rows.Close() - - accounts := make([]core.AccountsBalances, 0) - - for rows.Next() { - var currentAccount string - var arrayAgg []string - if err = rows.Scan(¤tAccount, pq.Array(&arrayAgg)); err != nil { - return api.Cursor[core.AccountsBalances]{}, s.error(err) - } - - accountsBalances := core.AccountsBalances{ - currentAccount: core.AssetsBalances{}, - } - - // arrayAgg is in the form: []string{"(USD,-250)","(EUR,1000)"} - for _, agg := range arrayAgg { - // Remove parenthesis - agg = agg[1 : len(agg)-1] - // Split the asset and balances on the comma separator - split := strings.Split(agg, ",") - asset := split[0] - balancesString := split[1] - balances, err := strconv.ParseInt(balancesString, 10, 64) - if err != nil { - return api.Cursor[core.AccountsBalances]{}, s.error(err) - } - accountsBalances[currentAccount][asset] = core.NewMonetaryInt(balances) - } - - accounts = append(accounts, accountsBalances) - } - - if err := rows.Err(); err != nil { - return api.Cursor[core.AccountsBalances]{}, s.error(err) - } - - var previous, next string - if q.Offset > 0 { - offset := int(q.Offset) - int(q.PageSize) - if offset < 0 { - t.Offset = 0 - } else { - t.Offset = uint(offset) - } - raw, err := json.Marshal(t) - if err != nil { - return api.Cursor[core.AccountsBalances]{}, s.error(err) - } - previous = base64.RawURLEncoding.EncodeToString(raw) - } - - if len(accounts) == int(q.PageSize+1) { - accounts = accounts[:len(accounts)-1] - t.Offset = q.Offset + q.PageSize - raw, err := json.Marshal(t) - if err != nil { - return api.Cursor[core.AccountsBalances]{}, s.error(err) - } - next = base64.RawURLEncoding.EncodeToString(raw) - } - - hasMore := next != "" - return api.Cursor[core.AccountsBalances]{ - PageSize: int(q.PageSize), - HasMore: hasMore, - Previous: previous, - Next: next, - Data: accounts, - PageSizeDeprecated: int(q.PageSize), - HasMoreDeprecated: &hasMore, - }, nil -} diff --git a/pkg/storage/sqlstorage/balances_test.go b/pkg/storage/sqlstorage/balances_test.go deleted file mode 100644 index 204246519..000000000 --- a/pkg/storage/sqlstorage/balances_test.go +++ /dev/null @@ -1,146 +0,0 @@ -package sqlstorage_test - -import ( - "context" - "testing" - - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/storage/sqlstorage" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func testGetBalances(t *testing.T, store *sqlstorage.Store) { - - err := store.Commit(context.Background(), tx1, tx2, tx3) - require.NoError(t, err) - - t.Run("all accounts", func(t *testing.T) { - cursor, err := store.GetBalances(context.Background(), - ledger.BalancesQuery{ - PageSize: 10, - }) - assert.NoError(t, err) - assert.Equal(t, 10, cursor.PageSize) - assert.Equal(t, false, cursor.HasMore) - assert.Equal(t, "", cursor.Previous) - assert.Equal(t, "", cursor.Next) - assert.Equal(t, []core.AccountsBalances{ - { - "world": core.AssetsBalances{ - "USD": core.NewMonetaryInt(-200), - }, - }, - { - "users:1": core.AssetsBalances{ - "USD": core.NewMonetaryInt(1), - }, - }, - { - "central_bank": core.AssetsBalances{ - "USD": core.NewMonetaryInt(199), - }, - }, - }, cursor.Data) - }) - - t.Run("limit", func(t *testing.T) { - cursor, err := store.GetBalances(context.Background(), - ledger.BalancesQuery{ - PageSize: 1, - }) - assert.NoError(t, err) - assert.Equal(t, 1, cursor.PageSize) - assert.Equal(t, true, cursor.HasMore) - assert.Equal(t, "", cursor.Previous) - assert.NotEqual(t, "", cursor.Next) - assert.Equal(t, []core.AccountsBalances{ - { - "world": core.AssetsBalances{ - "USD": core.NewMonetaryInt(-200), - }, - }, - }, cursor.Data) - }) - - t.Run("limit and offset", func(t *testing.T) { - cursor, err := store.GetBalances(context.Background(), - ledger.BalancesQuery{ - PageSize: 1, - Offset: 1, - }) - assert.NoError(t, err) - assert.Equal(t, 1, cursor.PageSize) - assert.Equal(t, true, cursor.HasMore) - assert.NotEqual(t, "", cursor.Previous) - assert.NotEqual(t, "", cursor.Next) - assert.Equal(t, []core.AccountsBalances{ - { - "users:1": core.AssetsBalances{ - "USD": core.NewMonetaryInt(1), - }, - }, - }, cursor.Data) - }) - - t.Run("after", func(t *testing.T) { - cursor, err := store.GetBalances(context.Background(), - ledger.BalancesQuery{ - PageSize: 10, - AfterAddress: "world", - }) - assert.NoError(t, err) - assert.Equal(t, 10, cursor.PageSize) - assert.Equal(t, false, cursor.HasMore) - assert.Equal(t, "", cursor.Previous) - assert.Equal(t, "", cursor.Next) - assert.Equal(t, []core.AccountsBalances{ - { - "users:1": core.AssetsBalances{ - "USD": core.NewMonetaryInt(1), - }, - }, - { - "central_bank": core.AssetsBalances{ - "USD": core.NewMonetaryInt(199), - }, - }, - }, cursor.Data) - }) - - t.Run("after and filter on address", func(t *testing.T) { - cursor, err := store.GetBalances(context.Background(), - ledger.BalancesQuery{ - PageSize: 10, - AfterAddress: "world", - Filters: ledger.BalancesQueryFilters{AddressRegexp: "users.+"}, - }) - assert.NoError(t, err) - assert.Equal(t, 10, cursor.PageSize) - assert.Equal(t, false, cursor.HasMore) - assert.Equal(t, "", cursor.Previous) - assert.Equal(t, "", cursor.Next) - assert.Equal(t, []core.AccountsBalances{ - { - "users:1": core.AssetsBalances{ - "USD": core.NewMonetaryInt(1), - }, - }, - }, cursor.Data) - }) -} - -func testGetBalancesAggregated(t *testing.T, store *sqlstorage.Store) { - err := store.Commit(context.Background(), tx1, tx2, tx3) - assert.NoError(t, err) - - q := ledger.BalancesQuery{ - PageSize: 10, - } - cursor, err := store.GetBalancesAggregated(context.Background(), q) - assert.NoError(t, err) - assert.Equal(t, core.AssetsBalances{ - "USD": core.NewMonetaryInt(0), - }, cursor) -} diff --git a/pkg/storage/sqlstorage/collect.go b/pkg/storage/sqlstorage/collect.go deleted file mode 100644 index aece59367..000000000 --- a/pkg/storage/sqlstorage/collect.go +++ /dev/null @@ -1,97 +0,0 @@ -package sqlstorage - -import ( - "context" - "database/sql" - "embed" - "io/fs" - "path" - "regexp" - "sort" - "strings" - - "github.com/numary/ledger/pkg/core" -) - -//go:embed migrates -var MigrationsFS embed.FS - -func extractMigrationInformation(filename string) (string, string) { - parts := strings.SplitN(filename, "-", 2) - number := parts[0] - name := parts[1] - return number, name -} - -func CollectMigrationFiles(migrationsFS fs.FS) ([]Migration, error) { - directories, err := fs.ReadDir(migrationsFS, "migrates") - if err != nil { - return nil, err - } - - migrations := Migrations{} - for _, directory := range directories { - directoryName := directory.Name() - - version, name := extractMigrationInformation(directoryName) - - migrationDirectoryName := path.Join("migrates", directoryName) - units := make(map[string][]MigrationFunc) - unitsFiles, err := fs.ReadDir(migrationsFS, migrationDirectoryName) - if err != nil { - return nil, err - } - - for _, unit := range unitsFiles { - parts := strings.SplitN(unit.Name(), ".", 2) - extension := parts[1] - engine := parts[0] - switch extension { - case "sql": - content, err := fs.ReadFile(migrationsFS, path.Join(migrationDirectoryName, unit.Name())) - if err != nil { - return nil, err - } - - for _, statement := range strings.Split(string(content), "--statement") { - statement = strings.TrimSpace(statement) - if statement != "" { - units[engine] = append(units[engine], SQLMigrationFunc(statement)) - } - } - - case "go": - for _, registeredGoMigration := range registeredGoMigrations { - if registeredGoMigration.Version == version { - for engine, goMigrationUnits := range registeredGoMigration.Handlers { - units[engine] = append(units[engine], goMigrationUnits...) - } - } - } - } - } - - migrations = append(migrations, Migration{ - MigrationInfo: core.MigrationInfo{ - Version: version, - Name: name, - }, - Handlers: units, - }) - } - - sort.Sort(migrations) - - return migrations, nil -} - -func SQLMigrationFunc(content string) MigrationFunc { - return func(ctx context.Context, schema Schema, tx *sql.Tx) error { - plain := strings.ReplaceAll(content, "VAR_LEDGER_NAME", schema.Name()) - r := regexp.MustCompile(`[\n\t\s]+`) - plain = r.ReplaceAllString(plain, " ") - _, err := tx.ExecContext(ctx, plain) - - return err - } -} diff --git a/pkg/storage/sqlstorage/collect_test.go b/pkg/storage/sqlstorage/collect_test.go deleted file mode 100644 index ee0bb4ca1..000000000 --- a/pkg/storage/sqlstorage/collect_test.go +++ /dev/null @@ -1,64 +0,0 @@ -package sqlstorage - -import ( - "context" - "database/sql" - "fmt" - "testing" - - "github.com/psanford/memfs" - "github.com/stretchr/testify/require" -) - -func TestCollectMigrations(t *testing.T) { - - mfs := memfs.New() - require.NoError(t, mfs.MkdirAll("migrates/0-first-migration", 0666)) - require.NoError(t, mfs.WriteFile("migrates/0-first-migration/postgres.sql", []byte(` - --statement - NO SQL; - `), 0666)) - require.NoError(t, mfs.WriteFile("migrates/0-first-migration/sqlite.go", []byte{}, 0666)) - require.NoError(t, mfs.MkdirAll("migrates/1-second-migration", 0666)) - require.NoError(t, mfs.WriteFile("migrates/1-second-migration/any.sql", []byte(` - --statement - NO SQL; - `), 0666)) - - RegisterGoMigrationFromFilename("migrates/0-first-migration/sqlite.go", func(ctx context.Context, schema Schema, tx *sql.Tx) error { - return nil - }) - - migrations, err := CollectMigrationFiles(mfs) - require.NoError(t, err) - require.Len(t, migrations, 2) - - require.Equal(t, "0", migrations[0].Version) - require.Equal(t, "first-migration", migrations[0].Name) - require.Len(t, migrations[0].Handlers, 2) - require.Len(t, migrations[0].Handlers["sqlite"], 1) - require.Len(t, migrations[0].Handlers["postgres"], 1) - - require.Equal(t, "1", migrations[1].Version) - require.Equal(t, "second-migration", migrations[1].Name) - require.Len(t, migrations[1].Handlers, 1) - require.Len(t, migrations[1].Handlers["any"], 1) -} - -func TestMigrationsOrders(t *testing.T) { - mfs := memfs.New() - for i := 0; i < 1000; i++ { - dir := fmt.Sprintf("migrates/%d-migration", i) - require.NoError(t, mfs.MkdirAll(dir, 0666)) - require.NoError(t, mfs.WriteFile(fmt.Sprintf("%s/postgres.sql", dir), []byte(` - --statement - NO SQL; - `), 0666)) - } - - migrations, err := CollectMigrationFiles(mfs) - require.NoError(t, err) - for i, m := range migrations { - require.Equal(t, fmt.Sprintf("%d", i), m.Version) - } -} diff --git a/pkg/storage/sqlstorage/commit.go b/pkg/storage/sqlstorage/commit.go deleted file mode 100644 index 35cbead29..000000000 --- a/pkg/storage/sqlstorage/commit.go +++ /dev/null @@ -1,49 +0,0 @@ -package sqlstorage - -import ( - "context" - - "github.com/numary/ledger/pkg/core" - "github.com/pkg/errors" -) - -func (s *Store) commit(ctx context.Context, txs ...core.ExpandedTransaction) ([]core.Log, error) { - if err := s.insertTransactions(ctx, txs...); err != nil { - return nil, errors.Wrap(err, "inserting transactions") - } - - postCommitVolumes := core.AggregatePostCommitVolumes(txs...) - - for account := range postCommitVolumes { - err := s.ensureAccountExists(ctx, account) - if err != nil { - return nil, errors.Wrap(err, "ensuring account exists") - } - } - - if err := s.updateVolumes(ctx, postCommitVolumes); err != nil { - return nil, errors.Wrap(err, "updating volumes") - } - - logs := make([]core.Log, 0) - lastLog, err := s.GetLastLog(ctx) - if err != nil { - return nil, err - } - for _, tx := range txs { - newLog := core.NewTransactionLog(lastLog, tx.Transaction) - lastLog = &newLog - logs = append(logs, newLog) - } - - if err := s.appendLog(ctx, logs...); err != nil { - return nil, errors.Wrap(err, "inserting logs") - } - - return logs, nil -} - -func (s *Store) Commit(ctx context.Context, txs ...core.ExpandedTransaction) error { - _, err := s.commit(ctx, txs...) - return err -} diff --git a/pkg/storage/sqlstorage/driver.go b/pkg/storage/sqlstorage/driver.go deleted file mode 100644 index f0c11d9c7..000000000 --- a/pkg/storage/sqlstorage/driver.go +++ /dev/null @@ -1,274 +0,0 @@ -package sqlstorage - -import ( - "context" - "database/sql" - "database/sql/driver" - "fmt" - - "github.com/formancehq/go-libs/logging" - "github.com/huandu/go-sqlbuilder" - "github.com/numary/ledger/pkg/api/idempotency" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/opentelemetry" - "github.com/numary/ledger/pkg/storage" - "github.com/pkg/errors" - "go.nhat.io/otelsql" -) - -const SystemSchema = "_system" - -var sqlDrivers = map[Flavor]struct { - driverName string -}{} - -type otelSQLDriverWithCheckNamedValueDisabled struct { - driver.Driver -} - -func (d otelSQLDriverWithCheckNamedValueDisabled) CheckNamedValue(*driver.NamedValue) error { - return nil -} - -var _ = driver.NamedValueChecker(&otelSQLDriverWithCheckNamedValueDisabled{}) - -func UpdateSQLDriverMapping(flavor Flavor, name string) { - cfg := sqlDrivers[flavor] - cfg.driverName = name - sqlDrivers[flavor] = cfg -} - -func init() { - // Default mapping for app driver/sql driver - UpdateSQLDriverMapping(PostgreSQL, "pgx") -} - -func InstrumentalizeSQLDrivers() { - for flavor, config := range sqlDrivers { - // otelsql has a function Register which wrap the underlying driver, but does not mirror driver.NamedValuedChecker interface of the underlying driver - // pgx implements this interface and just return nil - // so, we need to manually wrap the driver to implements this interface and return a nil error - db, err := sql.Open(config.driverName, "") - if err != nil { - panic(err) - } - - dri := db.Driver() - - if err = db.Close(); err != nil { - panic(err) - } - - wrappedDriver := otelsql.Wrap(dri, - otelsql.AllowRoot(), - otelsql.TraceAll(), - ) - - config.driverName = fmt.Sprintf("otel-%s", config.driverName) - sql.Register(config.driverName, otelSQLDriverWithCheckNamedValueDisabled{ - wrappedDriver, - }) - sqlDrivers[flavor] = config - } -} - -// defaultExecutorProvider use the context to register and manage a sql transaction (if the context is mark as transactional) -func defaultExecutorProvider(schema Schema) func(ctx context.Context) (executor, error) { - return func(ctx context.Context) (executor, error) { - if !storage.IsTransactional(ctx) { - return schema, nil - } - - if storage.IsTransactionRegistered(ctx) { - return storage.RegisteredTransaction(ctx).(*sql.Tx), nil - } - - sqlTx, err := schema.BeginTx(ctx, &sql.TxOptions{}) - if err != nil { - return nil, err - } - - storage.RegisterTransaction(ctx, sqlTx, func(ctx context.Context) error { - return sqlTx.Commit() - }, func(ctx context.Context) error { - return sqlTx.Rollback() - }) - return sqlTx, nil - } -} - -type Driver struct { - name string - db DB - systemSchema Schema - registeredLedgers map[string]struct{} -} - -func (d *Driver) GetSystemStore() storage.SystemStore { - return &SystemStore{ - systemSchema: d.systemSchema, - } -} - -func (d *Driver) GetLedgerStore(ctx context.Context, name string, create bool) (*Store, bool, error) { - if name == SystemSchema { - return nil, false, errors.New("reserved name") - } - - ctx, span := opentelemetry.Start(ctx, "Load store") - defer span.End() - - var ( - created bool - schema Schema - err error - ) - if _, exists := d.registeredLedgers[name]; !exists { - systemStore := &SystemStore{ - systemSchema: d.systemSchema, - } - exists, err := systemStore.exists(ctx, name) - if err != nil { - return nil, false, errors.Wrap(err, "checking ledger existence") - } - if !exists && !create { - return nil, false, storage.ErrLedgerStoreNotFound - } - - created, err = systemStore.Register(ctx, name) - if err != nil { - return nil, false, errors.Wrap(err, "registering ledger") - } - - schema, err = d.db.Schema(ctx, name) - if err != nil { - return nil, false, errors.Wrap(err, "opening schema") - } - - if err = schema.Initialize(ctx); err != nil { - return nil, false, err - } - d.registeredLedgers[name] = struct{}{} - } else { - schema, err = d.db.Schema(ctx, name) - if err != nil { - return nil, false, errors.Wrap(err, "opening schema") - } - } - - return NewStore(schema, defaultExecutorProvider(schema), func(ctx context.Context) error { - return schema.Close(context.Background()) - }, func(ctx context.Context) error { - return d.GetSystemStore().DeleteLedger(ctx, name) - }), created, nil -} - -func (d *Driver) Name() string { - return d.name -} - -func (d *Driver) Initialize(ctx context.Context) (err error) { - logging.GetLogger(ctx).Debugf("Initialize driver %s", d.name) - - if err = d.db.Initialize(ctx); err != nil { - return - } - - d.systemSchema, err = d.db.Schema(ctx, SystemSchema) - if err != nil { - return - } - - if err = d.systemSchema.Initialize(ctx); err != nil { - return - } - - q, args := sqlbuilder. - CreateTable(d.systemSchema.Table("ledgers")). - Define("ledger varchar(255) primary key, addedAt timestamp"). - IfNotExists(). - BuildWithFlavor(d.systemSchema.Flavor()) - - _, err = d.systemSchema.ExecContext(ctx, q, args...) - if err != nil { - return err - } - - q, args = sqlbuilder. - CreateTable(d.systemSchema.Table("configuration")). - Define("key varchar(255) primary key, value text, addedAt timestamp"). - IfNotExists(). - BuildWithFlavor(d.systemSchema.Flavor()) - _, err = d.systemSchema.ExecContext(ctx, q, args...) - if err != nil { - return err - } - - return nil -} - -func (d *Driver) Close(ctx context.Context) error { - err := d.systemSchema.Close(ctx) - if err != nil { - return err - } - return d.db.Close(ctx) -} - -func NewDriver(name string, db DB) *Driver { - return &Driver{ - db: db, - name: name, - registeredLedgers: map[string]struct{}{}, - } -} - -var _ storage.Driver[*Store] = (*Driver)(nil) - -type LedgerStorageDriver struct { - *Driver -} - -func (d *LedgerStorageDriver) GetLedgerStore(ctx context.Context, name string, create bool) (ledger.Store, bool, error) { - return d.Driver.GetLedgerStore(ctx, name, create) -} - -var _ storage.Driver[ledger.Store] = (*LedgerStorageDriver)(nil) - -func NewLedgerStorageDriverFromRawDriver(driver *Driver) storage.Driver[ledger.Store] { - return &LedgerStorageDriver{ - Driver: driver, - } -} - -type DefaultStorageDriver struct { - *Driver -} - -func (d *DefaultStorageDriver) GetLedgerStore(ctx context.Context, name string, create bool) (storage.LedgerStore, bool, error) { - return d.Driver.GetLedgerStore(ctx, name, create) -} - -var _ storage.Driver[storage.LedgerStore] = (*DefaultStorageDriver)(nil) - -func NewDefaultStorageDriverFromRawDriver(driver *Driver) storage.Driver[storage.LedgerStore] { - return &DefaultStorageDriver{ - Driver: driver, - } -} - -type IdempotencyStorageDriver struct { - *Driver -} - -func (d *IdempotencyStorageDriver) GetLedgerStore(ctx context.Context, name string, create bool) (idempotency.Store, bool, error) { - return d.Driver.GetLedgerStore(ctx, name, create) -} - -var _ storage.Driver[idempotency.Store] = (*IdempotencyStorageDriver)(nil) - -func NewIdempotencyStorageDriverFromRawDriver(driver *Driver) storage.Driver[idempotency.Store] { - return &IdempotencyStorageDriver{ - Driver: driver, - } -} diff --git a/pkg/storage/sqlstorage/driver_test.go b/pkg/storage/sqlstorage/driver_test.go deleted file mode 100644 index bc29a11d2..000000000 --- a/pkg/storage/sqlstorage/driver_test.go +++ /dev/null @@ -1,49 +0,0 @@ -package sqlstorage - -import ( - "context" - "os" - "testing" - - "github.com/pborman/uuid" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestNewDriver(t *testing.T) { - d := NewDriver("sqlite", &sqliteDB{ - directory: os.TempDir(), - dbName: uuid.New(), - }) - - assert.NoError(t, d.Initialize(context.Background())) - - defer func(d *Driver, ctx context.Context) { - assert.NoError(t, d.Close(ctx)) - }(d, context.Background()) - - store, _, err := d.GetLedgerStore(context.Background(), "foo", true) - assert.NoError(t, err) - - _, err = store.Initialize(context.Background()) - assert.NoError(t, err) - - assert.NoError(t, store.Close(context.Background())) - - _, err = store.schema.QueryContext(context.Background(), "select * from transactions") - assert.Error(t, err) - assert.Equal(t, "sql: database is closed", err.Error()) -} - -func TestConfiguration(t *testing.T) { - d := NewDriver("sqlite", &sqliteDB{ - directory: os.TempDir(), - dbName: uuid.New(), - }) - require.NoError(t, d.Initialize(context.Background())) - - require.NoError(t, d.GetSystemStore().InsertConfiguration(context.Background(), "foo", "bar")) - bar, err := d.GetSystemStore().GetConfiguration(context.Background(), "foo") - require.NoError(t, err) - require.Equal(t, "bar", bar) -} diff --git a/pkg/storage/sqlstorage/flavor.go b/pkg/storage/sqlstorage/flavor.go deleted file mode 100644 index c019dd786..000000000 --- a/pkg/storage/sqlstorage/flavor.go +++ /dev/null @@ -1,89 +0,0 @@ -package sqlstorage - -import ( - "errors" - - "github.com/huandu/go-sqlbuilder" - "github.com/jackc/pgconn" - "github.com/numary/ledger/pkg/storage" - "go.opentelemetry.io/otel/attribute" - semconv "go.opentelemetry.io/otel/semconv/v1.4.0" -) - -type Flavor sqlbuilder.Flavor - -var ( - SQLite = Flavor(sqlbuilder.SQLite) - PostgreSQL = Flavor(sqlbuilder.PostgreSQL) -) - -func (f Flavor) String() string { - switch f { - case SQLite: - return "sqlite" - case PostgreSQL: - return "postgres" - default: - return "unknown" - } -} - -func (f Flavor) AttributeKeyValue() attribute.KeyValue { - switch f { - case SQLite: - return semconv.DBSystemSqlite - case PostgreSQL: - return semconv.DBSystemPostgreSQL - default: - return attribute.KeyValue{} - } -} - -func FlavorFromString(v string) Flavor { - switch v { - case "sqlite": - return SQLite - case "postgres": - return PostgreSQL - default: - return 0 - } -} - -var errorHandlers = map[Flavor]func(error) error{} - -func errorFromFlavor(f Flavor, err error) error { - if err == nil { - return nil - } - h, ok := errorHandlers[f] - if !ok { - return err - } - return h(err) -} - -func init() { - errorHandlers[PostgreSQL] = func(err error) error { - - handleError := func(err error) error { - switch eerr := err.(type) { - case *pgconn.PgError: - switch eerr.Code { - case "23505": - return storage.NewError(storage.ConstraintFailed, err) - case "53300": - return storage.NewError(storage.TooManyClient, err) - } - } - return err - } - - unwrappedError := errors.Unwrap(err) - if unwrappedError != nil { - return handleError(unwrappedError) - } else { - return handleError(err) - } - } -} diff --git a/pkg/storage/sqlstorage/idempotency.go b/pkg/storage/sqlstorage/idempotency.go deleted file mode 100644 index 7fa8d17d2..000000000 --- a/pkg/storage/sqlstorage/idempotency.go +++ /dev/null @@ -1,71 +0,0 @@ -package sqlstorage - -import ( - "context" - "database/sql" - "encoding/json" - "net/http" - "time" - - "github.com/huandu/go-sqlbuilder" - "github.com/numary/ledger/pkg/api/idempotency" - "github.com/pkg/errors" -) - -func (s *Store) CreateIK(ctx context.Context, key string, response idempotency.Response) error { - data, err := json.Marshal(response.Header) - if err != nil { - return err - } - - ib := sqlbuilder.NewInsertBuilder() - q, args := ib. - InsertInto(s.schema.Table("idempotency")). - Cols("key", "date", "status_code", "headers", "body", "request_hash"). - Values(key, time.Now().UTC(), response.StatusCode, string(data), response.Body, response.RequestHash). - BuildWithFlavor(s.schema.Flavor()) - - executor, err := s.executorProvider(ctx) - if err != nil { - return err - } - - _, err = executor.ExecContext(ctx, q, args...) - return errors.Wrap(err, "creating IK") -} - -func (s *Store) ReadIK(ctx context.Context, key string) (*idempotency.Response, error) { - sb := sqlbuilder.NewSelectBuilder() - q, args := sb. - Select("status_code", "headers", "body", "request_hash"). - From(s.schema.Table("idempotency")). - Where(sb.Equal("key", key)). - BuildWithFlavor(s.schema.Flavor()) - - executor, err := s.executorProvider(ctx) - if err != nil { - return nil, err - } - - row := executor.QueryRowContext(ctx, q, args...) - if row.Err() != nil { - return nil, s.error(row.Err()) - } - - response := &idempotency.Response{} - headersStringValue := "" - if err := row.Scan(&response.StatusCode, &headersStringValue, &response.Body, &response.RequestHash); err != nil { - if err == sql.ErrNoRows { - return nil, idempotency.ErrIKNotFound - } - return nil, s.error(err) - } - - headers := http.Header{} - if err := json.Unmarshal([]byte(headersStringValue), &headers); err != nil { - return nil, s.error(err) - } - response.Header = headers - - return response, nil -} diff --git a/pkg/storage/sqlstorage/logs.go b/pkg/storage/sqlstorage/logs.go deleted file mode 100644 index bbde79ccd..000000000 --- a/pkg/storage/sqlstorage/logs.go +++ /dev/null @@ -1,211 +0,0 @@ -package sqlstorage - -import ( - "context" - "database/sql" - "encoding/base64" - "encoding/json" - "fmt" - "time" - - "github.com/formancehq/go-libs/api" - "github.com/huandu/go-sqlbuilder" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/pkg/errors" -) - -func (s *Store) appendLog(ctx context.Context, log ...core.Log) error { - var ( - query string - args []interface{} - ) - - switch s.Schema().Flavor() { - case sqlbuilder.SQLite: - ib := sqlbuilder.NewInsertBuilder() - ib.InsertInto(s.schema.Table("log")) - ib.Cols("id", "type", "hash", "date", "data") - for _, l := range log { - data, err := json.Marshal(l.Data) - if err != nil { - panic(err) - } - - ib.Values(l.ID, l.Type, l.Hash, l.Date, string(data)) - } - query, args = ib.BuildWithFlavor(s.schema.Flavor()) - case sqlbuilder.PostgreSQL: - ids := make([]uint64, len(log)) - types := make([]string, len(log)) - hashes := make([]string, len(log)) - dates := make([]time.Time, len(log)) - datas := make([][]byte, len(log)) - - for i, l := range log { - data, err := json.Marshal(l.Data) - if err != nil { - panic(err) - } - ids[i] = l.ID - types[i] = l.Type - hashes[i] = l.Hash - dates[i] = l.Date - datas[i] = data - } - - query = fmt.Sprintf( - `INSERT INTO "%s".log (id, type, hash, date, data) (SELECT * FROM unnest($1::int[], $2::varchar[], $3::varchar[], $4::timestamptz[], $5::jsonb[]))`, - s.schema.Name()) - args = []interface{}{ - ids, types, hashes, dates, datas, - } - } - - executor, err := s.executorProvider(ctx) - if err != nil { - return err - } - - _, err = executor.ExecContext(ctx, query, args...) - if err != nil { - return s.error(err) - } - return nil -} - -func (s *Store) GetLastLog(ctx context.Context) (*core.Log, error) { - sb := sqlbuilder.NewSelectBuilder() - sb.From(s.schema.Table("log")) - sb.Select("id", "type", "hash", "date", "data") - sb.OrderBy("id desc") - sb.Limit(1) - - executor, err := s.executorProvider(ctx) - if err != nil { - return nil, err - } - - l := core.Log{} - data := sql.NullString{} - sqlq, _ := sb.BuildWithFlavor(s.schema.Flavor()) - row := executor.QueryRowContext(ctx, sqlq) - if err := row.Scan(&l.ID, &l.Type, &l.Hash, &l.Date, &data); err != nil { - if err == sql.ErrNoRows { - return nil, nil - } - return nil, err - } - l.Date = l.Date.UTC() - - l.Data, err = core.HydrateLog(l.Type, data.String) - if err != nil { - return nil, err - } - l.Date = l.Date.UTC() - - return &l, nil -} - -func (s *Store) GetLogs(ctx context.Context, q *ledger.LogsQuery) (api.Cursor[core.Log], error) { - res := []core.Log{} - - if q.PageSize == 0 { - return api.Cursor[core.Log]{Data: res}, nil - } - - sb, t := s.buildLogsQuery(q) - executor, err := s.executorProvider(ctx) - if err != nil { - return api.Cursor[core.Log]{}, err - } - - sqlq, args := sb.BuildWithFlavor(s.schema.Flavor()) - rows, err := executor.QueryContext(ctx, sqlq, args...) - if err != nil { - return api.Cursor[core.Log]{}, s.error(err) - } - defer rows.Close() - - for rows.Next() { - l := core.Log{} - data := sql.NullString{} - if err := rows.Scan(&l.ID, &l.Type, &l.Hash, &l.Date, &data); err != nil { - return api.Cursor[core.Log]{}, err - } - l.Date = l.Date.UTC() - - l.Data, err = core.HydrateLog(l.Type, data.String) - if err != nil { - return api.Cursor[core.Log]{}, errors.Wrap(err, "hydrating log") - } - l.Date = l.Date.UTC() - res = append(res, l) - } - if rows.Err() != nil { - return api.Cursor[core.Log]{}, s.error(rows.Err()) - } - - var previous, next string - - // Page with logs before - if q.AfterID > 0 && len(res) > 1 && res[0].ID == q.AfterID { - t.AfterID = res[0].ID + uint64(q.PageSize) - res = res[1:] - raw, err := json.Marshal(t) - if err != nil { - return api.Cursor[core.Log]{}, s.error(err) - } - previous = base64.RawURLEncoding.EncodeToString(raw) - } - - // Page with logs after - if len(res) > int(q.PageSize) { - res = res[:q.PageSize] - t.AfterID = res[len(res)-1].ID - raw, err := json.Marshal(t) - if err != nil { - return api.Cursor[core.Log]{}, s.error(err) - } - next = base64.RawURLEncoding.EncodeToString(raw) - } - - hasMore := next != "" - return api.Cursor[core.Log]{ - PageSize: int(q.PageSize), - HasMore: hasMore, - Previous: previous, - Next: next, - Data: res, - PageSizeDeprecated: int(q.PageSize), - HasMoreDeprecated: &hasMore, - }, nil -} - -func (s *Store) buildLogsQuery(q *ledger.LogsQuery) (*sqlbuilder.SelectBuilder, LogsPaginationToken) { - sb := sqlbuilder.NewSelectBuilder() - t := LogsPaginationToken{} - - sb.Select("id", "type", "hash", "date", "data") - sb.From(s.schema.Table("log")) - - if !q.Filters.StartTime.IsZero() { - sb.Where(sb.GE("date", q.Filters.StartTime.UTC())) - t.StartTime = q.Filters.StartTime - } - if !q.Filters.EndTime.IsZero() { - sb.Where(sb.L("date", q.Filters.EndTime.UTC())) - t.EndTime = q.Filters.EndTime - } - sb.OrderBy("id").Desc() - - if q.AfterID > 0 { - sb.Where(sb.LE("id", q.AfterID)) - } - - // We fetch additional logs to know if there are more before and/or after. - sb.Limit(int(q.PageSize + 2)) - t.PageSize = q.PageSize - - return sb, t -} diff --git a/pkg/storage/sqlstorage/mapping.go b/pkg/storage/sqlstorage/mapping.go deleted file mode 100644 index 8b4329910..000000000 --- a/pkg/storage/sqlstorage/mapping.go +++ /dev/null @@ -1,74 +0,0 @@ -package sqlstorage - -import ( - "context" - "database/sql" - "encoding/json" - - "github.com/huandu/go-sqlbuilder" - "github.com/numary/ledger/pkg/core" -) - -// We have only one mapping for a ledger, so hardcode the id -const mappingId = "0000" - -func (s *Store) LoadMapping(ctx context.Context) (*core.Mapping, error) { - sb := sqlbuilder.NewSelectBuilder() - sb.Select("mapping").From(s.schema.Table("mapping")) - - executor, err := s.executorProvider(ctx) - if err != nil { - return nil, err - } - - sqlq, args := sb.BuildWithFlavor(s.schema.Flavor()) - row := executor.QueryRowContext(ctx, sqlq, args...) - - m := core.Mapping{} - var mappingString string - if err := row.Scan(&mappingString); err != nil { - if err == sql.ErrNoRows { - return &m, nil - } - return &m, err - } - - if err := json.Unmarshal([]byte(mappingString), &m); err != nil { - return &m, err - } - - return &m, nil -} - -func (s *Store) SaveMapping(ctx context.Context, mapping core.Mapping) error { - data, err := json.Marshal(mapping) - if err != nil { - return err - } - - ib := sqlbuilder.NewInsertBuilder() - ib.InsertInto(s.schema.Table("mapping")) - ib.Cols("mapping_id", "mapping") - ib.Values(mappingId, string(data)) - - var ( - sqlq string - args []interface{} - ) - switch s.schema.Flavor() { - case sqlbuilder.Flavor(PostgreSQL): - sqlq, args = ib.BuildWithFlavor(s.schema.Flavor()) - sqlq += " ON CONFLICT (mapping_id) DO UPDATE SET mapping = $2" - default: - ib.ReplaceInto(s.schema.Table("mapping")) - sqlq, args = ib.BuildWithFlavor(s.schema.Flavor()) - } - - executor, err := s.executorProvider(ctx) - if err != nil { - return err - } - - _, err = executor.ExecContext(ctx, sqlq, args...) - return s.error(err) -} diff --git a/pkg/storage/sqlstorage/migrate.go b/pkg/storage/sqlstorage/migrate.go deleted file mode 100644 index 3ca4edf92..000000000 --- a/pkg/storage/sqlstorage/migrate.go +++ /dev/null @@ -1,182 +0,0 @@ -package sqlstorage - -import ( - "context" - "database/sql" - "sort" - "strconv" - "time" - - "github.com/formancehq/go-libs/logging" - "github.com/huandu/go-sqlbuilder" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/opentelemetry" - "github.com/pkg/errors" -) - -func (s *Store) GetMigrationsDone(ctx context.Context) ([]core.MigrationInfo, error) { - sb := sqlbuilder.NewSelectBuilder() - sb.Select("*") - sb.From(s.schema.Table("migrations")) - - executor, err := s.executorProvider(ctx) - if err != nil { - return []core.MigrationInfo{}, s.error(err) - } - - sqlq, args := sb.BuildWithFlavor(s.schema.Flavor()) - rows, err := executor.QueryContext(ctx, sqlq, args...) - if err != nil { - return []core.MigrationInfo{}, s.error(err) - } - defer rows.Close() - - res := make([]core.MigrationInfo, 0) - for rows.Next() { - var version, date string - if err := rows.Scan(&version, &date); err != nil { - return []core.MigrationInfo{}, s.error(err) - } - t, err := time.Parse(time.RFC3339, date) - if err != nil { - return []core.MigrationInfo{}, - s.error(errors.Wrap(err, "parsing migration date")) - } - res = append(res, core.MigrationInfo{ - Version: version, - Date: t, - }) - } - if rows.Err() != nil { - return []core.MigrationInfo{}, s.error(err) - } - - return res, nil -} - -func (s *Store) GetMigrationsAvailable() ([]core.MigrationInfo, error) { - migrations, err := CollectMigrationFiles(MigrationsFS) - if err != nil { - return []core.MigrationInfo{}, errors.Wrap(err, "collecting migration files") - } - - res := make([]core.MigrationInfo, 0) - for _, m := range migrations { - res = append(res, core.MigrationInfo{ - Version: m.Version, - Name: m.Name, - }) - } - - return res, nil -} - -type HandlersByEngine map[string][]MigrationFunc - -type Migration struct { - core.MigrationInfo `json:"inline"` - Handlers HandlersByEngine `json:"-"` -} - -type Migrations []Migration - -func (m Migrations) Len() int { - return len(m) -} - -func (m Migrations) Less(i, j int) bool { - iNumber, err := strconv.ParseInt(m[i].Version, 10, 64) - if err != nil { - panic(err) - } - jNumber, err := strconv.ParseInt(m[j].Version, 10, 64) - if err != nil { - panic(err) - } - return iNumber < jNumber -} - -func (m Migrations) Swap(i, j int) { - m[i], m[j] = m[j], m[i] -} - -var _ sort.Interface = &Migrations{} - -func Migrate(ctx context.Context, schema Schema, migrations ...Migration) (bool, error) { - ctx, span := opentelemetry.Start(ctx, "Migrate") - defer span.End() - - q, args := sqlbuilder. - CreateTable(schema.Table("migrations")). - Define(`version varchar, date varchar, UNIQUE("version")`). - IfNotExists(). - BuildWithFlavor(schema.Flavor()) - - _, err := schema.ExecContext(ctx, q, args...) - if err != nil { - return false, errorFromFlavor(Flavor(schema.Flavor()), err) - } - - tx, err := schema.BeginTx(ctx, &sql.TxOptions{}) - if err != nil { - return false, errorFromFlavor(Flavor(schema.Flavor()), err) - } - defer func(tx *sql.Tx) { - _ = tx.Rollback() - }(tx) - - modified := false - for _, m := range migrations { - sb := sqlbuilder.NewSelectBuilder() - sb.Select("version") - sb.From(schema.Table("migrations")) - sb.Where(sb.E("version", m.Version)) - - // Does not use sql transaction because if the table does not exist, postgres will mark transaction as invalid - sqlq, args := sb.BuildWithFlavor(schema.Flavor()) - row := schema.QueryRowContext(ctx, sqlq, args...) - var v string - if err = row.Scan(&v); err != nil { - logging.GetLogger(ctx).Debugf("migration %s: %s", m.Version, err) - } - if v != "" { - logging.GetLogger(ctx).Debugf("migration %s: already up to date", m.Version) - continue - } - modified = true - - logging.GetLogger(ctx).Debugf("running migration %s", m.Version) - - handlersForAnyEngine, ok := m.Handlers["any"] - if ok { - for _, h := range handlersForAnyEngine { - err := h(ctx, schema, tx) - if err != nil { - return false, err - } - } - } - - handlersForCurrentEngine, ok := m.Handlers[Flavor(schema.Flavor()).String()] - if ok { - for _, h := range handlersForCurrentEngine { - err := h(ctx, schema, tx) - if err != nil { - return false, err - } - } - } - - ib := sqlbuilder.NewInsertBuilder() - ib.InsertInto(schema.Table("migrations")) - ib.Cols("version", "date") - ib.Values(m.Version, time.Now().UTC().Format(time.RFC3339)) - sqlq, args = ib.BuildWithFlavor(schema.Flavor()) - if _, err = tx.ExecContext(ctx, sqlq, args...); err != nil { - logging.GetLogger(ctx).Errorf("failed to insert migration version %s: %s", m.Version, err) - return false, errorFromFlavor(Flavor(schema.Flavor()), err) - } - } - - return modified, errorFromFlavor(Flavor(schema.Flavor()), tx.Commit()) -} diff --git a/pkg/storage/sqlstorage/migrate_test.go b/pkg/storage/sqlstorage/migrate_test.go deleted file mode 100644 index 52ce8a6bf..000000000 --- a/pkg/storage/sqlstorage/migrate_test.go +++ /dev/null @@ -1,81 +0,0 @@ -package sqlstorage - -import ( - "context" - "database/sql" - "os" - "testing" - "time" - - "github.com/huandu/go-sqlbuilder" - "github.com/numary/ledger/pkg/core" - "github.com/pborman/uuid" - "github.com/stretchr/testify/require" -) - -func TestMigrates(t *testing.T) { - - tmpDir := os.TempDir() - db := NewSQLiteDB(tmpDir, uuid.New()) - schema, err := db.Schema(context.Background(), "testing") - require.NoError(t, err) - - migrations := []Migration{ - { - MigrationInfo: core.MigrationInfo{ - Version: "0", - Name: "create-schema", - }, - Handlers: HandlersByEngine{ - "any": { - SQLMigrationFunc(`CREATE TABLE IF NOT EXISTS transactions ( - "id" integer, - "reference" varchar, - "hash" varchar, - - UNIQUE("id"), - UNIQUE("reference") - );`), - SQLMigrationFunc(`INSERT INTO transactions VALUES (0, "", "")`), - }, - }, - }, - { - MigrationInfo: core.MigrationInfo{ - Version: "1", - Name: "update-column", - }, - Handlers: HandlersByEngine{ - "sqlite": { - SQLMigrationFunc(` - ALTER TABLE transactions - ADD COLUMN timestamp date;`), - }, - }, - }, - { - MigrationInfo: core.MigrationInfo{ - Version: "2", - Name: "init-timestamp", - }, - Handlers: HandlersByEngine{ - "any": { - func(ctx context.Context, schema Schema, tx *sql.Tx) error { - ub := sqlbuilder.NewUpdateBuilder() - sql, args := ub. - Update(schema.Table("transactions")). - Set(ub.Assign("timestamp", time.Now())). - BuildWithFlavor(schema.Flavor()) - _, err := tx.ExecContext(ctx, sql, args...) - return err - }, - }, - }, - }, - } - - modified, err := Migrate(context.Background(), schema, migrations...) - require.NoError(t, err) - require.True(t, modified) - -} diff --git a/pkg/storage/sqlstorage/migrates/0-init-schema/postgres.sql b/pkg/storage/sqlstorage/migrates/0-init-schema/postgres.sql deleted file mode 100644 index 16b697ad6..000000000 --- a/pkg/storage/sqlstorage/migrates/0-init-schema/postgres.sql +++ /dev/null @@ -1,89 +0,0 @@ ---statement -CREATE SCHEMA IF NOT EXISTS "VAR_LEDGER_NAME"; ---statement -CREATE TABLE IF NOT EXISTS "VAR_LEDGER_NAME".migrations ( - "version" varchar, - "date" varchar, - - UNIQUE("version") -); ---statement -CREATE TABLE IF NOT EXISTS "VAR_LEDGER_NAME".transactions ( - "id" bigint, - "timestamp" varchar, - "reference" varchar, - "hash" varchar, - - UNIQUE("id"), - UNIQUE("reference") -); ---statement -CREATE TABLE IF NOT EXISTS "VAR_LEDGER_NAME".postings ( - "id" smallint, - "txid" bigint, - "source" varchar, - "destination" varchar, - "amount" bigint, - "asset" varchar, - - UNIQUE("id", "txid") -); ---statement -CREATE INDEX IF NOT EXISTS p_c0 ON "VAR_LEDGER_NAME".postings ( - "txid" DESC, - "source", - "destination" -); ---statement -CREATE INDEX IF NOT EXISTS posting_txid ON "VAR_LEDGER_NAME".postings ( - "txid" DESC -); ---statement -CREATE INDEX IF NOT EXISTS posting_source ON "VAR_LEDGER_NAME".postings ( - "source" -); ---statement -CREATE INDEX IF NOT EXISTS posting_destination ON "VAR_LEDGER_NAME".postings ( - "destination" -); ---statement -CREATE INDEX IF NOT EXISTS posting_asset ON "VAR_LEDGER_NAME".postings ( - "asset" -); ---statement -CREATE TABLE IF NOT EXISTS "VAR_LEDGER_NAME".metadata ( - "meta_id" bigint, - "meta_target_type" varchar NOT NULL CHECK (meta_target_type <> ''), - "meta_target_id" varchar NOT NULL CHECK (meta_target_id <> ''), - "meta_key" varchar NOT NULL CHECK (meta_key <> ''), - "meta_value" varchar, - "timestamp" varchar NOT NULL CHECK (timestamp <> ''), - - UNIQUE("meta_id") -); ---statement -CREATE TABLE IF NOT EXISTS "VAR_LEDGER_NAME".contract ( - "contract_id" integer, - "contract_account" varchar, - "contract_expr" varchar, - - UNIQUE("contract_id") -); ---statement -CREATE TABLE IF NOT EXISTS "VAR_LEDGER_NAME".mapping ( - "mapping_id" varchar, - "mapping" varchar, - - UNIQUE("mapping_id") -); ---statement -CREATE INDEX IF NOT EXISTS m_i0 ON "VAR_LEDGER_NAME".metadata ( - "meta_target_type", - "meta_target_id" -); ---statement -CREATE OR REPLACE VIEW "VAR_LEDGER_NAME".addresses AS SELECT "address" FROM ( - SELECT source as address FROM "VAR_LEDGER_NAME".postings GROUP BY source - UNION - SELECT destination as address FROM "VAR_LEDGER_NAME".postings GROUP BY destination -) addr_agg GROUP BY "address"; diff --git a/pkg/storage/sqlstorage/migrates/0-init-schema/sqlite.sql b/pkg/storage/sqlstorage/migrates/0-init-schema/sqlite.sql deleted file mode 100644 index 059d74322..000000000 --- a/pkg/storage/sqlstorage/migrates/0-init-schema/sqlite.sql +++ /dev/null @@ -1,75 +0,0 @@ ---statement -CREATE TABLE IF NOT EXISTS transactions ( - "id" integer, - "timestamp" varchar, - "reference" varchar, - "hash" varchar, - - UNIQUE("id"), - UNIQUE("reference") -); ---statement -CREATE TABLE IF NOT EXISTS migrations ( - "version" varchar, - "date" varchar, - - UNIQUE ("version") -); ---statement -CREATE TABLE IF NOT EXISTS postings ( - "id" integer, - "txid" integer, - "source" varchar, - "destination" varchar, - "amount" integer, - "asset" varchar, - - UNIQUE("id", "txid") -); ---statement -CREATE INDEX IF NOT EXISTS 'p_c0' ON "postings" ( - "txid" DESC, - "source", - "destination" -); ---statement -CREATE INDEX IF NOT EXISTS 'posting_source' ON "postings" ( - "source" -); ---statement -CREATE INDEX IF NOT EXISTS 'posting_destination' ON "postings" ( - "destination" -); ---statement -CREATE INDEX IF NOT EXISTS 'posting_asset' ON "postings" ( - "asset" -); ---statement -CREATE TABLE IF NOT EXISTS metadata ( - "meta_id" integer, - "meta_target_type" varchar, - "meta_target_id" varchar, - "meta_key" varchar, - "meta_value" varchar, - "timestamp" varchar, - - UNIQUE("meta_id") -); ---statement -CREATE TABLE IF NOT EXISTS mapping ( - "mapping_id" varchar, - "mapping" varchar, - - UNIQUE("mapping_id") -); ---statement -CREATE INDEX IF NOT EXISTS 'm_i0' ON "metadata" ( - "meta_target_type", - "meta_target_id" -); ---statement -CREATE VIEW IF NOT EXISTS addresses AS SELECT address FROM ( - SELECT source as address FROM postings GROUP BY source - UNION - SELECT destination as address FROM postings GROUP BY destination -) GROUP BY address; diff --git a/pkg/storage/sqlstorage/migrates/1-add-triggers/postgres.sql b/pkg/storage/sqlstorage/migrates/1-add-triggers/postgres.sql deleted file mode 100644 index ea54d2463..000000000 --- a/pkg/storage/sqlstorage/migrates/1-add-triggers/postgres.sql +++ /dev/null @@ -1,292 +0,0 @@ ---statement -CREATE OR REPLACE FUNCTION "VAR_LEDGER_NAME".is_valid_json(p_json text) - RETURNS BOOLEAN -AS -$$ -BEGIN - RETURN (p_json::jsonb IS NOT NULL); -EXCEPTION - WHEN others THEN - RETURN false; -END; -$$ - LANGUAGE plpgsql - IMMUTABLE; ---statement -CREATE OR REPLACE FUNCTION "VAR_LEDGER_NAME".handle_log_entry() - RETURNS TRIGGER - LANGUAGE PLPGSQL -AS -$$ -BEGIN - if NEW.type = 'NEW_TRANSACTION' THEN - INSERT INTO "VAR_LEDGER_NAME".transactions(id, timestamp, reference, postings, metadata) - VALUES ((NEW.data ->> 'txid')::bigint, - (NEW.data ->> 'timestamp')::varchar, - CASE - WHEN (NEW.data ->> 'reference')::varchar = '' THEN NULL - ELSE (NEW.data ->> 'reference')::varchar END, - (NEW.data ->> 'postings')::jsonb, - CASE WHEN (NEW.data ->> 'metadata')::jsonb IS NULL THEN '{}' ELSE (NEW.data ->> 'metadata')::jsonb END); - END IF; - if NEW.type = 'SET_METADATA' THEN - if NEW.data ->> 'targetType' = 'TRANSACTION' THEN - UPDATE "VAR_LEDGER_NAME".transactions - SET metadata = metadata || (NEW.data ->> 'metadata')::jsonb - WHERE id = (NEW.data ->> 'targetId')::bigint; - END IF; - if NEW.data ->> 'targetType' = 'ACCOUNT' THEN - INSERT INTO "VAR_LEDGER_NAME".accounts (address, metadata) - VALUES ((NEW.data ->> 'targetId')::varchar, - (NEW.data ->> 'metadata')::jsonb) - ON CONFLICT (address) DO UPDATE SET metadata = accounts.metadata || (NEW.data ->> 'metadata')::jsonb; - END IF; - END IF; - RETURN NEW; -END; -$$; ---statement -CREATE TABLE IF NOT EXISTS "VAR_LEDGER_NAME".volumes -( - "account" varchar, - "asset" varchar, - "input" bigint, - "output" bigint, - - UNIQUE ("account", "asset") -); ---statement -CREATE TABLE IF NOT EXISTS "VAR_LEDGER_NAME".accounts -( - "address" varchar NOT NULL, - "metadata" jsonb DEFAULT '{}', - - UNIQUE ("address") -); ---statement -ALTER TABLE "VAR_LEDGER_NAME".transactions -ADD COLUMN "postings" jsonb; ---statement -ALTER TABLE "VAR_LEDGER_NAME".transactions -ADD COLUMN "metadata" jsonb DEFAULT '{}'; ---statement -CREATE TABLE IF NOT EXISTS "VAR_LEDGER_NAME".log -( - "id" bigint, - "type" varchar, - "hash" varchar, - "date" timestamp with time zone, - "data" jsonb, - - UNIQUE ("id") -); ---statement -CREATE INDEX IF NOT EXISTS volumes_account ON "VAR_LEDGER_NAME".volumes ("account"); ---statement -UPDATE "VAR_LEDGER_NAME".transactions -SET postings = ( - SELECT ('[' || string_agg(v.j, ',') || ']')::json - FROM ( - SELECT '{' || - '"amount":' || amount || ',' || - '"asset":"' || asset || '",' || - '"destination":"' || destination || '",' || - '"source":"' || source || '"' || - '}' as j, - txid - FROM "VAR_LEDGER_NAME".postings - WHERE txid::bigint = transactions.id - ORDER BY txid DESC - ) v -); ---statement -CREATE SEQUENCE "VAR_LEDGER_NAME".log_seq START WITH 0 MINVALUE 0; ---statement -INSERT INTO "VAR_LEDGER_NAME".log(id, type, date, data, hash) -SELECT nextval('"VAR_LEDGER_NAME".log_seq'), v.type, v.timestamp::timestamp with time zone, v.data::json, '' -FROM ( - SELECT id as ord, 'NEW_TRANSACTION' as type, timestamp, '{"metadata":{},"postings":' || postings::varchar || ',"reference":"' || CASE WHEN reference IS NOT NULL THEN reference ELSE '' END || '","timestamp":"' || timestamp || '","txid":' || id || '}' as data - FROM "VAR_LEDGER_NAME".transactions - UNION ALL - SELECT 100000000000 + meta_id as ord, 'SET_METADATA' as type, timestamp, '{"metadata":{"' || meta_key || '":' || CASE WHEN "VAR_LEDGER_NAME".is_valid_json(meta_value) THEN meta_value ELSE '"' || meta_value || '"' END || '},"targetId":' || CASE WHEN meta_target_type = 'transaction' THEN meta_target_id ELSE ('"' || meta_target_id || '"') END || ',"targetType":"' || UPPER(meta_target_type) || '"}' as data - FROM "VAR_LEDGER_NAME".metadata - ) v -ORDER BY v.timestamp ASC, v.ord ASC; --- statement -DROP SEQUENCE "VAR_LEDGER_NAME".log_seq; ---statement -UPDATE "VAR_LEDGER_NAME".transactions -SET metadata = ( - SELECT ('{' || COALESCE(STRING_AGG('"' || meta_key || '":' || CASE WHEN "VAR_LEDGER_NAME".is_valid_json(meta_value) THEN meta_value ELSE '"' || meta_value || '"' END, ','), '') || '}')::json - FROM ( - SELECT DISTINCT ON (meta_key) - meta_id, meta_key, meta_value - FROM "VAR_LEDGER_NAME".metadata - WHERE meta_target_type = 'transaction' AND meta_target_id::bigint = transactions.id - ORDER BY meta_key, meta_id DESC - ) v -); ---statement -INSERT INTO "VAR_LEDGER_NAME".accounts(address) SELECT * FROM "VAR_LEDGER_NAME".addresses; ---statement -UPDATE "VAR_LEDGER_NAME".accounts -SET metadata = ( - SELECT ('{' || string_agg('"' || meta_key || '":' || CASE WHEN "VAR_LEDGER_NAME".is_valid_json(meta_value) THEN meta_value ELSE '"' || meta_value || '"' END, ',') || '}')::json - FROM ( - SELECT distinct on (meta_key) - meta_id, meta_key, meta_value - FROM "VAR_LEDGER_NAME".metadata - WHERE meta_target_id = accounts.address - ORDER BY meta_key, meta_id DESC - ) v -); ---statement -DROP TRIGGER IF EXISTS log_entry ON "VAR_LEDGER_NAME".log; ---statement -CREATE TRIGGER log_entry - AFTER INSERT - ON "VAR_LEDGER_NAME".log - FOR EACH ROW -EXECUTE PROCEDURE "VAR_LEDGER_NAME".handle_log_entry(); ---statement -INSERT INTO "VAR_LEDGER_NAME".volumes (account, asset, input, output) -SELECT destination, asset, SUM(amount), 0 -FROM "VAR_LEDGER_NAME".postings -GROUP BY asset, destination; ---statement -INSERT INTO "VAR_LEDGER_NAME".volumes (account, asset, input, output) -SELECT source, asset, 0, SUM(amount) -FROM "VAR_LEDGER_NAME".postings -GROUP BY asset, source -ON CONFLICT (account, asset) DO UPDATE SET output = volumes.output + excluded.output; ---statement -CREATE OR REPLACE FUNCTION "VAR_LEDGER_NAME".compute_volumes() RETURNS trigger LANGUAGE plpgsql AS $$ -DECLARE - p record; -BEGIN - FOR p IN ( - SELECT - t.postings->>'source' as source, - t.postings->>'asset' as asset, - sum ((t.postings->>'amount')::bigint) as amount - FROM ( - SELECT jsonb_array_elements(((newtable.data::jsonb)->>'postings')::jsonb) as postings - FROM newtable - WHERE newtable.type = 'NEW_TRANSACTION' - ) t - GROUP BY source, asset - ) LOOP - INSERT INTO "VAR_LEDGER_NAME".accounts (address, metadata) - VALUES (p.source, '{}') - ON CONFLICT DO NOTHING; - - INSERT INTO "VAR_LEDGER_NAME".volumes (account, asset, input, output) - VALUES (p.source, p.asset, 0, p.amount::bigint) - ON CONFLICT (account, asset) DO UPDATE SET output = p.amount::bigint + ( - SELECT output - FROM "VAR_LEDGER_NAME".volumes - WHERE account = p.source - AND asset = p.asset - ); - END LOOP; - FOR p IN ( - SELECT - t.postings->>'destination' as destination, - t.postings->>'asset' as asset, - sum ((t.postings->>'amount')::bigint) as amount - FROM ( - SELECT jsonb_array_elements(((newtable.data::jsonb)->>'postings')::jsonb) as postings - FROM newtable - WHERE newtable.type = 'NEW_TRANSACTION' - ) t - GROUP BY destination, asset - ) LOOP - INSERT INTO "VAR_LEDGER_NAME".accounts (address, metadata) - VALUES (p.destination, '{}') - ON CONFLICT DO NOTHING; - - INSERT INTO "VAR_LEDGER_NAME".volumes (account, asset, input, output) - VALUES (p.destination, p.asset, p.amount::bigint, 0) - ON CONFLICT (account, asset) DO UPDATE SET input = p.amount::bigint + ( - SELECT input - FROM "VAR_LEDGER_NAME".volumes - WHERE account = p.destination - AND asset = p.asset - ); - END LOOP; - RETURN NULL; -END -$$; ---statement -CREATE TRIGGER volumes_changed -AFTER INSERT -ON "VAR_LEDGER_NAME".log -REFERENCING NEW TABLE AS newtable -FOR EACH STATEMENT -EXECUTE PROCEDURE "VAR_LEDGER_NAME".compute_volumes(); ---statement -CREATE OR REPLACE FUNCTION "VAR_LEDGER_NAME".normaliz(v jsonb) - RETURNS text AS -$BODY$ -DECLARE - r record; - t jsonb; -BEGIN - if jsonb_typeof(v) = 'object' then - return ( - SELECT COALESCE('{' || string_agg(keyValue, ',') || '}', '{}') - FROM ( - SELECT '"' || key || '":' || value as keyValue - FROM ( - SELECT key, (CASE WHEN "VAR_LEDGER_NAME".is_valid_json((select v ->> key)) THEN (select "VAR_LEDGER_NAME".normaliz((select v ->> key)::jsonb)) ELSE '"' || (select v ->> key) || '"' END) as value - FROM ( - SELECT jsonb_object_keys(v) as key - ) t - order by key - ) t - ) t - ); - end if; - if jsonb_typeof(v) = 'array' then - return ( - select COALESCE('[' || string_agg(items, ',') || ']', '[]') - from ( - select "VAR_LEDGER_NAME".normaliz(item) as items from jsonb_array_elements(v) item - ) t - ); - end if; - if jsonb_typeof(v) = 'string' then - return v::text; - end if; - if jsonb_typeof(v) = 'number' then - return v::bigint; - end if; - if jsonb_typeof(v) = 'boolean' then - return v::boolean; - end if; - - return ''; -END -$BODY$ - LANGUAGE plpgsql; ---statement -CREATE OR REPLACE FUNCTION "VAR_LEDGER_NAME".compute_hashes() - RETURNS void AS -$BODY$ -DECLARE - r record; -BEGIN - /* Create JSON object manually as it needs to be in canonical form */ - FOR r IN (select id, '{"data":' || "VAR_LEDGER_NAME".normaliz(data::jsonb) || ',"date":"' || to_char (date at time zone 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '","hash":"","id":' || id || ',"type":"' || type || '"}' as canonical from "VAR_LEDGER_NAME".log) - LOOP - UPDATE "VAR_LEDGER_NAME".log set hash = (select encode(digest( - COALESCE((select '{"data":' || "VAR_LEDGER_NAME".normaliz(data::jsonb) || ',"date":"' || to_char (date at time zone 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '","hash":"' || hash || '","id":' || id || ',"type":"' || type || '"}' from "VAR_LEDGER_NAME".log where id = r.id - 1), 'null') || r.canonical, - 'sha256' - ), 'hex')) - WHERE id = r.id; - END LOOP; -END -$BODY$ - LANGUAGE plpgsql; ---statement -SELECT "VAR_LEDGER_NAME".compute_hashes(); diff --git a/pkg/storage/sqlstorage/migrates/1-add-triggers/sqlite.sql b/pkg/storage/sqlstorage/migrates/1-add-triggers/sqlite.sql deleted file mode 100644 index 88f04f78d..000000000 --- a/pkg/storage/sqlstorage/migrates/1-add-triggers/sqlite.sql +++ /dev/null @@ -1,183 +0,0 @@ ---statement -CREATE TABLE IF NOT EXISTS volumes -( - "account" varchar, - "asset" varchar, - "input" integer, - "output" integer, - - UNIQUE ("account", "asset") -); ---statement -CREATE TABLE IF NOT EXISTS accounts -( - "address" varchar NOT NULL, - "metadata" varchar DEFAULT '{}', - - UNIQUE ("address") -); ---statement -CREATE TABLE IF NOT EXISTS log -( - "id" integer primary key autoincrement, - "type" varchar, - "hash" varchar, - "date" date, - "data" varchar -); ---statement -ALTER TABLE transactions -ADD COLUMN postings varchar; ---statement -ALTER TABLE transactions -ADD COLUMN metadata varchar; ---statement -UPDATE transactions -SET postings = ( - SELECT '[' || group_concat(v.j) || ']' - FROM ( - SELECT '{"source": "' || source || '", "destination": "' || destination || '", "asset": "' || asset || '", "amount": ' || amount || '}' as j, txid - FROM postings - WHERE txid = transactions.id - ) v -); ---statement -INSERT INTO log(type, date, data, hash) -SELECT v.type, v.timestamp, v.data, '' -FROM ( - SELECT id as ord, 'NEW_TRANSACTION' as type, timestamp, '{"txid": ' || id || ', "postings": ' || postings || ', "metadata": {}, "timestamp": "' || timestamp || '", "reference": "' || CASE WHEN reference IS NOT NULL THEN reference ELSE '' END || '"}' as data - FROM transactions - UNION ALL - SELECT 10000000000 + meta_id as ord, 'SET_METADATA' as type, timestamp, '{"targetType": "' || UPPER(meta_target_type) || '", "targetId": ' || CASE WHEN meta_target_type = 'transaction' THEN meta_target_id ELSE ('"' || meta_target_id || '"') END || ', "metadata": {"' || meta_key || '": ' || CASE WHEN json_valid(meta_value) THEN meta_value ELSE '"' || meta_value || '"' END || '}}' as data - FROM metadata - ) v -ORDER BY v.timestamp ASC, v.ord ASC; ---statement -ALTER TABLE log RENAME TO log2; ---statement -/* Remove autoincrement on log table by renaming to log2, recreating the log table without the autoincrement, copy data from log2 to log, then removing log2 */ -CREATE TABLE log -( - "id" integer primary key, /* without auto increment */ - "type" varchar, - "hash" varchar, - "date" date, - "data" varchar -); ---statement -INSERT INTO log SELECT v.id-1, v.type, v.hash, v.date, v.data FROM log2 v; ---statement -DROP TABLE log2; ---statement -UPDATE transactions -SET metadata = ( - SELECT json('{' || group_concat('"' || meta_key || '": ' || CASE WHEN json_valid(meta_value) THEN json(meta_value) ELSE '"' || meta_value || '"' END) || '}') - FROM ( - SELECT meta_id, meta_key, meta_value - FROM metadata - WHERE meta_target_id = transactions.id - GROUP BY meta_target_id, meta_key - HAVING max(meta_id) - ORDER BY meta_id DESC - ) v -); ---statement -INSERT INTO accounts(address) SELECT * FROM addresses; ---statement -UPDATE accounts -SET metadata = ( - SELECT json('{' || group_concat('"' || meta_key || '":' || CASE WHEN json_valid(meta_value) THEN json(meta_value) ELSE '"' || meta_value || '"' END) || '}') - FROM ( - SELECT meta_id, meta_key, meta_value - FROM metadata - WHERE meta_target_id = accounts.address - GROUP BY meta_target_id, meta_key - HAVING max(meta_id) - ORDER BY meta_id DESC - ) v -); ---statement -CREATE TRIGGER IF NOT EXISTS new_transaction - AFTER INSERT - ON transactions -BEGIN - INSERT OR IGNORE INTO accounts(address, metadata) - SELECT json_extract(p.value, '$.source'), '{}' - FROM json_each(new.postings) p; - - INSERT OR IGNORE INTO accounts(address, metadata) - SELECT json_extract(p.value, '$.destination'), '{}' - FROM json_each(new.postings) p; - - INSERT INTO volumes (account, asset, input, output) - SELECT json_extract(p.value, '$.source'), - json_extract(p.value, '$.asset'), - 0, - json_extract(p.value, '$.amount') - FROM json_each(new.postings) p - WHERE true - ON CONFLICT (account, asset) DO UPDATE SET output = output + excluded.output; - - INSERT INTO volumes (account, asset, input, output) - SELECT json_extract(p.value, '$.destination'), - json_extract(p.value, '$.asset'), - json_extract(p.value, '$.amount'), - 0 - FROM json_each(new.postings) p - WHERE true - ON CONFLICT (account, asset) DO UPDATE SET input = input + excluded.input; -END; ---statement -CREATE TRIGGER IF NOT EXISTS new_log_transaction -AFTER INSERT -ON log - WHEN new.type = 'NEW_TRANSACTION' -BEGIN - INSERT INTO transactions (id, reference, timestamp, postings, metadata) - VALUES (json_extract(new.data, '$.txid'), - CASE - WHEN json_extract(new.data, '$.reference') = '' THEN NULL - ELSE json_extract(new.data, '$.reference') END, - json_extract(new.data, '$.timestamp'), - json_extract(new.data, '$.postings'), - CASE - WHEN json_extract(new.data, '$.metadata') IS NULL THEN '{}' - ELSE json_extract(new.data, '$.metadata') END); -END; ---statement -CREATE TRIGGER IF NOT EXISTS new_log_set_metadata_on_transaction -AFTER INSERT -ON log -WHEN new.type = 'SET_METADATA' AND json_extract(new.data, '$.targetType') = 'TRANSACTION' -BEGIN - UPDATE transactions - SET metadata = json_patch(metadata, json_extract(new.data, '$.metadata')) - WHERE id = json_extract(new.data, '$.targetId'); -END; ---statement -CREATE TRIGGER IF NOT EXISTS new_log_set_metadata_on_account -AFTER INSERT -ON log -WHEN new.type = 'SET_METADATA' AND json_extract(new.data, '$.targetType') = 'ACCOUNT' -BEGIN - INSERT INTO accounts(address, metadata) - VALUES (json_extract(new.data, '$.targetId'), json_extract(new.data, '$.metadata')) - ON CONFLICT (address) DO UPDATE SET metadata = json_patch(metadata, excluded.metadata); -END; ---statement -INSERT INTO volumes (account, asset, input, output) -SELECT destination, asset, SUM(amount), 0 -FROM postings -GROUP BY asset, destination; ---statement -INSERT INTO volumes (account, asset, input, output) -SELECT source, asset, 0, SUM(amount) -FROM postings -GROUP BY asset, source -ON CONFLICT (account, asset) DO UPDATE SET output = output + excluded.output; ---statement -UPDATE log -SET hash = hash_log( - coalesce((select '{"data":' || l2.data || ',"date":"' || date || '","hash":"' || l2.hash || '","id":' || l2.id || ',"type":"' || type || '"}' as data from log l2 where l2.id = log.id - 1), 'null'), - (select '{"data":' || l2.data || ',"date":"' || date || '","hash":"","id":' || l2.id || ',"type":"' || type || '"}' as data from log l2 where l2.id = log.id) -); diff --git a/pkg/storage/sqlstorage/migrates/10-filter-transactions-using-regex/postgres.sql b/pkg/storage/sqlstorage/migrates/10-filter-transactions-using-regex/postgres.sql deleted file mode 100644 index 3ce46c0a4..000000000 --- a/pkg/storage/sqlstorage/migrates/10-filter-transactions-using-regex/postgres.sql +++ /dev/null @@ -1,24 +0,0 @@ ---statement -CREATE OR REPLACE FUNCTION "VAR_LEDGER_NAME".use_account_as_source(postings jsonb, account varchar) - RETURNS BOOLEAN -AS $$ -select bool_or(v.value::bool) from ( - select jsonb_extract_path_text(jsonb_array_elements(postings), 'source') ~ ('^' || account || '$') as value) as v; -$$ LANGUAGE sql; ---statement -CREATE OR REPLACE FUNCTION "VAR_LEDGER_NAME".use_account_as_destination(postings jsonb, account varchar) - RETURNS BOOLEAN -AS $$ -select bool_or(v.value::bool) from ( - select jsonb_extract_path_text(jsonb_array_elements(postings), 'destination') ~ ('^' || account || '$') as value) as v; -$$ LANGUAGE sql; ---statement -CREATE OR REPLACE FUNCTION "VAR_LEDGER_NAME".use_account(postings jsonb, account varchar) - RETURNS BOOLEAN -AS -$$ -SELECT bool_or(v.value) from ( - SELECT "VAR_LEDGER_NAME".use_account_as_source(postings, account) AS value UNION SELECT "VAR_LEDGER_NAME".use_account_as_destination(postings, account) AS value -) v -$$ -LANGUAGE sql; diff --git a/pkg/storage/sqlstorage/migrates/11-remove-triggers/postgres.sql b/pkg/storage/sqlstorage/migrates/11-remove-triggers/postgres.sql deleted file mode 100644 index b9d540edd..000000000 --- a/pkg/storage/sqlstorage/migrates/11-remove-triggers/postgres.sql +++ /dev/null @@ -1,4 +0,0 @@ ---statement -DROP TRIGGER log_entry ON "VAR_LEDGER_NAME".log; ---statement -DROP TRIGGER volumes_changed ON "VAR_LEDGER_NAME".log; diff --git a/pkg/storage/sqlstorage/migrates/11-remove-triggers/sqlite.sql b/pkg/storage/sqlstorage/migrates/11-remove-triggers/sqlite.sql deleted file mode 100644 index d3d187f65..000000000 --- a/pkg/storage/sqlstorage/migrates/11-remove-triggers/sqlite.sql +++ /dev/null @@ -1,8 +0,0 @@ ---statement -DROP TRIGGER new_transaction; ---statement -DROP TRIGGER new_log_transaction; ---statement -DROP TRIGGER new_log_set_metadata_on_transaction; ---statement -DROP TRIGGER new_log_set_metadata_on_account; diff --git a/pkg/storage/sqlstorage/migrates/12-amounts-numeric/postgres.sql b/pkg/storage/sqlstorage/migrates/12-amounts-numeric/postgres.sql deleted file mode 100644 index a5da171f8..000000000 --- a/pkg/storage/sqlstorage/migrates/12-amounts-numeric/postgres.sql +++ /dev/null @@ -1,4 +0,0 @@ ---statement -ALTER TABLE "VAR_LEDGER_NAME".volumes -ALTER COLUMN input TYPE numeric, -ALTER COLUMN output TYPE numeric; diff --git a/pkg/storage/sqlstorage/migrates/13-clean-logs/any_test.go b/pkg/storage/sqlstorage/migrates/13-clean-logs/any_test.go deleted file mode 100644 index 39b81fecb..000000000 --- a/pkg/storage/sqlstorage/migrates/13-clean-logs/any_test.go +++ /dev/null @@ -1,90 +0,0 @@ -package clean_logs_test - -import ( - "context" - "encoding/json" - "testing" - "time" - - "github.com/huandu/go-sqlbuilder" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledgertesting" - "github.com/numary/ledger/pkg/storage/sqlstorage" - "github.com/pborman/uuid" - "github.com/stretchr/testify/require" -) - -func TestMigrate(t *testing.T) { - driver, closeFunc, err := ledgertesting.StorageDriver() - require.NoError(t, err) - defer closeFunc() - - require.NoError(t, driver.Initialize(context.Background())) - store, _, err := driver.GetLedgerStore(context.Background(), uuid.New(), true) - require.NoError(t, err) - - schema := store.Schema() - - migrations, err := sqlstorage.CollectMigrationFiles(sqlstorage.MigrationsFS) - require.NoError(t, err) - - modified, err := sqlstorage.Migrate(context.Background(), schema, migrations[0:13]...) - require.NoError(t, err) - require.True(t, modified) - - sqlq, args := sqlbuilder.NewInsertBuilder(). - InsertInto(schema.Table("log")). - Cols("id", "type", "hash", "date", "data"). - Values("0", core.NewTransactionType, "", time.Now(), `{ - "txid": 0, - "postings": [], - "reference": "tx1" - }`). - Values("1", core.NewTransactionType, "", time.Now(), `{ - "txid": 1, - "postings": [], - "preCommitVolumes": {}, - "postCommitVolumes": {}, - "reference": "tx2" - }`). - BuildWithFlavor(schema.Flavor()) - - _, err = schema.ExecContext(context.Background(), sqlq, args...) - require.NoError(t, err) - - modified, err = sqlstorage.Migrate(context.Background(), schema, migrations[13]) - require.NoError(t, err) - require.True(t, modified) - - sqlq, args = sqlbuilder.NewSelectBuilder(). - Select("data"). - From(schema.Table("log")). - BuildWithFlavor(schema.Flavor()) - - rows, err := schema.QueryContext(context.Background(), sqlq, args...) - require.NoError(t, err) - - require.True(t, rows.Next()) - var dataStr string - require.NoError(t, rows.Scan(&dataStr)) - - data := map[string]any{} - require.NoError(t, json.Unmarshal([]byte(dataStr), &data)) - - require.Equal(t, map[string]any{ - "txid": float64(0), - "postings": []interface{}{}, - "reference": "tx1", - }, data) - - require.True(t, rows.Next()) - require.NoError(t, rows.Scan(&dataStr)) - require.NoError(t, json.Unmarshal([]byte(dataStr), &data)) - - require.Equal(t, map[string]any{ - "txid": float64(1), - "postings": []interface{}{}, - "reference": "tx2", - }, data) - -} diff --git a/pkg/storage/sqlstorage/migrates/13-clean-logs/postgres.sql b/pkg/storage/sqlstorage/migrates/13-clean-logs/postgres.sql deleted file mode 100644 index 9bf53b105..000000000 --- a/pkg/storage/sqlstorage/migrates/13-clean-logs/postgres.sql +++ /dev/null @@ -1,2 +0,0 @@ ---statement -update "VAR_LEDGER_NAME".log set data = data - 'preCommitVolumes' - 'postCommitVolumes'; diff --git a/pkg/storage/sqlstorage/migrates/13-clean-logs/sqlite.sql b/pkg/storage/sqlstorage/migrates/13-clean-logs/sqlite.sql deleted file mode 100644 index 2cc4f6b79..000000000 --- a/pkg/storage/sqlstorage/migrates/13-clean-logs/sqlite.sql +++ /dev/null @@ -1,2 +0,0 @@ ---statement -update log set data = json_remove(json_remove(data, '$.preCommitVolumes'), '$.postCommitVolumes'); diff --git a/pkg/storage/sqlstorage/migrates/14-update-timestamp-column-type/postgres.sql b/pkg/storage/sqlstorage/migrates/14-update-timestamp-column-type/postgres.sql deleted file mode 100644 index 6a2c04342..000000000 --- a/pkg/storage/sqlstorage/migrates/14-update-timestamp-column-type/postgres.sql +++ /dev/null @@ -1,8 +0,0 @@ ---statement -ALTER TABLE "VAR_LEDGER_NAME".transactions ADD COLUMN timestamp_holder timestamptz NULL; ---statement -UPDATE "VAR_LEDGER_NAME".transactions SET timestamp_holder = timestamp::TIMESTAMP; ---statement -ALTER TABLE "VAR_LEDGER_NAME".transactions ALTER COLUMN timestamp TYPE timestamptz USING timestamp_holder; ---statement -ALTER TABLE "VAR_LEDGER_NAME".transactions DROP COLUMN timestamp_holder; diff --git a/pkg/storage/sqlstorage/migrates/14-update-timestamp-column-type/sqlite.sql b/pkg/storage/sqlstorage/migrates/14-update-timestamp-column-type/sqlite.sql deleted file mode 100644 index 5bbb3daac..000000000 --- a/pkg/storage/sqlstorage/migrates/14-update-timestamp-column-type/sqlite.sql +++ /dev/null @@ -1,22 +0,0 @@ ---statement -create table tmp_transactions as select * from transactions; ---statement -drop table transactions; ---statement -create table transactions ( - "id" integer, - "timestamp" date, - "reference" varchar, - "postings" varchar, - "metadata" varchar, - "pre_commit_volumes" varchar, - "post_commit_volumes" varchar, - - unique("id"), - unique("reference") -); ---statement -insert into transactions(id, timestamp, reference, postings, metadata, pre_commit_volumes, post_commit_volumes) -select id, timestamp, reference, postings, metadata, pre_commit_volumes, post_commit_volumes from tmp_transactions; ---statement -drop table tmp_transactions; diff --git a/pkg/storage/sqlstorage/migrates/15-add-idempotency-table/postgres.sql b/pkg/storage/sqlstorage/migrates/15-add-idempotency-table/postgres.sql deleted file mode 100644 index 88111711c..000000000 --- a/pkg/storage/sqlstorage/migrates/15-add-idempotency-table/postgres.sql +++ /dev/null @@ -1,11 +0,0 @@ ---statement -CREATE TABLE IF NOT EXISTS "VAR_LEDGER_NAME".idempotency ( - "key" varchar, - "date" varchar, - "status_code" int, - "headers" varchar, - "body" varchar, - "request_hash" varchar, - - PRIMARY KEY("key") -); diff --git a/pkg/storage/sqlstorage/migrates/15-add-idempotency-table/sqlite.sql b/pkg/storage/sqlstorage/migrates/15-add-idempotency-table/sqlite.sql deleted file mode 100644 index 0f13811d6..000000000 --- a/pkg/storage/sqlstorage/migrates/15-add-idempotency-table/sqlite.sql +++ /dev/null @@ -1,11 +0,0 @@ ---statement -CREATE TABLE IF NOT EXISTS idempotency ( - "key" varchar, - "date" varchar, - "status_code" int, - "headers" varchar, - "body" varchar, - "request_hash" varchar, - - PRIMARY KEY("key") -); diff --git a/pkg/storage/sqlstorage/migrates/16-denormalize-addresses/postgres.sql b/pkg/storage/sqlstorage/migrates/16-denormalize-addresses/postgres.sql deleted file mode 100644 index 4eaa92eaa..000000000 --- a/pkg/storage/sqlstorage/migrates/16-denormalize-addresses/postgres.sql +++ /dev/null @@ -1,21 +0,0 @@ ---statement -alter table "VAR_LEDGER_NAME".transactions add column sources text; ---statement -alter table "VAR_LEDGER_NAME".transactions add column destinations text; ---statement -create index transactions_sources ON "VAR_LEDGER_NAME".transactions USING GIN (sources gin_trgm_ops); ---statement -create index transactions_destinations ON "VAR_LEDGER_NAME".transactions USING GIN (destinations gin_trgm_ops); ---statement -update "VAR_LEDGER_NAME".transactions -set sources = ( - select string_agg(ele->>'source', ';') - from "VAR_LEDGER_NAME".transactions sub - cross join lateral jsonb_array_elements(postings) source(ele) - where transactions.id = sub.id -), destinations = ( - select string_agg(ele->>'destination', ';') - from "VAR_LEDGER_NAME".transactions sub - cross join lateral jsonb_array_elements(postings) source(ele) - where transactions.id = sub.id -); diff --git a/pkg/storage/sqlstorage/migrates/16-denormalize-addresses/sqlite.sql b/pkg/storage/sqlstorage/migrates/16-denormalize-addresses/sqlite.sql deleted file mode 100644 index 90db4a7e8..000000000 --- a/pkg/storage/sqlstorage/migrates/16-denormalize-addresses/sqlite.sql +++ /dev/null @@ -1,15 +0,0 @@ ---statement -alter table transactions add column sources text; ---statement -alter table transactions add column destinations text; ---statement -UPDATE transactions -SET sources = ( - select group_concat(json_extract(json_each.value, '$.source'), ';') - from transactions tx2, json_each(tx2.postings) - where transactions.id = tx2.id -), destinations = ( - select group_concat(json_extract(json_each.value, '$.destination'), ';') - from transactions tx2, json_each(tx2.postings) - where transactions.id = tx2.id -); diff --git a/pkg/storage/sqlstorage/migrates/17-optimized-segments/any_test.go b/pkg/storage/sqlstorage/migrates/17-optimized-segments/any_test.go deleted file mode 100644 index e124dcf89..000000000 --- a/pkg/storage/sqlstorage/migrates/17-optimized-segments/any_test.go +++ /dev/null @@ -1,71 +0,0 @@ -package _17_optimized_segments - -import ( - "context" - "testing" - "time" - - "github.com/huandu/go-sqlbuilder" - "github.com/numary/ledger/pkg/ledgertesting" - "github.com/numary/ledger/pkg/storage/sqlstorage" - "github.com/pborman/uuid" - "github.com/stretchr/testify/require" -) - -func TestMigrate17(t *testing.T) { - if ledgertesting.StorageDriverName() != "postgres" { - t.Skip() - } - - driver, closeFunc, err := ledgertesting.StorageDriver() - require.NoError(t, err) - defer closeFunc() - - require.NoError(t, driver.Initialize(context.Background())) - store, _, err := driver.GetLedgerStore(context.Background(), uuid.New(), true) - require.NoError(t, err) - - schema := store.Schema() - - migrations, err := sqlstorage.CollectMigrationFiles(sqlstorage.MigrationsFS) - require.NoError(t, err) - - modified, err := sqlstorage.Migrate(context.Background(), schema, migrations[0:17]...) - require.NoError(t, err) - require.True(t, modified) - - now := time.Now().UTC().Truncate(time.Second) - - ib := sqlbuilder.NewInsertBuilder() - sqlq, args := ib. - InsertInto(schema.Table("transactions")). - Cols("id", "timestamp", "postings", "metadata"). - Values(0, now.Format(time.RFC3339), `[ - {"source": "world", "destination": "users:001", "asset": "USD", "amount": 100} - ]`, "{}"). - BuildWithFlavor(schema.Flavor()) - _, err = schema.ExecContext(context.Background(), sqlq, args...) - require.NoError(t, err) - - modified, err = sqlstorage.Migrate(context.Background(), schema, migrations[17]) - require.NoError(t, err) - require.True(t, modified) - - sqlq, args = sqlbuilder. - Select("txid", "posting_index", "source", "destination"). - From(schema.Table("postings")). - Where("txid = 0"). - BuildWithFlavor(schema.Flavor()) - - row := store.Schema().QueryRowContext(context.Background(), sqlq, args...) - require.NoError(t, row.Err()) - - var txid uint64 - var postingIndex int - var source, destination string - require.NoError(t, err, row.Scan(&txid, &postingIndex, &source, &destination)) - require.Equal(t, uint64(0), txid) - require.Equal(t, 0, postingIndex) - require.Equal(t, `["world"]`, source) - require.Equal(t, `["users", "001"]`, destination) -} diff --git a/pkg/storage/sqlstorage/migrates/17-optimized-segments/postgres.sql b/pkg/storage/sqlstorage/migrates/17-optimized-segments/postgres.sql deleted file mode 100644 index 7e6b1df4a..000000000 --- a/pkg/storage/sqlstorage/migrates/17-optimized-segments/postgres.sql +++ /dev/null @@ -1,36 +0,0 @@ ---statement -drop trigger if exists log_entry on "VAR_LEDGER_NAME".log; ---statement -drop trigger if exists volumes_changed on "VAR_LEDGER_NAME".log; - ---statement -alter table "VAR_LEDGER_NAME".transactions drop column if exists sources; ---statement -alter table "VAR_LEDGER_NAME".transactions drop column if exists destinations; ---statement -drop index if exists transactions_sources; ---statement -drop index if exists transactions_destinations; - ---statement -create table if not exists "VAR_LEDGER_NAME".postings ( - txid bigint, - posting_index integer, - source jsonb, - destination jsonb -); - ---statement -create index if not exists postings_src on "VAR_LEDGER_NAME".postings using GIN(source); ---statement -create index if not exists postings_dest on "VAR_LEDGER_NAME".postings using GIN(destination); ---statement -create index if not exists postings_txid on "VAR_LEDGER_NAME".postings (txid asc); - ---statement -insert into "VAR_LEDGER_NAME".postings(txid, posting_index, source, destination) -select txs.id as txid, i - 1 as posting_index, - array_to_json(string_to_array(t.posting->>'source', ':'))::jsonb as source, - array_to_json(string_to_array(t.posting->>'destination', ':'))::jsonb as destination -from "VAR_LEDGER_NAME".transactions txs left join lateral jsonb_array_elements(txs.postings) -with ordinality as t(posting, i) on true; diff --git a/pkg/storage/sqlstorage/migrates/2-add-meta-compare/postgres.sql b/pkg/storage/sqlstorage/migrates/2-add-meta-compare/postgres.sql deleted file mode 100644 index 088754b82..000000000 --- a/pkg/storage/sqlstorage/migrates/2-add-meta-compare/postgres.sql +++ /dev/null @@ -1,47 +0,0 @@ -CREATE OR REPLACE FUNCTION "VAR_LEDGER_NAME".meta_compare(metadata jsonb, value varchar, variadic path TEXT[]) - RETURNS BOOLEAN -AS -$$ -BEGIN - return jsonb_extract_path_text(metadata, variadic path)::varchar = value::varchar; -EXCEPTION - WHEN others THEN - RAISE INFO 'Error Name: %', SQLERRM; - RAISE INFO 'Error State: %', SQLSTATE; - RETURN false; -END -$$ - LANGUAGE plpgsql - IMMUTABLE; ---statement -CREATE OR REPLACE FUNCTION "VAR_LEDGER_NAME".meta_compare(metadata jsonb, value bool, variadic path TEXT[]) - RETURNS BOOLEAN -AS -$$ -BEGIN - return jsonb_extract_path(metadata, variadic path)::bool = value::bool; -EXCEPTION - WHEN others THEN - RAISE INFO 'Error Name: %', SQLERRM; - RAISE INFO 'Error State: %', SQLSTATE; - RETURN false; -END -$$ - LANGUAGE plpgsql - IMMUTABLE; ---statement -CREATE OR REPLACE FUNCTION "VAR_LEDGER_NAME".meta_compare(metadata jsonb, value numeric, variadic path TEXT[]) - RETURNS BOOLEAN -AS -$$ -BEGIN - return jsonb_extract_path(metadata, variadic path)::numeric = value::numeric; -EXCEPTION - WHEN others THEN - RAISE INFO 'Error Name: %', SQLERRM; - RAISE INFO 'Error State: %', SQLSTATE; - RETURN false; -END -$$ - LANGUAGE plpgsql - IMMUTABLE; diff --git a/pkg/storage/sqlstorage/migrates/2-add-meta-compare/sqlite.sql b/pkg/storage/sqlstorage/migrates/2-add-meta-compare/sqlite.sql deleted file mode 100644 index e8e05badd..000000000 --- a/pkg/storage/sqlstorage/migrates/2-add-meta-compare/sqlite.sql +++ /dev/null @@ -1,29 +0,0 @@ ---statement -ALTER TABLE transactions -ADD COLUMN pre_commit_volumes varchar; ---statement -ALTER TABLE transactions -ADD COLUMN post_commit_volumes varchar; ---statement -DROP TRIGGER new_log_transaction; ---statement -CREATE TRIGGER new_log_transaction -AFTER INSERT -ON log -WHEN new.type = 'NEW_TRANSACTION' -BEGIN -INSERT INTO transactions (id, reference, timestamp, postings, metadata, pre_commit_volumes, post_commit_volumes) -VALUES ( - json_extract(new.data, '$.txid'), - CASE - WHEN json_extract(new.data, '$.reference') = '' THEN NULL - ELSE json_extract(new.data, '$.reference') END, - json_extract(new.data, '$.timestamp'), - json_extract(new.data, '$.postings'), - CASE - WHEN json_extract(new.data, '$.metadata') IS NULL THEN '{}' - ELSE json_extract(new.data, '$.metadata') END, - json_extract(new.data, '$.preCommitVolumes'), - json_extract(new.data, '$.postCommitVolumes') -); -END; diff --git a/pkg/storage/sqlstorage/migrates/3-function-use_account/postgres.sql b/pkg/storage/sqlstorage/migrates/3-function-use_account/postgres.sql deleted file mode 100644 index bb8ef6ad6..000000000 --- a/pkg/storage/sqlstorage/migrates/3-function-use_account/postgres.sql +++ /dev/null @@ -1,23 +0,0 @@ ---statement -CREATE INDEX IF NOT EXISTS account_address ON "VAR_LEDGER_NAME".accounts ("address"); ---statement -CREATE INDEX IF NOT EXISTS log_id ON "VAR_LEDGER_NAME".log ("id"); ---statement -CREATE OR REPLACE FUNCTION "VAR_LEDGER_NAME".use_account(postings jsonb, account varchar) - RETURNS BOOLEAN -AS -$$ -DECLARE -p jsonb; -BEGIN -FOR p IN ( - SELECT jsonb_array_elements(postings) - ) LOOP - IF p->>'source' = account THEN RETURN true; END IF; - IF p->>'destination' = account THEN RETURN true; END IF; -END LOOP; -RETURN false; -END -$$ -LANGUAGE plpgsql -IMMUTABLE; diff --git a/pkg/storage/sqlstorage/migrates/4-rewrite-function-use_account/postgres.sql b/pkg/storage/sqlstorage/migrates/4-rewrite-function-use_account/postgres.sql deleted file mode 100644 index f10c51d81..000000000 --- a/pkg/storage/sqlstorage/migrates/4-rewrite-function-use_account/postgres.sql +++ /dev/null @@ -1,8 +0,0 @@ ---statement -CREATE OR REPLACE FUNCTION "VAR_LEDGER_NAME".use_account(postings jsonb, account varchar) - RETURNS BOOLEAN -AS $$ -SELECT postings @> ('[{"destination": "' || account || '"}]')::jsonb OR postings @> ('[{"source": "' || account || '"}]')::jsonb -$$ LANGUAGE sql; ---statement -CREATE INDEX postings_addresses ON "VAR_LEDGER_NAME".transactions USING GIN (postings); diff --git a/pkg/storage/sqlstorage/migrates/5-drop-unused-index/postgres.sql b/pkg/storage/sqlstorage/migrates/5-drop-unused-index/postgres.sql deleted file mode 100644 index f534b1754..000000000 --- a/pkg/storage/sqlstorage/migrates/5-drop-unused-index/postgres.sql +++ /dev/null @@ -1,12 +0,0 @@ ---statement -DROP INDEX "VAR_LEDGER_NAME".volumes_account; ---statement -DROP INDEX "VAR_LEDGER_NAME".account_address; ---statement -DROP INDEX "VAR_LEDGER_NAME".log_id; ---statement -DROP TABLE "VAR_LEDGER_NAME".metadata; ---statement -DROP TABLE "VAR_LEDGER_NAME".contract; ---statement -DROP TABLE "VAR_LEDGER_NAME".postings CASCADE; diff --git a/pkg/storage/sqlstorage/migrates/6-add-functions-to-filter-on-tx-addresses/postgres.sql b/pkg/storage/sqlstorage/migrates/6-add-functions-to-filter-on-tx-addresses/postgres.sql deleted file mode 100644 index 46eaf3cd3..000000000 --- a/pkg/storage/sqlstorage/migrates/6-add-functions-to-filter-on-tx-addresses/postgres.sql +++ /dev/null @@ -1,12 +0,0 @@ ---statement -CREATE OR REPLACE FUNCTION "VAR_LEDGER_NAME".use_account_as_source(postings jsonb, account varchar) - RETURNS BOOLEAN -AS $$ -SELECT postings @> ('[{"source": "' || account || '"}]')::jsonb -$$ LANGUAGE sql; ---statement -CREATE OR REPLACE FUNCTION "VAR_LEDGER_NAME".use_account_as_destination(postings jsonb, account varchar) - RETURNS BOOLEAN -AS $$ -SELECT postings @> ('[{"destination": "' || account || '"}]')::jsonb -$$ LANGUAGE sql; diff --git a/pkg/storage/sqlstorage/migrates/7-fix-meta_compare/postgres.sql b/pkg/storage/sqlstorage/migrates/7-fix-meta_compare/postgres.sql deleted file mode 100644 index 7ec46e0f5..000000000 --- a/pkg/storage/sqlstorage/migrates/7-fix-meta_compare/postgres.sql +++ /dev/null @@ -1,16 +0,0 @@ ---statement -CREATE OR REPLACE FUNCTION "VAR_LEDGER_NAME".meta_compare(metadata jsonb, value bool, variadic path TEXT[]) - RETURNS BOOLEAN -AS -$$ -BEGIN - return jsonb_extract_path(metadata, variadic path)::bool = value::bool; -EXCEPTION - WHEN others THEN - RAISE INFO 'Error Name: %', SQLERRM; - RAISE INFO 'Error State: %', SQLSTATE; - RETURN false; -END -$$ - LANGUAGE plpgsql - IMMUTABLE; diff --git a/pkg/storage/sqlstorage/migrates/8-add-columns-on-transactions/postgres.sql b/pkg/storage/sqlstorage/migrates/8-add-columns-on-transactions/postgres.sql deleted file mode 100644 index 4a239391e..000000000 --- a/pkg/storage/sqlstorage/migrates/8-add-columns-on-transactions/postgres.sql +++ /dev/null @@ -1,43 +0,0 @@ ---statement -ALTER TABLE "VAR_LEDGER_NAME".transactions -ADD COLUMN "pre_commit_volumes" jsonb; ---statement -ALTER TABLE "VAR_LEDGER_NAME".transactions -ADD COLUMN "post_commit_volumes" jsonb; ---statement -CREATE OR REPLACE FUNCTION "VAR_LEDGER_NAME".handle_log_entry() - RETURNS TRIGGER - LANGUAGE PLPGSQL -AS -$$ -BEGIN - if NEW.type = 'NEW_TRANSACTION' THEN - INSERT INTO "VAR_LEDGER_NAME".transactions(id, timestamp, reference, postings, metadata, pre_commit_volumes, post_commit_volumes) - VALUES ( - (NEW.data ->> 'txid')::bigint, - (NEW.data ->> 'timestamp')::varchar, - CASE - WHEN (NEW.data ->> 'reference')::varchar = '' THEN NULL - ELSE (NEW.data ->> 'reference')::varchar END, - (NEW.data ->> 'postings')::jsonb, - CASE WHEN (NEW.data ->> 'metadata')::jsonb IS NULL THEN '{}' ELSE (NEW.data ->> 'metadata')::jsonb END, - (NEW.data ->> 'preCommitVolumes')::jsonb, - (NEW.data ->> 'postCommitVolumes')::jsonb - ); - END IF; - if NEW.type = 'SET_METADATA' THEN - if NEW.data ->> 'targetType' = 'TRANSACTION' THEN - UPDATE "VAR_LEDGER_NAME".transactions - SET metadata = metadata || (NEW.data ->> 'metadata')::jsonb - WHERE id = (NEW.data ->> 'targetId')::bigint; - END IF; - if NEW.data ->> 'targetType' = 'ACCOUNT' THEN - INSERT INTO "VAR_LEDGER_NAME".accounts (address, metadata) - VALUES ((NEW.data ->> 'targetId')::varchar, - (NEW.data ->> 'metadata')::jsonb) - ON CONFLICT (address) DO UPDATE SET metadata = accounts.metadata || (NEW.data ->> 'metadata')::jsonb; - END IF; - END IF; - RETURN NEW; -END; -$$; diff --git a/pkg/storage/sqlstorage/migrates/9-add-pre-post-volumes/any.go b/pkg/storage/sqlstorage/migrates/9-add-pre-post-volumes/any.go deleted file mode 100644 index f485a964f..000000000 --- a/pkg/storage/sqlstorage/migrates/9-add-pre-post-volumes/any.go +++ /dev/null @@ -1,144 +0,0 @@ -package add_pre_post_volumes - -import ( - "context" - "database/sql" - "encoding/json" - - "github.com/huandu/go-sqlbuilder" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/storage/sqlstorage" - "github.com/pkg/errors" -) - -func init() { - sqlstorage.RegisterGoMigration(Upgrade) -} - -type Transaction struct { - ID uint64 `json:"txid"` - Postings core.Postings `json:"postings"` -} - -func Upgrade(ctx context.Context, schema sqlstorage.Schema, sqlTx *sql.Tx) error { - sb := sqlbuilder.NewSelectBuilder() - sb. - From(schema.Table("log")). - Select("data"). - Where(sb.E("type", core.NewTransactionType)). - OrderBy("id"). - Asc() - - sqlq, args := sb.BuildWithFlavor(schema.Flavor()) - rows, err := sqlTx.QueryContext(ctx, sqlq, args...) - if err != nil { - return errors.Wrap(err, "querying rows") - } - defer rows.Close() - - updates := make([]*sqlbuilder.UpdateBuilder, 0) - - aggregatedVolumes := core.AccountsAssetsVolumes{} - for rows.Next() { - var data string - err := rows.Scan(&data) - if err != nil { - return errors.Wrap(err, "scanning row") - } - - var tx Transaction - err = json.Unmarshal([]byte(data), &tx) - if err != nil { - return errors.Wrap(err, "decoding transaction") - } - - preCommitVolumes := core.AccountsAssetsVolumes{} - postCommitVolumes := core.AccountsAssetsVolumes{} - for _, posting := range tx.Postings { - - preCommitVolumes.SetVolumes( - posting.Source, - posting.Asset, - aggregatedVolumes.GetVolumes(posting.Source, posting.Asset), - ) - - preCommitVolumes.SetVolumes( - posting.Destination, - posting.Asset, - aggregatedVolumes.GetVolumes(posting.Destination, posting.Asset), - ) - - if !postCommitVolumes.HasAccount(posting.Source) { - postCommitVolumes.SetVolumes( - posting.Source, - posting.Asset, - preCommitVolumes.GetVolumes(posting.Source, posting.Asset), - ) - } - - if !postCommitVolumes.HasAccount(posting.Destination) { - postCommitVolumes.SetVolumes( - posting.Destination, - posting.Asset, - preCommitVolumes.GetVolumes(posting.Destination, posting.Asset), - ) - } - - postCommitVolumes.AddOutput( - posting.Source, - posting.Asset, - posting.Amount, - ) - - postCommitVolumes.AddInput( - posting.Destination, - posting.Asset, - posting.Amount, - ) - } - - for account, accountVolumes := range postCommitVolumes { - for asset, volumes := range accountVolumes { - aggregatedVolumes.SetVolumes(account, asset, core.Volumes{ - Input: volumes.Input.OrZero(), - Output: volumes.Output.OrZero(), - }) - } - } - - preCommitVolumesData, err := json.Marshal(preCommitVolumes) - if err != nil { - return err - } - - postCommitVolumesData, err := json.Marshal(postCommitVolumes) - if err != nil { - return err - } - - ub := sqlbuilder.NewUpdateBuilder() - ub.Update(schema.Table("transactions")) - ub.Set( - ub.Assign("pre_commit_volumes", preCommitVolumesData), - ub.Assign("post_commit_volumes", postCommitVolumesData), - ) - ub.Where(ub.E("id", tx.ID)) - - updates = append(updates, ub) - } - err = rows.Close() - if err != nil { - return err - } - - for _, update := range updates { - sqlq, args := update.BuildWithFlavor(schema.Flavor()) - - _, err = sqlTx.ExecContext(ctx, sqlq, args...) - if err != nil { - return errors.Wrap(err, "executing update") - } - } - - return nil -} diff --git a/pkg/storage/sqlstorage/migrates/9-add-pre-post-volumes/any_test.go b/pkg/storage/sqlstorage/migrates/9-add-pre-post-volumes/any_test.go deleted file mode 100644 index 47dbf6988..000000000 --- a/pkg/storage/sqlstorage/migrates/9-add-pre-post-volumes/any_test.go +++ /dev/null @@ -1,287 +0,0 @@ -package add_pre_post_volumes_test - -import ( - "context" - "database/sql" - "encoding/json" - "testing" - "time" - - "github.com/huandu/go-sqlbuilder" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/ledgertesting" - "github.com/numary/ledger/pkg/storage/sqlstorage" - add_pre_post_volumes "github.com/numary/ledger/pkg/storage/sqlstorage/migrates/9-add-pre-post-volumes" - "github.com/pborman/uuid" - "github.com/stretchr/testify/require" -) - -type testCase struct { - postings core.Postings - expectedPreCommitVolumes core.AccountsAssetsVolumes - expectedPostCommitVolumes core.AccountsAssetsVolumes -} - -var testCases = []testCase{ - { - postings: core.Postings{ - { - Source: "world", - Destination: "bank", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - expectedPreCommitVolumes: core.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - }, - "bank": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - }, - }, - expectedPostCommitVolumes: core.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(100), - }, - }, - "bank": { - "USD": { - Input: core.NewMonetaryInt(100), - Output: core.NewMonetaryInt(0), - }, - }, - }, - }, - { - postings: core.Postings{ - { - Source: "world", - Destination: "bank2", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - expectedPreCommitVolumes: core.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(100), - }, - }, - "bank2": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - }, - }, - expectedPostCommitVolumes: core.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(200), - }, - }, - "bank2": { - "USD": { - Input: core.NewMonetaryInt(100), - Output: core.NewMonetaryInt(0), - }, - }, - }, - }, - { - postings: core.Postings{ - { - Source: "world", - Destination: "bank", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - { - Source: "world", - Destination: "bank2", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - expectedPreCommitVolumes: core.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(200), - }, - }, - "bank": { - "USD": { - Input: core.NewMonetaryInt(100), - Output: core.NewMonetaryInt(0), - }, - }, - "bank2": { - "USD": { - Input: core.NewMonetaryInt(100), - Output: core.NewMonetaryInt(0), - }, - }, - }, - expectedPostCommitVolumes: core.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(400), - }, - }, - "bank2": { - "USD": { - Input: core.NewMonetaryInt(200), - Output: core.NewMonetaryInt(0), - }, - }, - "bank": { - "USD": { - Input: core.NewMonetaryInt(200), - Output: core.NewMonetaryInt(0), - }, - }, - }, - }, - { - postings: core.Postings{ - { - Source: "bank", - Destination: "user:1", - Amount: core.NewMonetaryInt(10), - Asset: "USD", - }, - { - Source: "bank", - Destination: "user:2", - Amount: core.NewMonetaryInt(90), - Asset: "USD", - }, - }, - expectedPreCommitVolumes: core.AccountsAssetsVolumes{ - "bank": { - "USD": { - Input: core.NewMonetaryInt(200), - Output: core.NewMonetaryInt(0), - }, - }, - "user:1": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - }, - "user:2": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - }, - }, - expectedPostCommitVolumes: core.AccountsAssetsVolumes{ - "bank": { - "USD": { - Input: core.NewMonetaryInt(200), - Output: core.NewMonetaryInt(100), - }, - }, - "user:1": { - "USD": { - Input: core.NewMonetaryInt(10), - Output: core.NewMonetaryInt(0), - }, - }, - "user:2": { - "USD": { - Input: core.NewMonetaryInt(90), - Output: core.NewMonetaryInt(0), - }, - }, - }, - }, -} - -func TestMigrate9(t *testing.T) { - driver, closeFunc, err := ledgertesting.StorageDriver() - require.NoError(t, err) - defer closeFunc() - - require.NoError(t, driver.Initialize(context.Background())) - store, _, err := driver.GetLedgerStore(context.Background(), uuid.New(), true) - require.NoError(t, err) - - schema := store.Schema() - - migrations, err := sqlstorage.CollectMigrationFiles(sqlstorage.MigrationsFS) - require.NoError(t, err) - - modified, err := sqlstorage.Migrate(context.Background(), schema, migrations[0:9]...) - require.NoError(t, err) - require.True(t, modified) - - now := time.Now() - for i, tc := range testCases { - txData, err := json.Marshal(struct { - add_pre_post_volumes.Transaction - Date time.Time `json:"timestamp"` - }{ - Transaction: add_pre_post_volumes.Transaction{ - ID: uint64(i), - Postings: tc.postings, - }, - Date: now, - }) - require.NoError(t, err) - - ib := sqlbuilder.NewInsertBuilder() - ib.InsertInto(schema.Table("log")) - ib.Cols("id", "data", "type", "date") - ib.Values(i, string(txData), core.NewTransactionType, now) - sqlq, args := ib.BuildWithFlavor(schema.Flavor()) - - _, err = schema.ExecContext(context.Background(), sqlq, args...) - require.NoError(t, err) - } - - count, err := store.CountTransactions(context.Background(), *ledger.NewTransactionsQuery()) - require.NoError(t, err) - require.Equal(t, count, uint64(len(testCases))) - - sqlTx, err := schema.BeginTx(context.Background(), &sql.TxOptions{}) - require.NoError(t, err) - - require.NoError(t, add_pre_post_volumes.Upgrade(context.Background(), schema, sqlTx)) - require.NoError(t, sqlTx.Commit()) - - for i, tc := range testCases { - - sb := sqlbuilder.NewSelectBuilder() - sqlq, args := sb. - From(schema.Table("transactions")). - Select("pre_commit_volumes", "post_commit_volumes"). - Where(sb.E("id", i)). - BuildWithFlavor(schema.Flavor()) - row := schema.QueryRowContext(context.Background(), sqlq, args...) - require.NoError(t, row.Err()) - - preCommitVolumes, postCommitVolumes := core.AccountsAssetsVolumes{}, core.AccountsAssetsVolumes{} - require.NoError(t, row.Scan(&preCommitVolumes, &postCommitVolumes)) - - require.Equal(t, tc.expectedPreCommitVolumes, preCommitVolumes) - require.Equal(t, tc.expectedPostCommitVolumes, postCommitVolumes) - } - -} diff --git a/pkg/storage/sqlstorage/module.go b/pkg/storage/sqlstorage/module.go deleted file mode 100644 index bad545a29..000000000 --- a/pkg/storage/sqlstorage/module.go +++ /dev/null @@ -1,89 +0,0 @@ -package sqlstorage - -import ( - "context" - "database/sql" - "fmt" - "os" - - "github.com/formancehq/go-libs/health" - "github.com/numary/ledger/pkg/storage" - "go.uber.org/fx" -) - -type SQLiteConfig struct { - Dir string - DBName string -} - -type PostgresConfig struct { - ConnString string -} - -type ModuleConfig struct { - StorageDriver string - SQLiteConfig *SQLiteConfig - PostgresConfig *PostgresConfig -} - -func OpenSQLDB(flavor Flavor, dataSourceName string) (*sql.DB, error) { - c, ok := sqlDrivers[flavor] - if !ok { - panic(fmt.Sprintf("Driver '%s' not found", flavor)) - } - return sql.Open(c.driverName, dataSourceName) -} - -func DriverModule(cfg ModuleConfig) fx.Option { - options := make([]fx.Option, 0) - options = append(options, fx.Provide(func() Flavor { - return FlavorFromString(cfg.StorageDriver) - })) - - switch FlavorFromString(cfg.StorageDriver) { - case PostgreSQL: - options = append(options, fx.Provide(func() (*sql.DB, error) { - return OpenSQLDB(PostgreSQL, cfg.PostgresConfig.ConnString) - })) - options = append(options, fx.Provide(func(db *sql.DB) DB { - return NewPostgresDB(db) - })) - options = append(options, fx.Provide(func(db DB) (*Driver, error) { - return NewDriver(PostgreSQL.String(), db), nil - })) - options = append(options, health.ProvideHealthCheck(func(db *sql.DB) health.NamedCheck { - return health.NewNamedCheck(PostgreSQL.String(), health.CheckFn(db.PingContext)) - })) - case SQLite: - options = append(options, fx.Provide(func() DB { - return NewSQLiteDB(cfg.SQLiteConfig.Dir, cfg.SQLiteConfig.DBName) - })) - options = append(options, fx.Provide(func(db DB) (*Driver, error) { - return NewDriver(SQLite.String(), db), nil - })) - options = append(options, health.ProvideHealthCheck(func() health.NamedCheck { - return health.NewNamedCheck(SQLite.String(), health.CheckFn(func(ctx context.Context) error { - _, err := os.Open(cfg.SQLiteConfig.Dir) - return err - })) - })) - default: - panic("Unsupported driver: " + cfg.StorageDriver) - } - options = append(options, fx.Provide(func(driver *Driver) storage.Driver[*Store] { - return driver - })) - options = append(options, fx.Invoke(func(driver storage.Driver[*Store], lifecycle fx.Lifecycle) error { - lifecycle.Append(fx.Hook{ - OnStart: driver.Initialize, - OnStop: driver.Close, - }) - return nil - })) - options = append(options, fx.Provide( - NewLedgerStorageDriverFromRawDriver, - NewDefaultStorageDriverFromRawDriver, - NewIdempotencyStorageDriverFromRawDriver, - )) - return fx.Options(options...) -} diff --git a/pkg/storage/sqlstorage/pagination.go b/pkg/storage/sqlstorage/pagination.go deleted file mode 100644 index d542741e8..000000000 --- a/pkg/storage/sqlstorage/pagination.go +++ /dev/null @@ -1,43 +0,0 @@ -package sqlstorage - -import ( - "time" - - "github.com/numary/ledger/pkg/ledger" -) - -type TxsPaginationToken struct { - AfterTxID uint64 `json:"after"` - ReferenceFilter string `json:"reference,omitempty"` - AccountFilter string `json:"account,omitempty"` - SourceFilter string `json:"source,omitempty"` - DestinationFilter string `json:"destination,omitempty"` - StartTime time.Time `json:"startTime,omitempty"` - EndTime time.Time `json:"endTime,omitempty"` - MetadataFilter map[string]string `json:"metadata,omitempty"` - PageSize uint `json:"pageSize,omitempty"` -} - -type AccPaginationToken struct { - PageSize uint `json:"pageSize"` - Offset uint `json:"offset"` - AfterAddress string `json:"after,omitempty"` - AddressRegexpFilter string `json:"address,omitempty"` - MetadataFilter map[string]string `json:"metadata,omitempty"` - BalanceFilter string `json:"balance,omitempty"` - BalanceOperatorFilter ledger.BalanceOperator `json:"balanceOperator,omitempty"` -} - -type BalancesPaginationToken struct { - PageSize uint `json:"pageSize"` - Offset uint `json:"offset"` - AfterAddress string `json:"after,omitempty"` - AddressRegexpFilter string `json:"address,omitempty"` -} - -type LogsPaginationToken struct { - AfterID uint64 `json:"after"` - PageSize uint `json:"pageSize,omitempty"` - StartTime time.Time `json:"startTime,omitempty"` - EndTime time.Time `json:"endTime,omitempty"` -} diff --git a/pkg/storage/sqlstorage/queryable.go b/pkg/storage/sqlstorage/queryable.go deleted file mode 100644 index e436bb6eb..000000000 --- a/pkg/storage/sqlstorage/queryable.go +++ /dev/null @@ -1,12 +0,0 @@ -package sqlstorage - -import ( - "context" - "database/sql" -) - -type executor interface { - QueryContext(ctx context.Context, query string, args ...interface{}) (*sql.Rows, error) - QueryRowContext(ctx context.Context, query string, args ...interface{}) *sql.Row - ExecContext(ctx context.Context, query string, args ...interface{}) (sql.Result, error) -} diff --git a/pkg/storage/sqlstorage/register.go b/pkg/storage/sqlstorage/register.go deleted file mode 100644 index 00a4d3ed1..000000000 --- a/pkg/storage/sqlstorage/register.go +++ /dev/null @@ -1,38 +0,0 @@ -package sqlstorage - -import ( - "context" - "database/sql" - "path/filepath" - "runtime" - "strings" - - "github.com/numary/ledger/pkg/core" -) - -var registeredGoMigrations []Migration - -type MigrationFunc func(ctx context.Context, schema Schema, tx *sql.Tx) error - -func RegisterGoMigration(fn MigrationFunc) { - _, filename, _, _ := runtime.Caller(1) - RegisterGoMigrationFromFilename(filename, fn) -} - -func RegisterGoMigrationFromFilename(filename string, fn MigrationFunc) { - rest, goFile := filepath.Split(filename) - directory := filepath.Base(rest) - - version, name := extractMigrationInformation(directory) - engine := strings.Split(goFile, ".")[0] - - registeredGoMigrations = append(registeredGoMigrations, Migration{ - MigrationInfo: core.MigrationInfo{ - Version: version, - Name: name, - }, - Handlers: map[string][]MigrationFunc{ - engine: {fn}, - }, - }) -} diff --git a/pkg/storage/sqlstorage/register_test.go b/pkg/storage/sqlstorage/register_test.go deleted file mode 100644 index 58b73c5b2..000000000 --- a/pkg/storage/sqlstorage/register_test.go +++ /dev/null @@ -1,22 +0,0 @@ -package sqlstorage - -import ( - "context" - "database/sql" - "path/filepath" - "testing" - - "github.com/stretchr/testify/require" -) - -func TestRegister(t *testing.T) { - fn := func(ctx context.Context, schema Schema, tx *sql.Tx) error { - return nil - } - registeredGoMigrations = make([]Migration, 0) - defer func() { - registeredGoMigrations = make([]Migration, 0) - }() - RegisterGoMigrationFromFilename(filepath.Join("XXX", "0-init-schema", "any.go"), fn) - require.Len(t, registeredGoMigrations, 1) -} diff --git a/pkg/storage/sqlstorage/schema.go b/pkg/storage/sqlstorage/schema.go deleted file mode 100644 index 1a7f922af..000000000 --- a/pkg/storage/sqlstorage/schema.go +++ /dev/null @@ -1,214 +0,0 @@ -package sqlstorage - -import ( - "context" - "database/sql" - "fmt" - "os" - "path" - - "github.com/formancehq/go-libs/logging" - "github.com/huandu/go-sqlbuilder" -) - -type Schema interface { - executor - Initialize(ctx context.Context) error - Table(name string) string - Close(ctx context.Context) error - BeginTx(ctx context.Context, s *sql.TxOptions) (*sql.Tx, error) - Flavor() sqlbuilder.Flavor - Name() string - Delete(ctx context.Context) error -} - -type baseSchema struct { - *sql.DB - closeDb bool - name string -} - -func (s *baseSchema) Name() string { - return s.name -} - -func (s *baseSchema) QueryContext(ctx context.Context, query string, args ...interface{}) (*sql.Rows, error) { - logging.GetLogger(ctx).Debugf("QueryContext: %s %s", query, args) - return s.DB.QueryContext(ctx, query, args...) -} -func (s *baseSchema) QueryRowContext(ctx context.Context, query string, args ...interface{}) *sql.Row { - logging.GetLogger(ctx).Debugf("QueryRowContext: %s %s", query, args) - return s.DB.QueryRowContext(ctx, query, args...) -} -func (s *baseSchema) ExecContext(ctx context.Context, query string, args ...interface{}) (sql.Result, error) { - logging.GetLogger(ctx).Debugf("ExecContext: %s %s", query, args) - return s.DB.ExecContext(ctx, query, args...) -} -func (s *baseSchema) Close(ctx context.Context) error { - if s.closeDb { - return s.DB.Close() - } - return nil -} - -func (s *baseSchema) Table(name string) string { - return name -} - -func (s *baseSchema) Initialize(ctx context.Context) error { - return nil -} - -type PGSchema struct { - baseSchema - prefix string -} - -func (s *PGSchema) Table(name string) string { - return fmt.Sprintf(`"%s".%s`, s.prefix, name) -} - -func (s *PGSchema) Initialize(ctx context.Context) error { - _, err := s.ExecContext(ctx, fmt.Sprintf("CREATE SCHEMA IF NOT EXISTS \"%s\"", s.name)) - return err -} - -func (s *PGSchema) Flavor() sqlbuilder.Flavor { - return sqlbuilder.PostgreSQL -} - -func (s *PGSchema) Delete(ctx context.Context) error { - _, err := s.ExecContext(ctx, fmt.Sprintf("DROP SCHEMA \"%s\"", s.name)) - return err -} - -func (s *PGSchema) QueryContext(ctx context.Context, query string, args ...interface{}) (*sql.Rows, error) { - rows, err := s.baseSchema.QueryContext(ctx, query, args...) - if err != nil { - return nil, errorFromFlavor(PostgreSQL, err) - } - return rows, nil -} - -func (s *PGSchema) ExecContext(ctx context.Context, query string, args ...interface{}) (sql.Result, error) { - ret, err := s.baseSchema.ExecContext(ctx, query, args...) - if err != nil { - return nil, errorFromFlavor(PostgreSQL, err) - } - return ret, nil -} - -type SQLiteSchema struct { - baseSchema - file string -} - -func (s SQLiteSchema) Flavor() sqlbuilder.Flavor { - return sqlbuilder.SQLite -} - -func (s SQLiteSchema) Delete(ctx context.Context) error { - err := s.baseSchema.DB.Close() - if err != nil { - return err - } - return os.RemoveAll(s.file) -} - -func (s *SQLiteSchema) QueryContext(ctx context.Context, query string, args ...interface{}) (*sql.Rows, error) { - rows, err := s.baseSchema.QueryContext(ctx, query, args...) - if err != nil { - return nil, errorFromFlavor(SQLite, err) - } - return rows, nil -} - -func (s *SQLiteSchema) ExecContext(ctx context.Context, query string, args ...interface{}) (sql.Result, error) { - ret, err := s.baseSchema.ExecContext(ctx, query, args...) - if err != nil { - return nil, errorFromFlavor(SQLite, err) - } - return ret, nil -} - -type DB interface { - Initialize(ctx context.Context) error - Schema(ctx context.Context, name string) (Schema, error) - Close(ctx context.Context) error -} - -type postgresDB struct { - db *sql.DB -} - -func (p *postgresDB) Initialize(ctx context.Context) error { - _, err := p.db.ExecContext(ctx, "CREATE EXTENSION IF NOT EXISTS pgcrypto") - if err != nil { - return err - } - _, err = p.db.ExecContext(ctx, "CREATE EXTENSION IF NOT EXISTS pg_trgm") - if err != nil { - return err - } - return nil -} - -func (p *postgresDB) Schema(ctx context.Context, name string) (Schema, error) { - return &PGSchema{ - baseSchema: baseSchema{ - DB: p.db, - name: name, - }, - prefix: name, - }, nil -} - -func (p *postgresDB) Close(ctx context.Context) error { - return p.db.Close() -} - -func NewPostgresDB(db *sql.DB) *postgresDB { - return &postgresDB{ - db: db, - } -} - -type sqliteDB struct { - directory string - dbName string -} - -func (p *sqliteDB) Initialize(ctx context.Context) error { - return nil -} - -func (p *sqliteDB) Schema(ctx context.Context, name string) (Schema, error) { - file := path.Join( - p.directory, - fmt.Sprintf("%s_%s.db", p.dbName, name), - ) - db, err := OpenSQLDB(SQLite, file) - if err != nil { - return nil, err - } - - return &SQLiteSchema{ - baseSchema: baseSchema{ - name: name, - DB: db, - closeDb: true, - }, - file: file, - }, nil -} - -func (p *sqliteDB) Close(ctx context.Context) error { - return nil -} - -func NewSQLiteDB(directory, dbName string) *sqliteDB { - return &sqliteDB{ - directory: directory, - dbName: dbName, - } -} diff --git a/pkg/storage/sqlstorage/sqlite.go b/pkg/storage/sqlstorage/sqlite.go deleted file mode 100644 index 96aac3014..000000000 --- a/pkg/storage/sqlstorage/sqlite.go +++ /dev/null @@ -1,160 +0,0 @@ -//go:build cgo -// +build cgo - -// File is part of the build only if cgo is enabled. -// Otherwise, the compilation will complains about missing type sqlite3,Error -// It is due to the fact than sqlite lib use import "C" statement. -// The presence of these statement during the build exclude the file if CGO is disabled. -package sqlstorage - -import ( - "database/sql" - "encoding/json" - "regexp" - "strconv" - - "github.com/buger/jsonparser" - "github.com/mattn/go-sqlite3" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/storage" -) - -func init() { - errorHandlers[SQLite] = func(err error) error { - eerr, ok := err.(sqlite3.Error) - if !ok { - return err - } - if eerr.Code == sqlite3.ErrConstraint { - return storage.NewError(storage.ConstraintFailed, err) - } - return err - } - sql.Register("sqlite3-custom", &sqlite3.SQLiteDriver{ - ConnectHook: func(conn *sqlite3.SQLiteConn) error { - err := conn.RegisterFunc("hash_log", func(v1, v2 string) string { - m1 := make(map[string]interface{}) - m2 := make(map[string]interface{}) - err := json.Unmarshal([]byte(v1), &m1) - if err != nil { - panic(err) - } - err = json.Unmarshal([]byte(v2), &m2) - if err != nil { - panic(err) - } - return core.Hash(m1, m2) - }, true) - if err != nil { - return err - } - err = conn.RegisterFunc("regexp", func(re, s string) (bool, error) { - b, e := regexp.MatchString(re, s) - return b, e - }, true) - if err != nil { - return err - } - err = conn.RegisterFunc("use_account", func(v string, act string) (bool, error) { - r, err := regexp.Compile("^" + act + "$") - if err != nil { - return false, err - } - postings := core.Postings{} - err = json.Unmarshal([]byte(v), &postings) - if err != nil { - return false, nil - } - for _, p := range postings { - if r.MatchString(p.Source) || r.MatchString(p.Destination) { - return true, nil - } - } - return false, nil - }, true) - if err != nil { - return err - } - err = conn.RegisterFunc("use_account_as_source", func(v string, act string) (bool, error) { - r, err := regexp.Compile("^" + act + "$") - if err != nil { - return false, err - } - postings := core.Postings{} - err = json.Unmarshal([]byte(v), &postings) - if err != nil { - return false, nil - } - for _, p := range postings { - if r.MatchString(p.Source) { - return true, nil - } - } - return false, nil - }, true) - if err != nil { - return err - } - err = conn.RegisterFunc("use_account_as_destination", func(v string, act string) (bool, error) { - r, err := regexp.Compile("^" + act + "$") - if err != nil { - return false, err - } - postings := core.Postings{} - err = json.Unmarshal([]byte(v), &postings) - if err != nil { - return false, nil - } - for _, p := range postings { - if r.MatchString(p.Destination) { - return true, nil - } - } - return false, nil - }, true) - if err != nil { - return err - } - err = conn.RegisterFunc(SQLCustomFuncMetaCompare, func(metadata string, value string, key ...string) bool { - bytes, dataType, _, err := jsonparser.Get([]byte(metadata), key...) - if err != nil { - return false - } - switch dataType { - case jsonparser.String: - str, err := jsonparser.ParseString(bytes) - if err != nil { - return false - } - return value == str - case jsonparser.Boolean: - b, err := jsonparser.ParseBoolean(bytes) - if err != nil { - return false - } - switch value { - case "true": - return b - case "false": - return !b - } - return false - case jsonparser.Number: - i, err := jsonparser.ParseInt(bytes) - if err != nil { - return false - } - vi, err := strconv.ParseInt(value, 10, 64) - if err != nil { - return false - } - return i == vi - default: - return false - } - }, true) - return err - }, - }) - UpdateSQLDriverMapping(SQLite, "sqlite3-custom") -} diff --git a/pkg/storage/sqlstorage/store_ledger.go b/pkg/storage/sqlstorage/store_ledger.go deleted file mode 100644 index 121129d8c..000000000 --- a/pkg/storage/sqlstorage/store_ledger.go +++ /dev/null @@ -1,70 +0,0 @@ -package sqlstorage - -import ( - "context" - - "github.com/formancehq/go-libs/logging" - _ "github.com/jackc/pgx/v4/stdlib" - "github.com/numary/ledger/pkg/ledger" - "github.com/pkg/errors" -) - -const ( - SQLCustomFuncMetaCompare = "meta_compare" -) - -type Store struct { - executorProvider func(ctx context.Context) (executor, error) - schema Schema - onClose func(ctx context.Context) error - onDelete func(ctx context.Context) error -} - -func (s *Store) error(err error) error { - if err == nil { - return nil - } - return errorFromFlavor(Flavor(s.schema.Flavor()), err) -} - -func (s *Store) Schema() Schema { - return s.schema -} - -func (s *Store) Name() string { - return s.schema.Name() -} - -func (s *Store) Delete(ctx context.Context) error { - if err := s.schema.Delete(ctx); err != nil { - return err - } - return errors.Wrap(s.onDelete(ctx), "deleting ledger store") -} - -func (s *Store) Initialize(ctx context.Context) (bool, error) { - logging.GetLogger(ctx).Debug("Initialize store") - - migrations, err := CollectMigrationFiles(MigrationsFS) - if err != nil { - return false, err - } - - return Migrate(ctx, s.schema, migrations...) -} - -func (s *Store) Close(ctx context.Context) error { - return s.onClose(ctx) -} - -func NewStore(schema Schema, executorProvider func(ctx context.Context) (executor, error), - onClose, onDelete func(ctx context.Context) error) *Store { - return &Store{ - executorProvider: executorProvider, - schema: schema, - onClose: onClose, - onDelete: onDelete, - } -} - -var _ ledger.Store = &Store{} diff --git a/pkg/storage/sqlstorage/store_ledger_test.go b/pkg/storage/sqlstorage/store_ledger_test.go deleted file mode 100644 index b9efd28ca..000000000 --- a/pkg/storage/sqlstorage/store_ledger_test.go +++ /dev/null @@ -1,821 +0,0 @@ -package sqlstorage_test - -import ( - "context" - "database/sql" - "encoding/json" - "fmt" - "net/http" - "os" - "testing" - "time" - - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/logging/logginglogrus" - "github.com/google/uuid" - "github.com/numary/ledger/internal/pgtesting" - "github.com/numary/ledger/pkg/api/idempotency" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/ledgertesting" - "github.com/numary/ledger/pkg/storage" - "github.com/numary/ledger/pkg/storage/sqlstorage" - "github.com/sirupsen/logrus" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "go.uber.org/fx" -) - -func TestStore(t *testing.T) { - l := logrus.New() - if testing.Verbose() { - l.Level = logrus.DebugLevel - } - logging.SetFactory(logging.StaticLoggerFactory(logginglogrus.New(l))) - - type testingFunction struct { - name string - fn func(t *testing.T, store *sqlstorage.Store) - } - - for _, tf := range []testingFunction{ - {name: "Commit", fn: testCommit}, - {name: "UpdateTransactionMetadata", fn: testUpdateTransactionMetadata}, - {name: "UpdateAccountMetadata", fn: testUpdateAccountMetadata}, - {name: "GetLastLog", fn: testGetLastLog}, - {name: "GetLogs", fn: testGetLogs}, - {name: "CountAccounts", fn: testCountAccounts}, - {name: "GetAssetsVolumes", fn: testGetAssetsVolumes}, - {name: "GetAccounts", fn: testGetAccounts}, - {name: "Transactions", fn: testTransactions}, - {name: "GetTransaction", fn: testGetTransaction}, - {name: "Mapping", fn: testMapping}, - {name: "TooManyClient", fn: testTooManyClient}, - {name: "GetBalances", fn: testGetBalances}, - {name: "GetBalancesAggregated", fn: testGetBalancesAggregated}, - {name: "CreateIK", fn: testIKS}, - } { - t.Run(fmt.Sprintf("%s/%s", ledgertesting.StorageDriverName(), tf.name), func(t *testing.T) { - done := make(chan struct{}) - app := fx.New( - ledgertesting.ProvideStorageDriver(), - fx.Invoke(func(driver *sqlstorage.Driver, lc fx.Lifecycle) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - defer func() { - close(done) - }() - store, _, err := driver.GetLedgerStore(ctx, uuid.NewString(), true) - if err != nil { - return err - } - defer store.Close(ctx) - - if _, err = store.Initialize(context.Background()); err != nil { - return err - } - - tf.fn(t, store) - return nil - }, - }) - }), - ) - go func() { - require.NoError(t, app.Start(context.Background())) - }() - defer func(app *fx.App, ctx context.Context) { - require.NoError(t, app.Stop(ctx)) - }(app, context.Background()) - - select { - case <-time.After(5 * time.Second): - t.Fatal("timeout") - case <-done: - } - }) - } -} - -var now = time.Now().UTC().Truncate(time.Second) -var tx1 = core.ExpandedTransaction{ - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "central_bank", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - Reference: "tx1", - Timestamp: now.Add(-3 * time.Hour), - }, - }, - PostCommitVolumes: core.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(100), - }, - }, - "central_bank": { - "USD": { - Input: core.NewMonetaryInt(100), - Output: core.NewMonetaryInt(0), - }, - }, - }, - PreCommitVolumes: core.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - }, - "central_bank": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - }, - }, -} -var tx2 = core.ExpandedTransaction{ - Transaction: core.Transaction{ - ID: 1, - TransactionData: core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "central_bank", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - Reference: "tx2", - Timestamp: now.Add(-2 * time.Hour), - }, - }, - PostCommitVolumes: core.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(200), - }, - }, - "central_bank": { - "USD": { - Input: core.NewMonetaryInt(200), - Output: core.NewMonetaryInt(0), - }, - }, - }, - PreCommitVolumes: core.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(100), - }, - }, - "central_bank": { - "USD": { - Input: core.NewMonetaryInt(100), - Output: core.NewMonetaryInt(0), - }, - }, - }, -} -var tx3 = core.ExpandedTransaction{ - Transaction: core.Transaction{ - ID: 2, - TransactionData: core.TransactionData{ - Postings: []core.Posting{ - { - Source: "central_bank", - Destination: "users:1", - Amount: core.NewMonetaryInt(1), - Asset: "USD", - }, - }, - Reference: "tx3", - Metadata: core.Metadata{ - "priority": json.RawMessage(`"high"`), - }, - Timestamp: now.Add(-1 * time.Hour), - }, - }, - PreCommitVolumes: core.AccountsAssetsVolumes{ - "central_bank": { - "USD": { - Input: core.NewMonetaryInt(200), - Output: core.NewMonetaryInt(0), - }, - }, - "users:1": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - }, - }, - PostCommitVolumes: core.AccountsAssetsVolumes{ - "central_bank": { - "USD": { - Input: core.NewMonetaryInt(200), - Output: core.NewMonetaryInt(1), - }, - }, - "users:1": { - "USD": { - Input: core.NewMonetaryInt(1), - Output: core.NewMonetaryInt(0), - }, - }, - }, -} - -func testCommit(t *testing.T, store *sqlstorage.Store) { - tx := core.ExpandedTransaction{ - Transaction: core.Transaction{ - ID: 0, - TransactionData: core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "central_bank", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - Reference: "foo", - Timestamp: time.Now().Round(time.Second), - }, - }, - } - err := store.Commit(context.Background(), tx) - require.NoError(t, err) - - err = store.Commit(context.Background(), tx) - require.Error(t, err) - require.True(t, storage.IsErrorCode(err, storage.ConstraintFailed)) - - cursor, err := store.GetLogs(context.Background(), ledger.NewLogsQuery()) - require.NoError(t, err) - require.Len(t, cursor.Data, 1) -} - -func testIKS(t *testing.T, store *sqlstorage.Store) { - t.Run("Create and Read", func(t *testing.T) { - response := idempotency.Response{ - RequestHash: "xxx", - StatusCode: http.StatusAccepted, - Header: http.Header{ - "Content-Type": []string{"application/json"}, - }, - Body: "Hello World!", - } - require.NoError(t, store.CreateIK(context.Background(), "foo", response)) - - fromDB, err := store.ReadIK(context.Background(), "foo") - require.NoError(t, err) - require.Equal(t, response, *fromDB) - }) - t.Run("Not found", func(t *testing.T) { - _, err := store.ReadIK(context.Background(), uuid.NewString()) - require.Equal(t, idempotency.ErrIKNotFound, err) - }) -} - -func testUpdateTransactionMetadata(t *testing.T, store *sqlstorage.Store) { - tx := core.ExpandedTransaction{ - Transaction: core.Transaction{ - ID: 0, - TransactionData: core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "central_bank", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - Reference: "foo", - Timestamp: time.Now().Round(time.Second), - }, - }, - } - err := store.Commit(context.Background(), tx) - require.NoError(t, err) - - err = store.UpdateTransactionMetadata(context.Background(), tx.ID, core.Metadata{ - "foo": "bar", - }, time.Now()) - require.NoError(t, err) - - retrievedTransaction, err := store.GetTransaction(context.Background(), tx.ID) - require.NoError(t, err) - require.EqualValues(t, "bar", retrievedTransaction.Metadata["foo"]) - - cursor, err := store.GetLogs(context.Background(), ledger.NewLogsQuery()) - require.NoError(t, err) - require.Len(t, cursor.Data, 2) -} - -func testUpdateAccountMetadata(t *testing.T, store *sqlstorage.Store) { - tx := core.ExpandedTransaction{ - Transaction: core.Transaction{ - ID: 0, - TransactionData: core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "central_bank", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - Reference: "foo", - Timestamp: time.Now().Round(time.Second), - }, - }, - } - err := store.Commit(context.Background(), tx) - require.NoError(t, err) - - err = store.UpdateAccountMetadata(context.Background(), "central_bank", core.Metadata{ - "foo": "bar", - }, time.Now()) - require.NoError(t, err) - - account, err := store.GetAccount(context.Background(), "central_bank") - require.NoError(t, err) - require.EqualValues(t, "bar", account.Metadata["foo"]) - - cursor, err := store.GetLogs(context.Background(), ledger.NewLogsQuery()) - require.NoError(t, err) - require.Len(t, cursor.Data, 2) -} - -func testCountAccounts(t *testing.T, store *sqlstorage.Store) { - tx := core.ExpandedTransaction{ - Transaction: core.Transaction{ - ID: 0, - TransactionData: core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "central_bank", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - Timestamp: time.Now().Round(time.Second), - }, - }, - } - err := store.Commit(context.Background(), tx) - require.NoError(t, err) - - countAccounts, err := store.CountAccounts(context.Background(), ledger.AccountsQuery{}) - require.NoError(t, err) - require.EqualValues(t, 2, countAccounts) // world + central_bank -} - -func testGetAssetsVolumes(t *testing.T, store *sqlstorage.Store) { - tx := core.ExpandedTransaction{ - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Postings: []core.Posting{ - { - Source: "world", - Destination: "central_bank", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - Timestamp: time.Now().Round(time.Second), - }, - }, - PostCommitVolumes: core.AccountsAssetsVolumes{ - "central_bank": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(100), - Output: core.NewMonetaryInt(0), - }, - }, - }, - PreCommitVolumes: core.AccountsAssetsVolumes{ - "central_bank": core.AssetsVolumes{ - "USD": { - Input: core.NewMonetaryInt(100), - Output: core.NewMonetaryInt(0), - }, - }, - }, - } - err := store.Commit(context.Background(), tx) - require.NoError(t, err) - - volumes, err := store.GetAssetsVolumes(context.Background(), "central_bank") - require.NoError(t, err) - require.Len(t, volumes, 1) - require.EqualValues(t, core.NewMonetaryInt(100), volumes["USD"].Input) - require.EqualValues(t, core.NewMonetaryInt(0), volumes["USD"].Output) -} - -func testGetAccounts(t *testing.T, store *sqlstorage.Store) { - require.NoError(t, store.UpdateAccountMetadata(context.Background(), "world", core.Metadata{ - "foo": json.RawMessage(`"bar"`), - }, now)) - require.NoError(t, store.UpdateAccountMetadata(context.Background(), "bank", core.Metadata{ - "hello": json.RawMessage(`"world"`), - }, now)) - require.NoError(t, store.UpdateAccountMetadata(context.Background(), "order:1", core.Metadata{ - "hello": json.RawMessage(`"world"`), - }, now)) - require.NoError(t, store.UpdateAccountMetadata(context.Background(), "order:2", core.Metadata{ - "number": json.RawMessage(`3`), - "boolean": json.RawMessage(`true`), - "a": json.RawMessage(`{"super": {"nested": {"key": "hello"}}}`), - }, now)) - - accounts, err := store.GetAccounts(context.Background(), ledger.AccountsQuery{ - PageSize: 1, - }) - require.NoError(t, err) - require.Equal(t, 1, accounts.PageSize) - require.Len(t, accounts.Data, 1) - - accounts, err = store.GetAccounts(context.Background(), ledger.AccountsQuery{ - PageSize: 1, - AfterAddress: accounts.Data[0].Address, - }) - require.NoError(t, err) - require.Equal(t, 1, accounts.PageSize) - - accounts, err = store.GetAccounts(context.Background(), ledger.AccountsQuery{ - PageSize: 10, - Filters: ledger.AccountsQueryFilters{ - Address: ".*der.*", - }, - }) - require.NoError(t, err) - require.Len(t, accounts.Data, 2) - require.Equal(t, 10, accounts.PageSize) - - accounts, err = store.GetAccounts(context.Background(), ledger.AccountsQuery{ - PageSize: 10, - Filters: ledger.AccountsQueryFilters{ - Metadata: map[string]string{ - "foo": "bar", - }, - }, - }) - require.NoError(t, err) - require.Len(t, accounts.Data, 1) - - accounts, err = store.GetAccounts(context.Background(), ledger.AccountsQuery{ - PageSize: 10, - Filters: ledger.AccountsQueryFilters{ - Metadata: map[string]string{ - "number": "3", - }, - }, - }) - require.NoError(t, err) - require.Len(t, accounts.Data, 1) - - accounts, err = store.GetAccounts(context.Background(), ledger.AccountsQuery{ - PageSize: 10, - Filters: ledger.AccountsQueryFilters{ - Metadata: map[string]string{ - "boolean": "true", - }, - }, - }) - require.NoError(t, err) - require.Len(t, accounts.Data, 1) - - accounts, err = store.GetAccounts(context.Background(), ledger.AccountsQuery{ - PageSize: 10, - Filters: ledger.AccountsQueryFilters{ - Metadata: map[string]string{ - "a.super.nested.key": "hello", - }, - }, - }) - require.NoError(t, err) - require.Len(t, accounts.Data, 1) -} - -func testTransactions(t *testing.T, store *sqlstorage.Store) { - err := store.Commit(context.Background(), tx1, tx2, tx3) - require.NoError(t, err) - - t.Run("Count", func(t *testing.T) { - count, err := store.CountTransactions(context.Background(), ledger.TransactionsQuery{}) - require.NoError(t, err) - // Should get all the transactions - require.EqualValues(t, 3, count) - - count, err = store.CountTransactions(context.Background(), ledger.TransactionsQuery{ - Filters: ledger.TransactionsQueryFilters{ - Account: "world", - }, - }) - require.NoError(t, err) - // Should get the two first transactions involving the 'world' account. - require.EqualValues(t, 2, count) - - count, err = store.CountTransactions(context.Background(), ledger.TransactionsQuery{ - Filters: ledger.TransactionsQueryFilters{ - Account: "world", - StartTime: now.Add(-2 * time.Hour), - EndTime: now.Add(-1 * time.Hour), - }, - }) - require.NoError(t, err) - // Should get only tx2, as StartTime is inclusive and EndTime exclusive. - require.EqualValues(t, 1, count) - - count, err = store.CountTransactions(context.Background(), ledger.TransactionsQuery{ - Filters: ledger.TransactionsQueryFilters{ - Metadata: map[string]string{ - "priority": "high", - }, - }, - }) - require.NoError(t, err) - require.EqualValues(t, 1, count) - }) - - t.Run("Get", func(t *testing.T) { - cursor, err := store.GetTransactions(context.Background(), ledger.TransactionsQuery{ - PageSize: 1, - }) - require.NoError(t, err) - // Should get only the first transaction. - require.Equal(t, 1, cursor.PageSize) - - cursor, err = store.GetTransactions(context.Background(), ledger.TransactionsQuery{ - AfterTxID: cursor.Data[0].ID, - PageSize: 1, - }) - require.NoError(t, err) - // Should get only the second transaction. - require.Equal(t, 1, cursor.PageSize) - - cursor, err = store.GetTransactions(context.Background(), ledger.TransactionsQuery{ - Filters: ledger.TransactionsQueryFilters{ - Account: "world", - Reference: "tx1", - }, - PageSize: 1, - }) - require.NoError(t, err) - require.Equal(t, 1, cursor.PageSize) - // Should get only the first transaction. - require.Len(t, cursor.Data, 1) - - cursor, err = store.GetTransactions(context.Background(), ledger.TransactionsQuery{ - Filters: ledger.TransactionsQueryFilters{ - Account: "users:.*", - }, - PageSize: 10, - }) - require.NoError(t, err) - require.Equal(t, 10, cursor.PageSize) - require.Len(t, cursor.Data, 1) - - cursor, err = store.GetTransactions(context.Background(), ledger.TransactionsQuery{ - Filters: ledger.TransactionsQueryFilters{ - Source: "central_bank", - }, - PageSize: 10, - }) - require.NoError(t, err) - require.Equal(t, 10, cursor.PageSize) - // Should get only the third transaction. - require.Len(t, cursor.Data, 1) - - cursor, err = store.GetTransactions(context.Background(), ledger.TransactionsQuery{ - Filters: ledger.TransactionsQueryFilters{ - Destination: "users:1", - }, - PageSize: 10, - }) - require.NoError(t, err) - require.Equal(t, 10, cursor.PageSize) - // Should get only the third transaction. - require.Len(t, cursor.Data, 1) - - cursor, err = store.GetTransactions(context.Background(), ledger.TransactionsQuery{ - Filters: ledger.TransactionsQueryFilters{ - Destination: "users:.*", // Use regex - }, - PageSize: 10, - }) - assert.NoError(t, err) - assert.Equal(t, 10, cursor.PageSize) - // Should get only the third transaction. - assert.Len(t, cursor.Data, 1) - - cursor, err = store.GetTransactions(context.Background(), ledger.TransactionsQuery{ - Filters: ledger.TransactionsQueryFilters{ - Destination: ".*:1", // Use regex - }, - PageSize: 10, - }) - assert.NoError(t, err) - assert.Equal(t, 10, cursor.PageSize) - // Should get only the third transaction. - assert.Len(t, cursor.Data, 1) - - cursor, err = store.GetTransactions(context.Background(), ledger.TransactionsQuery{ - Filters: ledger.TransactionsQueryFilters{ - Source: ".*bank", // Use regex - }, - PageSize: 10, - }) - assert.NoError(t, err) - assert.Equal(t, 10, cursor.PageSize) - // Should get only the third transaction. - assert.Len(t, cursor.Data, 1) - - cursor, err = store.GetTransactions(context.Background(), ledger.TransactionsQuery{ - Filters: ledger.TransactionsQueryFilters{ - StartTime: now.Add(-2 * time.Hour), - EndTime: now.Add(-1 * time.Hour), - }, - PageSize: 10, - }) - require.NoError(t, err) - require.Equal(t, 10, cursor.PageSize) - // Should get only tx2, as StartTime is inclusive and EndTime exclusive. - require.Len(t, cursor.Data, 1) - - cursor, err = store.GetTransactions(context.Background(), ledger.TransactionsQuery{ - Filters: ledger.TransactionsQueryFilters{ - Metadata: map[string]string{ - "priority": "high", - }, - }, - PageSize: 10, - }) - require.NoError(t, err) - require.Equal(t, 10, cursor.PageSize) - // Should get only the third transaction. - require.Len(t, cursor.Data, 1) - }) -} - -func testMapping(t *testing.T, store *sqlstorage.Store) { - m := core.Mapping{ - Contracts: []core.Contract{ - { - Expr: &core.ExprGt{ - Op1: core.VariableExpr{Name: "balance"}, - Op2: core.ConstantExpr{Value: core.NewMonetaryInt(0)}, - }, - Account: "orders:*", - }, - }, - } - err := store.SaveMapping(context.Background(), m) - assert.NoError(t, err) - - mapping, err := store.LoadMapping(context.Background()) - assert.NoError(t, err) - assert.Len(t, mapping.Contracts, 1) - assert.EqualValues(t, m.Contracts[0], mapping.Contracts[0]) - - m2 := core.Mapping{ - Contracts: []core.Contract{}, - } - err = store.SaveMapping(context.Background(), m2) - assert.NoError(t, err) - - mapping, err = store.LoadMapping(context.Background()) - assert.NoError(t, err) - assert.Len(t, mapping.Contracts, 0) -} - -func testGetTransaction(t *testing.T, store *sqlstorage.Store) { - err := store.Commit(context.Background(), tx1, tx2) - require.NoError(t, err) - - tx, err := store.GetTransaction(context.Background(), tx1.ID) - require.NoError(t, err) - require.Equal(t, tx1.Postings, tx.Postings) - require.Equal(t, tx1.Reference, tx.Reference) - require.Equal(t, tx1.Timestamp, tx.Timestamp) -} - -func testTooManyClient(t *testing.T, store *sqlstorage.Store) { - // Use of external server, ignore this test - if os.Getenv("NUMARY_STORAGE_POSTGRES_CONN_STRING") != "" || - ledgertesting.StorageDriverName() != "postgres" { - return - } - - for i := 0; i < pgtesting.MaxConnections; i++ { - tx, err := store.Schema().BeginTx(context.Background(), nil) - require.NoError(t, err) - defer func(tx *sql.Tx) { - require.NoError(t, tx.Rollback()) - }(tx) - } - - _, err := store.CountTransactions(context.Background(), ledger.TransactionsQuery{}) - require.Error(t, err) - require.IsType(t, new(storage.Error), err) - require.Equal(t, storage.TooManyClient, err.(*storage.Error).Code) -} - -func TestInitializeStore(t *testing.T) { - l := logrus.New() - if testing.Verbose() { - l.Level = logrus.DebugLevel - } - logging.SetFactory(logging.StaticLoggerFactory(logginglogrus.New(l))) - - driver, stopFn, err := ledgertesting.StorageDriver() - require.NoError(t, err) - defer stopFn() - defer func(driver storage.Driver[*sqlstorage.Store], ctx context.Context) { - require.NoError(t, driver.Close(ctx)) - }(driver, context.Background()) - - err = driver.Initialize(context.Background()) - require.NoError(t, err) - - store, _, err := driver.GetLedgerStore(context.Background(), uuid.NewString(), true) - require.NoError(t, err) - - modified, err := store.Initialize(context.Background()) - require.NoError(t, err) - require.True(t, modified) - - modified, err = store.Initialize(context.Background()) - require.NoError(t, err) - require.False(t, modified) -} - -func testGetLastLog(t *testing.T, store *sqlstorage.Store) { - err := store.Commit(context.Background(), tx1) - require.NoError(t, err) - - lastLog, err := store.GetLastLog(context.Background()) - require.NoError(t, err) - require.NotNil(t, lastLog) - - require.Equal(t, tx1.Postings, lastLog.Data.(core.Transaction).Postings) - require.Equal(t, tx1.Reference, lastLog.Data.(core.Transaction).Reference) - require.Equal(t, tx1.Timestamp, lastLog.Data.(core.Transaction).Timestamp) -} - -func testGetLogs(t *testing.T, store *sqlstorage.Store) { - require.NoError(t, store.Commit(context.Background(), tx1, tx2, tx3)) - - cursor, err := store.GetLogs(context.Background(), ledger.NewLogsQuery()) - require.NoError(t, err) - require.Equal(t, ledger.QueryDefaultPageSize, cursor.PageSize) - - require.Equal(t, 3, len(cursor.Data)) - require.Equal(t, uint64(2), cursor.Data[0].ID) - require.Equal(t, tx3.Postings, cursor.Data[0].Data.(core.Transaction).Postings) - require.Equal(t, tx3.Reference, cursor.Data[0].Data.(core.Transaction).Reference) - require.Equal(t, tx3.Timestamp, cursor.Data[0].Data.(core.Transaction).Timestamp) - - cursor, err = store.GetLogs(context.Background(), &ledger.LogsQuery{ - PageSize: 1, - }) - require.NoError(t, err) - // Should get only the first log. - require.Equal(t, 1, cursor.PageSize) - require.Equal(t, uint64(2), cursor.Data[0].ID) - - cursor, err = store.GetLogs(context.Background(), &ledger.LogsQuery{ - AfterID: cursor.Data[0].ID, - PageSize: 1, - }) - require.NoError(t, err) - // Should get only the second log. - require.Equal(t, 1, cursor.PageSize) - require.Equal(t, uint64(1), cursor.Data[0].ID) - - cursor, err = store.GetLogs(context.Background(), &ledger.LogsQuery{ - Filters: ledger.LogsQueryFilters{ - StartTime: now.Add(-2 * time.Hour), - EndTime: now.Add(-1 * time.Hour), - }, - PageSize: 10, - }) - require.NoError(t, err) - require.Equal(t, 10, cursor.PageSize) - // Should get only the second log, as StartTime is inclusive and EndTime exclusive. - require.Len(t, cursor.Data, 1) - require.Equal(t, uint64(1), cursor.Data[0].ID) -} diff --git a/pkg/storage/sqlstorage/store_system.go b/pkg/storage/sqlstorage/store_system.go deleted file mode 100644 index 9ae82f52a..000000000 --- a/pkg/storage/sqlstorage/store_system.go +++ /dev/null @@ -1,118 +0,0 @@ -package sqlstorage - -import ( - "context" - "database/sql" - "time" - - "github.com/huandu/go-sqlbuilder" - "github.com/numary/ledger/pkg/storage" - "github.com/pkg/errors" -) - -type SystemStore struct { - systemSchema Schema -} - -func (s *SystemStore) GetConfiguration(ctx context.Context, key string) (string, error) { - builder := sqlbuilder. - Select("value"). - From(s.systemSchema.Table("configuration")) - q, args := builder. - Where(builder.E("key", key)). - Limit(1). - BuildWithFlavor(s.systemSchema.Flavor()) - - row := s.systemSchema.QueryRowContext(ctx, q, args...) - if row.Err() != nil { - if row.Err() != sql.ErrNoRows { - return "", nil - } - } - var value string - if err := row.Scan(&value); err != nil { - if err == sql.ErrNoRows { - return "", storage.ErrConfigurationNotFound - } - return "", err - } - - return value, nil -} - -func (s SystemStore) InsertConfiguration(ctx context.Context, key, value string) error { - q, args := sqlbuilder. - InsertInto(s.systemSchema.Table("configuration")). - Cols("key", "value", "addedAt"). - Values(key, value, time.Now().UTC().Truncate(time.Second)). - BuildWithFlavor(s.systemSchema.Flavor()) - _, err := s.systemSchema.ExecContext(ctx, q, args...) - return errors.Wrap(err, "inserting configuration") -} - -func (s SystemStore) ListLedgers(ctx context.Context) ([]string, error) { - q, args := sqlbuilder. - Select("ledger"). - From(s.systemSchema.Table("ledgers")). - BuildWithFlavor(s.systemSchema.Flavor()) - rows, err := s.systemSchema.QueryContext(ctx, q, args...) - if err != nil { - return nil, err - } - defer rows.Close() - - res := make([]string, 0) - for rows.Next() { - var ledger string - if err := rows.Scan(&ledger); err != nil { - return nil, err - } - res = append(res, ledger) - } - return res, nil -} - -func (s SystemStore) DeleteLedger(ctx context.Context, name string) error { - b := sqlbuilder.DeleteFrom(s.systemSchema.Table("ledgers")) - b = b.Where(b.E("ledger", name)) - q, args := b.BuildWithFlavor(s.systemSchema.Flavor()) - _, err := s.systemSchema.ExecContext(ctx, q, args...) - return errors.Wrap(err, "delete ledger from system store") -} - -func (s *SystemStore) Register(ctx context.Context, ledger string) (bool, error) { - q, args := sqlbuilder. - InsertInto(s.systemSchema.Table("ledgers")). - Cols("ledger", "addedAt"). - Values(ledger, time.Now()). - SQL("ON CONFLICT DO NOTHING"). - BuildWithFlavor(s.systemSchema.Flavor()) - - ret, err := s.systemSchema.ExecContext(ctx, q, args...) - if err != nil { - return false, err - } - affected, err := ret.RowsAffected() - if err != nil { - return false, err - } - return affected > 0, nil -} - -func (s *SystemStore) exists(ctx context.Context, ledger string) (bool, error) { - b := sqlbuilder. - Select("ledger"). - From(s.systemSchema.Table("ledgers")) - - q, args := b.Where(b.E("ledger", ledger)).BuildWithFlavor(s.systemSchema.Flavor()) - - ret := s.systemSchema.QueryRowContext(ctx, q, args...) - if ret.Err() != nil { - return false, nil - } - var t string - _ = ret.Scan(&t) // Trigger close - return true, nil -} - -var _ storage.SystemStore = &SystemStore{} diff --git a/pkg/storage/sqlstorage/transactions.go b/pkg/storage/sqlstorage/transactions.go deleted file mode 100644 index 1a07ae457..000000000 --- a/pkg/storage/sqlstorage/transactions.go +++ /dev/null @@ -1,529 +0,0 @@ -package sqlstorage - -import ( - "context" - "database/sql" - "encoding/base64" - "encoding/json" - "fmt" - "regexp" - "strings" - "time" - - "github.com/formancehq/go-libs/api" - "github.com/huandu/go-sqlbuilder" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/pkg/errors" -) - -// this regexp is used to distinguish between deprecated regex queries for -// source, destination and account params and the new wildcard query -// which allows segmented address pattern matching, e.g; "foo:bar:*" -var addressQueryRegexp = regexp.MustCompile(`^(\w+|\*|\.\*)(:(\w+|\*|\.\*))*$`) - -func (s *Store) buildTransactionsQuery(flavor Flavor, p ledger.TransactionsQuery) (*sqlbuilder.SelectBuilder, TxsPaginationToken) { - sb := sqlbuilder.NewSelectBuilder() - t := TxsPaginationToken{} - - var ( - destination = p.Filters.Destination - source = p.Filters.Source - account = p.Filters.Account - reference = p.Filters.Reference - startTime = p.Filters.StartTime - endTime = p.Filters.EndTime - metadata = p.Filters.Metadata - ) - - sb.Select("id", "timestamp", "reference", "metadata", "postings", "pre_commit_volumes", "post_commit_volumes"). - Distinct() - sb.From(s.schema.Table("transactions")) - if (source != "" || destination != "" || account != "") && flavor == PostgreSQL { - // new wildcard handling - sb.Join(fmt.Sprintf( - "%s postings on postings.txid = %s.id", - s.schema.Table("postings"), - s.schema.Table("transactions"), - )) - } - if source != "" { - if !addressQueryRegexp.MatchString(source) || flavor == SQLite { - // deprecated regex handling - arg := sb.Args.Add(source) - sb.Where(s.schema.Table("use_account_as_source") + "(postings, " + arg + ")") - } else { - // new wildcard handling - src := strings.Split(source, ":") - sb.Where(fmt.Sprintf("jsonb_array_length(postings.source) = %d", len(src))) - - for i, segment := range src { - if segment == ".*" || segment == "*" || segment == "" { - continue - } - - arg := sb.Args.Add(segment) - sb.Where(fmt.Sprintf("postings.source @@ ('$[%d] == \"' || %s::text || '\"')::jsonpath", i, arg)) - } - } - t.SourceFilter = source - } - if destination != "" { - if !addressQueryRegexp.MatchString(destination) || flavor == SQLite { - // deprecated regex handling - arg := sb.Args.Add(destination) - sb.Where(s.schema.Table("use_account_as_destination") + "(postings, " + arg + ")") - } else { - // new wildcard handling - dst := strings.Split(destination, ":") - sb.Where(fmt.Sprintf("jsonb_array_length(postings.destination) = %d", len(dst))) - for i, segment := range dst { - if segment == ".*" || segment == "*" || segment == "" { - continue - } - - arg := sb.Args.Add(segment) - sb.Where(fmt.Sprintf("postings.destination @@ ('$[%d] == \"' || %s::text || '\"')::jsonpath", i, arg)) - } - } - t.DestinationFilter = destination - } - if account != "" { - if !addressQueryRegexp.MatchString(account) || flavor == SQLite { - // deprecated regex handling - arg := sb.Args.Add(account) - sb.Where(s.schema.Table("use_account") + "(postings, " + arg + ")") - } else { - // new wildcard handling - dst := strings.Split(account, ":") - sb.Where(fmt.Sprintf("(jsonb_array_length(postings.destination) = %d OR jsonb_array_length(postings.source) = %d)", len(dst), len(dst))) - for i, segment := range dst { - if segment == ".*" || segment == "*" || segment == "" { - continue - } - - arg := sb.Args.Add(segment) - sb.Where(fmt.Sprintf("(postings.source @@ ('$[%d] == \"' || %s::text || '\"')::jsonpath OR postings.destination @@ ('$[%d] == \"' || %s::text || '\"')::jsonpath)", i, arg, i, arg)) - } - } - t.AccountFilter = account - } - if reference != "" { - sb.Where(sb.E("reference", reference)) - t.ReferenceFilter = reference - } - if !startTime.IsZero() { - sb.Where(sb.GE("timestamp", startTime.UTC())) - t.StartTime = startTime - } - if !endTime.IsZero() { - sb.Where(sb.L("timestamp", endTime.UTC())) - t.EndTime = endTime - } - - for key, value := range metadata { - arg := sb.Args.Add(value) - sb.Where(s.schema.Table( - fmt.Sprintf("%s(metadata, %s, '%s')", - SQLCustomFuncMetaCompare, arg, strings.ReplaceAll(key, ".", "', '")), - )) - } - t.MetadataFilter = metadata - - return sb, t -} - -func (s *Store) GetTransactions(ctx context.Context, q ledger.TransactionsQuery) (api.Cursor[core.ExpandedTransaction], error) { - txs := make([]core.ExpandedTransaction, 0) - - if q.PageSize == 0 { - return api.Cursor[core.ExpandedTransaction]{Data: txs}, nil - } - - sb, t := s.buildTransactionsQuery(Flavor(s.schema.Flavor()), q) - sb.OrderBy("id").Desc() - if q.AfterTxID > 0 { - sb.Where(sb.LE("id", q.AfterTxID)) - } - - // We fetch additional transactions to know if there are more before and/or after. - sb.Limit(int(q.PageSize + 2)) - t.PageSize = q.PageSize - - executor, err := s.executorProvider(ctx) - if err != nil { - return api.Cursor[core.ExpandedTransaction]{}, err - } - - sqlq, args := sb.BuildWithFlavor(s.schema.Flavor()) - rows, err := executor.QueryContext(ctx, sqlq, args...) - if err != nil { - return api.Cursor[core.ExpandedTransaction]{}, s.error(err) - } - defer rows.Close() - - for rows.Next() { - var ref sql.NullString - tx := core.ExpandedTransaction{} - if err := rows.Scan( - &tx.ID, - &tx.Timestamp, - &ref, - &tx.Metadata, - &tx.Postings, - &tx.PreCommitVolumes, - &tx.PostCommitVolumes, - ); err != nil { - return api.Cursor[core.ExpandedTransaction]{}, err - } - tx.Reference = ref.String - if tx.Metadata == nil { - tx.Metadata = core.Metadata{} - } - tx.Timestamp = tx.Timestamp.UTC() - txs = append(txs, tx) - } - if rows.Err() != nil { - return api.Cursor[core.ExpandedTransaction]{}, s.error(err) - } - - var previous, next string - - // Page with transactions before - if q.AfterTxID > 0 && len(txs) > 1 && txs[0].ID == q.AfterTxID { - t.AfterTxID = txs[0].ID + uint64(q.PageSize) - txs = txs[1:] - raw, err := json.Marshal(t) - if err != nil { - return api.Cursor[core.ExpandedTransaction]{}, s.error(err) - } - previous = base64.RawURLEncoding.EncodeToString(raw) - } - - // Page with transactions after - if len(txs) > int(q.PageSize) { - txs = txs[:q.PageSize] - t.AfterTxID = txs[len(txs)-1].ID - raw, err := json.Marshal(t) - if err != nil { - return api.Cursor[core.ExpandedTransaction]{}, s.error(err) - } - next = base64.RawURLEncoding.EncodeToString(raw) - } - - hasMore := next != "" - return api.Cursor[core.ExpandedTransaction]{ - PageSize: int(q.PageSize), - HasMore: hasMore, - Previous: previous, - Next: next, - Data: txs, - PageSizeDeprecated: int(q.PageSize), - HasMoreDeprecated: &hasMore, - }, nil -} - -func (s *Store) GetTransaction(ctx context.Context, txId uint64) (*core.ExpandedTransaction, error) { - sb := sqlbuilder.NewSelectBuilder() - sb.Select("id", "timestamp", "reference", "metadata", "postings", "pre_commit_volumes", "post_commit_volumes") - sb.From(s.schema.Table("transactions")) - sb.Where(sb.Equal("id", txId)) - sb.OrderBy("id desc") - - executor, err := s.executorProvider(ctx) - if err != nil { - return nil, err - } - - sqlq, args := sb.BuildWithFlavor(s.schema.Flavor()) - row := executor.QueryRowContext(ctx, sqlq, args...) - if row.Err() != nil { - return nil, s.error(row.Err()) - } - - tx := core.ExpandedTransaction{ - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Postings: core.Postings{}, - Metadata: core.Metadata{}, - }, - }, - PreCommitVolumes: core.AccountsAssetsVolumes{}, - PostCommitVolumes: core.AccountsAssetsVolumes{}, - } - - var ref sql.NullString - if err := row.Scan( - &tx.ID, - &tx.Timestamp, - &ref, - &tx.Metadata, - &tx.Postings, - &tx.PreCommitVolumes, - &tx.PostCommitVolumes, - ); err != nil { - if err == sql.ErrNoRows { - return nil, nil - } - return nil, err - } - tx.Timestamp = tx.Timestamp.UTC() - tx.Reference = ref.String - - return &tx, nil -} - -func (s *Store) GetLastTransaction(ctx context.Context) (*core.ExpandedTransaction, error) { - sb := sqlbuilder.NewSelectBuilder() - sb.Select("id", "timestamp", "reference", "metadata", "postings", "pre_commit_volumes", "post_commit_volumes") - sb.From(s.schema.Table("transactions")) - sb.OrderBy("id desc") - sb.Limit(1) - - executor, err := s.executorProvider(ctx) - if err != nil { - return nil, err - } - - sqlq, args := sb.BuildWithFlavor(s.schema.Flavor()) - row := executor.QueryRowContext(ctx, sqlq, args...) - if row.Err() != nil { - return nil, s.error(row.Err()) - } - - tx := core.ExpandedTransaction{ - Transaction: core.Transaction{ - TransactionData: core.TransactionData{ - Postings: core.Postings{}, - Metadata: core.Metadata{}, - }, - }, - PreCommitVolumes: core.AccountsAssetsVolumes{}, - PostCommitVolumes: core.AccountsAssetsVolumes{}, - } - - var ref sql.NullString - if err := row.Scan( - &tx.ID, - &tx.Timestamp, - &ref, - &tx.Metadata, - &tx.Postings, - &tx.PreCommitVolumes, - &tx.PostCommitVolumes, - ); err != nil { - if err == sql.ErrNoRows { - return nil, nil - } - return nil, err - } - tx.Timestamp = tx.Timestamp.UTC() - tx.Reference = ref.String - - return &tx, nil -} - -func (s *Store) insertTransactions(ctx context.Context, txs ...core.ExpandedTransaction) error { - var queryTxs string - var argsTxs []any - - executor, err := s.executorProvider(ctx) - if err != nil { - return err - } - - switch s.Schema().Flavor() { - case sqlbuilder.SQLite: - ibTxs := sqlbuilder.NewInsertBuilder() - ibTxs.InsertInto(s.schema.Table("transactions")) - ibTxs.Cols("id", "timestamp", "reference", "postings", "metadata", - "pre_commit_volumes", "post_commit_volumes") - - for _, tx := range txs { - postingsData, err := json.Marshal(tx.Postings) - if err != nil { - panic(err) - } - - metadataData := []byte("{}") - if tx.Metadata != nil { - metadataData, err = json.Marshal(tx.Metadata) - if err != nil { - panic(err) - } - } - - preCommitVolumesData, err := json.Marshal(tx.PreCommitVolumes) - if err != nil { - panic(err) - } - - postCommitVolumesData, err := json.Marshal(tx.PostCommitVolumes) - if err != nil { - panic(err) - } - - var reference *string - if tx.Reference != "" { - cp := tx.Reference - reference = &cp - } - - ibTxs.Values(tx.ID, tx.Timestamp, reference, postingsData, - metadataData, preCommitVolumesData, postCommitVolumesData) - } - - queryTxs, argsTxs = ibTxs.BuildWithFlavor(s.schema.Flavor()) - - case sqlbuilder.PostgreSQL: - txIds := make([]uint64, len(txs)) - timestamps := make([]time.Time, len(txs)) - references := make([]*string, len(txs)) - postingDataSet := make([]string, len(txs)) - metadataDataSet := make([]string, len(txs)) - preCommitVolumesDataSet := make([]string, len(txs)) - postCommitVolumesDataSet := make([]string, len(txs)) - - postingTxIds := []uint64{} - postingIndices := []int{} - sources := []string{} - destinations := []string{} - - for i, tx := range txs { - postingsData, err := json.Marshal(tx.Postings) - if err != nil { - panic(err) - } - - metadataData := []byte("{}") - if tx.Metadata != nil { - metadataData, err = json.Marshal(tx.Metadata) - if err != nil { - panic(err) - } - } - - preCommitVolumesData, err := json.Marshal(tx.PreCommitVolumes) - if err != nil { - panic(err) - } - - postCommitVolumesData, err := json.Marshal(tx.PostCommitVolumes) - if err != nil { - panic(err) - } - - txIds[i] = tx.ID - timestamps[i] = tx.Timestamp - postingDataSet[i] = string(postingsData) - metadataDataSet[i] = string(metadataData) - preCommitVolumesDataSet[i] = string(preCommitVolumesData) - postCommitVolumesDataSet[i] = string(postCommitVolumesData) - references[i] = nil - if tx.Reference != "" { - cp := tx.Reference - references[i] = &cp - } - - for i, p := range tx.Postings { - sourcesBy, err := json.Marshal(strings.Split(p.Source, ":")) - if err != nil { - panic(err) - } - destinationsBy, err := json.Marshal(strings.Split(p.Destination, ":")) - if err != nil { - panic(err) - } - postingTxIds = append(postingTxIds, tx.ID) - postingIndices = append(postingIndices, i) - sources = append(sources, string(sourcesBy)) - destinations = append(destinations, string(destinationsBy)) - } - } - - queryTxs = fmt.Sprintf( - `INSERT INTO "%s".transactions (id, timestamp, reference, - postings, metadata, - pre_commit_volumes, - post_commit_volumes) (SELECT * FROM unnest( - $1::int[], - $2::timestamp[], - $3::varchar[], - $4::jsonb[], - $5::jsonb[], - $6::jsonb[], - $7::jsonb[]))`, - s.schema.Name()) - argsTxs = []any{ - txIds, timestamps, references, - postingDataSet, metadataDataSet, - preCommitVolumesDataSet, postCommitVolumesDataSet, - } - - queryPostings := fmt.Sprintf( - `INSERT INTO "%s".postings (txid, posting_index, - source, destination) (SELECT * FROM unnest( - $1::int[], - $2::int[], - $3::jsonb[], - $4::jsonb[]))`, - s.schema.Name()) - argsPostings := []any{ - postingTxIds, postingIndices, sources, destinations, - } - - _, err = executor.ExecContext(ctx, queryPostings, argsPostings...) - if err != nil { - return s.error(err) - } - } - - _, err = executor.ExecContext(ctx, queryTxs, argsTxs...) - if err != nil { - return s.error(err) - } - - return nil -} - -func (s *Store) UpdateTransactionMetadata(ctx context.Context, id uint64, metadata core.Metadata, at time.Time) error { - ub := sqlbuilder.NewUpdateBuilder() - - metadataData, err := json.Marshal(metadata) - if err != nil { - return err - } - ub. - Update(s.schema.Table("transactions")). - Where(ub.E("id", id)) - - placeholder := ub.Var(string(metadataData)) - switch Flavor(s.schema.Flavor()) { - case PostgreSQL: - ub.Set(fmt.Sprintf("metadata = metadata || %s", placeholder)) - case SQLite: - ub.Set(fmt.Sprintf("metadata = json_patch(metadata, %s)", placeholder)) - } - - executor, err := s.executorProvider(ctx) - if err != nil { - return err - } - - sqlq, args := ub.BuildWithFlavor(s.schema.Flavor()) - _, err = executor.ExecContext(ctx, sqlq, args...) - if err != nil { - return err - } - - lastLog, err := s.GetLastLog(ctx) - if err != nil { - return errors.Wrap(err, "reading last log") - } - - return s.appendLog(ctx, core.NewSetMetadataLog(lastLog, at, core.SetMetadata{ - TargetType: core.MetaTargetTypeTransaction, - TargetID: id, - Metadata: metadata, - })) -} diff --git a/pkg/storage/sqlstorage/transactions_test.go b/pkg/storage/sqlstorage/transactions_test.go deleted file mode 100644 index 915cb253c..000000000 --- a/pkg/storage/sqlstorage/transactions_test.go +++ /dev/null @@ -1,289 +0,0 @@ -package sqlstorage_test - -import ( - "context" - "encoding/base64" - "encoding/json" - "fmt" - "testing" - "time" - - "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/logging/logginglogrus" - "github.com/google/uuid" - "github.com/numary/ledger/pkg/core" - "github.com/numary/ledger/pkg/ledger" - "github.com/numary/ledger/pkg/ledgertesting" - "github.com/numary/ledger/pkg/storage/sqlstorage" - "github.com/sirupsen/logrus" - "github.com/stretchr/testify/require" - "go.uber.org/fx" -) - -func BenchmarkStore_GetTransactions(b *testing.B) { - b.StopTimer() - l := logrus.New() - if testing.Verbose() { - l.Level = logrus.DebugLevel - } - logging.SetFactory(logging.StaticLoggerFactory(logginglogrus.New(l))) - - app := fx.New( - fx.NopLogger, - ledgertesting.ProvideStorageDriver(), - fx.Invoke(func(driver *sqlstorage.Driver, lc fx.Lifecycle) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - ledgerName := uuid.NewString() - var store *sqlstorage.Store - var err error - for store == nil { - store, _, err = driver.GetLedgerStore(ctx, ledgerName, true) - if err != nil { - fmt.Printf("sqlstorage.Driver.GetLedgerStore: %s\n", err.Error()) - time.Sleep(3 * time.Second) - } - } - defer func(store ledger.Store, ctx context.Context) { - require.NoError(b, store.Close(ctx)) - }(store, context.Background()) - - if _, err = store.Initialize(context.Background()); err != nil { - return err - } - - benchGetTransactions(b, store) - return nil - }, - }) - })) - - require.NoError(b, app.Start(context.Background())) - defer func(app *fx.App, ctx context.Context) { - require.NoError(b, app.Stop(ctx)) - }(app, context.Background()) -} - -func benchGetTransactions(b *testing.B, store *sqlstorage.Store) { - maxPages := 120 - maxPageSize := 500 - id := uint64(0) - var txs []core.ExpandedTransaction - for i := 0; i < maxPages; i++ { - for j := 0; j < maxPageSize; j++ { - acc := uuid.NewString() + ":key1:" + uuid.NewString() + ":key2:" + uuid.NewString() - txs = append(txs, core.ExpandedTransaction{ - Transaction: core.Transaction{ - ID: id, - TransactionData: core.TransactionData{ - Postings: []core.Posting{ - { - Source: acc, - Destination: "world", - Amount: core.NewMonetaryInt(100), - Asset: "USD", - }, - }, - Reference: uuid.NewString(), - Timestamp: time.Now(), - }, - }, - PostCommitVolumes: core.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: core.NewMonetaryInt(100), - Output: core.NewMonetaryInt(0), - }, - }, - acc: { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(100), - }, - }, - }, - PreCommitVolumes: core.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - }, - acc: { - "USD": { - Input: core.NewMonetaryInt(0), - Output: core.NewMonetaryInt(0), - }, - }, - }, - }) - id++ - } - if len(txs) >= 1000 { - require.NoError(b, store.Commit(context.Background(), txs...)) - txs = []core.ExpandedTransaction{} - } - } - if len(txs) > 0 { - require.NoError(b, store.Commit(context.Background(), txs...)) - } - - numTxs := maxPages * maxPageSize - nb, err := store.CountTransactions(context.Background(), ledger.TransactionsQuery{}) - require.NoError(b, err) - require.Equal(b, uint64(numTxs), nb) - - firstQ1, midQ1, lastQ1 := getTxQueries(b, store, 1, maxPages*maxPageSize) - firstQ50, midQ50, lastQ50 := getTxQueries(b, store, 50, maxPages*maxPageSize) - firstQ500, midQ500, lastQ500 := getTxQueries(b, store, 500, maxPages*maxPageSize) - var cursor api.Cursor[core.ExpandedTransaction] - - b.ResetTimer() - b.StartTimer() - - b.Run("firstQ1", func(b *testing.B) { - for n := 0; n < b.N; n++ { - cursor, err = store.GetTransactions(context.Background(), *firstQ1) - require.NoError(b, err) - } - require.Equal(b, 1, cursor.PageSize) - require.Len(b, cursor.Data, 1) - require.Equal(b, "", cursor.Previous) - require.NotEqual(b, "", cursor.Next) - }) - - b.Run("midQ1", func(b *testing.B) { - for n := 0; n < b.N; n++ { - cursor, err = store.GetTransactions(context.Background(), *midQ1) - require.NoError(b, err) - } - require.Equal(b, 1, cursor.PageSize) - require.Len(b, cursor.Data, 1) - require.NotEqual(b, "", cursor.Previous) - require.NotEqual(b, "", cursor.Next) - }) - - b.Run("lastQ1", func(b *testing.B) { - for n := 0; n < b.N; n++ { - cursor, err = store.GetTransactions(context.Background(), *lastQ1) - require.NoError(b, err) - } - require.Equal(b, 1, cursor.PageSize) - require.Len(b, cursor.Data, 1) - require.NotEqual(b, "", cursor.Previous) - require.Equal(b, "", cursor.Next) - }) - - b.Run("firstQ50", func(b *testing.B) { - for n := 0; n < b.N; n++ { - cursor, err = store.GetTransactions(context.Background(), *firstQ50) - require.NoError(b, err) - } - require.Equal(b, 50, cursor.PageSize) - require.Len(b, cursor.Data, 50) - require.Equal(b, "", cursor.Previous) - require.NotEqual(b, "", cursor.Next) - }) - - b.Run("midQ50", func(b *testing.B) { - for n := 0; n < b.N; n++ { - cursor, err = store.GetTransactions(context.Background(), *midQ50) - require.NoError(b, err) - } - require.Equal(b, 50, cursor.PageSize) - require.Len(b, cursor.Data, 50) - require.NotEqual(b, "", cursor.Previous) - require.NotEqual(b, "", cursor.Next) - }) - - b.Run("lastQ50", func(b *testing.B) { - for n := 0; n < b.N; n++ { - cursor, err = store.GetTransactions(context.Background(), *lastQ50) - require.NoError(b, err) - } - require.Equal(b, 50, cursor.PageSize) - require.Len(b, cursor.Data, 50) - require.NotEqual(b, "", cursor.Previous) - require.Equal(b, "", cursor.Next) - }) - - b.Run("firstQ500", func(b *testing.B) { - for n := 0; n < b.N; n++ { - cursor, err = store.GetTransactions(context.Background(), *firstQ500) - require.NoError(b, err) - } - require.Equal(b, 500, cursor.PageSize) - require.Len(b, cursor.Data, 500) - require.Equal(b, "", cursor.Previous) - require.NotEqual(b, "", cursor.Next) - }) - - b.Run("midQ500", func(b *testing.B) { - for n := 0; n < b.N; n++ { - cursor, err = store.GetTransactions(context.Background(), *midQ500) - require.NoError(b, err) - } - require.Equal(b, 500, cursor.PageSize) - require.Len(b, cursor.Data, 500) - require.NotEqual(b, "", cursor.Previous) - require.NotEqual(b, "", cursor.Next) - }) - - b.Run("lastQ500", func(b *testing.B) { - for n := 0; n < b.N; n++ { - cursor, err = store.GetTransactions(context.Background(), *lastQ500) - require.NoError(b, err) - } - require.Equal(b, 500, cursor.PageSize) - require.Len(b, cursor.Data, 500) - require.NotEqual(b, "", cursor.Previous) - require.Equal(b, "", cursor.Next) - }) -} - -func getTxQueries(b *testing.B, store *sqlstorage.Store, pageSize, maxNumTxs int) (firstQ, midQ, lastQ *ledger.TransactionsQuery) { - numTxs := 0 - txQuery := &ledger.TransactionsQuery{ - Filters: ledger.TransactionsQueryFilters{ - Source: ".*:key1:.*:key2:.*", - }, - PageSize: uint(pageSize), - } - firstQ = txQuery - cursor := api.Cursor[core.ExpandedTransaction]{ - HasMore: true, - } - var err error - for cursor.HasMore { - if cursor.Next != "" { - res, decErr := base64.RawURLEncoding.DecodeString(cursor.Next) - if decErr != nil { - return - } - - token := sqlstorage.TxsPaginationToken{} - if err = json.Unmarshal(res, &token); err != nil { - return - } - - txQuery = ledger.NewTransactionsQuery(). - WithAfterTxID(token.AfterTxID). - WithSourceFilter(token.SourceFilter). - WithPageSize(token.PageSize) - } - - cursor, err = store.GetTransactions(context.Background(), *txQuery) - require.NoError(b, err) - require.Equal(b, pageSize, cursor.PageSize) - require.Len(b, cursor.Data, pageSize) - numTxs += len(cursor.Data) - - if midQ == nil && numTxs > maxNumTxs/2 { - midQ = txQuery - } - } - lastQ = txQuery - require.Equal(b, maxNumTxs, numTxs) - return -} diff --git a/pkg/storage/sqlstorage/volumes.go b/pkg/storage/sqlstorage/volumes.go deleted file mode 100644 index bfe0359a1..000000000 --- a/pkg/storage/sqlstorage/volumes.go +++ /dev/null @@ -1,37 +0,0 @@ -package sqlstorage - -import ( - "context" - - "github.com/huandu/go-sqlbuilder" - "github.com/numary/ledger/pkg/core" -) - -func (s *Store) updateVolumes(ctx context.Context, volumes core.AccountsAssetsVolumes) error { - for account, accountVolumes := range volumes { - for asset, volumes := range accountVolumes { - ib := sqlbuilder.NewInsertBuilder() - inputArg := ib.Var(volumes.Input.String()) - outputArg := ib.Var(volumes.Output.String()) - ib. - InsertInto(s.schema.Table("volumes")). - Cols("account", "asset", "input", "output"). - Values(account, asset, volumes.Input.String(), volumes.Output.String()). - SQL("ON CONFLICT (account, asset) DO UPDATE SET input = " + inputArg + ", output = " + outputArg) - - sqlq, args := ib.BuildWithFlavor(s.schema.Flavor()) - - executor, err := s.executorProvider(ctx) - if err != nil { - return err - } - - _, err = executor.ExecContext(ctx, sqlq, args...) - if err != nil { - return s.error(err) - } - } - } - - return nil -} diff --git a/pkg/storage/transactional.go b/pkg/storage/transactional.go deleted file mode 100644 index 289add090..000000000 --- a/pkg/storage/transactional.go +++ /dev/null @@ -1,92 +0,0 @@ -package storage - -import ( - "context" - "errors" -) - -type contextHolder struct { - transactional bool - transaction any - commit func(ctx context.Context) error - rollback func(ctx context.Context) error -} - -type contextHolderKeyStruct struct{} - -var contextKey = contextHolderKeyStruct{} - -func withContextHolder(ctx context.Context, holder *contextHolder) context.Context { - return context.WithValue(ctx, contextKey, holder) -} - -func getContextHolder(ctx context.Context) *contextHolder { - ctxHolder := ctx.Value(contextKey) - if ctxHolder == nil { - return nil - } - return ctxHolder.(*contextHolder) -} - -func RegisteredTransaction(ctx context.Context) any { - holder := getContextHolder(ctx) - if holder == nil { - panic("no context holder") - } - return holder.transaction -} - -func RegisterTransaction(ctx context.Context, transaction any, - commitFn func(ctx context.Context) error, rollbackFn func(ctx context.Context) error) { - holder := getContextHolder(ctx) - if holder == nil { - panic("no context holder") - } - holder.transaction = transaction - holder.commit = commitFn - holder.rollback = rollbackFn -} - -func IsTransactionRegistered(ctx context.Context) bool { - ctxHolder := ctx.Value(contextKey) - if ctxHolder == nil { - return false - } - return ctxHolder.(*contextHolder).transaction != nil -} - -func IsTransactional(ctx context.Context) bool { - ctxHolder := ctx.Value(contextKey) - if ctxHolder == nil { - return false - } - return ctxHolder.(*contextHolder).transactional -} - -func TransactionalContext(ctx context.Context) context.Context { - return withContextHolder(ctx, &contextHolder{ - transactional: true, - }) -} - -func CommitTransaction(ctx context.Context) error { - holder := getContextHolder(ctx) - if holder == nil { - panic("context holder is nil") - } - if holder.transaction == nil { - return errors.New("transaction not initialized") - } - return holder.commit(ctx) -} - -func RollbackTransaction(ctx context.Context) error { - holder := getContextHolder(ctx) - if holder == nil { - panic("context holder is nil") - } - if holder.transaction == nil { - return errors.New("transaction not initialized") - } - return holder.rollback(ctx) -} diff --git a/sdk/configs/go.yaml b/sdk/configs/go.yaml deleted file mode 100644 index cb977cddd..000000000 --- a/sdk/configs/go.yaml +++ /dev/null @@ -1,8 +0,0 @@ -templateDir: templates/go -additionalProperties: - packageName: ledgerclient - goImportAlias: client - generateInterfaces: true -files: - Taskfile.yml.mustache: - destinationFilename: Taskfile.yml diff --git a/sdk/configs/java.yaml b/sdk/configs/java.yaml deleted file mode 100644 index 73936a905..000000000 --- a/sdk/configs/java.yaml +++ /dev/null @@ -1,34 +0,0 @@ -templateDir: templates/java -files: - Taskfile.yml.mustache: - destinationFilename: Taskfile.yml -additionalProperties: - invokerPackage: com.formance.ledgerclient - apiPackage: com.formance.ledgerclient.api - modelPackage: com.formance.ledgerclient.model - groupId: com.formance - artifactId: ledgerclient - artifactDescription: Ledger Java Client - artifactUrl: https://github.com/numary/numary-sdk-java - scmConnection: scm:git:git@github.com/numary/numary-sdk-java.git - scmDeveloperConnection: scm:git:git@github.com/numary/numary-sdk-java.git - scmUrl: https://github.com/numary/numary-sdk-java - developerEmail: support@formance.com - developerName: Ledger Java Client Contributors - developerOrganization: Formance - developerOrganizationUrl: https://formance.com - library: retrofit2 - openApiNullable: false - licenseName: MIT License - licenseUrl: http://www.opensource.org/licenses/mit-license.php - gpg_private_key: '${{ secrets.GPG_SIGNING_KEY }}' - gpg_passphrase: '${{ secrets.GPG_PASSPHRASE }}' - nexus_username: '${{ secrets.OSSRH_USERNAME }}' - nexus_password: '${{ secrets.OSSRH_PASSWORD }}' - -inlineSchemaNameMappings: - getAccount_200_response: GetAccountResponse - listAccounts_200_response: ListAccountsResponse - listAccounts_200_response_cursor: ListAccountsResponseCursor - listTransactions_200_response: ListTransactionsResponse - listTransactions_200_response_cursor: ListTransactionsResponseCursor diff --git a/sdk/configs/php.yaml b/sdk/configs/php.yaml deleted file mode 100644 index e0df377b2..000000000 --- a/sdk/configs/php.yaml +++ /dev/null @@ -1,6 +0,0 @@ -templateDir: templates/php -additionalProperties: - invokerPackage: Numary\Ledger -files: - Taskfile.yml.mustache: - destinationFilename: Taskfile.yml diff --git a/sdk/configs/python.yaml b/sdk/configs/python.yaml deleted file mode 100644 index 72099385a..000000000 --- a/sdk/configs/python.yaml +++ /dev/null @@ -1,12 +0,0 @@ -templateDir: templates/python -additionalProperties: - packageName: ledgerclient - infoName: Numary ledger Client - infoEmail: support@numary.com - PYPI_SECRET: "${{ secrets.PYPI_SECRET }}" - -files: - Taskfile.yml.mustache: - destinationFilename: Taskfile.yml - .github/workflows/release.yml.mustache: - destinationFilename: .github/workflows/release.yml diff --git a/sdk/configs/typescript-node.yaml b/sdk/configs/typescript-node.yaml deleted file mode 100644 index ada3f25ae..000000000 --- a/sdk/configs/typescript-node.yaml +++ /dev/null @@ -1,14 +0,0 @@ -templateDir: templates/typescript -additionalProperties: - enumUnknownDefaultCase: true - platform: node - npmName: '@numaryhq/ledger-nodejs' - moduleName: ledger - projectName: '@numaryhq/ledger-nodejs' - gitRepository: 'https://github.com/numary/numary-sdk-typescript-node' - NPM_TOKEN: '${{ secrets.NPM_TOKEN }}' -files: - Taskfile.yml.mustache: - destinationFilename: Taskfile.yml - .github/workflows/release.yml.mustache: - destinationFilename: .github/workflows/release.yml diff --git a/sdk/templates/go/README.mustache b/sdk/templates/go/README.mustache deleted file mode 100644 index f71cd6e36..000000000 --- a/sdk/templates/go/README.mustache +++ /dev/null @@ -1,231 +0,0 @@ -# Formance Go SDK - -{{#appDescriptionWithNewLines}} -{{{.}}} -{{/appDescriptionWithNewLines}} - -## Overview -This SDK was generated by the [OpenAPI Generator](https://openapi-generator.tech) project. By using the [OpenAPI-spec](https://www.openapis.org/) from a remote server, you can easily generate SDK. - -- API version: {{appVersion}} -- Package version: {{packageVersion}} -{{^hideGenerationTimestamp}} -- Build date: {{generatedDate}} -{{/hideGenerationTimestamp}} -- Build package: {{generatorClass}} -{{#infoUrl}} -For more information, please visit [{{{infoUrl}}}]({{{infoUrl}}}) -{{/infoUrl}} - -## Installation - -```shell -go get github.com/formancehq/numary-sdk-go -``` - -Put the package under your project folder and add the following in import: - -```golang -import sw "./{{packageName}}" -``` - -To use a proxy, set the environment variable `HTTP_PROXY`: - -```golang -os.Setenv("HTTP_PROXY", "http://proxy_name:proxy_port") -``` - -## Configuration of Server URL - -Default configuration comes with `Servers` field that contains server objects as defined in the OpenAPI specification. - -### Select Server Configuration - -For using other server than the one defined on index 0 set context value `sw.ContextServerIndex` of type `int`. - -```golang -ctx := context.WithValue(context.Background(), sw.ContextServerIndex, 1) -``` - -### Templated Server URL - -Templated server URL is formatted using default variables from configuration or from context value `sw.ContextServerVariables` of type `map[string]string`. - -```golang -ctx := context.WithValue(context.Background(), sw.ContextServerVariables, map[string]string{ - "basePath": "v2", -}) -``` - -Note, enum values are always validated and all unused variables are silently ignored. - -### URLs Configuration per Operation - -Each operation can use different server URL defined using `OperationServers` map in the `Configuration`. -An operation is uniquely identified by `"{classname}Service.{nickname}"` string. -Similar rules for overriding default operation server index and variables applies by using `sw.ContextOperationServerIndices` and `sw.ContextOperationServerVariables` context maps. - -``` -ctx := context.WithValue(context.Background(), sw.ContextOperationServerIndices, map[string]int{ - "{classname}Service.{nickname}": 2, -}) -ctx = context.WithValue(context.Background(), sw.ContextOperationServerVariables, map[string]map[string]string{ - "{classname}Service.{nickname}": { - "port": "8443", - }, -}) -``` - -## Documentation for API Endpoints - -All URIs are relative to *{{basePath}}* - -Class | Method | HTTP request | Description ------------- | ------------- | ------------- | ------------- -{{#apiInfo}}{{#apis}}{{#operations}}{{#operation}}*{{classname}}* | [**{{operationId}}**]({{apiDocPath}}{{classname}}.md#{{operationIdLowerCase}}) | **{{httpMethod}}** {{path}} | {{summary}} -{{/operation}}{{/operations}}{{/apis}}{{/apiInfo}} - -## Documentation For Models - -{{#models}}{{#model}} - [{{{classname}}}]({{modelDocPath}}{{{classname}}}.md) -{{/model}}{{/models}} - -## Documentation For Authorization - -{{^authMethods}} Endpoints do not require authorization. -{{/authMethods}}{{#authMethods}}{{#last}} Authentication schemes defined for the API:{{/last}}{{/authMethods}} -{{#authMethods}} - -### {{{name}}} - -{{#isApiKey}} -- **Type**: API key -- **API key parameter name**: {{{keyParamName}}} -- **Location**: {{#isKeyInQuery}}URL query string{{/isKeyInQuery}}{{#isKeyInHeader}}HTTP header{{/isKeyInHeader}} - -Note, each API key must be added to a map of `map[string]APIKey` where the key is: {{keyParamName}} and passed in as the auth context for each request. - -{{/isApiKey}} -{{#isBasic}} -{{#isBasicBearer}} -- **Type**: HTTP Bearer token authentication - -Example - -```golang -auth := context.WithValue(context.Background(), sw.ContextAccessToken, "BEARERTOKENSTRING") -r, err := client.Service.Operation(auth, args) -``` - -Library provide utility to fetch access token : -```golang -tok, err := sw.FetchToken( - http.DefaultClient, - sw.AuthEndpoint, - "API KEY", -) -if err != nil { - panic(err) -} -auth := context.WithValue(context.Background(), sw.ContextCloudToken, tok) -r, err := client.Service.Operation(auth, args) -``` - -{{/isBasicBearer}} -{{#isBasicBasic}} -- **Type**: HTTP basic authentication - -Example - -```golang -auth := context.WithValue(context.Background(), sw.ContextBasicAuth, sw.BasicAuth{ - UserName: "username", - Password: "password", -}) -r, err := client.Service.Operation(auth, args) -``` - -{{/isBasicBasic}} -{{#isHttpSignature}} -- **Type**: HTTP signature authentication - -Example - -```golang - authConfig := sw.HttpSignatureAuth{ - KeyId: "my-key-id", - PrivateKeyPath: "rsa.pem", - Passphrase: "my-passphrase", - SigningScheme: sw.HttpSigningSchemeHs2019, - SignedHeaders: []string{ - sw.HttpSignatureParameterRequestTarget, // The special (request-target) parameter expresses the HTTP request target. - sw.HttpSignatureParameterCreated, // Time when request was signed, formatted as a Unix timestamp integer value. - "Host", // The Host request header specifies the domain name of the server, and optionally the TCP port number. - "Date", // The date and time at which the message was originated. - "Content-Type", // The Media type of the body of the request. - "Digest", // A cryptographic digest of the request body. - }, - SigningAlgorithm: sw.HttpSigningAlgorithmRsaPSS, - SignatureMaxValidity: 5 * time.Minute, - } - var authCtx context.Context - var err error - if authCtx, err = authConfig.ContextWithValue(context.Background()); err != nil { - // Process error - } - r, err = client.Service.Operation(auth, args) - -``` -{{/isHttpSignature}} -{{/isBasic}} -{{#isOAuth}} - -- **Type**: OAuth -- **Flow**: {{{flow}}} -- **Authorization URL**: {{{authorizationUrl}}} -- **Scopes**: {{^scopes}}N/A{{/scopes}} -{{#scopes}} - **{{{scope}}}**: {{{description}}} -{{/scopes}} - -Example - -```golang -auth := context.WithValue(context.Background(), sw.ContextAccessToken, "ACCESSTOKENSTRING") -r, err := client.Service.Operation(auth, args) -``` - -Or via OAuth2 module to automatically refresh tokens and perform user authentication. - -```golang -import "golang.org/x/oauth2" - -/* Perform OAuth2 round trip request and obtain a token */ - -tokenSource := oauth2cfg.TokenSource(createContext(httpClient), &token) -auth := context.WithValue(oauth2.NoContext, sw.ContextOAuth2, tokenSource) -r, err := client.Service.Operation(auth, args) -``` - -{{/isOAuth}} -{{/authMethods}} - -## Documentation for Utility Methods - -Due to the fact that model structure members are all pointers, this package contains -a number of utility functions to easily obtain pointers to values of basic types. -Each of these functions takes a value of the given basic type and returns a pointer to it: - -* `PtrBool` -* `PtrInt` -* `PtrInt32` -* `PtrInt64` -* `PtrFloat` -* `PtrFloat32` -* `PtrFloat64` -* `PtrString` -* `PtrTime` - -## Author - -{{#apiInfo}}{{#apis}}{{#-last}}{{infoEmail}} -{{/-last}}{{/apis}}{{/apiInfo}} diff --git a/sdk/templates/go/Taskfile.yml.mustache b/sdk/templates/go/Taskfile.yml.mustache deleted file mode 100644 index be8841fdc..000000000 --- a/sdk/templates/go/Taskfile.yml.mustache +++ /dev/null @@ -1,30 +0,0 @@ -version: '3' - -tasks: - vendor: - cmds: - - go mod vendor - - start-ledger: - cmds: - - > - docker run -d - --name ledger - --rm - -e NUMARY_SERVER_HTTP_BIND_ADDRESS=0.0.0.0:3068 - -p 3068:3068 - ghcr.io/formancehq/ledger:{{appVersion}} - - stop-ledger: - cmds: - - docker stop ledger - - test: - desc: Test client code - deps: - - vendor - cmds: - - task: start-ledger - - defer: - task: stop-ledger - - go test diff --git a/sdk/templates/go/api.mustache b/sdk/templates/go/api.mustache deleted file mode 100644 index 4a94d6aaf..000000000 --- a/sdk/templates/go/api.mustache +++ /dev/null @@ -1,413 +0,0 @@ -{{>partial_header}} -package {{packageName}} - -{{#operations}} -import ( - "bytes" - _context "context" - _ioutil "io/ioutil" - _nethttp "net/http" - _neturl "net/url" -{{#imports}} "{{import}}" -{{/imports}} -) - -// Linger please -var ( - _ _context.Context -) -{{#generateInterfaces}} - -type {{classname}} interface { - {{#operation}} - - /* - {{operationId}} {{{summary}}}{{^summary}}Method for {{operationId}}{{/summary}} - {{#notes}} - - {{{unescapedNotes}}} - {{/notes}} - - @param ctx _context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background().{{#pathParams}} - @param {{paramName}}{{#description}} {{{.}}}{{/description}}{{/pathParams}} - @return {{#structPrefix}}{{&classname}}{{/structPrefix}}Api{{operationId}}Request - {{#isDeprecated}} - - Deprecated - {{/isDeprecated}} - */ - {{{nickname}}}(ctx _context.Context{{#pathParams}}, {{paramName}} {{{dataType}}}{{/pathParams}}) {{#structPrefix}}{{&classname}}{{/structPrefix}}Api{{operationId}}Request - - // {{nickname}}Execute executes the request{{#returnType}} - // @return {{{.}}}{{/returnType}} - {{#isDeprecated}} - // Deprecated - {{/isDeprecated}} - {{nickname}}Execute(r {{#structPrefix}}{{&classname}}{{/structPrefix}}Api{{operationId}}Request) ({{#returnType}}{{{.}}}, {{/returnType}}*_nethttp.Response, error) - {{/operation}} -} -{{/generateInterfaces}} - -// {{classname}}Service {{classname}} service -type {{classname}}Service service -{{#operation}} - -type {{#structPrefix}}{{&classname}}{{/structPrefix}}Api{{operationId}}Request struct { - ctx _context.Context - ApiService {{#generateInterfaces}}{{classname}}{{/generateInterfaces}}{{^generateInterfaces}}*{{classname}}Service{{/generateInterfaces}} -{{#allParams}} - {{paramName}} {{^isPathParam}}*{{/isPathParam}}{{{dataType}}} -{{/allParams}} -} -{{#allParams}}{{^isPathParam}} -{{#description}} -// {{.}} -{{/description}} -{{#isDeprecated}} -// Deprecated -{{/isDeprecated}} -func (r {{#structPrefix}}{{&classname}}{{/structPrefix}}Api{{operationId}}Request) {{vendorExtensions.x-export-param-name}}({{paramName}} {{{dataType}}}) {{#structPrefix}}{{&classname}}{{/structPrefix}}Api{{operationId}}Request { - r.{{paramName}} = &{{paramName}} - return r -}{{/isPathParam}}{{/allParams}} - -func (r {{#structPrefix}}{{&classname}}{{/structPrefix}}Api{{operationId}}Request) Execute() ({{#returnType}}{{{.}}}, {{/returnType}}*_nethttp.Response, error) { - return r.ApiService.{{nickname}}Execute(r) -} - -/* -{{operationId}} {{{summary}}}{{^summary}}Method for {{operationId}}{{/summary}} -{{#notes}} - -{{{unescapedNotes}}} -{{/notes}} - - @param ctx _context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background().{{#pathParams}} - @param {{paramName}}{{#description}} {{{.}}}{{/description}}{{/pathParams}} - @return {{#structPrefix}}{{&classname}}{{/structPrefix}}Api{{operationId}}Request -{{#isDeprecated}} - -Deprecated -{{/isDeprecated}} -*/ -func (a *{{{classname}}}Service) {{{nickname}}}(ctx _context.Context{{#pathParams}}, {{paramName}} {{{dataType}}}{{/pathParams}}) {{#structPrefix}}{{&classname}}{{/structPrefix}}Api{{operationId}}Request { - return {{#structPrefix}}{{&classname}}{{/structPrefix}}Api{{operationId}}Request{ - ApiService: a, - ctx: ctx, - {{#pathParams}} - {{paramName}}: {{paramName}}, - {{/pathParams}} - } -} - -// Execute executes the request{{#returnType}} -// @return {{{.}}}{{/returnType}} -{{#isDeprecated}} -// Deprecated -{{/isDeprecated}} -func (a *{{{classname}}}Service) {{nickname}}Execute(r {{#structPrefix}}{{&classname}}{{/structPrefix}}Api{{operationId}}Request) ({{#returnType}}{{{.}}}, {{/returnType}}*_nethttp.Response, error) { - var ( - localVarHTTPMethod = _nethttp.Method{{httpMethod}} - localVarPostBody interface{} - formFiles []formFile - {{#returnType}} - localVarReturnValue {{{.}}} - {{/returnType}} - ) - - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "{{{classname}}}Service.{{{nickname}}}") - if err != nil { - return {{#returnType}}localVarReturnValue, {{/returnType}}nil, GenericOpenAPIError{error: err.Error()} - } - - localVarPath := localBasePath + "{{{path}}}"{{#pathParams}} - localVarPath = strings.Replace(localVarPath, "{"+"{{baseName}}"+"}", _neturl.PathEscape(parameterToString(r.{{paramName}}, "{{collectionFormat}}")), -1){{/pathParams}} - - localVarHeaderParams := make(map[string]string) - localVarQueryParams := _neturl.Values{} - localVarFormParams := _neturl.Values{} - {{#allParams}} - {{#required}} - {{^isPathParam}} - if r.{{paramName}} == nil { - return {{#returnType}}localVarReturnValue, {{/returnType}}nil, reportError("{{paramName}} is required and must be specified") - } - {{/isPathParam}} - {{#minItems}} - if len({{^isPathParam}}*{{/isPathParam}}r.{{paramName}}) < {{minItems}} { - return {{#returnType}}localVarReturnValue, {{/returnType}}nil, reportError("{{paramName}} must have at least {{minItems}} elements") - } - {{/minItems}} - {{#maxItems}} - if len({{^isPathParam}}*{{/isPathParam}}r.{{paramName}}) > {{maxItems}} { - return {{#returnType}}localVarReturnValue, {{/returnType}}nil, reportError("{{paramName}} must have less than {{maxItems}} elements") - } - {{/maxItems}} - {{#minLength}} - if strlen({{^isPathParam}}*{{/isPathParam}}r.{{paramName}}) < {{minLength}} { - return {{#returnType}}localVarReturnValue, {{/returnType}}nil, reportError("{{paramName}} must have at least {{minLength}} elements") - } - {{/minLength}} - {{#maxLength}} - if strlen({{^isPathParam}}*{{/isPathParam}}r.{{paramName}}) > {{maxLength}} { - return {{#returnType}}localVarReturnValue, {{/returnType}}nil, reportError("{{paramName}} must have less than {{maxLength}} elements") - } - {{/maxLength}} - {{#minimum}} - {{#isString}} - {{paramName}}Txt, err := atoi({{^isPathParam}}*{{/isPathParam}}r.{{paramName}}) - if {{paramName}}Txt < {{minimum}} { - {{/isString}} - {{^isString}} - if {{^isPathParam}}*{{/isPathParam}}r.{{paramName}} < {{minimum}} { - {{/isString}} - return {{#returnType}}localVarReturnValue, {{/returnType}}nil, reportError("{{paramName}} must be greater than {{minimum}}") - } - {{/minimum}} - {{#maximum}} - {{#isString}} - {{paramName}}Txt, err := atoi({{^isPathParam}}*{{/isPathParam}}r.{{paramName}}) - if {{paramName}}Txt > {{maximum}} { - {{/isString}} - {{^isString}} - if {{^isPathParam}}*{{/isPathParam}}r.{{paramName}} > {{maximum}} { - {{/isString}} - return {{#returnType}}localVarReturnValue, {{/returnType}}nil, reportError("{{paramName}} must be less than {{maximum}}") - } - {{/maximum}} - {{/required}} - {{/allParams}} - - {{#queryParams}} - {{#required}} - {{#isCollectionFormatMulti}} - { - t := *r.{{paramName}} - if reflect.TypeOf(t).Kind() == reflect.Slice { - s := reflect.ValueOf(t) - for i := 0; i < s.Len(); i++ { - localVarQueryParams.Add("{{baseName}}", parameterToString(s.Index(i), "{{collectionFormat}}")) - } - } else { - localVarQueryParams.Add("{{baseName}}", parameterToString(t, "{{collectionFormat}}")) - } - } - {{/isCollectionFormatMulti}} - {{^isCollectionFormatMulti}} - localVarQueryParams.Add("{{baseName}}", parameterToString(*r.{{paramName}}, "{{collectionFormat}}")) - {{/isCollectionFormatMulti}} - {{/required}} - {{^required}} - if r.{{paramName}} != nil { - {{#isCollectionFormatMulti}} - t := *r.{{paramName}} - if reflect.TypeOf(t).Kind() == reflect.Slice { - s := reflect.ValueOf(t) - for i := 0; i < s.Len(); i++ { - localVarQueryParams.Add("{{baseName}}", parameterToString(s.Index(i), "{{collectionFormat}}")) - } - } else { - localVarQueryParams.Add("{{baseName}}", parameterToString(t, "{{collectionFormat}}")) - } - {{/isCollectionFormatMulti}} - {{^isCollectionFormatMulti}} - localVarQueryParams.Add("{{baseName}}", parameterToString(*r.{{paramName}}, "{{collectionFormat}}")) - {{/isCollectionFormatMulti}} - } - {{/required}} - {{/queryParams}} - // to determine the Content-Type header -{{=<% %>=}} - localVarHTTPContentTypes := []string{<%#consumes%>"<%&mediaType%>"<%^-last%>, <%/-last%><%/consumes%>} -<%={{ }}=%> - - // set Content-Type header - localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) - if localVarHTTPContentType != "" { - localVarHeaderParams["Content-Type"] = localVarHTTPContentType - } - - // to determine the Accept header -{{=<% %>=}} - localVarHTTPHeaderAccepts := []string{<%#produces%>"<%&mediaType%>"<%^-last%>, <%/-last%><%/produces%>} -<%={{ }}=%> - - // set Accept header - localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) - if localVarHTTPHeaderAccept != "" { - localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept - } -{{#headerParams}} - {{#required}} - localVarHeaderParams["{{baseName}}"] = parameterToString(*r.{{paramName}}, "{{collectionFormat}}") - {{/required}} - {{^required}} - if r.{{paramName}} != nil { - localVarHeaderParams["{{baseName}}"] = parameterToString(*r.{{paramName}}, "{{collectionFormat}}") - } - {{/required}} -{{/headerParams}} -{{#formParams}} -{{#isFile}} - var {{paramName}}LocalVarFormFileName string - var {{paramName}}LocalVarFileName string - var {{paramName}}LocalVarFileBytes []byte - - {{paramName}}LocalVarFormFileName = "{{baseName}}" - -{{#required}} - {{paramName}}LocalVarFile := *r.{{paramName}} -{{/required}} -{{^required}} - var {{paramName}}LocalVarFile {{dataType}} - if r.{{paramName}} != nil { - {{paramName}}LocalVarFile = *r.{{paramName}} - } -{{/required}} - if {{paramName}}LocalVarFile != nil { - fbs, _ := _ioutil.ReadAll({{paramName}}LocalVarFile) - {{paramName}}LocalVarFileBytes = fbs - {{paramName}}LocalVarFileName = {{paramName}}LocalVarFile.Name() - {{paramName}}LocalVarFile.Close() - } - formFiles = append(formFiles, formFile{fileBytes: {{paramName}}LocalVarFileBytes, fileName: {{paramName}}LocalVarFileName, formFileName: {{paramName}}LocalVarFormFileName}) -{{/isFile}} -{{^isFile}} -{{#required}} - localVarFormParams.Add("{{baseName}}", parameterToString(*r.{{paramName}}, "{{collectionFormat}}")) -{{/required}} -{{^required}} -{{#isModel}} - if r.{{paramName}} != nil { - paramJson, err := parameterToJson(*r.{{paramName}}) - if err != nil { - return {{#returnType}}localVarReturnValue, {{/returnType}}nil, err - } - localVarFormParams.Add("{{baseName}}", paramJson) - } -{{/isModel}} -{{^isModel}} - if r.{{paramName}} != nil { - localVarFormParams.Add("{{baseName}}", parameterToString(*r.{{paramName}}, "{{collectionFormat}}")) - } -{{/isModel}} -{{/required}} -{{/isFile}} -{{/formParams}} -{{#bodyParams}} - // body params - localVarPostBody = r.{{paramName}} -{{/bodyParams}} -{{#authMethods}} -{{#isApiKey}} -{{^isKeyInCookie}} - if r.ctx != nil { - // API Key Authentication - if auth, ok := r.ctx.Value(ContextAPIKeys).(map[string]APIKey); ok { - {{#vendorExtensions.x-auth-id-alias}} - if apiKey, ok := auth["{{.}}"]; ok { - var key string - if prefix, ok := auth["{{name}}"]; ok && prefix.Prefix != "" { - key = prefix.Prefix + " " + apiKey.Key - } else { - key = apiKey.Key - } - {{/vendorExtensions.x-auth-id-alias}} - {{^vendorExtensions.x-auth-id-alias}} - if apiKey, ok := auth["{{name}}"]; ok { - var key string - if apiKey.Prefix != "" { - key = apiKey.Prefix + " " + apiKey.Key - } else { - key = apiKey.Key - } - {{/vendorExtensions.x-auth-id-alias}} - {{#isKeyInHeader}} - localVarHeaderParams["{{keyParamName}}"] = key - {{/isKeyInHeader}} - {{#isKeyInQuery}} - localVarQueryParams.Add("{{keyParamName}}", key) - {{/isKeyInQuery}} - } - } - } -{{/isKeyInCookie}} -{{/isApiKey}} -{{/authMethods}} - req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) - if err != nil { - return {{#returnType}}localVarReturnValue, {{/returnType}}nil, err - } - - localVarHTTPResponse, err := a.client.callAPI(req) - if err != nil || localVarHTTPResponse == nil { - return {{#returnType}}localVarReturnValue, {{/returnType}}localVarHTTPResponse, err - } - - localVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body) - localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = _ioutil.NopCloser(bytes.NewBuffer(localVarBody)) - if err != nil { - return {{#returnType}}localVarReturnValue, {{/returnType}}localVarHTTPResponse, err - } - - if localVarHTTPResponse.StatusCode >= 300 { - newErr := GenericOpenAPIError{ - body: localVarBody, - error: localVarHTTPResponse.Status, - } - {{#responses}} - {{#dataType}} - {{^is1xx}} - {{^is2xx}} - {{#range}} - {{#is3xx}} - if localVarHTTPResponse.StatusCode >= 300 && localVarHTTPResponse.StatusCode < 400 { - {{/is3xx}} - {{#is4xx}} - if localVarHTTPResponse.StatusCode >= 400 && localVarHTTPResponse.StatusCode < 500 { - {{/is4xx}} - {{#is5xx}} - if localVarHTTPResponse.StatusCode >= 500 - {{/is5xx}} - {{/range}} - {{^range}} - {{^wildcard}} - if localVarHTTPResponse.StatusCode == {{{code}}} { - {{/wildcard}} - {{/range}} - var v {{{dataType}}} - err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) - if err != nil { - newErr.error = err.Error() - return {{#returnType}}localVarReturnValue, {{/returnType}}localVarHTTPResponse, newErr - } - newErr.model = v - {{^-last}} - return {{#returnType}}localVarReturnValue, {{/returnType}}localVarHTTPResponse, newErr - {{/-last}} - {{^wildcard}} - } - {{/wildcard}} - {{/is2xx}} - {{/is1xx}} - {{/dataType}} - {{/responses}} - return {{#returnType}}localVarReturnValue, {{/returnType}}localVarHTTPResponse, newErr - } - - {{#returnType}} - err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) - if err != nil { - newErr := GenericOpenAPIError{ - body: localVarBody, - error: err.Error(), - } - return {{#returnType}}localVarReturnValue, {{/returnType}}localVarHTTPResponse, newErr - } - - {{/returnType}} - return {{#returnType}}localVarReturnValue, {{/returnType}}localVarHTTPResponse, nil -} -{{/operation}} -{{/operations}} diff --git a/sdk/templates/go/api_doc.mustache b/sdk/templates/go/api_doc.mustache deleted file mode 100644 index c0c67ced0..000000000 --- a/sdk/templates/go/api_doc.mustache +++ /dev/null @@ -1,92 +0,0 @@ -# {{invokerPackage}}\{{classname}}{{#description}} - -{{.}}{{/description}} - -All URIs are relative to *{{basePath}}* - -Method | HTTP request | Description -------------- | ------------- | ------------- -{{#operations}}{{#operation}}[**{{operationId}}**]({{classname}}.md#{{operationId}}) | **{{httpMethod}}** {{path}} | {{summary}} -{{/operation}}{{/operations}} - -{{#operations}} -{{#operation}} - -## {{{operationId}}} - -> {{#returnType}}{{{.}}} {{/returnType}}{{{operationId}}}(ctx{{#pathParams}}, {{paramName}}{{/pathParams}}){{#allParams}}{{^isPathParam}}.{{vendorExtensions.x-export-param-name}}({{paramName}}){{/isPathParam}}{{/allParams}}.Execute() - -{{{summary}}}{{#notes}} - -{{{unespacedNotes}}}{{/notes}} - -### Example - -```go -package main - -import ( - "context" - "fmt" - "os" -{{#vendorExtensions.x-go-import}} -{{{vendorExtensions.x-go-import}}} -{{/vendorExtensions.x-go-import}} - {{goImportAlias}} "github.com/numary/numary-go" -) - -func main() { - {{#allParams}} - {{paramName}} := {{{vendorExtensions.x-go-example}}} // {{{dataType}}} | {{{description}}}{{^required}} (optional){{/required}}{{#defaultValue}} (default to {{{.}}}){{/defaultValue}} - {{/allParams}} - - configuration := {{goImportAlias}}.NewConfiguration() - api_client := {{goImportAlias}}.NewAPIClient(configuration) - resp, r, err := api_client.{{classname}}.{{operationId}}(context.Background(){{#pathParams}}, {{paramName}}{{/pathParams}}){{#allParams}}{{^isPathParam}}.{{vendorExtensions.x-export-param-name}}({{paramName}}){{/isPathParam}}{{/allParams}}.Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `{{classname}}.{{operationId}}``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - {{#returnType}} - // response from `{{operationId}}`: {{{.}}} - fmt.Fprintf(os.Stdout, "Response from `{{classname}}.{{operationId}}`: %v\n", resp) - {{/returnType}} -} -``` - -### Path Parameters - -{{^allParams}}This endpoint does not need any parameter.{{/allParams}}{{#pathParams}}{{#-last}} -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- -**ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc.{{/-last}}{{/pathParams}}{{#pathParams}} -**{{paramName}}** | {{^isPrimitiveType}}{{^isFile}}[{{/isFile}}{{/isPrimitiveType}}**{{dataType}}**{{^isPrimitiveType}}{{^isFile}}]({{baseType}}.md){{/isFile}}{{/isPrimitiveType}} | {{description}} | {{#defaultValue}}[default to {{.}}]{{/defaultValue}}{{/pathParams}} - -### Other Parameters - -Other parameters are passed through a pointer to a api{{{nickname}}}Request struct via the builder pattern -{{#allParams}}{{#-last}} - -Name | Type | Description | Notes -------------- | ------------- | ------------- | -------------{{/-last}}{{/allParams}}{{#allParams}} -{{^isPathParam}} **{{paramName}}** | {{#isContainer}}{{#isArray}}{{#items}}{{^isPrimitiveType}}{{^isFile}}[{{/isFile}}{{/isPrimitiveType}}**[]{{dataType}}**{{^isPrimitiveType}}{{^isFile}}]({{^baseType}}{{dataType}}{{/baseType}}{{baseType}}.md){{/isFile}}{{/isPrimitiveType}}{{/items}}{{/isArray}}{{#isMap}}{{#items}}{{^isPrimitiveType}}{{^isFile}}[{{/isFile}}{{/isPrimitiveType}}**map[string]{{dataType}}**{{^isPrimitiveType}}{{^isFile}}]({{^baseType}}{{dataType}}{{/baseType}}{{baseType}}.md){{/isFile}}{{/isPrimitiveType}}{{/items}}{{/isMap}}{{/isContainer}}{{^isContainer}}{{^isPrimitiveType}}{{^isFile}}[{{/isFile}}{{/isPrimitiveType}}**{{dataType}}**{{^isPrimitiveType}}{{^isFile}}]({{^baseType}}{{dataType}}{{/baseType}}{{baseType}}.md){{/isFile}}{{/isPrimitiveType}}{{/isContainer}} | {{description}} | {{#defaultValue}}[default to {{.}}]{{/defaultValue}}{{/isPathParam}}{{/allParams}} - -### Return type - -{{#returnType}}{{#returnTypeIsPrimitive}}**{{{returnType}}}**{{/returnTypeIsPrimitive}}{{^returnTypeIsPrimitive}}[**{{{returnType}}}**]({{returnBaseType}}.md){{/returnTypeIsPrimitive}}{{/returnType}}{{^returnType}} (empty response body){{/returnType}} - -### Authorization - -{{^authMethods}}No authorization required{{/authMethods}}{{#authMethods}}[{{{name}}}](../README.md#{{{name}}}){{^-last}}, {{/-last}}{{/authMethods}} - -### HTTP request headers - -- **Content-Type**: {{#consumes}}{{{mediaType}}}{{^-last}}, {{/-last}}{{/consumes}}{{^consumes}}Not defined{{/consumes}} -- **Accept**: {{#produces}}{{{mediaType}}}{{^-last}}, {{/-last}}{{/produces}}{{^produces}}Not defined{{/produces}} - -[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) -[[Back to Model list]](../README.md#documentation-for-models) -[[Back to README]](../README.md) - -{{/operation}} -{{/operations}} diff --git a/sdk/templates/go/client.mustache b/sdk/templates/go/client.mustache deleted file mode 100644 index 609b362a8..000000000 --- a/sdk/templates/go/client.mustache +++ /dev/null @@ -1,608 +0,0 @@ -{{>partial_header}} -package {{packageName}} - -import ( - "bytes" - "context" - "encoding/json" - "encoding/xml" - "errors" - "fmt" - "io" - "io/ioutil" - "log" - "mime/multipart" - "net/http" - "net/http/httputil" - "net/url" - "os" - "path/filepath" - "reflect" - "regexp" - "strconv" - "strings" - "time" - "unicode/utf8" - - "golang.org/x/oauth2" - {{#withAWSV4Signature}} - awsv4 "github.com/aws/aws-sdk-go/aws/signer/v4" - awscredentials "github.com/aws/aws-sdk-go/aws/credentials" - {{/withAWSV4Signature}} -) - -var ( - jsonCheck = regexp.MustCompile(`(?i:(?:application|text)/(?:vnd\.[^;]+\+)?json)`) - xmlCheck = regexp.MustCompile(`(?i:(?:application|text)/xml)`) -) - -// APIClient manages communication with the {{appName}} API v{{version}} -// In most cases there should be only one, shared, APIClient. -type APIClient struct { - cfg *Configuration - common service // Reuse a single struct instead of allocating one for each service on the heap. - - // API Services -{{#apiInfo}} -{{#apis}} -{{#operations}} - - {{classname}} {{#generateInterfaces}}{{classname}}{{/generateInterfaces}}{{^generateInterfaces}}*{{classname}}Service{{/generateInterfaces}} -{{/operations}} -{{/apis}} -{{/apiInfo}} -} - -type service struct { - client *APIClient -} - -// NewAPIClient creates a new API client. Requires a userAgent string describing your application. -// optionally a custom http.Client to allow for advanced features such as caching. -func NewAPIClient(cfg *Configuration) *APIClient { - if cfg.HTTPClient == nil { - cfg.HTTPClient = http.DefaultClient - } - - c := &APIClient{} - c.cfg = cfg - c.common.client = c - -{{#apiInfo}} - // API Services -{{#apis}} -{{#operations}} - c.{{classname}} = (*{{classname}}Service)(&c.common) -{{/operations}} -{{/apis}} -{{/apiInfo}} - - return c -} - -func atoi(in string) (int, error) { - return strconv.Atoi(in) -} - -// selectHeaderContentType select a content type from the available list. -func selectHeaderContentType(contentTypes []string) string { - if len(contentTypes) == 0 { - return "" - } - if contains(contentTypes, "application/json") { - return "application/json" - } - return contentTypes[0] // use the first content type specified in 'consumes' -} - -// selectHeaderAccept join all accept types and return -func selectHeaderAccept(accepts []string) string { - if len(accepts) == 0 { - return "" - } - - if contains(accepts, "application/json") { - return "application/json" - } - - return strings.Join(accepts, ",") -} - -// contains is a case insensitive match, finding needle in a haystack -func contains(haystack []string, needle string) bool { - for _, a := range haystack { - if strings.ToLower(a) == strings.ToLower(needle) { - return true - } - } - return false -} - -// Verify optional parameters are of the correct type. -func typeCheckParameter(obj interface{}, expected string, name string) error { - // Make sure there is an object. - if obj == nil { - return nil - } - - // Check the type is as expected. - if reflect.TypeOf(obj).String() != expected { - return fmt.Errorf("Expected %s to be of type %s but received %s.", name, expected, reflect.TypeOf(obj).String()) - } - return nil -} - -// parameterToString convert interface{} parameters to string, using a delimiter if format is provided. -func parameterToString(obj interface{}, collectionFormat string) string { - var delimiter string - - switch collectionFormat { - case "pipes": - delimiter = "|" - case "ssv": - delimiter = " " - case "tsv": - delimiter = "\t" - case "csv": - delimiter = "," - } - - if reflect.TypeOf(obj).Kind() == reflect.Slice { - return strings.Trim(strings.Replace(fmt.Sprint(obj), " ", delimiter, -1), "[]") - } else if t, ok := obj.(time.Time); ok { - return t.Format(time.RFC3339) - } - - return fmt.Sprintf("%v", obj) -} - -// helper for converting interface{} parameters to json strings -func parameterToJson(obj interface{}) (string, error) { - jsonBuf, err := json.Marshal(obj) - if err != nil { - return "", err - } - return string(jsonBuf), err -} - -// callAPI do the request. -func (c *APIClient) callAPI(request *http.Request) (*http.Response, error) { - if c.cfg.Debug { - dump, err := httputil.DumpRequestOut(request, true) - if err != nil { - return nil, err - } - log.Printf("\n%s\n", string(dump)) - } - - resp, err := c.cfg.HTTPClient.Do(request) - if err != nil { - return resp, err - } - - if c.cfg.Debug { - dump, err := httputil.DumpResponse(resp, true) - if err != nil { - return resp, err - } - log.Printf("\n%s\n", string(dump)) - } - return resp, err -} - -// Allow modification of underlying config for alternate implementations and testing -// Caution: modifying the configuration while live can cause data races and potentially unwanted behavior -func (c *APIClient) GetConfig() *Configuration { - return c.cfg -} - -type formFile struct { - fileBytes []byte - fileName string - formFileName string -} - -// prepareRequest build the request -func (c *APIClient) prepareRequest( - ctx context.Context, - path string, method string, - postBody interface{}, - headerParams map[string]string, - queryParams url.Values, - formParams url.Values, - formFiles []formFile) (localVarRequest *http.Request, err error) { - - var body *bytes.Buffer - - // Detect postBody type and post. - if postBody != nil { - contentType := headerParams["Content-Type"] - if contentType == "" { - contentType = detectContentType(postBody) - headerParams["Content-Type"] = contentType - } - - body, err = setBody(postBody, contentType) - if err != nil { - return nil, err - } - } - - // add form parameters and file if available. - if strings.HasPrefix(headerParams["Content-Type"], "multipart/form-data") && len(formParams) > 0 || (len(formFiles) > 0) { - if body != nil { - return nil, errors.New("Cannot specify postBody and multipart form at the same time.") - } - body = &bytes.Buffer{} - w := multipart.NewWriter(body) - - for k, v := range formParams { - for _, iv := range v { - if strings.HasPrefix(k, "@") { // file - err = addFile(w, k[1:], iv) - if err != nil { - return nil, err - } - } else { // form value - w.WriteField(k, iv) - } - } - } - for _, formFile := range formFiles { - if len(formFile.fileBytes) > 0 && formFile.fileName != "" { - w.Boundary() - part, err := w.CreateFormFile(formFile.formFileName, filepath.Base(formFile.fileName)) - if err != nil { - return nil, err - } - _, err = part.Write(formFile.fileBytes) - if err != nil { - return nil, err - } - } - } - - // Set the Boundary in the Content-Type - headerParams["Content-Type"] = w.FormDataContentType() - - // Set Content-Length - headerParams["Content-Length"] = fmt.Sprintf("%d", body.Len()) - w.Close() - } - - if strings.HasPrefix(headerParams["Content-Type"], "application/x-www-form-urlencoded") && len(formParams) > 0 { - if body != nil { - return nil, errors.New("Cannot specify postBody and x-www-form-urlencoded form at the same time.") - } - body = &bytes.Buffer{} - body.WriteString(formParams.Encode()) - // Set Content-Length - headerParams["Content-Length"] = fmt.Sprintf("%d", body.Len()) - } - - // Setup path and query parameters - url, err := url.Parse(path) - if err != nil { - return nil, err - } - - // Override request host, if applicable - if c.cfg.Host != "" { - url.Host = c.cfg.Host - } - - // Override request scheme, if applicable - if c.cfg.Scheme != "" { - url.Scheme = c.cfg.Scheme - } - - // Adding Query Param - query := url.Query() - for k, v := range queryParams { - for _, iv := range v { - query.Add(k, iv) - } - } - - // Encode the parameters. - url.RawQuery = query.Encode() - - // Generate a new request - if body != nil { - localVarRequest, err = http.NewRequest(method, url.String(), body) - } else { - localVarRequest, err = http.NewRequest(method, url.String(), nil) - } - if err != nil { - return nil, err - } - - // add header parameters, if any - if len(headerParams) > 0 { - headers := http.Header{} - for h, v := range headerParams { - headers[h] = []string{v} - } - localVarRequest.Header = headers - } - - // Add the user agent to the request. - localVarRequest.Header.Add("User-Agent", c.cfg.UserAgent) - - if ctx != nil { - // add context to the request - localVarRequest = localVarRequest.WithContext(ctx) - - // Walk through any authentication. - - // OAuth2 authentication - if tok, ok := ctx.Value(ContextOAuth2).(oauth2.TokenSource); ok { - // We were able to grab an oauth2 token from the context - var latestToken *oauth2.Token - if latestToken, err = tok.Token(); err != nil { - return nil, err - } - - latestToken.SetAuthHeader(localVarRequest) - } - - // Basic HTTP Authentication - if auth, ok := ctx.Value(ContextBasicAuth).(BasicAuth); ok { - localVarRequest.SetBasicAuth(auth.UserName, auth.Password) - } - - // AccessToken Authentication - if auth, ok := ctx.Value(ContextAccessToken).(string); ok { - localVarRequest.Header.Add("Authorization", "Bearer "+auth) - } - - {{#withAWSV4Signature}} - // AWS Signature v4 Authentication - if auth, ok := ctx.Value(ContextAWSv4).(AWSv4); ok { - creds := awscredentials.NewStaticCredentials(auth.AccessKey, auth.SecretKey, auth.SessionToken) - signer := awsv4.NewSigner(creds) - var reader *strings.Reader - if body == nil { - reader = strings.NewReader("") - } else { - reader = strings.NewReader(body.String()) - } - - // Define default values for region and service to maintain backward compatibility - region := auth.Region - if region == "" { - region = "eu-west-2" - } - service := auth.Service - if service == "" { - service = "oapi" - } - - timestamp := time.Now() - _, err := signer.Sign(localVarRequest, reader, service, region, timestamp) - if err != nil { - return nil, err - } - } - {{/withAWSV4Signature}} - } - - for header, value := range c.cfg.DefaultHeader { - localVarRequest.Header.Add(header, value) - } -{{#withCustomMiddlewareFunction}} - - if c.cfg.Middleware != nil { - c.cfg.Middleware(localVarRequest) - } - -{{/withCustomMiddlewareFunction}} -{{#hasHttpSignatureMethods}} - if ctx != nil { - // HTTP Signature Authentication. All request headers must be set (including default headers) - // because the headers may be included in the signature. - if auth, ok := ctx.Value(ContextHttpSignatureAuth).(HttpSignatureAuth); ok { - err = SignRequest(ctx, localVarRequest, auth) - if err != nil { - return nil, err - } - } - } -{{/hasHttpSignatureMethods}} - return localVarRequest, nil -} - -func (c *APIClient) decode(v interface{}, b []byte, contentType string) (err error) { - if len(b) == 0 { - return nil - } - if s, ok := v.(*string); ok { - *s = string(b) - return nil - } - if f, ok := v.(**os.File); ok { - *f, err = ioutil.TempFile("", "HttpClientFile") - if err != nil { - return - } - _, err = (*f).Write(b) - if err != nil { - return - } - _, err = (*f).Seek(0, io.SeekStart) - return - } - if xmlCheck.MatchString(contentType) { - if err = xml.Unmarshal(b, v); err != nil { - return err - } - return nil - } - if jsonCheck.MatchString(contentType) { - if actualObj, ok := v.(interface{ GetActualInstance() interface{} }); ok { // oneOf, anyOf schemas - if unmarshalObj, ok := actualObj.(interface{ UnmarshalJSON([]byte) error }); ok { // make sure it has UnmarshalJSON defined - if err = unmarshalObj.UnmarshalJSON(b); err != nil { - return err - } - } else { - return errors.New("Unknown type with GetActualInstance but no unmarshalObj.UnmarshalJSON defined") - } - } else if err = json.Unmarshal(b, v); err != nil { // simple model - return err - } - return nil - } - return errors.New("undefined response type") -} - -// Add a file to the multipart request -func addFile(w *multipart.Writer, fieldName, path string) error { - file, err := os.Open(path) - if err != nil { - return err - } - defer file.Close() - - part, err := w.CreateFormFile(fieldName, filepath.Base(path)) - if err != nil { - return err - } - _, err = io.Copy(part, file) - - return err -} - -// Prevent trying to import "fmt" -func reportError(format string, a ...interface{}) error { - return fmt.Errorf(format, a...) -} - -// Set request body from an interface{} -func setBody(body interface{}, contentType string) (bodyBuf *bytes.Buffer, err error) { - if bodyBuf == nil { - bodyBuf = &bytes.Buffer{} - } - - if reader, ok := body.(io.Reader); ok { - _, err = bodyBuf.ReadFrom(reader) - } else if fp, ok := body.(**os.File); ok { - _, err = bodyBuf.ReadFrom(*fp) - } else if b, ok := body.([]byte); ok { - _, err = bodyBuf.Write(b) - } else if s, ok := body.(string); ok { - _, err = bodyBuf.WriteString(s) - } else if s, ok := body.(*string); ok { - _, err = bodyBuf.WriteString(*s) - } else if jsonCheck.MatchString(contentType) { - err = json.NewEncoder(bodyBuf).Encode(body) - } else if xmlCheck.MatchString(contentType) { - err = xml.NewEncoder(bodyBuf).Encode(body) - } - - if err != nil { - return nil, err - } - - if bodyBuf.Len() == 0 { - err = fmt.Errorf("Invalid body type %s\n", contentType) - return nil, err - } - return bodyBuf, nil -} - -// detectContentType method is used to figure out `Request.Body` content type for request header -func detectContentType(body interface{}) string { - contentType := "text/plain; charset=utf-8" - kind := reflect.TypeOf(body).Kind() - - switch kind { - case reflect.Struct, reflect.Map, reflect.Ptr: - contentType = "application/json; charset=utf-8" - case reflect.String: - contentType = "text/plain; charset=utf-8" - default: - if b, ok := body.([]byte); ok { - contentType = http.DetectContentType(b) - } else if kind == reflect.Slice { - contentType = "application/json; charset=utf-8" - } - } - - return contentType -} - -// Ripped from https://github.com/gregjones/httpcache/blob/master/httpcache.go -type cacheControl map[string]string - -func parseCacheControl(headers http.Header) cacheControl { - cc := cacheControl{} - ccHeader := headers.Get("Cache-Control") - for _, part := range strings.Split(ccHeader, ",") { - part = strings.Trim(part, " ") - if part == "" { - continue - } - if strings.ContainsRune(part, '=') { - keyval := strings.Split(part, "=") - cc[strings.Trim(keyval[0], " ")] = strings.Trim(keyval[1], ",") - } else { - cc[part] = "" - } - } - return cc -} - -// CacheExpires helper function to determine remaining time before repeating a request. -func CacheExpires(r *http.Response) time.Time { - // Figure out when the cache expires. - var expires time.Time - now, err := time.Parse(time.RFC1123, r.Header.Get("date")) - if err != nil { - return time.Now() - } - respCacheControl := parseCacheControl(r.Header) - - if maxAge, ok := respCacheControl["max-age"]; ok { - lifetime, err := time.ParseDuration(maxAge + "s") - if err != nil { - expires = now - } else { - expires = now.Add(lifetime) - } - } else { - expiresHeader := r.Header.Get("Expires") - if expiresHeader != "" { - expires, err = time.Parse(time.RFC1123, expiresHeader) - if err != nil { - expires = now - } - } - } - return expires -} - -func strlen(s string) int { - return utf8.RuneCountInString(s) -} - -// GenericOpenAPIError Provides access to the body, error and model on returned errors. -type GenericOpenAPIError struct { - body []byte - error string - model interface{} -} - -// Error returns non-empty string if there was an error. -func (e GenericOpenAPIError) Error() string { - return e.error -} - -// Body returns the raw bytes of the response -func (e GenericOpenAPIError) Body() []byte { - return e.body -} - -// Model returns the unpacked model of the error -func (e GenericOpenAPIError) Model() interface{} { - return e.model -} diff --git a/sdk/templates/go/gitignore.mustache b/sdk/templates/go/gitignore.mustache deleted file mode 100644 index 3b6195563..000000000 --- a/sdk/templates/go/gitignore.mustache +++ /dev/null @@ -1,28 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe -*.test -*.prof - -vendor -.idea -cloud_test.go diff --git a/sdk/templates/go/go.mod.mustache b/sdk/templates/go/go.mod.mustache deleted file mode 100644 index 3921ee182..000000000 --- a/sdk/templates/go/go.mod.mustache +++ /dev/null @@ -1,13 +0,0 @@ -module {{gitHost}}/{{gitUserId}}/{{gitRepoId}}{{#isGoSubmodule}}/{{packageName}}{{/isGoSubmodule}} - -go 1.13 - -require ( - github.com/pborman/uuid v1.2.1 - github.com/pkg/errors v0.9.1 - github.com/stretchr/testify v1.4.0 - golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99 - {{#withAWSV4Signature}} - github.com/aws/aws-sdk-go v1.34.14 - {{/withAWSV4Signature}} -) diff --git a/sdk/templates/go/model_simple.mustache b/sdk/templates/go/model_simple.mustache deleted file mode 100644 index 97884f910..000000000 --- a/sdk/templates/go/model_simple.mustache +++ /dev/null @@ -1,419 +0,0 @@ -// {{classname}} {{{description}}}{{^description}}struct for {{{classname}}}{{/description}} -type {{classname}} struct { -{{#parent}} -{{^isMap}} -{{^isArray}} - {{{parent}}} -{{/isArray}} -{{/isMap}} -{{#isArray}} - Items {{{parent}}} -{{/isArray}} -{{/parent}} -{{#vars}} -{{^-first}} -{{/-first}} -{{#description}} - // {{{.}}} -{{/description}} -{{#deprecated}} - // Deprecated -{{/deprecated}} - {{name}} {{^required}}{{^isNullable}}*{{/isNullable}}{{/required}}{{{dataType}}} `json:"{{baseName}}{{^required}},omitempty{{/required}}"{{#withXml}} xml:"{{baseName}}{{#isXmlAttribute}},attr{{/isXmlAttribute}}"{{/withXml}}{{#vendorExtensions.x-go-custom-tag}} {{{.}}}{{/vendorExtensions.x-go-custom-tag}}` -{{/vars}} -{{#isAdditionalPropertiesTrue}} - AdditionalProperties map[string]interface{} -{{/isAdditionalPropertiesTrue}} -} - -{{#isAdditionalPropertiesTrue}} -type _{{{classname}}} {{{classname}}} - -{{/isAdditionalPropertiesTrue}} -// New{{classname}} instantiates a new {{classname}} object -// This constructor will assign default values to properties that have it defined, -// and makes sure properties required by API are set, but the set of arguments -// will change when the set of required properties is changed -func New{{classname}}({{#requiredVars}}{{nameInCamelCase}} {{dataType}}{{^-last}}, {{/-last}}{{/requiredVars}}) *{{classname}} { - this := {{classname}}{} -{{#allVars}} -{{#required}} - this.{{name}} = {{nameInCamelCase}} -{{/required}} -{{^required}} -{{#defaultValue}} -{{^vendorExtensions.x-golang-is-container}} -{{^isReadOnly}} -{{#isNullable}} - var {{nameInCamelCase}} {{{datatypeWithEnum}}} = {{{.}}} - this.{{name}} = *New{{{dataType}}}(&{{nameInCamelCase}}) -{{/isNullable}} -{{^isNullable}} - var {{nameInCamelCase}} {{{dataType}}} = {{{.}}} - this.{{name}} = &{{nameInCamelCase}} -{{/isNullable}} -{{/isReadOnly}} -{{/vendorExtensions.x-golang-is-container}} -{{/defaultValue}} -{{/required}} -{{/allVars}} - return &this -} - -// New{{classname}}WithDefaults instantiates a new {{classname}} object -// This constructor will only assign default values to properties that have it defined, -// but it doesn't guarantee that properties required by API are set -func New{{classname}}WithDefaults() *{{classname}} { - this := {{classname}}{} -{{#vars}} -{{#defaultValue}} -{{^vendorExtensions.x-golang-is-container}} -{{^isReadOnly}} -{{#isNullable}} -{{!we use datatypeWithEnum here, since it will represent the non-nullable name of the datatype, e.g. int64 for NullableInt64}} - var {{nameInCamelCase}} {{{datatypeWithEnum}}} = {{{.}}} - this.{{name}} = *New{{{dataType}}}(&{{nameInCamelCase}}) -{{/isNullable}} -{{^isNullable}} - var {{nameInCamelCase}} {{{dataType}}} = {{{.}}} - this.{{name}} = {{^required}}&{{/required}}{{nameInCamelCase}} -{{/isNullable}} -{{/isReadOnly}} -{{/vendorExtensions.x-golang-is-container}} -{{/defaultValue}} -{{/vars}} - return &this -} - -{{#vars}} -{{#required}} -// Get{{name}} returns the {{name}} field value -{{#isNullable}} -// If the value is explicit nil, the zero value for {{vendorExtensions.x-go-base-type}} will be returned -{{/isNullable}} -{{#deprecated}} -// Deprecated -{{/deprecated}} -func (o *{{classname}}) Get{{name}}() {{vendorExtensions.x-go-base-type}} { - if o == nil{{#isNullable}}{{^vendorExtensions.x-golang-is-container}} || o.{{name}}.Get() == nil{{/vendorExtensions.x-golang-is-container}}{{/isNullable}} { - var ret {{vendorExtensions.x-go-base-type}} - return ret - } - -{{#isNullable}} -{{#vendorExtensions.x-golang-is-container}} - return o.{{name}} -{{/vendorExtensions.x-golang-is-container}} -{{^vendorExtensions.x-golang-is-container}} - return *o.{{name}}.Get() -{{/vendorExtensions.x-golang-is-container}} -{{/isNullable}} -{{^isNullable}} - return o.{{name}} -{{/isNullable}} -} - -// Get{{name}}Ok returns a tuple with the {{name}} field value -// and a boolean to check if the value has been set. -{{#isNullable}} -// NOTE: If the value is an explicit nil, `nil, true` will be returned -{{/isNullable}} -{{#deprecated}} -// Deprecated -{{/deprecated}} -func (o *{{classname}}) Get{{name}}Ok() (*{{vendorExtensions.x-go-base-type}}, bool) { - if o == nil {{#isNullable}}{{#vendorExtensions.x-golang-is-container}}|| o.{{name}} == nil{{/vendorExtensions.x-golang-is-container}}{{/isNullable}} { - return nil, false - } -{{#isNullable}} -{{#vendorExtensions.x-golang-is-container}} - return &o.{{name}}, true -{{/vendorExtensions.x-golang-is-container}} -{{^vendorExtensions.x-golang-is-container}} - return o.{{name}}.Get(), o.{{name}}.IsSet() -{{/vendorExtensions.x-golang-is-container}} -{{/isNullable}} -{{^isNullable}} - return &o.{{name}}, true -{{/isNullable}} -} - -// Set{{name}} sets field value -{{#deprecated}} -// Deprecated -{{/deprecated}} -func (o *{{classname}}) Set{{name}}(v {{vendorExtensions.x-go-base-type}}) { -{{#isNullable}} -{{#vendorExtensions.x-golang-is-container}} - o.{{name}} = v -{{/vendorExtensions.x-golang-is-container}} -{{^vendorExtensions.x-golang-is-container}} - o.{{name}}.Set(&v) -{{/vendorExtensions.x-golang-is-container}} -{{/isNullable}} -{{^isNullable}} - o.{{name}} = v -{{/isNullable}} -} - -{{/required}} -{{^required}} -// Get{{name}} returns the {{name}} field value if set, zero value otherwise{{#isNullable}} (both if not set or set to explicit null){{/isNullable}}. -{{#deprecated}} -// Deprecated -{{/deprecated}} -func (o *{{classname}}) Get{{name}}() {{vendorExtensions.x-go-base-type}} { - if o == nil {{^isNullable}}|| o.{{name}} == nil{{/isNullable}}{{#isNullable}}{{^vendorExtensions.x-golang-is-container}}|| o.{{name}}.Get() == nil{{/vendorExtensions.x-golang-is-container}}{{/isNullable}} { - var ret {{vendorExtensions.x-go-base-type}} - return ret - } -{{#isNullable}} -{{#vendorExtensions.x-golang-is-container}} - return o.{{name}} -{{/vendorExtensions.x-golang-is-container}} -{{^vendorExtensions.x-golang-is-container}} - return *o.{{name}}.Get() -{{/vendorExtensions.x-golang-is-container}} -{{/isNullable}} -{{^isNullable}} - return *o.{{name}} -{{/isNullable}} -} - -// Get{{name}}Ok returns a tuple with the {{name}} field value if set, nil otherwise -// and a boolean to check if the value has been set. -{{#isNullable}} -// NOTE: If the value is an explicit nil, `nil, true` will be returned -{{/isNullable}} -{{#deprecated}} -// Deprecated -{{/deprecated}} -func (o *{{classname}}) Get{{name}}Ok() (*{{vendorExtensions.x-go-base-type}}, bool) { - if o == nil {{^isNullable}}|| o.{{name}} == nil{{/isNullable}}{{#isNullable}}{{#vendorExtensions.x-golang-is-container}}|| o.{{name}} == nil{{/vendorExtensions.x-golang-is-container}}{{/isNullable}} { - return nil, false - } -{{#isNullable}} -{{#vendorExtensions.x-golang-is-container}} - return &o.{{name}}, true -{{/vendorExtensions.x-golang-is-container}} -{{^vendorExtensions.x-golang-is-container}} - return o.{{name}}.Get(), o.{{name}}.IsSet() -{{/vendorExtensions.x-golang-is-container}} -{{/isNullable}} -{{^isNullable}} - return o.{{name}}, true -{{/isNullable}} -} - -// Has{{name}} returns a boolean if a field has been set. -func (o *{{classname}}) Has{{name}}() bool { - if o != nil && {{^isNullable}}o.{{name}} != nil{{/isNullable}}{{#isNullable}}{{#vendorExtensions.x-golang-is-container}}o.{{name}} != nil{{/vendorExtensions.x-golang-is-container}}{{^vendorExtensions.x-golang-is-container}}o.{{name}}.IsSet(){{/vendorExtensions.x-golang-is-container}}{{/isNullable}} { - return true - } - - return false -} - -// Set{{name}} gets a reference to the given {{dataType}} and assigns it to the {{name}} field. -{{#deprecated}} -// Deprecated -{{/deprecated}} -func (o *{{classname}}) Set{{name}}(v {{vendorExtensions.x-go-base-type}}) { -{{#isNullable}} -{{#vendorExtensions.x-golang-is-container}} - o.{{name}} = v -{{/vendorExtensions.x-golang-is-container}} -{{^vendorExtensions.x-golang-is-container}} - o.{{name}}.Set(&v) -{{/vendorExtensions.x-golang-is-container}} -{{/isNullable}} -{{^isNullable}} - o.{{name}} = &v -{{/isNullable}} -} -{{#isNullable}} -{{^vendorExtensions.x-golang-is-container}} -// Set{{name}}Nil sets the value for {{name}} to be an explicit nil -func (o *{{classname}}) Set{{name}}Nil() { - o.{{name}}.Set(nil) -} - -// Unset{{name}} ensures that no value is present for {{name}}, not even an explicit nil -func (o *{{classname}}) Unset{{name}}() { - o.{{name}}.Unset() -} -{{/vendorExtensions.x-golang-is-container}} -{{/isNullable}} - -{{/required}} -{{/vars}} -func (o {{classname}}) MarshalJSON() ([]byte, error) { - toSerialize := {{#isArray}}make([]interface{}, len(o.Items)){{/isArray}}{{^isArray}}map[string]interface{}{}{{/isArray}} - {{#parent}} - {{^isMap}} - {{^isArray}} - serialized{{parent}}, err{{parent}} := json.Marshal(o.{{parent}}) - if err{{parent}} != nil { - return []byte{}, err{{parent}} - } - err{{parent}} = json.Unmarshal([]byte(serialized{{parent}}), &toSerialize) - if err{{parent}} != nil { - return []byte{}, err{{parent}} - } - {{/isArray}} - {{/isMap}} - {{#isArray}} - for i, item := range o.Items { - toSerialize[i] = item - } - {{/isArray}} - {{/parent}} - {{#vars}} - {{! if argument is nullable, only serialize it if it is set}} - {{#isNullable}} - {{#vendorExtensions.x-golang-is-container}} - {{! support for container fields is not ideal at this point because of lack of Nullable* types}} - if o.{{name}} != nil { - toSerialize["{{baseName}}"] = o.{{name}} - } - {{/vendorExtensions.x-golang-is-container}} - {{^vendorExtensions.x-golang-is-container}} - if {{#required}}true{{/required}}{{^required}}o.{{name}}.IsSet(){{/required}} { - toSerialize["{{baseName}}"] = o.{{name}}.Get() - } - {{/vendorExtensions.x-golang-is-container}} - {{/isNullable}} - {{! if argument is not nullable, don't set it if it is nil}} - {{^isNullable}} - if {{#required}}true{{/required}}{{^required}}o.{{name}} != nil{{/required}} { - toSerialize["{{baseName}}"] = o.{{name}} - } - {{/isNullable}} - {{/vars}} - {{#isAdditionalPropertiesTrue}} - - for key, value := range o.AdditionalProperties { - toSerialize[key] = value - } - - {{/isAdditionalPropertiesTrue}} - return json.Marshal(toSerialize) -} - -{{#isAdditionalPropertiesTrue}} -func (o *{{{classname}}}) UnmarshalJSON(bytes []byte) (err error) { -{{#parent}} -{{^isMap}} - type {{classname}}WithoutEmbeddedStruct struct { - {{#vars}} - {{^-first}} - {{/-first}} - {{#description}} - // {{{.}}} - {{/description}} - {{#deprecated}} - // Deprecated - {{/deprecated}} - {{name}} {{^required}}{{^isNullable}}*{{/isNullable}}{{/required}}{{{dataType}}} `json:"{{baseName}}{{^required}},omitempty{{/required}}"{{#withXml}} xml:"{{baseName}}{{#isXmlAttribute}},attr{{/isXmlAttribute}}"{{/withXml}}{{#vendorExtensions.x-go-custom-tag}} {{{.}}}{{/vendorExtensions.x-go-custom-tag}}` - {{/vars}} - } - - var{{{classname}}}WithoutEmbeddedStruct := {{{classname}}}WithoutEmbeddedStruct{} - - err = json.Unmarshal(bytes, &var{{{classname}}}WithoutEmbeddedStruct) - if err == nil { - var{{{classname}}} := _{{{classname}}}{} - {{#vars}} - var{{{classname}}}.{{{name}}} = var{{{classname}}}WithoutEmbeddedStruct.{{{name}}} - {{/vars}} - *o = {{{classname}}}(var{{{classname}}}) - } else { - return err - } - - var{{{classname}}} := _{{{classname}}}{} - - err = json.Unmarshal(bytes, &var{{{classname}}}) - if err == nil { - o.{{{parent}}} = var{{{classname}}}.{{{parent}}} - } else { - return err - } - - additionalProperties := make(map[string]interface{}) - - if err = json.Unmarshal(bytes, &additionalProperties); err == nil { - {{#vars}} - delete(additionalProperties, "{{{baseName}}}") - {{/vars}} - - // remove fields from embedded structs - reflect{{{parent}}} := reflect.ValueOf(o.{{{parent}}}) - for i := 0; i < reflect{{{parent}}}.Type().NumField(); i++ { - t := reflect{{{parent}}}.Type().Field(i) - - if jsonTag := t.Tag.Get("json"); jsonTag != "" { - fieldName := "" - if commaIdx := strings.Index(jsonTag, ","); commaIdx > 0 { - fieldName = jsonTag[:commaIdx] - } else { - fieldName = jsonTag - } - if fieldName != "AdditionalProperties" { - delete(additionalProperties, fieldName) - } - } - } - - o.AdditionalProperties = additionalProperties - } - - return err -{{/isMap}} -{{#isMap}} - var{{{classname}}} := _{{{classname}}}{} - - if err = json.Unmarshal(bytes, &var{{{classname}}}); err == nil { - *o = {{{classname}}}(var{{{classname}}}) - } - - additionalProperties := make(map[string]interface{}) - - if err = json.Unmarshal(bytes, &additionalProperties); err == nil { - {{#vars}} - delete(additionalProperties, "{{{baseName}}}") - {{/vars}} - o.AdditionalProperties = additionalProperties - } - - return err -{{/isMap}} -{{/parent}} -{{^parent}} - var{{{classname}}} := _{{{classname}}}{} - - if err = json.Unmarshal(bytes, &var{{{classname}}}); err == nil { - *o = {{{classname}}}(var{{{classname}}}) - } - - additionalProperties := make(map[string]interface{}) - - if err = json.Unmarshal(bytes, &additionalProperties); err == nil { - {{#vars}} - delete(additionalProperties, "{{{baseName}}}") - {{/vars}} - o.AdditionalProperties = additionalProperties - } - - return err -{{/parent}} -} - -{{/isAdditionalPropertiesTrue}} -{{#isArray}} -func (o *{{{classname}}}) UnmarshalJSON(bytes []byte) (err error) { - return json.Unmarshal(bytes, &o.Items) -} - -{{/isArray}} -{{>nullable_model}} diff --git a/sdk/templates/java/Taskfile.yml.mustache b/sdk/templates/java/Taskfile.yml.mustache deleted file mode 100644 index e69de29bb..000000000 diff --git a/sdk/templates/java/maven.yml.mustache b/sdk/templates/java/maven.yml.mustache deleted file mode 100644 index 288b4e54f..000000000 --- a/sdk/templates/java/maven.yml.mustache +++ /dev/null @@ -1,24 +0,0 @@ -name: Publish -on: - push: - tags: - - '*' - -jobs: - Publish: - name: 'Publish' - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Setup Java JDK - uses: actions/setup-java@v3 - with: - java-version: 14 - cache: "maven" - - name: Release to Nexus Repository - uses: samuelmeuli/action-maven-publish@v1 - with: - gpg_private_key: {{ gpg_private_key }} - gpg_passphrase: {{ gpg_passphrase }} - nexus_username: {{ nexus_username }} - nexus_password: {{ nexus_password }} diff --git a/sdk/templates/php/README.mustache b/sdk/templates/php/README.mustache deleted file mode 100644 index 2937cf76c..000000000 --- a/sdk/templates/php/README.mustache +++ /dev/null @@ -1,166 +0,0 @@ -# PHP API client for Numary - -{{#appDescriptionWithNewLines}} -{{{.}}} -{{/appDescriptionWithNewLines}} - -{{#infoUrl}} -For more information, please visit [{{{infoUrl}}}]({{{infoUrl}}}). -{{/infoUrl}} - -## Installation & Usage - -### Requirements - -PHP 7.3 and later. -Should also work with PHP 8.0 but has not been tested. - -### Composer - -To install the bindings via [Composer](https://getcomposer.org/), add the following to `composer.json`: - -```json -{ - "repositories": [ - { - "type": "vcs", - "url": "https://{{gitHost}}/{{gitUserId}}/{{gitRepoId}}.git" - } - ], - "require": { - "{{gitUserId}}/{{gitRepoId}}": "*@dev" - } -} -``` - -Then run `composer install` - -### Manual Installation - -Download the files and include `autoload.php`: - -```php - php_doc_auth_partial}} -$apiInstance = new {{invokerPackage}}\Api\{{classname}}( - // If you want use custom http client, pass your client which implements `GuzzleHttp\ClientInterface`. - // This is optional, `GuzzleHttp\Client` will be used as default. - new GuzzleHttp\Client(){{#hasAuthMethods}}, - $config{{/hasAuthMethods}} -); -{{#allParams}}${{paramName}} = {{{example}}}; // {{{dataType}}}{{#description}} | {{{.}}}{{/description}} -{{/allParams}} - -try { - {{#returnType}}$result = {{/returnType}}$apiInstance->{{{operationId}}}({{#allParams}}${{paramName}}{{^-last}}, {{/-last}}{{/allParams}});{{#returnType}} - print_r($result);{{/returnType}} -} catch (Exception $e) { - echo 'Exception when calling {{classname}}->{{operationId}}: ', $e->getMessage(), PHP_EOL; -} -{{/-first}}{{/operation}}{{/operations}}{{/-first}}{{/apis}}{{/apiInfo}} -``` - -## API Endpoints - -All URIs are relative to *{{basePath}}* - -Class | Method | HTTP request | Description ------------- | ------------- | ------------- | ------------- -{{#apiInfo}}{{#apis}}{{#operations}}{{#operation}}*{{classname}}* | [**{{operationId}}**]({{apiDocPath}}/{{classname}}.md#{{operationIdLowerCase}}) | **{{httpMethod}}** {{path}} | {{summary}} -{{/operation}}{{/operations}}{{/apis}}{{/apiInfo}} -## Models - -{{#models}}{{#model}}- [{{{classname}}}]({{modelDocPath}}/{{{classname}}}.md){{/model}} -{{/models}} - -## Authorization -{{^authMethods}} -All endpoints do not require authorization. -{{/authMethods}} -{{#authMethods}} -{{#last}} Authentication schemes defined for the API:{{/last}} - -### {{{name}}} -{{#isApiKey}} - -- **Type**: API key -- **API key parameter name**: {{{keyParamName}}} -- **Location**: {{#isKeyInQuery}}URL query string{{/isKeyInQuery}}{{#isKeyInHeader}}HTTP header{{/isKeyInHeader}} - -{{/isApiKey}} -{{#isBasic}} -{{#isBasicBasic}} - -- **Type**: HTTP basic authentication -{{/isBasicBasic}} -{{#isBasicBearer}} - -- **Type**: Bearer authentication{{#bearerFormat}} ({{{.}}}){{/bearerFormat}} - -Library provide utility to fetch access token from Numary authorization server - -```php -use Numary\Ledger\Api\TransactionsApi; -use Numary\Ledger\Api\Configuration; -use Numary\Ledger\Cloud\TokenFetcher; -use GuzzleHttp\Client; - -$tokenFetcher = new TokenFetcher(TokenFetcher::$endpoint, "API KEY"); -$token = $tokenFetcher->fetchToken(); -$config = new Configuration(); -$config->setAccessToken($token); -$config->setHost("https://api.numary.cloud/ledger"); -$transactionApi = new TransactionsApi(new Client(), $config); -``` - -{{/isBasicBearer}} -{{/isBasic}} -{{#isOAuth}} - -- **Type**: `OAuth` -- **Flow**: `{{{flow}}}` -- **Authorization URL**: `{{{authorizationUrl}}}` -- **Scopes**: {{^scopes}}N/A{{/scopes}} -{{#scopes}} - - **{{{scope}}}**: {{{description}}} -{{/scopes}} -{{/isOAuth}} - -{{/authMethods}} -## Tests - -To run the tests, use: - -```bash -composer install -vendor/bin/phpunit -``` - -## Author - -{{#apiInfo}}{{#apis}}{{#-last}}{{infoEmail}} -{{/-last}}{{/apis}}{{/apiInfo}} -## About this package - -This PHP package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project: - -- API version: `{{appVersion}}` -{{#artifactVersion}} - - Package version: `{{.}}` -{{/artifactVersion}} -{{^hideGenerationTimestamp}} - - Build date: `{{generatedDate}}` -{{/hideGenerationTimestamp}} -- Build package: `{{generatorClass}}` diff --git a/sdk/templates/php/Taskfile.yml.mustache b/sdk/templates/php/Taskfile.yml.mustache deleted file mode 100644 index 75217e2be..000000000 --- a/sdk/templates/php/Taskfile.yml.mustache +++ /dev/null @@ -1,30 +0,0 @@ -version: '3' - -tasks: - update: - cmds: - - composer update - - start-ledger: - cmds: - - > - docker run -d - --name ledger - --rm - -e NUMARY_SERVER_HTTP_BIND_ADDRESS=0.0.0.0:3068 - -p 3068:3068 - ghcr.io/formancehq/ledger:{{appVersion}} - - stop-ledger: - cmds: - - docker stop ledger - - test: - desc: Test client code - deps: - - update - cmds: - - task: start-ledger - - defer: - task: stop-ledger - - ./vendor/bin/phpunit diff --git a/sdk/templates/python/README.mustache b/sdk/templates/python/README.mustache deleted file mode 100644 index 015381e0b..000000000 --- a/sdk/templates/python/README.mustache +++ /dev/null @@ -1,56 +0,0 @@ -# Python API client for Numary - -{{#appDescriptionWithNewLines}} -{{{.}}} -{{/appDescriptionWithNewLines}} - -This Python package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project: - -- API version: {{appVersion}} -- Package version: {{packageVersion}} -{{^hideGenerationTimestamp}} -- Build date: {{generatedDate}} -{{/hideGenerationTimestamp}} -- Build package: {{generatorClass}} -{{#infoUrl}} -For more information, please visit [{{{infoUrl}}}]({{{infoUrl}}}) -{{/infoUrl}} - -## Requirements. - -Python >= 3.6 - -## Installation & Usage -### pip install - -If the python package is hosted on a repository, you can install directly using: - -```sh -pip install git+https://{{gitHost}}/{{{gitUserId}}}/{{{gitRepoId}}}.git -``` -(you may need to run `pip` with root permission: `sudo pip install git+https://{{gitHost}}/{{{gitUserId}}}/{{{gitRepoId}}}.git`) - -Then import the package: -```python -import {{{packageName}}} -``` - -### Setuptools - -Install via [Setuptools](http://pypi.python.org/pypi/setuptools). - -```sh -python setup.py install --user -``` -(or `sudo python setup.py install` to install the package for all users) - -Then import the package: -```python -import {{{packageName}}} -``` - -## Getting Started - -Please follow the [installation procedure](#installation--usage) and then run the following: - -{{> README_common }} diff --git a/sdk/templates/python/Taskfile.yml.mustache b/sdk/templates/python/Taskfile.yml.mustache deleted file mode 100644 index 85353b16e..000000000 --- a/sdk/templates/python/Taskfile.yml.mustache +++ /dev/null @@ -1,37 +0,0 @@ -version: '3' - -tasks: - update: - cmds: - - pip install -r requirements.txt - - start-ledger: - cmds: - - > - docker run -d - --name ledger - --rm - -e NUMARY_SERVER_HTTP_BIND_ADDRESS=0.0.0.0:3068 - -p 3068:3068 - ghcr.io/formancehq/ledger:{{appVersion}} - - stop-ledger: - cmds: - - docker stop ledger - - test: - desc: Test client code - deps: - - update - cmds: - - task: start-ledger - - defer: - task: stop-ledger - - ./venv/bin/python -m unittest - - build: - desc: Publish Package - cmds: - - pip install twine - - python setup.py sdist bdist_wheel - - twine check dist/* diff --git a/sdk/templates/python/setup_cfg.mustache b/sdk/templates/python/setup_cfg.mustache deleted file mode 100644 index 205f16e7a..000000000 --- a/sdk/templates/python/setup_cfg.mustache +++ /dev/null @@ -1,16 +0,0 @@ -{{#useNose}} -[nosetests] -logging-clear-handlers=true -verbosity=2 -randomize=true -exe=true -with-coverage=true -cover-package={{{packageName}}} -cover-erase=true - -{{/useNose}} -[flake8] -max-line-length=99 - -[metadata] -description-file=README.md diff --git a/sdk/templates/typescript/.gitignore.mustache b/sdk/templates/typescript/.gitignore.mustache deleted file mode 100644 index e69de29bb..000000000 diff --git a/sdk/templates/typescript/README.mustache b/sdk/templates/typescript/README.mustache deleted file mode 100644 index 3da8a1113..000000000 --- a/sdk/templates/typescript/README.mustache +++ /dev/null @@ -1,30 +0,0 @@ -## {{npmName}}@{{npmVersion}} - -This generator creates TypeScript/JavaScript client that utilizes {{framework}}. - -### Building - -To build and compile the typescript sources to javascript use: -``` -npm install -npm run build -``` - -### Publishing - -First build the package then run ```npm publish``` - -### Consuming - -navigate to the folder of your consuming project and run one of the following commands. - -_published:_ - -``` -npm install {{npmName}}@{{npmVersion}} --save -``` - -_unPublished (not recommended):_ - -``` -npm install PATH_TO_GENERATED_PACKAGE --save diff --git a/sdk/templates/typescript/Taskfile.yml.mustache b/sdk/templates/typescript/Taskfile.yml.mustache deleted file mode 100644 index ea9fef41f..000000000 --- a/sdk/templates/typescript/Taskfile.yml.mustache +++ /dev/null @@ -1,30 +0,0 @@ -version: '3' - -tasks: - update: - cmds: - - npm install - - start-ledger: - cmds: - - > - docker run -d - --name ledger - --rm - -e NUMARY_SERVER_HTTP_BIND_ADDRESS=0.0.0.0:3068 - -p 3068:3068 - ghcr.io/formancehq/ledger:{{appVersion}} - - stop-ledger: - cmds: - - docker stop ledger - - test: - desc: Test client code - deps: - - update - cmds: - - task: start-ledger - - defer: - task: stop-ledger - - npm test