diff --git a/.github/workflows/CI-jest.yml b/.github/workflows/CI-jest.yml
deleted file mode 100644
index 481dffa..0000000
--- a/.github/workflows/CI-jest.yml
+++ /dev/null
@@ -1,40 +0,0 @@
-# ----------------------------------------------------------------------------
-# Copyright (C) 2021-2022 Deepchecks (https://www.deepchecks.com)
-#
-# This file is part of Deepchecks.
-# Deepchecks is distributed under the terms of the GNU Affero General
-# Public License (version 3 or later).
-# You should have received a copy of the GNU Affero General Public License
-# along with Deepchecks. If not, see .
-# ----------------------------------------------------------------------------
-#
-name: FE Tests
-
-on:
- push:
- branches:
- - main
- pull_request:
- branches: [main]
-
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
- cancel-in-progress: true
-
-jobs:
- Jest-TSC-ESLint:
- runs-on: ubuntu-latest
- if: "!github.event.pull_request.draft"
- steps:
- - uses: actions/checkout@v1
- - name: Run Linter
- run: |
- npm install
- npm run lint
- working-directory: ./frontend
-
- - name: Run TS Checker
- run: |
- npm install
- npm run tsc
- working-directory: ./frontend
diff --git a/.github/workflows/artifacts_cleanup.yml b/.github/workflows/artifacts_cleanup.yml
deleted file mode 100644
index 286393e..0000000
--- a/.github/workflows/artifacts_cleanup.yml
+++ /dev/null
@@ -1,15 +0,0 @@
-name: "nightly artifacts cleanup"
-on:
- schedule:
- - cron: "0 1 * * *" # every night at 1 am UTC
- workflow_dispatch:
-
-jobs:
- delete-artifacts:
- runs-on: ubuntu-latest
- if: "!github.event.pull_request.draft"
- steps:
- - uses: kolpav/purge-artifacts-action@v1
- with:
- token: ${{ secrets.GITHUB_TOKEN }}
- expire-in: 1day # Set this to 0 to delete all artifacts
diff --git a/.github/workflows/backend-tests.yml b/.github/workflows/backend-tests.yml
deleted file mode 100644
index 9235372..0000000
--- a/.github/workflows/backend-tests.yml
+++ /dev/null
@@ -1,87 +0,0 @@
-# ----------------------------------------------------------------------------
-# Copyright (C) 2021-2022 Deepchecks (https://www.deepchecks.com)
-#
-# This file is part of Deepchecks.
-# Deepchecks is distributed under the terms of the GNU Affero General
-# Public License (version 3 or later).
-# You should have received a copy of the GNU Affero General Public License
-# along with Deepchecks. If not, see .
-# ----------------------------------------------------------------------------
-#
-name: Build
-
-env:
- DISABLE_DEEPCHECKS_ANONYMOUS_TELEMETRY: "true"
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- DEEPCHECKS_CI_TOKEN: ${{ secrets.DEEPCHECKS_CI_TOKEN }}
- DEEPCHECKS_BUCKET: deepchecks-monitoring-ci
-
-on:
- push:
- branches: [main]
- pull_request:
- branches: [main]
-
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
- cancel-in-progress: true
-
-jobs:
- tests:
- runs-on: ubuntu-latest
- if: "!github.event.pull_request.draft"
- strategy:
- matrix:
- python-version: ["3.11"]
-
- steps:
- - uses: actions/checkout@v3
- - name: Configure AWS credentials
- uses: aws-actions/configure-aws-credentials@v1
- with:
- aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
- aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- aws-region: eu-west-1
- - name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v3
- with:
- python-version: ${{ matrix.python-version }}
- - name: Set Up Env
- run: make env
- - name: pip freeze
- run: make freeze
- - name: Verify migrations linearity
- run: make check-migrations-liniarity
- - name: Run Tests
- run: make test
-# coverage:
-# runs-on: ubuntu-latest
-# name: coveralls
-# steps:
-# - uses: actions/checkout@v3
-# - uses: actions/setup-python@v3
-# with:
-# python-version: "3.9"
-# - name: Set Up Env
-# run: make env
-# working-directory: ./backend
-# - name: Run Coverage
-# run: make coveralls
-# working-directory: ./backend
-# env:
-# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
-# tests-windows:
-# runs-on: windows-latest
-# strategy:
-# matrix:
-# python-version: ["3.8"]
-# steps:
-# - uses: actions/checkout@v3
-# - name: Set up Python ${{ matrix.python-version }}
-# uses: actions/setup-python@v3
-# with:
-# python-version: ${{ matrix.python-version }}
-# - name: Run Tests
-# run: make test-win
-# working-directory: ./backend
diff --git a/.github/workflows/e2e-tests.yml b/.github/workflows/e2e-tests.yml
deleted file mode 100644
index 36a44a8..0000000
--- a/.github/workflows/e2e-tests.yml
+++ /dev/null
@@ -1,276 +0,0 @@
-# ----------------------------------------------------------------------------
-# Copyright (C) 2021-2022 Deepchecks (https://www.deepchecks.com)
-#
-# This file is part of Deepchecks.
-# Deepchecks is distributed under the terms of the GNU Affero General
-# Public License (version 3 or later).
-# You should have received a copy of the GNU Affero General Public License
-# along with Deepchecks. If not, see .
-# ----------------------------------------------------------------------------
-#
-name: PR Tests
-
-env:
- DISABLE_DEEPCHECKS_ANONYMOUS_TELEMETRY: "true"
-
-on:
- pull_request:
- branches:
- - main
- push:
- branches:
- - main
- # Manual run
- workflow_dispatch:
- push:
- branches: [main]
-
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
- cancel-in-progress: true
-
-jobs:
- build:
- runs-on: ubuntu-latest
- if: "!github.event.pull_request.draft"
- steps:
- - name: Checkout
- uses: actions/checkout@v2
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v2
- - name: Build and push
- uses: docker/build-push-action@v4
- with:
- context: .
- cache-from: type=gha
- cache-to: type=gha,mode=max
- tags: ${{ secrets.MON_IMAGE_NAME }}:${{ github.sha }}
- build-args: |
- DEEPCHECKS_CI_TOKEN=${{ secrets.DEEPCHECKS_CI_TOKEN }}
- MIXPANEL_ID=${{ secrets.MIXPANEL_ID }}
- outputs: type=docker,dest=/tmp/commImage.tar
- - name: Upload artifact
- uses: actions/upload-artifact@v2
- with:
- name: commImage
- path: /tmp/commImage.tar
- docker-push:
- runs-on: ubuntu-latest
- if: github.event_name != 'pull_request'
- steps:
- - name: Checkout
- uses: actions/checkout@v2
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v2
- - name: Configure AWS credentials
- uses: aws-actions/configure-aws-credentials@v1
- with:
- aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
- aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- aws-region: eu-west-1
- - name: Login to Amazon ECR
- id: login-ecr
- uses: aws-actions/amazon-ecr-login@v1
- - name: Configure AWS credentials (Public)
- uses: aws-actions/configure-aws-credentials@v1
- with:
- aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
- aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- aws-region: us-east-1
- - name: Login to Amazon ECR Public
- id: login-ecr-public
- uses: aws-actions/amazon-ecr-login@v1
- with:
- registry-type: public
- # Sadly, buildx does not support loading images from file to reuse the image from the previous step
- # https://github.com/docker/buildx/issues/847
- - name: Build and push - private
- uses: docker/build-push-action@v4
- with:
- context: .
- cache-from: type=gha
- cache-to: type=gha,mode=max
- platforms: linux/amd64,linux/arm64/v8
- push: true
- tags: |
- ${{ secrets.MON_IMAGE_NAME }}:${{ github.sha }}
- ${{ secrets.MON_IMAGE_NAME }}:latest
- build-args: |
- DEEPCHECKS_CI_TOKEN=${{ secrets.DEEPCHECKS_CI_TOKEN }}
- MIXPANEL_ID=${{ secrets.MIXPANEL_ID }}
- - name: Build and push - public
- uses: docker/build-push-action@v4
- with:
- context: .
- cache-from: type=gha
- cache-to: type=gha,mode=max
- platforms: linux/amd64,linux/arm64/v8
- push: true
- tags: |
- ${{ secrets.PUBLIC_IMAGE_NAME }}:${{ github.sha }}
- ${{ secrets.PUBLIC_IMAGE_NAME }}:latest
- build-args: |
- DEEPCHECKS_CI_TOKEN=${{ secrets.DEEPCHECKS_CI_TOKEN }}
- MIXPANEL_ID=${{ secrets.MIXPANEL_ID }}
- IS_DEEPCHECKS_OSS="true"
- - name: Logout from Amazon ECR
- if: always()
- run: |
- docker logout ${{ steps.login-ecr.outputs.registry }}
- docker logout ${{ steps.login-ecr-public.outputs.registry }}
- dev-deploy:
- if: github.event_name != 'pull_request'
- runs-on: ubuntu-latest
- needs: docker-push
- steps:
- - name: Configure AWS credentials
- uses: aws-actions/configure-aws-credentials@v1
- with:
- aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
- aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- aws-region: eu-west-1
- - name: Download task definition
- run: |
- aws ecs describe-task-definition --task-definition mon-commercial --query taskDefinition > task-definition.json
- - name: Fill in the new image ID in the Amazon ECS task definition - app
- id: render-app-container
- uses: aws-actions/amazon-ecs-render-task-definition@v1
- with:
- task-definition: task-definition.json
- container-name: server
- image: ${{ secrets.MON_IMAGE_NAME }}:${{ github.sha }}
- - name: Fill in the new image ID in the Amazon ECS task definition - alert-scheduler
- id: render-alert-scheduler-container
- uses: aws-actions/amazon-ecs-render-task-definition@v1
- with:
- task-definition: ${{ steps.render-app-container.outputs.task-definition }}
- container-name: alert-scheduler
- image: ${{ secrets.MON_IMAGE_NAME }}:${{ github.sha }}
- - name: Fill in the new image ID in the Amazon ECS task definition - task queuer
- id: render-task-queuer-container
- uses: aws-actions/amazon-ecs-render-task-definition@v1
- with:
- task-definition: ${{ steps.render-alert-scheduler-container.outputs.task-definition }}
- container-name: task-queuer
- image: ${{ secrets.MON_IMAGE_NAME }}:${{ github.sha }}
- - name: Fill in the new image ID in the Amazon ECS task definition - task runner
- id: render-task-runner-container
- uses: aws-actions/amazon-ecs-render-task-definition@v1
- with:
- task-definition: ${{ steps.render-task-queuer-container.outputs.task-definition }}
- container-name: task-runner
- image: ${{ secrets.MON_IMAGE_NAME }}:${{ github.sha }}
- - name: Deploy Amazon ECS task definition
- uses: aws-actions/amazon-ecs-deploy-task-definition@v1
- with:
- task-definition: ${{ steps.render-task-runner-container.outputs.task-definition }}
- service: mon-commercial
- cluster: DevMonitoring
- e2e:
- runs-on: ubuntu-latest-4-cores
- needs: build
- steps:
- - name: Checkout
- uses: actions/checkout@v2
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v1
- - name: Download artifact
- uses: actions/download-artifact@v2
- with:
- name: commImage
- path: /tmp
- - name: Load Docker image
- run: |
- docker load --input /tmp/commImage.tar
- docker tag ${{ secrets.MON_IMAGE_NAME }}:${{ github.sha }} deepchecks-enterprise-testing
- docker image ls -a
- - name: Run e2e tests
- env:
- LAUCHDARKLY_SDK_KEY: ${{ secrets.LAUCHDARKLY_SDK_KEY }}
- OAUTH_CLIENT_ID: ${{ secrets.OAUTH_CLIENT_ID }}
- OAUTH_CLIENT_SECRET: ${{ secrets.OAUTH_CLIENT_SECRET }}
- run: make cypress
- - name: Upload artifact
- if: always()
- uses: actions/upload-artifact@v2
- with:
- name: cypress-artifacts
- path: |
- ./cypress/screenshots
- ./cypress/videos
- retention-days: 1
- docs:
- needs: build
- runs-on: ubuntu-latest
- steps:
- - name: Checkout deepchecks
- uses: actions/checkout@v2
-
- - name: Setup Python
- uses: actions/setup-python@v2
- with:
- python-version: "3.11"
- - name: Install pandoc
- run: |
- sudo apt-get update
- sudo apt-get install pandoc dvisvgm texlive texlive-latex-extra postgresql-client
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v1
- - name: Download artifact
- uses: actions/download-artifact@v2
- with:
- name: commImage
- path: /tmp
- - name: Load Docker image
- run: |
- docker load --input /tmp/commImage.tar
- docker tag ${{ secrets.MON_IMAGE_NAME }}:${{ github.sha }} deepchecks-enterprise-testing
- docker image ls -a
- - name: Setup environment
- run: |
- make external-services-setup
- - name: DB load
- run: |
- docker run --env-file e2e/.development.env --network deepchecks deepchecks-enterprise-testing init-test-db.sh
- echo "DEEPCHECKS_API_TOKEN=ZTJlLXRlc3RpbmdAZGVlcGNoZWNrcy5jb20=.ASB8rB13J5i_4Nma7zZfIA" >> $GITHUB_ENV
- echo "DEEPCHECKS_API_HOST=http://127.0.0.1:8000" >> $GITHUB_ENV
- env:
- DEEPCHECKS_CI_TOKEN: ${{ secrets.DEEPCHECKS_CI_TOKEN }}
-
- - name: Build documentation
- env:
- DEEPCHECKS_CI_TOKEN: ${{ secrets.DEEPCHECKS_CI_TOKEN }}
- DEEPCHECKS_API_TOKEN: ${{ env.DEEPCHECKS_API_TOKEN }}
- DEEPCHECKS_API_HOST: ${{ env.DEEPCHECKS_API_HOST }}
- LAUCHDARKLY_SDK_KEY: ${{ secrets.LAUCHDARKLY_SDK_KEY }}
- OAUTH_CLIENT_ID: ${{ secrets.OAUTH_CLIENT_ID }}
- OAUTH_CLIENT_SECRET: ${{ secrets.OAUTH_CLIENT_SECRET }}
- run: |
- make env-setup
- make docs
- # - name: Debugging with ssh
- # uses: lhotari/action-upterm@v1
- # with:
- # ## limits ssh access and adds the ssh public key for the user which triggered the workflow
- # limit-access-to-actor: true
- # env:
- # DEEPCHECKS_CI_TOKEN: ${{ secrets.DEEPCHECKS_CI_TOKEN }}
- # DEEPCHECKS_API_TOKEN: ${{ env.DEEPCHECKS_API_TOKEN }}
- # DEEPCHECKS_API_HOST: ${{ env.DEEPCHECKS_API_HOST }}
- - name: Print errors
- if: always()
- run: cat docs/docs.error.log
- - name: Print server logs
- if: always()
- run: docker logs `docker ps | grep :8000 | awk '{print $1}'`
- - name: Checkout deepchecks.github.io
- if: github.event_name != 'pull_request'
- uses: actions/checkout@v2
- with:
- repository: deepchecks/deepchecks.github.io
- path: deepchecks.github.io
- token: ${{ secrets.DEEPCHECKS_CI_TOKEN }}
-
- - name: Push documentation
- if: github.event_name != 'pull_request'
- run: ${{ github.workspace }}/.github/scripts/push_docs.sh
diff --git a/.github/workflows/label-new-issue.yml b/.github/workflows/label-new-issue.yml
deleted file mode 100644
index 436dc8a..0000000
--- a/.github/workflows/label-new-issue.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-# ----------------------------------------------------------------------------
-# Copyright (C) 2021-2022 Deepchecks (https://www.deepchecks.com)
-#
-# This file is part of Deepchecks.
-# Deepchecks is distributed under the terms of the GNU Affero General
-# Public License (version 3 or later).
-# You should have received a copy of the GNU Affero General Public License
-# along with Deepchecks. If not, see .
-# ----------------------------------------------------------------------------
-#
-name: Labels Blank issues
-
-on:
- issues:
- types: [opened]
-
-jobs:
- label-blank-issues:
- runs-on: ubuntu-latest
- if: "!github.event.pull_request.draft"
- steps:
- - uses: andymckay/labeler@1.0.4
- with:
- add-labels: "monitoring, linear"
diff --git a/.github/workflows/license-check.yml b/.github/workflows/license-check.yml
deleted file mode 100644
index 94260d9..0000000
--- a/.github/workflows/license-check.yml
+++ /dev/null
@@ -1,87 +0,0 @@
-# ----------------------------------------------------------------------------
-# Copyright (C) 2021-2022 Deepchecks (https://www.deepchecks.com)
-#
-# This file is part of Deepchecks.
-# Deepchecks is distributed under the terms of the GNU Affero General
-# Public License (version 3 or later).
-# You should have received a copy of the GNU Affero General Public License
-# along with Deepchecks. If not, see .
-# ----------------------------------------------------------------------------
-#
-name: License Check
-
-env:
- DISABLE_DEEPCHECKS_ANONYMOUS_TELEMETRY: "true"
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- DEEPCHECKS_CI_TOKEN: ${{ secrets.DEEPCHECKS_CI_TOKEN }}
-
-on:
- push:
- branches: [main]
- pull_request:
- branches: [main]
-
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
- cancel-in-progress: true
-
-jobs:
- backend-license-check:
- runs-on: ubuntu-latest
- if: "!github.event.pull_request.draft"
- steps:
- - name: Checkout the code
- uses: actions/checkout@v3
- with:
- fetch-depth: 0
- - name: Setup Python
- uses: actions/setup-python@v3
- with:
- python-version: 3.11
- - name: Get explicit and transitive dependencies
- run: |
- pip install -U pip==22.0.4 setuptools==58.3.0
- pip install -q -r requirements.txt
- pip freeze > requirements-all.txt
- working-directory: ./backend
- - name: Check python
- id: license_check_report
- uses: pilosus/action-pip-license-checker@v0.6.1
- with:
- requirements: "backend/requirements-all.txt"
- fail: "Copyleft,Other,Error"
- exclude: '(psycopg2.*2\.9\.3|fqdn.*1\.5\.1|pyzmq.*25\.1\.2|debugpy.*1\.6\.7|certifi.*2023\.11\.17|tqdm.*4\.66\.1|webencodings.*0\.5\.1|torch.*1\.10\.2.*|torch.*1\.11\.0.*|pytorch-ignite.*0\.4\.10.*|torchaudio.*0\.11\.0.*|torchvision.*0\.12\.0.*|terminado.*0\.15\.0|qudida.*0\.0\.4|expiringdict.*1\.2\.2|botocore.*1\.29\.80|orderedmultidict.*1\.0\.1|deepchecks.*)'
- # psycopg2 is LGPL 2
- # pyzmq is Revised BSD https://github.com/zeromq/pyzmq/blob/main/examples/LICENSE
- # debugpy is MIT https://github.com/microsoft/debugpy/blob/main/LICENSE
- # certifi is MPL-2.0 https://github.com/certifi/python-certifi/blob/master/LICENSE
- # tqdm is MPL-2.0 https://github.com/tqdm/tqdm/blob/master/LICENCE
- # webencodings is BSD https://github.com/gsnedders/python-webencodings/blob/master/LICENSE
- # torch is BSD https://github.com/pytorch/pytorch/blob/master/LICENSE
- # torchvision is BSD https://github.com/pytorch/vision/blob/main/LICENSE
- # torchaudio is BSD https://github.com/pytorch/audio/blob/main/LICENSE
- # terminado is BSD https://github.com/jupyter/terminado/blob/main/LICENSE
- # orderedmultidict is freeley distributed https://github.com/gruns/orderedmultidict/blob/master/LICENSE.md
- - name: Print report
- if: ${{ always() }}
- run: echo "${{ steps.license_check_report.outputs.report }}"
- frontend-license-check:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout the code
- uses: actions/checkout@v3
- with:
- fetch-depth: 0
- - name: Setup Node
- uses: actions/setup-node@v3
- with:
- node-version: 16
- - run: npm install --production=false
- working-directory: ./frontend
- - run: ./node_modules/license-checker/bin/license-checker --production --onlyAllow "MIT;ISC;BSD;Apache-2.0;Python-2.0;CC0-1.0;Unlicense" --excludePackages "axe-core@4.4.3;caniuse-lite@1.0.30001400;lz-string@1.4.4;@mapbox/jsonlint-lines-primitives@2.0.2;gl-mat4@1.2.0" --summary --excludePrivatePackages
- working-directory: ./frontend
- # axe-core is MPL-2.0 https://www.npmjs.com/package/axe-core
- # caniuse-lite is CC-BY-4.0 https://github.com/browserslist/caniuse-lite/blob/main/LICENSE
- # lz-string is WTFPL (MIT) https://github.com/pieroxy/lz-string/blob/master/LICENSE
- # @mapbox/jsonlint-lines-primitives is MIT https://github.com/tmcw/jsonlint
- # gl-mat is zlib license
diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml
deleted file mode 100644
index 0d60998..0000000
--- a/.github/workflows/pylint.yml
+++ /dev/null
@@ -1,70 +0,0 @@
-# ----------------------------------------------------------------------------
-# Copyright (C) 2021-2022 Deepchecks (https://www.deepchecks.com)
-#
-# This file is part of Deepchecks.
-# Deepchecks is distributed under the terms of the GNU Affero General
-# Public License (version 3 or later).
-# You should have received a copy of the GNU Affero General Public License
-# along with Deepchecks. If not, see .
-# ----------------------------------------------------------------------------
-
-name: Code Style
-
-env:
- DISABLE_DEEPCHECKS_ANONYMOUS_TELEMETRY: "true"
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- DEEPCHECKS_CI_TOKEN: ${{ secrets.DEEPCHECKS_CI_TOKEN }}
-
-on:
- push:
- branches: [main]
- pull_request:
- branches: [main]
-
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
- cancel-in-progress: true
-
-jobs:
- pylint:
- runs-on: ubuntu-latest
- if: "!github.event.pull_request.draft"
- steps:
- - uses: actions/checkout@v2
- - name: Set up Python 3.11
- uses: actions/setup-python@v2
- with:
- python-version: 3.11
-
- - name: Run Pylint
- run: make pylint
- working-directory: ./backend
- - name: Check import order
- uses: jamescurtin/isort-action@master
- with:
- sortPaths: backend/deepchecks_monitoring
- docstring:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - name: Set up Python 3.11
- uses: actions/setup-python@v2
- with:
- python-version: 3.11
- - name: Run Pydocstyle
- run: make docstring
- working-directory: ./backend
-# license-check:
-# runs-on: ubuntu-latest
-#
-# needs: docstring
-#
-# steps:
-# - uses: actions/checkout@v2
-# - name: Set up Python 3.9
-# uses: actions/setup-python@v2
-# with:
-# python-version: 3.9
-# - name: Run License eye
-# run: make license-check
-# working-directory: ./backend
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
deleted file mode 100644
index 6fffcc6..0000000
--- a/.github/workflows/release.yml
+++ /dev/null
@@ -1,117 +0,0 @@
-# ----------------------------------------------------------------------------
-# Copyright (C) 2021-2022 Deepchecks (https://www.deepchecks.com)
-#
-# This file is part of Deepchecks.
-# Deepchecks is distributed under the terms of the GNU Affero General
-# Public License (version 3 or later).
-# You should have received a copy of the GNU Affero General Public License
-# along with Deepchecks. If not, see .
-# ----------------------------------------------------------------------------
-#
-name: Release
-
-on:
- push:
- tags:
- - "[0-9]+.[0-9]+.[0-9]+"
-
-env:
- DISABLE_DEEPCHECKS_ANONYMOUS_TELEMETRY: "true"
-
-jobs:
- release:
- runs-on: ubuntu-latest
- if: "!github.event.pull_request.draft"
- steps:
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v2
- - name: Configure AWS credentials
- uses: aws-actions/configure-aws-credentials@v1
- with:
- aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
- aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- aws-region: eu-west-1
- - name: Login to Amazon ECR
- id: login-ecr
- uses: aws-actions/amazon-ecr-login@v1
- - name: Configure AWS credentials (Public)
- uses: aws-actions/configure-aws-credentials@v1
- with:
- aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
- aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- aws-region: us-east-1
- - name: Login to Amazon ECR Public
- id: login-ecr-public
- uses: aws-actions/amazon-ecr-login@v1
- with:
- registry-type: public
- - name: Set TAG env var
- run: |
- echo "TAG=$(echo -n "${{ github.event.ref }}" | cut -d '/' -f3)" >> $GITHUB_ENV
- - name: Tag image as latest-release - private
- run: |
- docker buildx imagetools create \
- --tag ${{ secrets.MON_IMAGE_NAME }}:$(echo -n "${{ github.event.ref }}" | cut -d '/' -f3) \
- ${{ secrets.MON_IMAGE_NAME }}:${{ github.sha }}
- - name: Tag image as latest-release - public
- run: |
- docker buildx imagetools create \
- --tag ${{ secrets.PUBLIC_IMAGE_NAME }}:$(echo -n "${{ github.event.ref }}" | cut -d '/' -f3) \
- --tag ${{ secrets.PUBLIC_IMAGE_NAME }}:latest-release \
- ${{ secrets.PUBLIC_IMAGE_NAME }}:${{ github.sha }}
- - name: Logout from Amazon ECR
- if: always()
- run: |
- docker logout ${{ steps.login-ecr.outputs.registry }}
- docker logout ${{ steps.login-ecr-public.outputs.registry }}
-
- update-demo-task-definition:
- runs-on: ubuntu-latest
- needs: release
- steps:
- - name: Configure AWS credentials
- uses: aws-actions/configure-aws-credentials@v1
- with:
- aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
- aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- aws-region: eu-west-1
- - name: Download task definition
- run: |
- aws ecs describe-task-definition --task-definition mon-commercial-staging --query taskDefinition > task-definition.json
- - name: Set TAG env var
- run: |
- echo "TAG=$(echo -n "${{ github.event.ref }}" | cut -d '/' -f3)" >> $GITHUB_ENV
- - name: Fill in the new image ID in the Amazon ECS task definition - app
- id: render-app-container
- uses: aws-actions/amazon-ecs-render-task-definition@v1
- with:
- task-definition: task-definition.json
- container-name: app
- image: ${{ secrets.MON_IMAGE_NAME }}:${{ env.TAG }}
- - name: Fill in the new image ID in the Amazon ECS task definition - alert-scheduler
- id: render-alert-scheduler-container
- uses: aws-actions/amazon-ecs-render-task-definition@v1
- with:
- task-definition: ${{ steps.render-app-container.outputs.task-definition }}
- container-name: alert-scheduler
- image: ${{ secrets.MON_IMAGE_NAME }}:${{ env.TAG }}
- - name: Fill in the new image ID in the Amazon ECS task definition - task queuer
- id: render-task-queuer-container
- uses: aws-actions/amazon-ecs-render-task-definition@v1
- with:
- task-definition: ${{ steps.render-alert-scheduler-container.outputs.task-definition }}
- container-name: task-queuer
- image: ${{ secrets.MON_IMAGE_NAME }}:${{ env.TAG }}
- - name: Fill in the new image ID in the Amazon ECS task definition - task runner
- id: render-task-runner-container
- uses: aws-actions/amazon-ecs-render-task-definition@v1
- with:
- task-definition: ${{ steps.render-task-queuer-container.outputs.task-definition }}
- container-name: task-runner
- image: ${{ secrets.MON_IMAGE_NAME }}:${{ env.TAG }}
- - name: Deploy Amazon ECS task definition
- uses: aws-actions/amazon-ecs-deploy-task-definition@v1
- with:
- task-definition: ${{ steps.render-task-runner-container.outputs.task-definition }}
- service: demo
- cluster: DevMonitoring
diff --git a/AGPL_LICENSE b/AGPL_LICENSE
deleted file mode 100644
index eaa3b0a..0000000
--- a/AGPL_LICENSE
+++ /dev/null
@@ -1,692 +0,0 @@
-Deepchecks Monitoring
-
-Copyright (C) 2021-2023 Deepchecks
-
- This program is free software: you can redistribute it and/or modify
-it under the terms of the GNU Affero General Public License as
-published by the Free Software Foundation, either version 3 of the
-License, or (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU Affero General Public License for more details.
-
- A copy of the GNU Affero General Public License is set out below and is
-also found at .
-
-
-Additional permission under GNU AGPL version 3 section 7
-
- If you modify this Program, or any covered work, by linking or combining it
-with the sub-library in the “**/ee/” sub-directory (or a modified version of
-that library), covered by the Terms & Conditions of the sub-library in the
-“**/ee/” sub-directory, found at https://deepchecks.com/terms-and-conditions
-(the "Commercial License"), the licensors of this Program grant you
-additional permission to convey the resulting work provided that use of the
-sub-library in the “**/ee/” sub-directory is subject to the terms of the
-Commercial License.
-
---------------------------------
-
- GNU AFFERO GENERAL PUBLIC LICENSE
- Version 3, 19 November 2007
-
- Copyright (C) 2007 Free Software Foundation, Inc.
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
- Preamble
-
- The GNU Affero General Public License is a free, copyleft license for
-software and other kinds of works, specifically designed to ensure
-cooperation with the community in the case of network server software.
-
- The licenses for most software and other practical works are designed
-to take away your freedom to share and change the works. By contrast,
-our General Public Licenses are intended to guarantee your freedom to
-share and change all versions of a program--to make sure it remains free
-software for all its users.
-
- When we speak of free software, we are referring to freedom, not
-price. Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-them if you wish), that you receive source code or can get it if you
-want it, that you can change the software or use pieces of it in new
-free programs, and that you know you can do these things.
-
- Developers that use our General Public Licenses protect your rights
-with two steps: (1) assert copyright on the software, and (2) offer
-you this License which gives you legal permission to copy, distribute
-and/or modify the software.
-
- A secondary benefit of defending all users' freedom is that
-improvements made in alternate versions of the program, if they
-receive widespread use, become available for other developers to
-incorporate. Many developers of free software are heartened and
-encouraged by the resulting cooperation. However, in the case of
-software used on network servers, this result may fail to come about.
-The GNU General Public License permits making a modified version and
-letting the public access it on a server without ever releasing its
-source code to the public.
-
- The GNU Affero General Public License is designed specifically to
-ensure that, in such cases, the modified source code becomes available
-to the community. It requires the operator of a network server to
-provide the source code of the modified version running there to the
-users of that server. Therefore, public use of a modified version, on
-a publicly accessible server, gives the public access to the source
-code of the modified version.
-
- An older license, called the Affero General Public License and
-published by Affero, was designed to accomplish similar goals. This is
-a different license, not a version of the Affero GPL, but Affero has
-released a new version of the Affero GPL which permits relicensing under
-this license.
-
- The precise terms and conditions for copying, distribution and
-modification follow.
-
- TERMS AND CONDITIONS
-
- 0. Definitions.
-
- "This License" refers to version 3 of the GNU Affero General Public License.
-
- "Copyright" also means copyright-like laws that apply to other kinds of
-works, such as semiconductor masks.
-
- "The Program" refers to any copyrightable work licensed under this
-License. Each licensee is addressed as "you". "Licensees" and
-"recipients" may be individuals or organizations.
-
- To "modify" a work means to copy from or adapt all or part of the work
-in a fashion requiring copyright permission, other than the making of an
-exact copy. The resulting work is called a "modified version" of the
-earlier work or a work "based on" the earlier work.
-
- A "covered work" means either the unmodified Program or a work based
-on the Program.
-
- To "propagate" a work means to do anything with it that, without
-permission, would make you directly or secondarily liable for
-infringement under applicable copyright law, except executing it on a
-computer or modifying a private copy. Propagation includes copying,
-distribution (with or without modification), making available to the
-public, and in some countries other activities as well.
-
- To "convey" a work means any kind of propagation that enables other
-parties to make or receive copies. Mere interaction with a user through
-a computer network, with no transfer of a copy, is not conveying.
-
- An interactive user interface displays "Appropriate Legal Notices"
-to the extent that it includes a convenient and prominently visible
-feature that (1) displays an appropriate copyright notice, and (2)
-tells the user that there is no warranty for the work (except to the
-extent that warranties are provided), that licensees may convey the
-work under this License, and how to view a copy of this License. If
-the interface presents a list of user commands or options, such as a
-menu, a prominent item in the list meets this criterion.
-
- 1. Source Code.
-
- The "source code" for a work means the preferred form of the work
-for making modifications to it. "Object code" means any non-source
-form of a work.
-
- A "Standard Interface" means an interface that either is an official
-standard defined by a recognized standards body, or, in the case of
-interfaces specified for a particular programming language, one that
-is widely used among developers working in that language.
-
- The "System Libraries" of an executable work include anything, other
-than the work as a whole, that (a) is included in the normal form of
-packaging a Major Component, but which is not part of that Major
-Component, and (b) serves only to enable use of the work with that
-Major Component, or to implement a Standard Interface for which an
-implementation is available to the public in source code form. A
-"Major Component", in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system
-(if any) on which the executable work runs, or a compiler used to
-produce the work, or an object code interpreter used to run it.
-
- The "Corresponding Source" for a work in object code form means all
-the source code needed to generate, install, and (for an executable
-work) run the object code and to modify the work, including scripts to
-control those activities. However, it does not include the work's
-System Libraries, or general-purpose tools or generally available free
-programs which are used unmodified in performing those activities but
-which are not part of the work. For example, Corresponding Source
-includes interface definition files associated with source files for
-the work, and the source code for shared libraries and dynamically
-linked subprograms that the work is specifically designed to require,
-such as by intimate data communication or control flow between those
-subprograms and other parts of the work.
-
- The Corresponding Source need not include anything that users
-can regenerate automatically from other parts of the Corresponding
-Source.
-
- The Corresponding Source for a work in source code form is that
-same work.
-
- 2. Basic Permissions.
-
- All rights granted under this License are granted for the term of
-copyright on the Program, and are irrevocable provided the stated
-conditions are met. This License explicitly affirms your unlimited
-permission to run the unmodified Program. The output from running a
-covered work is covered by this License only if the output, given its
-content, constitutes a covered work. This License acknowledges your
-rights of fair use or other equivalent, as provided by copyright law.
-
- You may make, run and propagate covered works that you do not
-convey, without conditions so long as your license otherwise remains
-in force. You may convey covered works to others for the sole purpose
-of having them make modifications exclusively for you, or provide you
-with facilities for running those works, provided that you comply with
-the terms of this License in conveying all material for which you do
-not control copyright. Those thus making or running the covered works
-for you must do so exclusively on your behalf, under your direction
-and control, on terms that prohibit them from making any copies of
-your copyrighted material outside their relationship with you.
-
- Conveying under any other circumstances is permitted solely under
-the conditions stated below. Sublicensing is not allowed; section 10
-makes it unnecessary.
-
- 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
- No covered work shall be deemed part of an effective technological
-measure under any applicable law fulfilling obligations under article
-11 of the WIPO copyright treaty adopted on 20 December 1996, or
-similar laws prohibiting or restricting circumvention of such
-measures.
-
- When you convey a covered work, you waive any legal power to forbid
-circumvention of technological measures to the extent such circumvention
-is effected by exercising rights under this License with respect to
-the covered work, and you disclaim any intention to limit operation or
-modification of the work as a means of enforcing, against the work's
-users, your or third parties' legal rights to forbid circumvention of
-technological measures.
-
- 4. Conveying Verbatim Copies.
-
- You may convey verbatim copies of the Program's source code as you
-receive it, in any medium, provided that you conspicuously and
-appropriately publish on each copy an appropriate copyright notice;
-keep intact all notices stating that this License and any
-non-permissive terms added in accord with section 7 apply to the code;
-keep intact all notices of the absence of any warranty; and give all
-recipients a copy of this License along with the Program.
-
- You may charge any price or no price for each copy that you convey,
-and you may offer support or warranty protection for a fee.
-
- 5. Conveying Modified Source Versions.
-
- You may convey a work based on the Program, or the modifications to
-produce it from the Program, in the form of source code under the
-terms of section 4, provided that you also meet all of these conditions:
-
- a) The work must carry prominent notices stating that you modified
- it, and giving a relevant date.
-
- b) The work must carry prominent notices stating that it is
- released under this License and any conditions added under section
- 7. This requirement modifies the requirement in section 4 to
- "keep intact all notices".
-
- c) You must license the entire work, as a whole, under this
- License to anyone who comes into possession of a copy. This
- License will therefore apply, along with any applicable section 7
- additional terms, to the whole of the work, and all its parts,
- regardless of how they are packaged. This License gives no
- permission to license the work in any other way, but it does not
- invalidate such permission if you have separately received it.
-
- d) If the work has interactive user interfaces, each must display
- Appropriate Legal Notices; however, if the Program has interactive
- interfaces that do not display Appropriate Legal Notices, your
- work need not make them do so.
-
- A compilation of a covered work with other separate and independent
-works, which are not by their nature extensions of the covered work,
-and which are not combined with it such as to form a larger program,
-in or on a volume of a storage or distribution medium, is called an
-"aggregate" if the compilation and its resulting copyright are not
-used to limit the access or legal rights of the compilation's users
-beyond what the individual works permit. Inclusion of a covered work
-in an aggregate does not cause this License to apply to the other
-parts of the aggregate.
-
- 6. Conveying Non-Source Forms.
-
- You may convey a covered work in object code form under the terms
-of sections 4 and 5, provided that you also convey the
-machine-readable Corresponding Source under the terms of this License,
-in one of these ways:
-
- a) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by the
- Corresponding Source fixed on a durable physical medium
- customarily used for software interchange.
-
- b) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by a
- written offer, valid for at least three years and valid for as
- long as you offer spare parts or customer support for that product
- model, to give anyone who possesses the object code either (1) a
- copy of the Corresponding Source for all the software in the
- product that is covered by this License, on a durable physical
- medium customarily used for software interchange, for a price no
- more than your reasonable cost of physically performing this
- conveying of source, or (2) access to copy the
- Corresponding Source from a network server at no charge.
-
- c) Convey individual copies of the object code with a copy of the
- written offer to provide the Corresponding Source. This
- alternative is allowed only occasionally and noncommercially, and
- only if you received the object code with such an offer, in accord
- with subsection 6b.
-
- d) Convey the object code by offering access from a designated
- place (gratis or for a charge), and offer equivalent access to the
- Corresponding Source in the same way through the same place at no
- further charge. You need not require recipients to copy the
- Corresponding Source along with the object code. If the place to
- copy the object code is a network server, the Corresponding Source
- may be on a different server (operated by you or a third party)
- that supports equivalent copying facilities, provided you maintain
- clear directions next to the object code saying where to find the
- Corresponding Source. Regardless of what server hosts the
- Corresponding Source, you remain obligated to ensure that it is
- available for as long as needed to satisfy these requirements.
-
- e) Convey the object code using peer-to-peer transmission, provided
- you inform other peers where the object code and Corresponding
- Source of the work are being offered to the general public at no
- charge under subsection 6d.
-
- A separable portion of the object code, whose source code is excluded
-from the Corresponding Source as a System Library, need not be
-included in conveying the object code work.
-
- A "User Product" is either (1) a "consumer product", which means any
-tangible personal property which is normally used for personal, family,
-or household purposes, or (2) anything designed or sold for incorporation
-into a dwelling. In determining whether a product is a consumer product,
-doubtful cases shall be resolved in favor of coverage. For a particular
-product received by a particular user, "normally used" refers to a
-typical or common use of that class of product, regardless of the status
-of the particular user or of the way in which the particular user
-actually uses, or expects or is expected to use, the product. A product
-is a consumer product regardless of whether the product has substantial
-commercial, industrial or non-consumer uses, unless such uses represent
-the only significant mode of use of the product.
-
- "Installation Information" for a User Product means any methods,
-procedures, authorization keys, or other information required to install
-and execute modified versions of a covered work in that User Product from
-a modified version of its Corresponding Source. The information must
-suffice to ensure that the continued functioning of the modified object
-code is in no case prevented or interfered with solely because
-modification has been made.
-
- If you convey an object code work under this section in, or with, or
-specifically for use in, a User Product, and the conveying occurs as
-part of a transaction in which the right of possession and use of the
-User Product is transferred to the recipient in perpetuity or for a
-fixed term (regardless of how the transaction is characterized), the
-Corresponding Source conveyed under this section must be accompanied
-by the Installation Information. But this requirement does not apply
-if neither you nor any third party retains the ability to install
-modified object code on the User Product (for example, the work has
-been installed in ROM).
-
- The requirement to provide Installation Information does not include a
-requirement to continue to provide support service, warranty, or updates
-for a work that has been modified or installed by the recipient, or for
-the User Product in which it has been modified or installed. Access to a
-network may be denied when the modification itself materially and
-adversely affects the operation of the network or violates the rules and
-protocols for communication across the network.
-
- Corresponding Source conveyed, and Installation Information provided,
-in accord with this section must be in a format that is publicly
-documented (and with an implementation available to the public in
-source code form), and must require no special password or key for
-unpacking, reading or copying.
-
- 7. Additional Terms.
-
- "Additional permissions" are terms that supplement the terms of this
-License by making exceptions from one or more of its conditions.
-Additional permissions that are applicable to the entire Program shall
-be treated as though they were included in this License, to the extent
-that they are valid under applicable law. If additional permissions
-apply only to part of the Program, that part may be used separately
-under those permissions, but the entire Program remains governed by
-this License without regard to the additional permissions.
-
- When you convey a copy of a covered work, you may at your option
-remove any additional permissions from that copy, or from any part of
-it. (Additional permissions may be written to require their own
-removal in certain cases when you modify the work.) You may place
-additional permissions on material, added by you to a covered work,
-for which you have or can give appropriate copyright permission.
-
- Notwithstanding any other provision of this License, for material you
-add to a covered work, you may (if authorized by the copyright holders of
-that material) supplement the terms of this License with terms:
-
- a) Disclaiming warranty or limiting liability differently from the
- terms of sections 15 and 16 of this License; or
-
- b) Requiring preservation of specified reasonable legal notices or
- author attributions in that material or in the Appropriate Legal
- Notices displayed by works containing it; or
-
- c) Prohibiting misrepresentation of the origin of that material, or
- requiring that modified versions of such material be marked in
- reasonable ways as different from the original version; or
-
- d) Limiting the use for publicity purposes of names of licensors or
- authors of the material; or
-
- e) Declining to grant rights under trademark law for use of some
- trade names, trademarks, or service marks; or
-
- f) Requiring indemnification of licensors and authors of that
- material by anyone who conveys the material (or modified versions of
- it) with contractual assumptions of liability to the recipient, for
- any liability that these contractual assumptions directly impose on
- those licensors and authors.
-
- All other non-permissive additional terms are considered "further
-restrictions" within the meaning of section 10. If the Program as you
-received it, or any part of it, contains a notice stating that it is
-governed by this License along with a term that is a further
-restriction, you may remove that term. If a license document contains
-a further restriction but permits relicensing or conveying under this
-License, you may add to a covered work material governed by the terms
-of that license document, provided that the further restriction does
-not survive such relicensing or conveying.
-
- If you add terms to a covered work in accord with this section, you
-must place, in the relevant source files, a statement of the
-additional terms that apply to those files, or a notice indicating
-where to find the applicable terms.
-
- Additional terms, permissive or non-permissive, may be stated in the
-form of a separately written license, or stated as exceptions;
-the above requirements apply either way.
-
- 8. Termination.
-
- You may not propagate or modify a covered work except as expressly
-provided under this License. Any attempt otherwise to propagate or
-modify it is void, and will automatically terminate your rights under
-this License (including any patent licenses granted under the third
-paragraph of section 11).
-
- However, if you cease all violation of this License, then your
-license from a particular copyright holder is reinstated (a)
-provisionally, unless and until the copyright holder explicitly and
-finally terminates your license, and (b) permanently, if the copyright
-holder fails to notify you of the violation by some reasonable means
-prior to 60 days after the cessation.
-
- Moreover, your license from a particular copyright holder is
-reinstated permanently if the copyright holder notifies you of the
-violation by some reasonable means, this is the first time you have
-received notice of violation of this License (for any work) from that
-copyright holder, and you cure the violation prior to 30 days after
-your receipt of the notice.
-
- Termination of your rights under this section does not terminate the
-licenses of parties who have received copies or rights from you under
-this License. If your rights have been terminated and not permanently
-reinstated, you do not qualify to receive new licenses for the same
-material under section 10.
-
- 9. Acceptance Not Required for Having Copies.
-
- You are not required to accept this License in order to receive or
-run a copy of the Program. Ancillary propagation of a covered work
-occurring solely as a consequence of using peer-to-peer transmission
-to receive a copy likewise does not require acceptance. However,
-nothing other than this License grants you permission to propagate or
-modify any covered work. These actions infringe copyright if you do
-not accept this License. Therefore, by modifying or propagating a
-covered work, you indicate your acceptance of this License to do so.
-
- 10. Automatic Licensing of Downstream Recipients.
-
- Each time you convey a covered work, the recipient automatically
-receives a license from the original licensors, to run, modify and
-propagate that work, subject to this License. You are not responsible
-for enforcing compliance by third parties with this License.
-
- An "entity transaction" is a transaction transferring control of an
-organization, or substantially all assets of one, or subdividing an
-organization, or merging organizations. If propagation of a covered
-work results from an entity transaction, each party to that
-transaction who receives a copy of the work also receives whatever
-licenses to the work the party's predecessor in interest had or could
-give under the previous paragraph, plus a right to possession of the
-Corresponding Source of the work from the predecessor in interest, if
-the predecessor has it or can get it with reasonable efforts.
-
- You may not impose any further restrictions on the exercise of the
-rights granted or affirmed under this License. For example, you may
-not impose a license fee, royalty, or other charge for exercise of
-rights granted under this License, and you may not initiate litigation
-(including a cross-claim or counterclaim in a lawsuit) alleging that
-any patent claim is infringed by making, using, selling, offering for
-sale, or importing the Program or any portion of it.
-
- 11. Patents.
-
- A "contributor" is a copyright holder who authorizes use under this
-License of the Program or a work on which the Program is based. The
-work thus licensed is called the contributor's "contributor version".
-
- A contributor's "essential patent claims" are all patent claims
-owned or controlled by the contributor, whether already acquired or
-hereafter acquired, that would be infringed by some manner, permitted
-by this License, of making, using, or selling its contributor version,
-but do not include claims that would be infringed only as a
-consequence of further modification of the contributor version. For
-purposes of this definition, "control" includes the right to grant
-patent sublicenses in a manner consistent with the requirements of
-this License.
-
- Each contributor grants you a non-exclusive, worldwide, royalty-free
-patent license under the contributor's essential patent claims, to
-make, use, sell, offer for sale, import and otherwise run, modify and
-propagate the contents of its contributor version.
-
- In the following three paragraphs, a "patent license" is any express
-agreement or commitment, however denominated, not to enforce a patent
-(such as an express permission to practice a patent or covenant not to
-sue for patent infringement). To "grant" such a patent license to a
-party means to make such an agreement or commitment not to enforce a
-patent against the party.
-
- If you convey a covered work, knowingly relying on a patent license,
-and the Corresponding Source of the work is not available for anyone
-to copy, free of charge and under the terms of this License, through a
-publicly available network server or other readily accessible means,
-then you must either (1) cause the Corresponding Source to be so
-available, or (2) arrange to deprive yourself of the benefit of the
-patent license for this particular work, or (3) arrange, in a manner
-consistent with the requirements of this License, to extend the patent
-license to downstream recipients. "Knowingly relying" means you have
-actual knowledge that, but for the patent license, your conveying the
-covered work in a country, or your recipient's use of the covered work
-in a country, would infringe one or more identifiable patents in that
-country that you have reason to believe are valid.
-
- If, pursuant to or in connection with a single transaction or
-arrangement, you convey, or propagate by procuring conveyance of, a
-covered work, and grant a patent license to some of the parties
-receiving the covered work authorizing them to use, propagate, modify
-or convey a specific copy of the covered work, then the patent license
-you grant is automatically extended to all recipients of the covered
-work and works based on it.
-
- A patent license is "discriminatory" if it does not include within
-the scope of its coverage, prohibits the exercise of, or is
-conditioned on the non-exercise of one or more of the rights that are
-specifically granted under this License. You may not convey a covered
-work if you are a party to an arrangement with a third party that is
-in the business of distributing software, under which you make payment
-to the third party based on the extent of your activity of conveying
-the work, and under which the third party grants, to any of the
-parties who would receive the covered work from you, a discriminatory
-patent license (a) in connection with copies of the covered work
-conveyed by you (or copies made from those copies), or (b) primarily
-for and in connection with specific products or compilations that
-contain the covered work, unless you entered into that arrangement,
-or that patent license was granted, prior to 28 March 2007.
-
- Nothing in this License shall be construed as excluding or limiting
-any implied license or other defenses to infringement that may
-otherwise be available to you under applicable patent law.
-
- 12. No Surrender of Others' Freedom.
-
- If conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License. If you cannot convey a
-covered work so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you may
-not convey it at all. For example, if you agree to terms that obligate you
-to collect a royalty for further conveying from those to whom you convey
-the Program, the only way you could satisfy both those terms and this
-License would be to refrain entirely from conveying the Program.
-
- 13. Remote Network Interaction; Use with the GNU General Public License.
-
- Notwithstanding any other provision of this License, if you modify the
-Program, your modified version must prominently offer all users
-interacting with it remotely through a computer network (if your version
-supports such interaction) an opportunity to receive the Corresponding
-Source of your version by providing access to the Corresponding Source
-from a network server at no charge, through some standard or customary
-means of facilitating copying of software. This Corresponding Source
-shall include the Corresponding Source for any work covered by version 3
-of the GNU General Public License that is incorporated pursuant to the
-following paragraph.
-
- Notwithstanding any other provision of this License, you have
-permission to link or combine any covered work with a work licensed
-under version 3 of the GNU General Public License into a single
-combined work, and to convey the resulting work. The terms of this
-License will continue to apply to the part which is the covered work,
-but the work with which it is combined will remain governed by version
-3 of the GNU General Public License.
-
- 14. Revised Versions of this License.
-
- The Free Software Foundation may publish revised and/or new versions of
-the GNU Affero General Public License from time to time. Such new versions
-will be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
-
- Each version is given a distinguishing version number. If the
-Program specifies that a certain numbered version of the GNU Affero General
-Public License "or any later version" applies to it, you have the
-option of following the terms and conditions either of that numbered
-version or of any later version published by the Free Software
-Foundation. If the Program does not specify a version number of the
-GNU Affero General Public License, you may choose any version ever published
-by the Free Software Foundation.
-
- If the Program specifies that a proxy can decide which future
-versions of the GNU Affero General Public License can be used, that proxy's
-public statement of acceptance of a version permanently authorizes you
-to choose that version for the Program.
-
- Later license versions may give you additional or different
-permissions. However, no additional obligations are imposed on any
-author or copyright holder as a result of your choosing to follow a
-later version.
-
- 15. Disclaimer of Warranty.
-
- THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
-APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
-HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
-OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
-IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
-ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
- 16. Limitation of Liability.
-
- IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
-THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
-GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
-USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
-DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
-PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
-EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGES.
-
- 17. Interpretation of Sections 15 and 16.
-
- If the disclaimer of warranty and limitation of liability provided
-above cannot be given local legal effect according to their terms,
-reviewing courts shall apply local law that most closely approximates
-an absolute waiver of all civil liability in connection with the
-Program, unless a warranty or assumption of liability accompanies a
-copy of the Program in return for a fee.
-
- END OF TERMS AND CONDITIONS
-
- How to Apply These Terms to Your New Programs
-
- If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
-
- To do so, attach the following notices to the program. It is safest
-to attach them to the start of each source file to most effectively
-state the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
-
-
- Copyright (C)
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU Affero General Public License as published
- by the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU Affero General Public License for more details.
-
- You should have received a copy of the GNU Affero General Public License
- along with this program. If not, see .
-
-Also add information on how to contact you by electronic and paper mail.
-
- If your software can interact with users remotely through a computer
-network, you should also make sure that it provides a way for users to
-get its source. For example, if your program is a web application, its
-interface could display a "Source" link that leads users to an archive
-of the code. There are many ways you could offer source, and different
-solutions will be better for different programs; see section 13 for the
-specific requirements.
-
- You should also get your employer (if you work as a programmer) or school,
-if any, to sign a "copyright disclaimer" for the program, if necessary.
-For more information on this, and how to apply and follow the GNU AGPL, see
-.
diff --git a/LICENSE b/LICENSE
index 95211ea..eaa3b0a 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,11 +1,692 @@
-Copyright © 2021-2023 Deepchecks
+Deepchecks Monitoring
-Portions of this software are licensed as follows:
+Copyright (C) 2021-2023 Deepchecks
-* The sub-library under the sub-directory named "**/ee/", if exists,
- is licensed under the license defined in "**/ee/LICENSE".
-* All third party components incorporated into the Deepchecks Software
- are licensed under the original license provided by the owner of the
- applicable component.
-* Content outside of the above mentioned directories or restrictions above,
- is available under the AGPL license as set forth in "AGPL_LICENSE".
\ No newline at end of file
+ This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU Affero General Public License as
+published by the Free Software Foundation, either version 3 of the
+License, or (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU Affero General Public License for more details.
+
+ A copy of the GNU Affero General Public License is set out below and is
+also found at .
+
+
+Additional permission under GNU AGPL version 3 section 7
+
+ If you modify this Program, or any covered work, by linking or combining it
+with the sub-library in the “**/ee/” sub-directory (or a modified version of
+that library), covered by the Terms & Conditions of the sub-library in the
+“**/ee/” sub-directory, found at https://deepchecks.com/terms-and-conditions
+(the "Commercial License"), the licensors of this Program grant you
+additional permission to convey the resulting work provided that use of the
+sub-library in the “**/ee/” sub-directory is subject to the terms of the
+Commercial License.
+
+--------------------------------
+
+ GNU AFFERO GENERAL PUBLIC LICENSE
+ Version 3, 19 November 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU Affero General Public License is a free, copyleft license for
+software and other kinds of works, specifically designed to ensure
+cooperation with the community in the case of network server software.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+our General Public Licenses are intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ Developers that use our General Public Licenses protect your rights
+with two steps: (1) assert copyright on the software, and (2) offer
+you this License which gives you legal permission to copy, distribute
+and/or modify the software.
+
+ A secondary benefit of defending all users' freedom is that
+improvements made in alternate versions of the program, if they
+receive widespread use, become available for other developers to
+incorporate. Many developers of free software are heartened and
+encouraged by the resulting cooperation. However, in the case of
+software used on network servers, this result may fail to come about.
+The GNU General Public License permits making a modified version and
+letting the public access it on a server without ever releasing its
+source code to the public.
+
+ The GNU Affero General Public License is designed specifically to
+ensure that, in such cases, the modified source code becomes available
+to the community. It requires the operator of a network server to
+provide the source code of the modified version running there to the
+users of that server. Therefore, public use of a modified version, on
+a publicly accessible server, gives the public access to the source
+code of the modified version.
+
+ An older license, called the Affero General Public License and
+published by Affero, was designed to accomplish similar goals. This is
+a different license, not a version of the Affero GPL, but Affero has
+released a new version of the Affero GPL which permits relicensing under
+this license.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU Affero General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Remote Network Interaction; Use with the GNU General Public License.
+
+ Notwithstanding any other provision of this License, if you modify the
+Program, your modified version must prominently offer all users
+interacting with it remotely through a computer network (if your version
+supports such interaction) an opportunity to receive the Corresponding
+Source of your version by providing access to the Corresponding Source
+from a network server at no charge, through some standard or customary
+means of facilitating copying of software. This Corresponding Source
+shall include the Corresponding Source for any work covered by version 3
+of the GNU General Public License that is incorporated pursuant to the
+following paragraph.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the work with which it is combined will remain governed by version
+3 of the GNU General Public License.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU Affero General Public License from time to time. Such new versions
+will be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU Affero General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU Affero General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU Affero General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU Affero General Public License as published
+ by the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU Affero General Public License for more details.
+
+ You should have received a copy of the GNU Affero General Public License
+ along with this program. If not, see .
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If your software can interact with users remotely through a computer
+network, you should also make sure that it provides a way for users to
+get its source. For example, if your program is a web application, its
+interface could display a "Source" link that leads users to an archive
+of the code. There are many ways you could offer source, and different
+solutions will be better for different programs; see section 13 for the
+specific requirements.
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU AGPL, see
+.
diff --git a/backend/deepchecks_monitoring/ee/LICENSE b/backend/deepchecks_monitoring/ee/LICENSE
deleted file mode 100644
index 7fca747..0000000
--- a/backend/deepchecks_monitoring/ee/LICENSE
+++ /dev/null
@@ -1,6 +0,0 @@
-Copyright © 2023 Deepchecks
-
-The sub-library in this directory (“**/ee/”) is licensed under the
-Terms & Conditions found at https://deepchecks.com/terms-and-conditions/.
-
-
diff --git a/backend/deepchecks_monitoring/ee/__init__.py b/backend/deepchecks_monitoring/ee/__init__.py
deleted file mode 100644
index 5cf2d32..0000000
--- a/backend/deepchecks_monitoring/ee/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from . import api, bgtasks, config, integrations, middlewares, notifications, resources, utils
-
-__all__ = []
diff --git a/backend/deepchecks_monitoring/ee/api/__init__.py b/backend/deepchecks_monitoring/ee/api/__init__.py
deleted file mode 100644
index 326cdf4..0000000
--- a/backend/deepchecks_monitoring/ee/api/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from . import v1
diff --git a/backend/deepchecks_monitoring/ee/api/v1/__init__.py b/backend/deepchecks_monitoring/ee/api/v1/__init__.py
deleted file mode 100644
index c6c1991..0000000
--- a/backend/deepchecks_monitoring/ee/api/v1/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from . import billing, data_sources, members, slack
-from .routers import cloud_router, ee_router
-
-__all__ = ['cloud_router', 'ee_router', 'slack', 'billing', 'members']
diff --git a/backend/deepchecks_monitoring/ee/api/v1/billing.py b/backend/deepchecks_monitoring/ee/api/v1/billing.py
deleted file mode 100644
index 66f64ba..0000000
--- a/backend/deepchecks_monitoring/ee/api/v1/billing.py
+++ /dev/null
@@ -1,324 +0,0 @@
-# ----------------------------------------------------------------------------
-# Copyright (C) 2021-2022 Deepchecks (https://www.deepchecks.com)
-#
-# This file is part of Deepchecks.
-# Deepchecks is distributed under the terms of the GNU Affero General
-# Public License (version 3 or later).
-# You should have received a copy of the GNU Affero General Public License
-# along with Deepchecks. If not, see .
-# ----------------------------------------------------------------------------
-"""Module representing the endpoints for billing."""
-import typing as t
-from datetime import datetime
-
-import sqlalchemy as sa
-import stripe
-from fastapi import Depends, Request
-from pydantic.main import BaseModel
-from sqlalchemy.ext.asyncio import AsyncSession
-
-from deepchecks_monitoring.api.v1.global_api import global_router
-from deepchecks_monitoring.dependencies import AsyncSessionDep, SettingsDep
-from deepchecks_monitoring.exceptions import AccessForbidden, BadRequest
-from deepchecks_monitoring.public_models import Billing, Organization, User
-from deepchecks_monitoring.public_models.organization import OrgTier
-from deepchecks_monitoring.utils import auth
-
-from .routers import cloud_router as router
-
-
-class BillingSchema(BaseModel):
- """Billing schema."""
-
- id: int
- subscription_id: t.Optional[str]
- bought_models: int
- last_update: t.Optional[datetime]
- started_at: datetime
- organization_id: int
-
- class Config:
- """Schema config."""
-
- orm_mode = True
-
-
-class CheckoutSchema(BaseModel):
- """Schema for the request of create subscription endpoint."""
-
- price_id: str
- quantity: int
-
-
-class SubscriptionCreationResponse(BaseModel):
- """Schema for the response of create subscription endpoint."""
-
- client_secret: str
- subscription_id: str
-
-
-class PaymentMethodSchema(BaseModel):
- """Schema for the payment method update endpoint."""
-
- payment_method_id: str
-
-
-class ProductResponseSchema(BaseModel):
- """Schema that represent a product from stripe."""
-
- id: str
- default_price: str
- unit_amount: int
- name: str
- description: t.Optional[str]
-
-
-class SubscriptionSchema(BaseModel):
- """Schema for the subscription object."""
-
- models: int
- subscription_id: str
- status: str
- start_date: int
- current_period_end: int
- cancel_at_period_end: bool
- plan: str
-
-
-class ChargeSchema(BaseModel):
- """Schema for the charge object."""
-
- id: str
- plan: t.Optional[str]
- models: t.Optional[int]
- paid: bool
- amount: int
- receipt_url: str
- created: int
-
-
-def _get_subscription(stripe_customer_id: str, status: t.Optional[str]) -> t.List[SubscriptionSchema]:
- subscriptions = stripe.Subscription.list(customer=stripe_customer_id, status=status)["data"]
- product_dict = {product["id"]: product["name"] for product in stripe.Product.list()["data"]}
- subscriptions_schemas = []
- for subscription in subscriptions:
- subscription_schema = SubscriptionSchema(
- models=subscription["quantity"],
- subscription_id=subscription["id"],
- status=subscription["status"],
- start_date=subscription["start_date"],
- current_period_end=subscription["current_period_end"],
- cancel_at_period_end=subscription["cancel_at_period_end"],
- plan=product_dict[subscription["items"]["data"][0]["price"]["product"]]
- )
- subscriptions_schemas.append(subscription_schema)
- return subscriptions_schemas
-
-
-@router.get("/billing/charges", tags=["billing"], response_model=t.List[ChargeSchema])
-async def list_all_charges(
- user: User = Depends(auth.OwnerUser()) # pylint: disable=unused-argument
-):
- """Get the list of available products from stripe."""
- try:
- charges_list = stripe.Charge.list(customer=user.organization.stripe_customer_id)["data"]
- invoices_dict = {invoice["id"]: invoice for invoice in
- stripe.Invoice.list(customer=user.organization.stripe_customer_id)["data"]}
- sub_dict = {sub.subscription_id: sub for sub in
- _get_subscription(user.organization.stripe_customer_id, "all")}
- charges_schema = []
- for charge in charges_list:
- invoice = invoices_dict.get(charge["invoice"]) if charge.get("invoice") else None
- sub = sub_dict.get(invoice["subscription"]) if invoice is not None else None
- if sub:
- charge_schema = ChargeSchema(plan=sub.plan, models=sub.models, **charge)
- else:
- charge_schema = ChargeSchema(**charge)
- charges_schema.append(charge_schema)
- return charges_schema
- except stripe.error.StripeError as e:
- raise BadRequest(str(e)) from e
-
-
-@router.get("/billing/available-products", tags=["billing"], response_model=t.List[ProductResponseSchema])
-async def list_all_products(
- user: User = Depends(auth.OwnerUser()) # pylint: disable=unused-argument
-):
- """Get the list of available products from stripe."""
- try:
- product_list = stripe.Product.list()
- price_dict = {price["id"]: price["unit_amount"] for price in stripe.Price.list()["data"]}
- return [ProductResponseSchema(unit_amount=price_dict[x["default_price"]], **x) for x in product_list["data"]]
- except stripe.error.StripeError as e:
- raise BadRequest(str(e)) from e
-
-
-@router.post("/billing/payment-method", tags=["billing"])
-async def update_payment_method(body: PaymentMethodSchema, user: User = Depends(auth.OwnerUser())):
- """Update the payment method on stripe."""
- try:
- stripe.PaymentMethod.attach(
- body.payment_method_id,
- customer=user.organization.stripe_customer_id,
- )
- # Set the default payment method on the customer
- stripe.Customer.modify(
- user.organization.stripe_customer_id,
- invoice_settings={
- "default_payment_method": body.payment_method_id,
- },
- )
-
- return
- except stripe.error.StripeError as e:
- raise BadRequest(str(e)) from e
-
-
-@router.get("/billing/payment-method", tags=["billing"], response_model=t.List)
-async def get_payment_method(user: User = Depends(auth.OwnerUser())) -> t.List:
- """Return the payment method of the organization."""
- customer_id = user.organization.stripe_customer_id
-
- try:
- return stripe.Customer.list_payment_methods(
- customer_id,
- type="card"
- )["data"]
- except stripe.error.StripeError as e:
- raise AccessForbidden(str(e)) from e
-
-
-@router.get("/billing/subscription", tags=["billing"], response_model=t.List)
-async def get_subscriptions(user: User = Depends(auth.OwnerUser())) -> t.List:
- """Return a list of subscription of the organization."""
- try:
- subscriptions = stripe.Subscription.list(customer=user.organization.stripe_customer_id,
- expand=["data.latest_invoice.payment_intent"])
- return subscriptions["data"]
- except stripe.error.StripeError as e:
- raise AccessForbidden(str(e)) from e
-
-
-@router.post("/billing/subscription", tags=["billing"], response_model=SubscriptionCreationResponse)
-async def create_subscription(
- body: CheckoutSchema,
- user: User = Depends(auth.OwnerUser())
-) -> SubscriptionCreationResponse:
- """Creates a checkout session with stripe"""
- try:
- # Create the subscription
- subscription = stripe.Subscription.create(
- customer=user.organization.stripe_customer_id,
- items=[
- {
- "price": body.price_id,
- "quantity": body.quantity - 1
- }
- ],
- payment_behavior="default_incomplete",
- expand=["latest_invoice.payment_intent"],
- )
-
- return SubscriptionCreationResponse(
- client_secret=subscription.latest_invoice.payment_intent.client_secret,
- subscription_id=subscription.id
- )
- except stripe.error.StripeError as e:
- raise BadRequest(str(e)) from e
-
-
-@router.delete("/billing/subscription/{subscription_id}", tags=["billing"])
-def cancel_subscription(
- subscription_id: str,
- user: User = Depends(auth.OwnerUser()) # pylint: disable=unused-argument
-):
- """Cancel the subscription."""
- try:
- # Cancel the subscription by deleting it
- deleted_subscription = stripe.Subscription.delete(
- subscription_id)
- return deleted_subscription
- except stripe.error.StripeError as e:
- return AccessForbidden(str(e))
-
-
-@router.put("/billing/subscription/{subscription_id}", tags=["billing"], response_model=SubscriptionCreationResponse)
-def update_subscription(
- subscription_id: str,
- body: CheckoutSchema,
- user: User = Depends(auth.OwnerUser()), # pylint: disable=unused-argument
-) -> SubscriptionCreationResponse:
- """Update the subscription for the organization."""
- try:
- subscription = stripe.Subscription.retrieve(subscription_id)
-
- updated_subscription = stripe.Subscription.modify(
- subscription_id,
- cancel_at_period_end=False,
- items=[{
- "id": subscription["items"]["data"][0].id,
- "price": body.price_id,
- "quantity": body.quantity - 1
- }],
- expand=["latest_invoice.payment_intent"]
- )
- return SubscriptionCreationResponse(
- client_secret=updated_subscription.latest_invoice.payment_intent.client_secret,
- subscription_id=updated_subscription.id
- )
- except stripe.error.StripeError as e:
- return AccessForbidden(str(e))
-
-
-@global_router.post("/billing/webhook", tags=["billing"])
-async def stripe_webhook(request: Request,
- settings=SettingsDep,
- session: AsyncSession = AsyncSessionDep):
- """Webhook to catch stripe events."""
- # You can use webhooks to receive information about asynchronous payment events.
- # For more about our webhook events check out https://stripe.com/docs/webhooks.
- webhook_secret = settings.stripe_webhook_secret
- request_data = await request.json()
-
- if webhook_secret:
- # Retrieve the event by verifying the signature using the raw body and secret if webhook signing is configured.
- signature = request.headers.get("stripe-signature")
- try:
- event = stripe.Webhook.construct_event(
- payload=request_data, sig_header=signature, secret=webhook_secret)
- data = event["data"]
- except stripe.error.StripeError as e:
- return e
- # Get the type of webhook event sent - used to check the status of PaymentIntents.
- event_type = event["type"]
- else:
- data = request_data["data"]
- event_type = request_data["type"]
-
- if event_type in [
- "charge.succeeded",
- "customer.subscription.created",
- "customer.subscription.deleted",
- "customer.subscription.updated"
- ]:
- org: Organization = await session.scalar(sa.select(Organization)
- .where(Organization.stripe_customer_id == data["object"]["customer"]))
- if org is not None:
- billing: Billing = await session.scalar(sa.select(Billing).where(Billing.organization_id == org.id))
- if billing is None:
- billing = Billing(organization_id=org.id)
- session.add(billing)
- subs = _get_subscription(org.stripe_customer_id, "active")
- if len(subs) > 0:
- billing.bought_models = subs[0].models
- billing.subscription_id = subs[0].subscription_id
- org.tier = OrgTier.BASIC
- else:
- billing.bought_models = 0
- billing.subscription_id = None
- org.tier = OrgTier.FREE
-
- await session.commit()
- await session.flush()
-
- return {"status": "success"}
diff --git a/backend/deepchecks_monitoring/ee/api/v1/data_sources.py b/backend/deepchecks_monitoring/ee/api/v1/data_sources.py
deleted file mode 100644
index d43153b..0000000
--- a/backend/deepchecks_monitoring/ee/api/v1/data_sources.py
+++ /dev/null
@@ -1,95 +0,0 @@
-# ----------------------------------------------------------------------------
-# Copyright (C) 2021-2022 Deepchecks (https://www.deepchecks.com)
-#
-# This file is part of Deepchecks.
-# Deepchecks is distributed under the terms of the GNU Affero General
-# Public License (version 3 or later).
-# You should have received a copy of the GNU Affero General Public License
-# along with Deepchecks. If not, see .
-# ----------------------------------------------------------------------------
-"""Module representing the endpoints for data sources."""
-import typing as t
-
-import boto3
-import sqlalchemy as sa
-from botocore.config import Config
-from botocore.exceptions import BotoCoreError
-from fastapi import Depends, Response
-from pydantic.main import BaseModel
-from sqlalchemy.ext.asyncio import AsyncSession
-from starlette import status
-
-from deepchecks_monitoring.config import Tags
-from deepchecks_monitoring.dependencies import AsyncSessionDep
-from deepchecks_monitoring.exceptions import BadRequest
-from deepchecks_monitoring.monitoring_utils import fetch_or_404
-from deepchecks_monitoring.public_models import User
-from deepchecks_monitoring.schema_models import DataSource
-from deepchecks_monitoring.utils import auth
-
-from .routers import ee_router as router
-
-
-class DataSourceCreationSchema(BaseModel):
- """Data Source creation schema."""
-
- type: str
- parameters: t.Dict[str, t.Any]
-
-
-class DataSourceSchema(DataSourceCreationSchema):
- """Data Source schema."""
-
- id: int
-
- class Config:
- orm_mode = True
-
-
-@router.get('/data-sources', response_model=t.List[DataSourceSchema], tags=[Tags.DATA_SOURCES])
-async def get_data_sources(session: AsyncSession = AsyncSessionDep,
- current_user: User = Depends(auth.AdminUser()) # pylint: disable=unused-argument
- ):
- data_sources = await session.scalars(sa.select(DataSource))
- return data_sources.all()
-
-
-@router.put('/data-sources', tags=[Tags.DATA_SOURCES])
-async def new_data_source(body: DataSourceCreationSchema,
- session: AsyncSession = AsyncSessionDep,
- current_user: User = Depends(auth.AdminUser()), # pylint: disable=unused-argument
- ):
- data_source = DataSource(**body.dict())
- if data_source.type == 's3':
- # Test parameters name given
- expected = {'aws_access_key_id', 'aws_secret_access_key', 'region'}
- if set(data_source.parameters.keys()) != expected:
- raise BadRequest(f'Invalid parameters for S3 data source, expected: {sorted(expected)}')
- # Test credentials to AWS
- try:
- sts = boto3.client(
- 'sts',
- aws_access_key_id=data_source.parameters['aws_access_key_id'],
- aws_secret_access_key=data_source.parameters['aws_secret_access_key'],
- config=Config(region_name=data_source.parameters['region'])
- )
- sts.get_caller_identity()
- except BotoCoreError as e:
- raise BadRequest('Invalid credentials to AWS') from e
- else:
- raise BadRequest('Invalid data source type')
-
- session.add(data_source)
- await session.commit()
- return Response(status_code=status.HTTP_200_OK)
-
-
-@router.delete('/data-sources/{data_source_id}', tags=[Tags.DATA_SOURCES])
-async def delete_data_source(data_source_id: int,
- session: AsyncSession = AsyncSessionDep,
- current_user: User = Depends(auth.AdminUser()), # pylint: disable=unused-argument
- ):
- data_source = await fetch_or_404(session, DataSource, id=data_source_id)
- await session.delete(data_source)
- await session.commit()
- return Response(status_code=status.HTTP_200_OK)
diff --git a/backend/deepchecks_monitoring/ee/api/v1/members.py b/backend/deepchecks_monitoring/ee/api/v1/members.py
deleted file mode 100644
index 1cfd525..0000000
--- a/backend/deepchecks_monitoring/ee/api/v1/members.py
+++ /dev/null
@@ -1,185 +0,0 @@
-# ----------------------------------------------------------------------------
-# Copyright (C) 2021-2022 Deepchecks (https://www.deepchecks.com)
-#
-# This file is part of Deepchecks.
-# Deepchecks is distributed under the terms of the GNU Affero General
-# Public License (version 3 or later).
-# You should have received a copy of the GNU Affero General Public License
-# along with Deepchecks. If not, see .
-# ----------------------------------------------------------------------------
-"""Module representing the endpoints for members advanced edit."""
-import typing as t
-
-import sqlalchemy as sa
-from fastapi import Depends
-from pydantic.main import BaseModel
-from sqlalchemy.ext.asyncio import AsyncSession
-from sqlalchemy.orm import joinedload
-
-from deepchecks_monitoring.api.v1.global_api.users import UserSchema
-from deepchecks_monitoring.config import Tags
-from deepchecks_monitoring.dependencies import AsyncSessionDep, ResourcesProviderDep
-from deepchecks_monitoring.exceptions import BadRequest, PaymentRequired
-from deepchecks_monitoring.features_control import FeaturesControl
-from deepchecks_monitoring.monitoring_utils import exists_or_404, fetch_or_404
-from deepchecks_monitoring.public_models import User
-from deepchecks_monitoring.public_models.role import Role, RoleEnum
-from deepchecks_monitoring.resources import ResourcesProvider
-from deepchecks_monitoring.schema_models.model import Model
-from deepchecks_monitoring.schema_models.model_memeber import ModelMember
-from deepchecks_monitoring.utils import auth
-
-from .routers import ee_router as router
-
-
-class RoleUpdateSchema(BaseModel):
- """Role update schema."""
-
- roles: t.List[RoleEnum]
- replace: t.Optional[bool] = True
-
-
-class MemberUpdateSchema(BaseModel):
- """Member update schema."""
-
- model_ids: t.List[int]
- replace: t.Optional[bool] = True
-
-
-class BatchModelMemberUpdateSchema(BaseModel):
- """Member update schema."""
-
- user_ids: t.List[int]
- replace: t.Optional[bool] = True
-
-
-@router.put("/users/{user_id}/roles", response_model=UserSchema, tags=[Tags.USERS])
-async def update_user_role(roles_schema: RoleUpdateSchema,
- user_id: int,
- session: AsyncSession = AsyncSessionDep,
- resources_provider: ResourcesProvider = ResourcesProviderDep,
- current_user: User = Depends(auth.OwnerUser()), # pylint: disable=unused-argument
- ) -> UserSchema:
- """Update user roles."""
-
- features_control: FeaturesControl = resources_provider.get_features_control(current_user)
- if not features_control.update_roles:
- raise PaymentRequired("Updating roles requires to set up a subscription. "
- f"Set up through {resources_provider.settings.deployment_url}"
- f"/workspace-settings")
- user = await fetch_or_404(session, User, id=user_id)
- if user.organization_id != current_user.organization_id:
- raise BadRequest("User doesn't exists in your organization.")
-
- if user_id == current_user.id and roles_schema.replace and RoleEnum.OWNER not in roles_schema.roles:
- owner = await session.scalar(
- sa.select(Role).where(sa.and_(Role.role == RoleEnum.OWNER,
- Role.user_id != current_user.id,
- User.organization_id == current_user.organization_id))
- .join(User, Role.user_id == User.id)
- )
- if not owner:
- raise BadRequest("Owner cannot remove their owner role if there are no other owners in the organization.")
-
- roles: t.List[Role] = (await session.execute(sa.select(Role).where(Role.user_id == user_id))).scalars().all()
- roles_to_create = []
- roles_to_delete = []
- for role in roles:
- if role.role not in roles_schema.roles:
- if roles_schema.replace:
- roles_to_delete.append(role.id)
- existing_roles = [role.role for role in roles]
- for role in roles_schema.roles:
- if role not in existing_roles:
- roles_to_create.append(Role(user_id=user_id, role=role))
-
- await session.execute(sa.delete(Role).where(Role.id.in_(roles_to_delete)))
- session.add_all(roles_to_create)
- await session.flush()
-
- user = await session.scalar(sa.select(User).where(User.id == user_id).options(joinedload(User.roles)))
- return UserSchema(id=user.id, email=user.email, created_at=user.created_at, full_name=user.full_name,
- picture_url=user.picture_url, organization=user.organization,
- roles=[role.role for role in user.roles])
-
-
-@router.post("/users/{user_id}/models", tags=[Tags.USERS])
-async def assign_models_to_user(
- user_id: int,
- member_schema: MemberUpdateSchema,
- session: AsyncSession = AsyncSessionDep,
- resources_provider: ResourcesProvider = ResourcesProviderDep,
- current_user: User = Depends(auth.AdminUser()), # pylint: disable=unused-argument
-):
- """Assign models to user."""
-
- features_control: FeaturesControl = resources_provider.get_features_control(current_user)
- if not features_control.model_assignment:
- raise PaymentRequired("Model assignment requires to set up a dedicated plan. "
- "Contact Deepchecks.")
- user = await fetch_or_404(session, User, id=user_id)
- if user.organization_id != current_user.organization_id:
- raise BadRequest("User doesn't exists in your organization.")
-
- for model_id in member_schema.model_ids:
- await exists_or_404(session, Model, id=model_id)
-
- model_memebers: t.List[ModelMember] = (
- await session.execute(sa.select(ModelMember)
- .where(ModelMember.user_id == user_id))
- ).scalars().all()
- models_to_create = []
- models_to_delete = []
- for model_memeber in model_memebers:
- if model_memeber.model_id not in member_schema.model_ids:
- if member_schema.replace:
- models_to_delete.append(model_memeber.id)
- existing_models = [member.model_id for member in model_memebers]
- for model_id in member_schema.model_ids:
- if model_id not in existing_models:
- models_to_create.append(ModelMember(user_id=user_id, model_id=model_id))
-
- await session.execute(sa.delete(ModelMember).where(ModelMember.id.in_(models_to_delete)))
- session.add_all(models_to_create)
- await session.flush()
-
-
-@router.post("/models/{model_id}/members", tags=[Tags.MODELS])
-async def assign_users_to_model(
- model_id: int,
- member_schema: BatchModelMemberUpdateSchema,
- session: AsyncSession = AsyncSessionDep,
- resources_provider: ResourcesProvider = ResourcesProviderDep,
- current_user: User = Depends(auth.AdminUser()), # pylint: disable=unused-argument
-):
- """Assign users to model."""
-
- features_control: FeaturesControl = resources_provider.get_features_control(current_user)
- if not features_control.model_assignment:
- raise PaymentRequired("Model assignment requires to set up a dedicated plan. "
- "Contact Deepchecks.")
- await exists_or_404(session, Model, id=model_id)
-
- for user_id in member_schema.user_ids:
- user = await fetch_or_404(session, User, id=user_id)
- if user.organization_id != current_user.organization_id:
- raise BadRequest(f"User(id:{user_id}) doesn't exists in your organization.")
-
- model_memebers: t.List[ModelMember] = (
- await session.execute(sa.select(ModelMember)
- .where(ModelMember.model_id == model_id))
- ).scalars().all()
- users_to_create = []
- users_to_delete = []
- for model_memeber in model_memebers:
- if model_memeber.user_id not in member_schema.user_ids:
- if member_schema.replace:
- users_to_delete.append(model_memeber.id)
- existing_users = [member.user_id for member in model_memebers]
- for user_id in member_schema.user_ids:
- if user_id not in existing_users:
- users_to_create.append(ModelMember(user_id=user_id, model_id=model_id))
-
- await session.execute(sa.delete(ModelMember).where(ModelMember.id.in_(users_to_delete)))
- session.add_all(users_to_create)
- await session.flush()
diff --git a/backend/deepchecks_monitoring/ee/api/v1/routers.py b/backend/deepchecks_monitoring/ee/api/v1/routers.py
deleted file mode 100644
index 1e41c40..0000000
--- a/backend/deepchecks_monitoring/ee/api/v1/routers.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from fastapi import APIRouter
-
-ee_router = APIRouter(prefix='/api/v1')
-cloud_router = APIRouter(prefix='/api/v1')
diff --git a/backend/deepchecks_monitoring/ee/api/v1/slack.py b/backend/deepchecks_monitoring/ee/api/v1/slack.py
deleted file mode 100644
index e307938..0000000
--- a/backend/deepchecks_monitoring/ee/api/v1/slack.py
+++ /dev/null
@@ -1,223 +0,0 @@
-"""Represent the API for the slack integration."""
-import typing as t
-
-from fastapi import Depends, Query, Request, status
-from fastapi.responses import PlainTextResponse, RedirectResponse
-from pydantic import BaseModel
-from sqlalchemy import delete, select
-from sqlalchemy.dialects.postgresql import insert as pginsert
-from sqlalchemy.ext.asyncio import AsyncSession
-
-from deepchecks_monitoring.dependencies import AsyncSessionDep, SettingsDep
-from deepchecks_monitoring.ee.config import Settings
-from deepchecks_monitoring.ee.integrations.slack import SlackInstallationError, SlackInstallationUtils
-from deepchecks_monitoring.monitoring_utils import exists_or_404
-from deepchecks_monitoring.public_models.user import User
-from deepchecks_monitoring.schema_models.slack import SlackInstallation, SlackInstallationState
-from deepchecks_monitoring.utils import auth
-
-from .routers import ee_router
-
-
-@ee_router.get('/slack.authorize', name='slack-authorization-redirect', tags=['slack'])
-async def installation_redirect(
- request: Request,
- settings: Settings = SettingsDep,
- session: AsyncSession = AsyncSessionDep,
- user: User = Depends(auth.AdminUser()) # pylint: disable=unused-argument
-):
- """Redirect user to the slack authorization page.
-
- codeflow:
- 1. Authenticate user
- 2. Verify whether user has permissions to do operation
- 3. Issue 'installation state' to prevent forgery attack
- 4. Generate redirection URL
- 5. Set 'installation state' cookie
- 6. Redirect user to slack authorization page.
-
- Slack authorization URL description:
- https://slack.com/oauth/v2/authorize?state=&client_id=&scope=&user_scope=
- state - installation state, slack will include it in request with exchange code
- client_id - application client id
- scope - list of bot permissions
- user_scope -
-
- """
- state = await SlackInstallationState.issue(session, ttl=settings.slack_state_ttl)
- redirect_path = request.url_for('slack-installation-callback')
- utils = SlackInstallationUtils(settings)
- return RedirectResponse(
- url=utils.generate_authorization_url(state, str(redirect_path)),
- headers={'set-cookie': utils.generate_state_cookies(state)}
- )
-
-
-@ee_router.get('/slack.install', name='slack-installation-callback', tags=['slack'])
-async def installation_callback(
- request: Request,
- code: t.Optional[str] = Query(...),
- error: t.Optional[str] = Query(default=None),
- state: t.Optional[str] = Query(default=None),
- settings: Settings = SettingsDep,
- user: User = Depends(auth.AdminUser()), # pylint: disable=unused-argument
- session: AsyncSession = AsyncSessionDep,
-):
- """Finish slack installation.
-
- When a user confirms application (bot) installation,
- slack redirects him back to the 'redirect_uri' URL
- provided within the authorization request.
-
- Slack will include the next query parameters with the redirection URL:
- code - access token exchange code
- error - error message if something went wrong
- state - installation state token that was passed with an authorization request.
- """
- utils = SlackInstallationUtils(settings)
- headers = {'set-cookie': utils.generate_state_cookies_removal()}
-
- if error is not None:
- # TODO:
- # what page should we show in this case?
- # where user should be redirected in this case?
- return PlainTextResponse(
- status_code=status.HTTP_200_OK,
- content=f'Failed to install slack into workspace.\nError: {error}',
- headers=headers
- )
-
- if code is None:
- # TODO:
- # what page should we show in this case?
- # where user should be redirected in this case
- return PlainTextResponse(
- status_code=status.HTTP_401_UNAUTHORIZED,
- content='Redirection request missing exchange code',
- headers=headers
- )
-
- if state is None:
- # TODO:
- # what page should we show in this case?
- # where user should be redirected in this case
- return PlainTextResponse(
- status_code=status.HTTP_401_UNAUTHORIZED,
- content='Missing installation state code',
- headers=headers
- )
-
- if not utils.is_valid_state_cookies(state, request.headers):
- # TODO:
- # what page should we show in this case?
- # where user should be redirected in this case
- return PlainTextResponse(
- status_code=status.HTTP_401_UNAUTHORIZED,
- content='Invalid or missed installation state cookie',
- headers=headers
- )
-
- is_active_state = await SlackInstallationState.is_active(session, state)
-
- if not is_active_state:
- # TODO:
- # what page should we show in this case?
- # where user should be redirected in this case
- return PlainTextResponse(
- status_code=status.HTTP_401_UNAUTHORIZED,
- content='Expired installation state code',
- headers=headers
- )
-
- try:
- redirect_path = str(request.url_for('slack-installation-callback'))
- installation = utils.finish_installation(code, redirect_path)
- except SlackInstallationError as exception:
- # TODO:
- # what page should we show in this case?
- # where user should be redirected in this case
- return PlainTextResponse(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- content=str(exception),
- headers=headers
- )
-
- await session.execute(pginsert(SlackInstallation).values(
- app_id=installation.app_id,
- client_id=settings.slack_client_id,
- scope=installation.scope,
- token_type=installation.token_type,
- access_token=installation.access_token,
- bot_user_id=installation.bot_user_id,
- team_id=installation.team.id,
- team_name=installation.team.name,
- authed_user_id=installation.authed_user.id,
- incoming_webhook_channel_id=installation.incoming_webhook.channel_id,
- incoming_webhook_channel=installation.incoming_webhook.channel,
- incoming_webhook_url=installation.incoming_webhook.url,
- incoming_webhook_configuration_url=installation.incoming_webhook.configuration_url,
- created_by=user.id,
- updated_by=user.id,
- ).on_conflict_do_update(
- constraint='slackapp_per_workspace',
- set_=dict(
- scope=installation.scope,
- token_type=installation.token_type,
- access_token=installation.access_token,
- bot_user_id=installation.bot_user_id,
- authed_user_id=installation.authed_user.id,
- incoming_webhook_channel_id=installation.incoming_webhook.channel_id,
- incoming_webhook_channel=installation.incoming_webhook.channel,
- incoming_webhook_url=installation.incoming_webhook.url,
- incoming_webhook_configuration_url=installation.incoming_webhook.configuration_url,
- created_by=user.id,
- updated_by=user.id,
- )
- ))
-
- # TODO:
- # what page should we show in this case?
- # where user should be redirected in this case
- return PlainTextResponse(
- status_code=status.HTTP_201_CREATED,
- content='Slack app installed',
- headers=headers
- )
-
-
-class SlackBotSchema(BaseModel):
- """Slack Installation endpoint output schema."""
-
- id: int
- team_name: str
- scope: str
-
- class Config:
- """Pydantic config."""
-
- orm_mode = True
-
-
-@ee_router.get('/slack/apps', tags=['slack'])
-async def retrieve_instalations(
- session: AsyncSession = AsyncSessionDep,
- user: User = Depends(auth.AdminUser()) # pylint: disable=unused-argument
-):
- """Return list of slack installations."""
- q = select(SlackInstallation)
- installations = (await session.scalars(q)).all()
- return [SlackBotSchema.from_orm(it).dict() for it in installations]
-
-
-@ee_router.delete('/slack/apps/{app_id}', tags=['slack'])
-async def remove_installation(
- app_id: int,
- session: AsyncSession = AsyncSessionDep,
- user: User = Depends(auth.AdminUser()) # pylint: disable=unused-argument
-):
- """Remove slack installation."""
- await exists_or_404(session, SlackInstallation, id=app_id)
- await session.execute(
- delete(SlackInstallation)
- .where(SlackInstallation.id == app_id)
- )
diff --git a/backend/deepchecks_monitoring/ee/bgtasks/__init__.py b/backend/deepchecks_monitoring/ee/bgtasks/__init__.py
deleted file mode 100644
index c9a5f7c..0000000
--- a/backend/deepchecks_monitoring/ee/bgtasks/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from .object_storage_ingestor import ObjectStorageIngestor
-
-__all__ = ['ObjectStorageIngestor']
diff --git a/backend/deepchecks_monitoring/ee/bgtasks/object_storage_ingestor.py b/backend/deepchecks_monitoring/ee/bgtasks/object_storage_ingestor.py
deleted file mode 100644
index f48ef10..0000000
--- a/backend/deepchecks_monitoring/ee/bgtasks/object_storage_ingestor.py
+++ /dev/null
@@ -1,259 +0,0 @@
-# ----------------------------------------------------------------------------
-# Copyright (C) 2021-2022 Deepchecks (https://www.deepchecks.com)
-#
-# This file is part of Deepchecks.
-# Deepchecks is distributed under the terms of the GNU Affero General
-# Public License (version 3 or later).
-# You should have received a copy of the GNU Affero General Public License
-# along with Deepchecks. If not, see .
-# ----------------------------------------------------------------------------
-#
-import io
-from urllib.parse import urlparse
-
-import boto3
-import pandas as pd
-import pendulum as pdl
-from botocore.config import Config
-from botocore.exceptions import ClientError, EndpointConnectionError
-from pandas.core.dtypes.common import is_integer_dtype
-from redis.asyncio.lock import Lock
-from sqlalchemy import delete, select
-from sqlalchemy.ext.asyncio import AsyncSession
-from sqlalchemy.orm import selectinload
-
-from deepchecks_monitoring.logic.data_ingestion import DataIngestionBackend, save_failures
-from deepchecks_monitoring.monitoring_utils import configure_logger
-from deepchecks_monitoring.public_models import Organization
-from deepchecks_monitoring.public_models.task import BackgroundWorker, Task
-from deepchecks_monitoring.resources import ResourcesProvider
-from deepchecks_monitoring.schema_models import Model, ModelVersion
-from deepchecks_monitoring.schema_models.column_type import SAMPLE_ID_COL, SAMPLE_TS_COL
-from deepchecks_monitoring.schema_models.data_sources import DataSource
-from deepchecks_monitoring.utils import database
-
-__all__ = ['ObjectStorageIngestor']
-
-
-class ObjectStorageIngestor(BackgroundWorker):
- """Worker to ingest files from s3"""
-
- def __init__(self, resources_provider: ResourcesProvider):
- super().__init__()
- self.ingestion_backend = DataIngestionBackend(resources_provider)
- self.logger = configure_logger(self.__class__.__name__)
-
- @classmethod
- def queue_name(cls) -> str:
- return 'object_storage_ingestion'
-
- @classmethod
- def delay_seconds(cls) -> int:
- return 0
-
- async def run(self, task: 'Task', session: AsyncSession, resources_provider: ResourcesProvider, lock: Lock):
- await session.execute(delete(Task).where(Task.id == task.id))
-
- organization_id = task.params['organization_id']
- model_id = task.params['model_id']
-
- self.logger.info({'message': 'starting job', 'worker name': str(type(self)),
- 'task': task.id, 'model_id': model_id, 'org_id': organization_id})
-
- organization_schema = (await session.scalar(
- select(Organization.schema_name)
- .where(Organization.id == organization_id)
- ))
-
- errors = []
-
- if organization_schema is None:
- self._handle_error(errors, f'Could not locate organization with id {organization_id}')
- await self._finalize_before_exit(session, errors)
- return
-
- await database.attach_schema_switcher_listener(
- session=session,
- schema_search_path=[organization_schema, 'public']
- )
-
- model: Model = (await session.scalar(select(Model).options(selectinload(Model.versions))
- .where(Model.id == model_id)))
- if model is None:
- self._handle_error(errors, f'Could not locate model with id {model_id}')
- await self._finalize_before_exit(session, errors)
- return
-
- # Get s3 authentication info
- s3_data_source = (await session.scalar(select(DataSource).where(DataSource.type == 's3')))
- if s3_data_source is None:
- self._handle_error(errors, 'No data source of type s3 found', model_id)
- await self._finalize_before_exit(session, errors)
- return
-
- access_key = s3_data_source.parameters['aws_access_key_id']
- secret_key = s3_data_source.parameters['aws_secret_access_key']
- region = s3_data_source.parameters['region']
-
- # first - test connectivity to AWS
- try:
- sts = boto3.client(
- 'sts',
- aws_access_key_id=access_key,
- aws_secret_access_key=secret_key,
- config=Config(region_name=region)
- )
- sts.get_caller_identity()
- except (ClientError, EndpointConnectionError):
- self.logger.exception({'message': 'Invalid credentials to AWS'})
- self._handle_error(errors, 'Invalid credentials to AWS', model_id)
- await self._finalize_before_exit(session, errors)
- return
-
- s3 = boto3.client(
- 's3',
- aws_access_key_id=access_key,
- aws_secret_access_key=secret_key
- )
-
- if model.obj_store_path is None:
- self._handle_error(errors, 'Model is missing object store path configuration', model_id)
- await self._finalize_before_exit(session, errors)
- return
-
- try:
- s3_url = urlparse(model.obj_store_path)
- model_path = s3_url.path[1:]
- if model_path[-1] == '/':
- model_path = model_path[:-1]
- bucket = s3_url.netloc
- new_scan_time = pdl.now()
- # First ever scan of model - will scan all files
- if model.obj_store_last_scan_time is None:
- model_prefixes = ['']
- # Else scan only new files since last scan (by date)
- else:
- model_prefixes = []
- date = pdl.instance(model.obj_store_last_scan_time).date()
- while date <= new_scan_time.date():
- date = date.add(days=1)
- model_prefixes.append(date.isoformat())
-
- version: ModelVersion
- for version in model.versions:
- version_path = f'{model_path}/{version.name}'
- # If first scan of specific version - scan all files under version, else scan only new files by date
- version_prefixes = model_prefixes if version.latest_file_time is not None else ['']
- for prefix in version_prefixes:
- for df, time in self.ingest_prefix(s3, bucket, f'{version_path}/{prefix}', version.latest_file_time,
- errors, version.model_id, version.id, need_ts=True):
- # For each file, set lock expiry to 360 seconds from now
- await lock.extend(360, replace_ttl=True)
- await self.ingestion_backend.log_samples(version, df, session, organization_id, new_scan_time)
- version.latest_file_time = max(version.latest_file_time or
- pdl.datetime(year=1970, month=1, day=1), time)
-
- # Ingest labels
- for prefix in model_prefixes:
- labels_path = f'{model_path}/labels/{prefix}'
- for df, time in self.ingest_prefix(s3, bucket, labels_path, model.latest_labels_file_time,
- errors, model_id):
- # For each file, set lock expiry to 360 seconds from now
- await lock.extend(360, replace_ttl=True)
- await self.ingestion_backend.log_labels(model, df, session, organization_id)
- model.latest_labels_file_time = max(model.latest_labels_file_time
- or pdl.datetime(year=1970, month=1, day=1), time)
-
- model.obj_store_last_scan_time = new_scan_time
- except Exception: # pylint: disable=broad-except
- self.logger.exception({'message': 'General Error when ingesting data'})
- self._handle_error(errors, 'General Error when ingesting data', model_id)
- finally:
- await self._finalize_before_exit(session, errors)
- s3.close()
-
- self.logger.info({'message': 'finished job', 'worker name': str(type(self)),
- 'task': task.id, 'model_id': model_id, 'org_id': organization_id})
-
- def ingest_prefix(self, s3, bucket, prefix, last_file_time, errors,
- model_id, version_id=None, need_ts: bool = False):
- """Ingest all files in prefix, return df and file time"""
- last_file_time = last_file_time or pdl.datetime(year=1970, month=1, day=1)
- # First read all file names, then retrieve them sorted by date
- resp = s3.list_objects_v2(Bucket=bucket, Prefix=prefix)
- files = []
- if 'Contents' not in resp:
- self._handle_error(errors, f'No files found for bucket {bucket} and prefix {prefix}', model_id, version_id)
- return
-
- # Iterate over files in prefix
- for obj in resp['Contents']:
- key = obj['Key']
- file_with_extension = key.rsplit('/', maxsplit=1)[-1]
- if len(file_with_extension) == 0: # ingesting the folder
- continue
- if file_with_extension.count('.') != 1:
- self._handle_error(errors, f'Expected single dot in file name: {key}', model_id,
- version_id)
- continue
- file_name, extension = file_with_extension.split('.')
- if extension not in ['csv', 'parquet']:
- self._handle_error(errors, f'Invalid file extension: {extension}, for file {key}',
- model_id, version_id)
- continue
- try:
- file_time = pdl.parse(file_name)
- except pdl.parsing.exceptions.ParserError:
- self._handle_error(errors, f'Invalid date format in file name: {key}', model_id,
- version_id)
- continue
- # If file is before the last file ingested - skip
- if file_time > last_file_time:
- files.append({'key': key, 'time': file_time, 'extension': extension})
- else:
- self.logger.info({'message': f'file {key} is before latest file time {last_file_time} - skipping'})
-
- files = sorted(files, key=lambda x: x['time'])
- for file in files:
- self.logger.info({'message': f'Ingesting file {file["key"]}'})
- file_response = s3.get_object(Bucket=bucket, Key=file['key'])
- value = io.BytesIO(file_response.get('Body').read())
- if file['extension'] == 'csv':
- df = pd.read_csv(value)
- df[SAMPLE_ID_COL] = df[SAMPLE_ID_COL].astype(str)
- elif file['extension'] == 'parquet':
- df = pd.read_parquet(value)
- else:
- self._handle_error(errors, f'Invalid file extension: {file["extension"]}, for file: {file["key"]}',
- model_id, version_id)
- continue
- if need_ts:
- if SAMPLE_TS_COL not in df or not is_integer_dtype(df[SAMPLE_TS_COL]):
- self._handle_error(errors, f'Invalid timestamp column: {SAMPLE_TS_COL}, in file: {file["key"]}',
- model_id, version_id)
- continue
- # The user facing API requires unix timestamps, but for the ingestion we convert it to ISO format
- df[SAMPLE_TS_COL] = df[SAMPLE_TS_COL].apply(lambda x: pdl.from_timestamp(x).isoformat())
- # Sort by timestamp
- df = df.sort_values(by=[SAMPLE_TS_COL])
- yield df, file['time']
-
- def _handle_error(self, errors, error_message, model_id=None, model_version_id=None, set_warning_in_logs=True):
-
- error_message = f'S3 integration - {error_message}'
-
- log_message = {'message': f'{error_message}, model_id: {model_id}, version_id: {model_version_id}'}
- if set_warning_in_logs:
- self.logger.warning(log_message)
- else:
- self.logger.error(log_message)
-
- errors.append(dict(sample=None,
- sample_id=None,
- error=error_message,
- model_id=model_id,
- model_version_id=model_version_id))
-
- async def _finalize_before_exit(self, session, errors):
- await save_failures(session, errors, self.logger)
- await session.commit()
diff --git a/backend/deepchecks_monitoring/ee/config.py b/backend/deepchecks_monitoring/ee/config.py
deleted file mode 100644
index 66f3750..0000000
--- a/backend/deepchecks_monitoring/ee/config.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# ----------------------------------------------------------------------------
-# Copyright (C) 2021-2022 Deepchecks (https://www.deepchecks.com)
-#
-# This file is part of Deepchecks.
-# Deepchecks is distributed under the terms of the GNU Affero General
-# Public License (version 3 or later).
-# You should have received a copy of the GNU Affero General Public License
-# along with Deepchecks. If not, see .
-# ----------------------------------------------------------------------------
-"""Module defining the configuration for the deepchecks_monitoring package."""
-import pathlib
-import typing as t
-
-from pydantic import SecretStr, validator
-
-from deepchecks_monitoring.config import BaseDeepchecksSettings
-from deepchecks_monitoring.config import Settings as OpenSourceSettings
-
-__all__ = [
- 'Settings',
- 'TelemetrySettings',
- 'StripeSettings',
- 'SlackSettings'
-]
-
-
-PROJECT_DIR = pathlib.Path(__file__).parent.parent.absolute()
-
-
-class TelemetrySettings(BaseDeepchecksSettings):
- """Telemetry settings."""
-
- instrument_telemetry: bool = False
- sentry_dsn: t.Optional[str] = None
- sentry_env: str = 'dev'
-
-
-class StripeSettings(BaseDeepchecksSettings):
- """Stripe settings."""
-
- stripe_secret_api_key: str = ''
- stripe_public_api_key: str = ''
- stripe_webhook_secret: str = ''
-
-
-class SlackSettings(BaseDeepchecksSettings):
- """Settings for Slack."""
-
- slack_client_id: t.Optional[str]
- slack_client_secret: t.Optional[SecretStr]
- slack_scopes: str = 'chat:write,incoming-webhook'
- slack_state_ttl: int = 300
-
- @validator('slack_scopes')
- def validate_scopes(cls, value: str): # pylint: disable=no-self-argument
- """Validate scopes of slack."""
- minimal_required_scopes = ['chat:write', 'incoming-webhook']
- assert all(it in value for it in minimal_required_scopes)
- return value
-
-
-class Settings(
- OpenSourceSettings,
- SlackSettings,
- TelemetrySettings,
- StripeSettings
-):
- """Settings for the deepchecks_monitoring package."""
-
- enviroment: str = 'dev'
- debug_mode: bool = False
- lauchdarkly_sdk_key: str = ''
- access_audit: bool = False
- hotjar_sv: str = ''
- hotjar_id: str = ''
diff --git a/backend/deepchecks_monitoring/ee/features_control_cloud.py b/backend/deepchecks_monitoring/ee/features_control_cloud.py
deleted file mode 100644
index 16d02ad..0000000
--- a/backend/deepchecks_monitoring/ee/features_control_cloud.py
+++ /dev/null
@@ -1,141 +0,0 @@
-from ldclient import Context
-from ldclient.client import LDClient
-from pydantic import BaseModel
-from sqlalchemy import select
-
-from deepchecks_monitoring.features_control import FeaturesControl
-from deepchecks_monitoring.public_models import Billing, User
-
-
-class TierConfSchema(BaseModel):
- """Tier configuration which is loaded from launchdarkly."""
-
- custom_checks: bool = False
- data_retention_months: int = 3
- max_models: int = 1
- monthly_predictions_limit: int = 500_000
- sso: bool = False
- rows_per_minute: int = 500_000
- update_roles: bool = False
- model_assignment: bool = False
-
-
-class CloudFeaturesControl(FeaturesControl):
- """Feature controls class for the cloud version."""
-
- def __init__(self, user: User, ld_client: LDClient, settings):
- super().__init__(settings)
- self.user = user
- self.ld_client = ld_client
- self._max_models = None
- self._allowed_models = None
- self._rows_per_minute = None
- self._custom_checks_enabled = None
- self._data_retention_months = None
- self._monthly_predictions_limit = None
- self._sso_enabled = None
- self._signup_enabled = None
- self._onboarding_enabled = None
- self._update_roles = None
- self._model_assignment = None
-
- @property
- def max_models(self) -> int:
- if self._max_models is None:
- self._load_tier()
- return self._max_models
-
- async def get_allowed_models(self, session) -> int:
- if self._allowed_models is None:
- self._allowed_models = await session.scalar(
- select(Billing.bought_models).where(Billing.organization_id == self.user.organization_id)
- )
- if self._allowed_models is None:
- return 1
-
- return self._allowed_models + 1
-
- @property
- def update_roles(self) -> bool:
- if self._update_roles is None:
- self._load_tier()
- return self._update_roles
-
- @property
- def model_assignment(self) -> bool:
- if self._model_assignment is None:
- self._load_tier()
- return self._model_assignment
-
- @property
- def signup_enabled(self) -> bool:
- if self._signup_enabled is None:
- self._load_tier()
- return self._signup_enabled
-
- @property
- def onboarding_enabled(self) -> bool:
- if self._onboarding_enabled is None:
- self._load_tier()
- return self._onboarding_enabled
-
- @property
- def slack_enabled(self) -> bool:
- return True
-
- @property
- def rows_per_minute(self) -> int:
- if self._rows_per_minute is None:
- self._load_tier()
- return self._rows_per_minute
-
- @property
- def custom_checks_enabled(self) -> bool:
- if self._custom_checks_enabled is None:
- self._load_tier()
- return self._custom_checks_enabled
-
- @property
- def data_retention_months(self) -> int:
- if self._data_retention_months is None:
- self._load_tier()
- return self._data_retention_months
-
- @property
- def monthly_predictions_limit(self) -> int:
- if self._monthly_predictions_limit is None:
- self._load_tier()
- return self._monthly_predictions_limit
-
- @property
- def sso_enabled(self) -> bool:
- if self._sso_enabled is None:
- self._load_tier()
- return self._sso_enabled
-
- @property
- def multi_tenant(self) -> bool:
- return True
-
- def _load_tier(self):
- context = Context.builder(self.user.email).set("email", self.user.email)
- if self.user.organization:
- context.set("organization_id", self.user.organization.id)
- context.set("tier", self.user.organization.tier)
-
- ld_user = context.build()
- tier_conf = self.ld_client.variation("paid-features", ld_user, default={})
- if getattr(self.user, "email_verified", False):
- self._signup_enabled = self.ld_client.variation("signUpEnabled", ld_user, default=True)
- else:
- self._signup_enabled = False
- tier_conf = TierConfSchema(**tier_conf)
- self._custom_checks_enabled = tier_conf.custom_checks
- self._data_retention_months = tier_conf.data_retention_months
- self._max_models = tier_conf.max_models
- self._monthly_predictions_limit = tier_conf.monthly_predictions_limit
- self._sso_enabled = tier_conf.sso
- self._rows_per_minute = tier_conf.rows_per_minute
- self._update_roles = tier_conf.update_roles
- self._model_assignment = tier_conf.model_assignment
- self._onboarding_enabled = self.ld_client.variation("onBoardingEnabled", ld_user, default=False)
diff --git a/backend/deepchecks_monitoring/ee/features_control_on_prem.py b/backend/deepchecks_monitoring/ee/features_control_on_prem.py
deleted file mode 100644
index fa7adb6..0000000
--- a/backend/deepchecks_monitoring/ee/features_control_on_prem.py
+++ /dev/null
@@ -1,58 +0,0 @@
-from deepchecks_monitoring.features_control import FeaturesControl
-
-
-class OnPremFeaturesControl(FeaturesControl):
- """Feature controls class for on prem version.
- TODO: implement license check :(
- """
-
- @property
- def max_models(self) -> int:
- return 9999
-
- async def get_allowed_models(self, session) -> int:
- return 10
-
- @property
- def update_roles(self) -> bool:
- return True
-
- @property
- def model_assignment(self) -> bool:
- return True
-
- @property
- def signup_enabled(self) -> bool:
- return True
-
- @property
- def onboarding_enabled(self) -> bool:
- return True
-
- @property
- def slack_enabled(self) -> bool:
- return True
-
- @property
- def rows_per_minute(self) -> int:
- return 500_000
-
- @property
- def custom_checks_enabled(self) -> bool:
- return False
-
- @property
- def data_retention_months(self) -> int:
- return 12
-
- @property
- def monthly_predictions_limit(self) -> int:
- return 10_000_000
-
- @property
- def sso_enabled(self) -> bool:
- return False
-
- @property
- def multi_tenant(self) -> bool:
- return False
diff --git a/backend/deepchecks_monitoring/ee/integrations/__init__.py b/backend/deepchecks_monitoring/ee/integrations/__init__.py
deleted file mode 100644
index f95b5b2..0000000
--- a/backend/deepchecks_monitoring/ee/integrations/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from . import slack
-
-__all__ = []
diff --git a/backend/deepchecks_monitoring/ee/integrations/slack.py b/backend/deepchecks_monitoring/ee/integrations/slack.py
deleted file mode 100644
index ecb61c6..0000000
--- a/backend/deepchecks_monitoring/ee/integrations/slack.py
+++ /dev/null
@@ -1,325 +0,0 @@
-"""Represent slack utilities module."""
-import abc
-import logging
-import typing as t
-
-from deepchecks.core.checks import CheckConfig
-from pydantic import BaseModel, ValidationError, validator
-from slack_sdk import WebClient as SlackClient
-from slack_sdk.errors import SlackApiError
-from slack_sdk.oauth import AuthorizeUrlGenerator, OAuthStateUtils
-from slack_sdk.webhook import WebhookResponse
-
-from deepchecks_monitoring.config import Settings as OpenSourceSettings
-from deepchecks_monitoring.ee.config import SlackSettings
-from deepchecks_monitoring.monitoring_utils import CheckParameterTypeEnum as CheckParameterKind
-from deepchecks_monitoring.monitoring_utils import MonitorCheckConfSchema as MonitorConfig
-from deepchecks_monitoring.schema_models import Alert, AlertRule, AlertSeverity, Check, Model, Monitor
-from deepchecks_monitoring.utils.alerts import prepare_alert_link
-from deepchecks_monitoring.utils.text import format_float
-
-__all__ = ["SlackInstallationSchema", "SlackInstallationError", "SlackAlertNotification", "SlackInstallationUtils",
- "SlackSender"]
-
-
-class SlackTeamSchema(BaseModel):
- """Schema for Slack team."""
-
- id: str
- name: str
-
-
-class SlackIncomingWebhookSchema(BaseModel):
- """Schema for Slack incoming webhook."""
-
- channel_id: str
- channel: str
- url: str
- configuration_url: str
-
-
-class AuthedUserSchema(BaseModel):
- """Schema for Slack authed user."""
-
- id: str
-
-
-# NOTE:
-# current schema does not describe installation response payload completely
-# it contains only what we need for now
-
-class SlackInstallationSchema(BaseModel):
- """Schema for Slack installation."""
-
- ok: bool
- app_id: str
- scope: str
- token_type: str
- access_token: str
- bot_user_id: str
- authed_user: AuthedUserSchema
- team: SlackTeamSchema
- incoming_webhook: SlackIncomingWebhookSchema
-
- @validator("token_type")
- def validate_token_type(cls, value: str): # pylint: disable=no-self-argument
- """Validate token type."""
- assert value == "bot", "Expected to receive bot token type"
- return value
-
-
-class SlackInstallationError(Exception):
- """Exception for Slack installation."""
-
- pass
-
-
-class SlackInstallationUtils:
- """Represent Slack installation utilities."""
-
- def __init__(
- self,
- settings: SlackSettings,
- logger: t.Optional[logging.Logger] = None
- ):
- self.settings = settings
- self.client = SlackClient()
- self.logger = logger or logging.getLogger("slack.installation")
- self.state_utils = OAuthStateUtils()
-
- def generate_authorization_url(self, state, redirect_path) -> str:
- """Generate the authorization URL."""
- return AuthorizeUrlGenerator(
- client_id=self.settings.slack_client_id,
- scopes=self.settings.slack_scopes.split(","), # TODO: error prone, consider changing it
- redirect_uri=redirect_path
- ).generate(state=state)
-
- def finish_installation(
- self,
- code: str,
- redirect_uri: t.Optional[str] = None
- ) -> SlackInstallationSchema:
- """Finish the slack installation."""
- try:
- response = self.client.oauth_v2_access(
- code=code,
- client_id=self.settings.slack_client_id,
- client_secret=self.settings.slack_client_secret.get_secret_value(),
- redirect_uri=redirect_uri
- ).validate()
- except SlackApiError as e:
- msg = "Failed to obtaine access token."
- self.logger.error(f"{msg}\nError: {e}")
- raise SlackInstallationError(msg) from e
-
- try:
- installation = SlackInstallationSchema(**t.cast(dict, response.data))
- except ValidationError as e:
- msg = "Received response of unsupported structure from 'oauth.v2.access' endpoint."
- self.logger.error("%s\nError: %s", msg, e)
- raise SlackInstallationError(msg) from e
-
- try:
- self.client.auth_test(token=installation.access_token).validate()
- except SlackApiError as e:
- msg = "Access token validation failed."
- self.logger.error("%s\nError: %s", msg, e)
- raise SlackInstallationError(msg) from e
-
- return installation
-
- def generate_state_cookies(self, state) -> str:
- """Generate the state cookies."""
- return self.state_utils.build_set_cookie_for_new_state(state)
-
- def generate_state_cookies_removal(self) -> str:
- """Remove the state cookies."""
- return self.state_utils.build_set_cookie_for_deletion()
-
- def is_valid_state_cookies(self, state, headers) -> bool:
- """Check if the state cookies are valid."""
- return self.state_utils.is_valid_browser(state=state, request_headers=headers)
-
-
-class BaseSlackNotification(abc.ABC):
- """Represent the abstract class for the slack notification."""
-
- @abc.abstractmethod
- def blocks(self, *args, **kwargs) -> t.Dict[str, t.Any]:
- """Return the slack blocks."""
- raise NotImplementedError()
-
-
-class SlackAlertNotification(BaseSlackNotification):
- """Represent the slack alert notification."""
-
- def __init__(self, alert: Alert, deepchecks_host: str):
- super().__init__()
- self.alert = alert
- self.rule = t.cast(AlertRule, self.alert.alert_rule)
- self.monitor = t.cast(Monitor, self.rule.monitor)
- self.check = t.cast(Check, self.monitor.check)
- self.check_config = t.cast(CheckConfig, self.check.config)
- self.model = t.cast(Model, self.check.model)
-
- self.alert_link = prepare_alert_link(
- deepchecks_host=deepchecks_host,
- model_id=t.cast(int, self.model.id),
- severity=self.rule.alert_severity.value
- )
- self.monitor_config = t.cast(
- t.Optional[MonitorConfig],
- self.monitor.additional_kwargs
- )
- self.features = (
- t.cast(t.List[str], self.monitor_config.check_conf.get(CheckParameterKind.FEATURE))
- if self.monitor_config is not None
- else None
- ) or []
-
- def prepare_header(self) -> t.Dict[str, t.Any]:
- """Prepare the header for the notification."""
- # TODO:
- return {
- "type": "header",
- "text": {
- "type": "plain_text",
- "text": self.monitor.name,
- "emoji": True
- }
- }
-
- def prepare_alert_status_section(self) -> t.Dict[str, t.Any]:
- """Prepare the alert status section in the notification."""
- rule = t.cast(AlertRule, self.alert.alert_rule)
- icons = {
- # TODO:
- # ask for a list of icons
- AlertSeverity.CRITICAL: "🔥",
- AlertSeverity.HIGH: "🚨",
- AlertSeverity.MEDIUM: "⚠",
- AlertSeverity.LOW: "😟",
- }
- return {
- "type": "context",
- "elements": [
- # {
- # "type": "image",
- # "image_url": icons[rule.alert_severity],
- # "alt_text": "alert icon"
- # },
- {
- "type": "mrkdwn",
- "text": icons[rule.alert_severity] # type: ignore
- },
- {
- "type": "mrkdwn",
- "text": f"Severity: {rule.alert_severity.value.capitalize()}"
- }
- ]
- }
-
- def prepare_check_result_status_section(self) -> t.Dict[str, t.Any]:
- """Prepare the check result section."""
- # Take first failed value (can be failed values for multiple versions)
- fail_value = list(self.alert.failed_values.values())[0]
- fail_value = format_float(fail_value)
- return {
- "type": "section",
- "text": {
- "type": "mrkdwn",
- "text": f"{self.check.name}={fail_value}"
- },
- "accessory": {
- "type": "button",
- "text": {
- "type": "plain_text",
- "text": "View On Deepchecks",
- "emoji": True
- },
- "url": str(self.alert_link),
- "value": "click_me_123",
- "action_id": "button-action"
- }
- }
-
- def prepare_metadata_section(self) -> t.List[t.Dict[str, t.Any]]:
- """Prepare the metadata section."""
- features = ", ".join(self.features)
- return [
- {
- "type": "context",
- "elements": [
- {
- "type": "plain_text",
- "text": f"Model: {self.model.name}",
- "emoji": True
- }
- ]
- },
- {
- "type": "context",
- "elements": [
- {
- "type": "plain_text",
- "text": f"Check: {self.check_config['class_name']}",
- "emoji": True
- }
- ]
- },
- {
- "type": "context",
- "elements": [
- {
- "type": "plain_text",
- "text": f"Feature: {features}",
- "emoji": True
- }
- ]
- },
- {
- "type": "context",
- "elements": [
- {
- "type": "plain_text",
- "text": f"Alert Condition: {str(self.rule.condition)}",
- "emoji": True
- }
- ]
- }
- ]
-
- def blocks(self) -> t.List[t.Any]:
- """Contain the notification message blocks."""
- return [
- self.prepare_header(),
- self.prepare_alert_status_section(),
- self.prepare_check_result_status_section(),
- {"type": "divider"},
- *self.prepare_metadata_section()
- ]
-
-
-class SlackSenderSettings(OpenSourceSettings, SlackSettings):
- pass
-
-
-class SlackSender:
- """Sends slack messages."""
-
- settings: SlackSenderSettings
-
- def __init__(self, settings: SlackSenderSettings):
- self.settings = settings
-
- @property
- def is_slack_available(self) -> bool:
- """Return whether slack services are available on this instance (i.e. settings are in place)."""
- # TODO: improve this
- return self.settings.slack_client_id is not None
-
- def send_alert(self, alert, app) -> WebhookResponse:
- """Send slack message."""
- notification = SlackAlertNotification(alert, self.settings.deployment_url).blocks()
- return app.webhook_client().send(blocks=notification)
diff --git a/backend/deepchecks_monitoring/ee/middlewares.py b/backend/deepchecks_monitoring/ee/middlewares.py
deleted file mode 100644
index 2cd1328..0000000
--- a/backend/deepchecks_monitoring/ee/middlewares.py
+++ /dev/null
@@ -1,199 +0,0 @@
-# ----------------------------------------------------------------------------
-# Copyright (C) 2021-2022 Deepchecks (https://www.deepchecks.com)
-#
-# This file is part of Deepchecks.
-# Deepchecks is distributed under the terms of the GNU Affero General
-# Public License (version 3 or later).
-# You should have received a copy of the GNU Affero General Public License
-# along with Deepchecks. If not, see .
-# ----------------------------------------------------------------------------
-"""Middlewares to be used in the application."""
-import logging
-import time
-
-import watchtower
-from fastapi import Depends
-from pyinstrument import Profiler
-from starlette.datastructures import MutableHeaders
-from starlette.requests import Request
-from starlette.types import ASGIApp, Message, Receive, Scope, Send
-
-from deepchecks_monitoring.exceptions import LicenseError
-from deepchecks_monitoring.public_models import User
-from deepchecks_monitoring.utils.auth import CurrentUser
-
-
-class ProfilingMiddleware:
- """A middleware which allows to return a runtime profiling for given routes."""
-
- def __init__(self, app: ASGIApp) -> None:
- self.app = app
-
- async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
- """Middleware entrypoint."""
- if scope["type"] != "http":
- return await self.app(scope, receive, send)
-
- request = Request(scope, receive=receive)
- profiling = request.query_params.get("profile", False)
- if not profiling:
- return await self.app(scope, receive, send)
-
- timeline = request.query_params.get("timeline", "false").lower() == "true"
-
- profiler = Profiler()
- output_html = None
-
- async def wrapped_send(message: Message) -> None:
- nonlocal profiler
- nonlocal output_html
- # For start message, editing the response headers
- if message["type"] == "http.response.start":
- profiler.stop()
- output_html = profiler.output_html(timeline=timeline).encode()
- # This modifies the "message" Dict in place, which is used by the "send" function below
- response_headers = MutableHeaders(scope=message)
- response_headers["content-encoding"] = ""
- response_headers["content-length"] = str(len(output_html))
- response_headers["content-type"] = "text/html; charset=utf-8"
- await send(message)
- # The body is sent in a second message
- elif message["type"] == "http.response.body":
- message["body"] = output_html
- await send(message)
- else:
- await send(message)
-
- profiler.start()
- return await self.app(scope, receive, wrapped_send)
-
-
-class SecurityAuditMiddleware:
- """Access audit middleware."""
-
- def __init__(
- self,
- app: ASGIApp,
- log_group_name: str = "deepchecks-access-audit",
- log_stream_name: str = "deepchecks-access-audit",
- ):
- h = watchtower.CloudWatchLogHandler(
- log_group_name=log_group_name,
- log_stream_name=log_stream_name,
- )
- h.setLevel(logging.INFO)
- self.logger = logging.getLogger("access-audit")
- self.logger.addHandler(h)
- self.app = app
-
- async def __call__(
- self,
- scope: Scope,
- receive: Receive,
- send: Send
- ):
- """Execute middleware."""
- from deepchecks_monitoring.utils import auth # pylint: disable=import-outside-toplevel
-
- if scope["type"] != "http":
- return await self.app(scope, receive, send)
-
- response_status_code = None
-
- async def wrapped_send(message: Message):
- nonlocal response_status_code
- if message["type"] == "http.response.start":
- response_status_code = message["status"]
- await send(message)
-
- start = time.time()
- await self.app(scope, receive, wrapped_send)
- end = time.time()
-
- info = {
- "duration": end - start,
- "client": scope["client"],
- "scheme": scope["scheme"],
- "http_version": scope["http_version"],
- "method": scope["method"],
- "path": scope["path"],
- "query_string": scope["query_string"],
- "status": response_status_code,
- "headers": {},
- "user": None,
- "access_token": None
- }
-
- for k, v in scope["headers"]:
- name = k.decode() if isinstance(k, bytes) else k
- value = v.decode() if isinstance(v, bytes) else v
-
- if name == "authorization":
- value = "bearer *****"
-
- info["headers"][name] = value
-
- state = scope.get("state")
-
- if state and isinstance(access_token := state.get("access_token"), auth.UserAccessToken):
- info["access_token"] = {
- "email": access_token.email,
- "is_admin": access_token.is_admin,
- "exp": access_token.exp,
- "email_verified": access_token.email_verified,
- }
-
- if state and (user := state.get("user")):
- info["user"] = {
- "id": user.id,
- "full_name": user.full_name,
- "email": user.email,
- "organization_id": user.organization_id,
- }
-
- self.logger.exception(info)
-
-
-class NoCacheMiddleware:
- """Responsible for disabling cache from REST API endpoints."""
-
- def __init__(self, app):
- self.app = app
-
- async def __call__(
- self,
- scope: Scope,
- receive: Receive,
- send: Send
- ):
- """Execute middleware."""
- if scope["type"] != "http" or not scope["path"].startswith("/api/"):
- return await self.app(scope, receive, send)
-
- async def wrapped_send(message: Message):
- if message["type"] == "http.response.start":
- message["headers"].append((b"cache-control", b"no-cache, stale-if-error=0"))
-
- await send(message)
-
- await self.app(scope, receive, wrapped_send)
-
-
-class LicenseCheckDependency:
- """Dependency used to validate license."""
-
- async def __call__(
- self,
- request: Request,
- user: User = Depends(CurrentUser()),
- ):
- """Authenticate user.
-
- Parameters
- ----------
- request: Request
- http request instance
- """
- # TODO: implement license check, for open source all enterprise features are disabled.
- if request.app.state.settings.is_cloud is False and request.app.state.settings.is_on_prem is False:
- raise LicenseError("Can't use enterprise features in open source version.")
diff --git a/backend/deepchecks_monitoring/ee/notifications.py b/backend/deepchecks_monitoring/ee/notifications.py
deleted file mode 100644
index ab7c366..0000000
--- a/backend/deepchecks_monitoring/ee/notifications.py
+++ /dev/null
@@ -1,127 +0,0 @@
-# ----------------------------------------------------------------------------
-# Copyright (C) 2021-2022 Deepchecks (https://www.deepchecks.com)
-#
-# This file is part of Deepchecks.
-# Deepchecks is distributed under the terms of the GNU Affero General
-# Public License (version 3 or later).
-# You should have received a copy of the GNU Affero General Public License
-# along with Deepchecks. If not, see .
-# ----------------------------------------------------------------------------
-#
-"""Alert execution logic."""
-import asyncio
-import typing as t
-
-import httpx
-import sqlalchemy as sa
-
-from deepchecks_monitoring import __version__
-from deepchecks_monitoring.ee.integrations.slack import SlackAlertNotification
-from deepchecks_monitoring.notifications import AlertNotificator as BaseAlertNotificator
-from deepchecks_monitoring.schema_models.alert_webhook import AlertWebhook
-from deepchecks_monitoring.schema_models.slack import SlackInstallation
-
-__all__ = ["AlertNotificator"]
-
-
-class AlertNotificator(BaseAlertNotificator):
- """Class to send notification about alerts."""
-
- async def send_slack_messages(self) -> bool:
- """Send slack message."""
- org = self.organization
- alert = self.alert
- alert_rule = self.alert_rule
-
- if alert_rule.alert_severity not in org.slack_notification_levels:
- notification_levels = ",".join(t.cast(t.List[t.Any], org.slack_notification_levels))
- self.logger.info(
- "AlertRule(id:%s) severity (%s) is not included in "
- "Organization(id:%s) slack notification levels config (%s)",
- alert_rule.id,
- alert_rule.alert_severity,
- org.id,
- notification_levels
- )
- return False
-
- q = sa.select(SlackInstallation)
- slack_apps = (await self.session.scalars(q)).all()
- slack_apps = t.cast(t.List[SlackInstallation], slack_apps)
-
- if not slack_apps:
- self.logger.info(
- "Organization(id:%s) does not have connected slack bots",
- org.id,
- )
- return False
-
- errors: t.List[t.Tuple[SlackInstallation, str]] = []
- settings = self.resources_provider.settings
-
- for app in slack_apps:
- notification = SlackAlertNotification(alert, settings.deployment_url).blocks()
- response = app.webhook_client().send(blocks=notification)
-
- if response.status_code != 200:
- errors.append((app, response.body))
- else:
- self.logger.info(
- "Alert(id:%s) slack notification was sent to Organization(id:%s) %s:%s:%s slack workspace",
- alert.id, org.id, app.app_id, app.team_name, app.team_id,
- )
-
- if errors:
- msg = ";\n".join(
- f"app:{app.id} - {message}"
- for app, message in errors
- )
- self.logger.error(
- "Failed to send Alert(id:%s) slack notification to the "
- "next Organization(id:%s) slack workspaces.\n%s",
- alert.id, org.id, msg
- )
-
- return len(errors) < len(slack_apps)
-
- async def execute_webhooks(self) -> bool:
- """Execute organization webhooks."""
- org = self.organization
- alert = self.alert
- webhooks = await self.session.scalars(sa.select(AlertWebhook))
-
- if not webhooks:
- return False
-
- webhooks = t.cast(t.Sequence[AlertWebhook], webhooks)
-
- async with httpx.AsyncClient() as client:
- results = await asyncio.gather(
- *(
- w.execute(
- alert=alert,
- client=client,
- settings=self.resources_provider.settings,
- logger=self.logger
- )
- for w in webhooks
- ),
- return_exceptions=True
- )
-
- if any(isinstance(it, Exception) or it is False for it in results):
- self.logger.warning(
- f"Execution of not all Organization(id:{org.id}) "
- "webhooks were successful"
- )
- return False
-
- await self.session.flush()
- await self.session.commit()
- return True
-
- async def notify(self):
- """Send notifications."""
- await self.send_emails()
- await self.send_slack_messages()
- await self.execute_webhooks()
diff --git a/backend/deepchecks_monitoring/ee/resources.py b/backend/deepchecks_monitoring/ee/resources.py
deleted file mode 100644
index 12794de..0000000
--- a/backend/deepchecks_monitoring/ee/resources.py
+++ /dev/null
@@ -1,167 +0,0 @@
-# ----------------------------------------------------------------------------
-# Copyright (C) 2021-2022 Deepchecks (https://www.deepchecks.com)
-#
-# This file is part of Deepchecks.
-# Deepchecks is distributed under the terms of the GNU Affero General
-# Public License (version 3 or later).
-# You should have received a copy of the GNU Affero General Public License
-# along with Deepchecks. If not, see .
-# ----------------------------------------------------------------------------
-# pylint: disable=unnecessary-ellipsis
-"""Module with resources instatiation logic."""
-import logging
-from typing import TYPE_CHECKING, cast
-
-import ldclient
-from ldclient import Context
-from ldclient.client import LDClient
-from ldclient.config import Config as LDConfig
-
-from deepchecks_monitoring.ee import utils
-from deepchecks_monitoring.ee.config import Settings, SlackSettings, StripeSettings, TelemetrySettings
-from deepchecks_monitoring.ee.features_control_cloud import CloudFeaturesControl
-from deepchecks_monitoring.ee.features_control_on_prem import OnPremFeaturesControl
-from deepchecks_monitoring.ee.notifications import AlertNotificator as EEAlertNotificator
-from deepchecks_monitoring.features_control import FeaturesControl
-from deepchecks_monitoring.integrations.email import EmailSender
-from deepchecks_monitoring.public_models import User
-from deepchecks_monitoring.resources import ResourcesProvider as OpenSourceResourcesProvider
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import
- from ray.util.actor_pool import ActorPool # noqa
-
-__all__ = ["ResourcesProvider"]
-
-
-class ResourcesProvider(OpenSourceResourcesProvider):
- """Provider of resources."""
-
- ALERT_NOTIFICATOR_TYPE = EEAlertNotificator
-
- def __init__(self, settings: Settings):
- super().__init__(settings)
- self._lauchdarkly_client = None
- self._is_telemetry_initialized = False
-
- @property
- def telemetry_settings(self) -> TelemetrySettings:
- """Get the telemetry settings."""
- if not isinstance(self._settings, TelemetrySettings):
- raise AssertionError(
- "Provided settings instance type is not a subclass of "
- "the 'TelemetrySettings', you need to provide instance "
- "of 'TelemetrySettings' to the 'ResourcesProvider' constructor"
- )
- return self._settings
-
- @property
- def slack_settings(self) -> SlackSettings:
- """Get the telemetry settings."""
- if not isinstance(self._settings, SlackSettings):
- raise AssertionError(
- "Provided settings instance type is not a subclass of "
- "the 'SlackSettings', you need to provide instance "
- "of 'SlackSettings' to the 'ResourcesProvider' constructor"
- )
- return self._settings
-
- @property
- def stripe_settings(self) -> StripeSettings:
- """Get the telemetry settings."""
- if not isinstance(self._settings, StripeSettings):
- raise AssertionError(
- "Provided settings instance type is not a subclass of "
- "the 'StripeSettings', you need to provide instance "
- "of 'StripeSettings' to the 'ResourcesProvider' constructor"
- )
- return self._settings
-
- @property
- def email_sender(self) -> EmailSender:
- """Email sender."""
- if self._email_sender is None:
- self._email_sender = EmailSender(self.email_settings)
- return self._email_sender
-
- @property
- def lauchdarkly_client(self) -> LDClient:
- """Launchdarkly client."""
- if self.settings.is_cloud is False:
- raise Exception("Launchdarkly client is only available in cloud mode")
- if self._lauchdarkly_client:
- return self._lauchdarkly_client
- ldclient.set_config(LDConfig(self.settings.lauchdarkly_sdk_key))
- self._lauchdarkly_client = ldclient.get()
- return self._lauchdarkly_client
-
- def get_features_control(self, user: User) -> FeaturesControl:
- """Return features control."""
- if self.settings.is_cloud:
- return CloudFeaturesControl(user, self.lauchdarkly_client, self.settings)
- # TODO add license check -
- elif self.settings.is_on_prem:
- return OnPremFeaturesControl(self.settings)
- return FeaturesControl(self.settings)
-
- @property
- def parallel_check_executors_pool(self) -> "ActorPool | None":
- if self.settings.is_cloud is False:
- parallel_check_executor_flag = True
- else:
- parallel_check_executor_flag = self.lauchdarkly_client.variation(
- "parallelCheckExecutorEnabled",
- context=Context.builder("parallelCheckExecutorEnabled").build(),
- default=True
- )
-
- logging.getLogger("server").info({
- "mesage": f"'parallelCheckExecutorEnabled' is set to {parallel_check_executor_flag}"
- })
- if parallel_check_executor_flag:
- return super().parallel_check_executors_pool
-
- def initialize_telemetry_collectors(
- self,
- *targets,
- traces_sample_rate: float = 0.6,
- ):
- """Initialize telemetry."""
- settings = self.telemetry_settings
-
- if settings.sentry_dsn and not self._is_telemetry_initialized:
- import sentry_sdk # pylint: disable=import-outside-toplevel
-
- sentry_sdk.init(
- dsn=settings.sentry_dsn,
- traces_sample_rate=traces_sample_rate,
- environment=settings.sentry_env,
- before_send_transaction=utils.sentry.sentry_send_hook
- )
-
- self._is_telemetry_initialized = True
-
- if self._is_telemetry_initialized:
- for it in targets:
- utils.telemetry.collect_telemetry(it)
-
- def get_client_configuration(self) -> dict:
- if self.settings.is_cloud:
- settings = cast(Settings, self.settings)
- return {
- "sentryDsn": settings.sentry_dsn,
- "stripeApiKey": settings.stripe_public_api_key,
- "lauchdarklySdkKey": settings.lauchdarkly_sdk_key,
- "environment": settings.enviroment,
- "mixpanel_id": settings.mixpanel_id,
- "is_cloud": True,
- "hotjar_id": settings.hotjar_id,
- "hotjar_sv": settings.hotjar_sv
- }
- return super().get_client_configuration()
-
- def get_feature_flags(self, user: User) -> dict:
- """Return feature flags."""
- if self.settings.is_cloud:
- return {"slack_enabled": True}
- return super().get_feature_flags(user)
diff --git a/backend/deepchecks_monitoring/ee/utils/__init__.py b/backend/deepchecks_monitoring/ee/utils/__init__.py
deleted file mode 100644
index 845c69a..0000000
--- a/backend/deepchecks_monitoring/ee/utils/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from . import sentry, telemetry
-
-__all__ = []
diff --git a/backend/deepchecks_monitoring/ee/utils/sentry.py b/backend/deepchecks_monitoring/ee/utils/sentry.py
deleted file mode 100644
index fc2dfba..0000000
--- a/backend/deepchecks_monitoring/ee/utils/sentry.py
+++ /dev/null
@@ -1,28 +0,0 @@
-
-def sentry_send_hook(event, *args, **kwargs): # pylint: disable=unused-argument
- """Sentry transaction send hook.
-
- Sentry "N+1 DB queries" detector incorrectly identifies a load of
- monitoring data during monitor execution as the 'N+1' problem, to
- prevent this we change a span 'op' key to the next value - 'monitoring-data-load'.
- Sentry uses this key to identify database queries and expects it to be equal
- to 'db' or 'db.query'.
- """
- if event.get('type') == 'transaction':
- for span in event.get('spans', tuple()):
- if (
- span.get('op') in ['db', 'db.query', 'db.sql.query']
- and '_monitor_data_' in span.get('description', '')
- ):
- span['op'] = 'monitoring-data-load'
- return event
-
-
-def traces_sampler(sampling_context):
- """Return trace sampling rate for given context."""
- source = sampling_context['transaction_context']['source']
- # Filtering out say-hello messages completely
- if source == 'route' and sampling_context['asgi_scope'].get('path') == '/api/v1/say-hello':
- return 0
- # For everything else return default rate
- return 0.1
diff --git a/backend/deepchecks_monitoring/ee/utils/telemetry.py b/backend/deepchecks_monitoring/ee/utils/telemetry.py
deleted file mode 100644
index cbbd9e8..0000000
--- a/backend/deepchecks_monitoring/ee/utils/telemetry.py
+++ /dev/null
@@ -1,388 +0,0 @@
-# ----------------------------------------------------------------------------
-# Copyright (C) 2021-2022 Deepchecks (https://www.deepchecks.com)
-#
-# This file is part of Deepchecks.
-# Deepchecks is distributed under the terms of the GNU Affero General
-# Public License (version 3 or later).
-# You should have received a copy of the GNU Affero General Public License
-# along with Deepchecks. If not, see .
-# ----------------------------------------------------------------------------
-#
-# pylint: disable=unused-import
-"""Open-telementy instrumentors."""
-import enum
-import json
-import logging
-import typing as t
-from functools import wraps
-from time import perf_counter
-
-import anyio
-import pendulum as pdl
-import sentry_sdk
-
-from deepchecks_monitoring import __version__
-from deepchecks_monitoring.public_models import Organization, User
-from deepchecks_monitoring.schema_models import Model, ModelVersion
-
-if t.TYPE_CHECKING:
- from pendulum.datetime import DateTime as PendulumDateTime
- from sqlalchemy.ext.asyncio import AsyncSession
-
- from deepchecks_monitoring.bgtasks.scheduler import AlertsScheduler
- from deepchecks_monitoring.bgtasks.tasks_queuer import TasksQueuer
- from deepchecks_monitoring.bgtasks.tasks_runner import TaskRunner
- from deepchecks_monitoring.logic.data_ingestion import DataIngestionBackend
-
-
-__all__ = [
- "collect_telemetry",
- "SchedulerInstrumentor",
- "DataIngetionInstrumentor"
-]
-
-
-class SpanStatus(str, enum.Enum):
- CANCELED = "Coroutine Canceled"
- FAILED = "Execution Failed"
- OK = "Ok"
-
-
-def collect_telemetry(routine: t.Any):
- """Instrument open-telementry for given routine."""
- # pylint: disable=redefined-outer-name,import-outside-toplevel
- from deepchecks_monitoring.bgtasks.scheduler import AlertsScheduler
- from deepchecks_monitoring.bgtasks.tasks_queuer import TasksQueuer
- from deepchecks_monitoring.bgtasks.tasks_runner import TaskRunner
- from deepchecks_monitoring.logic.data_ingestion import DataIngestionBackend
-
- logger = logging.getLogger("instrumentation")
-
- if issubclass(routine, AlertsScheduler):
- SchedulerInstrumentor(scheduler_type=routine).instrument()
- logger.info("Instrumented alerts scheduler telemetry collectors")
- return routine
-
- if issubclass(routine, DataIngestionBackend):
- DataIngetionInstrumentor(data_ingestion_backend_type=routine).instrument()
- logger.info("Instrumented data ingestion backend telemetry collectors")
- return routine
-
- if issubclass(routine, TaskRunner):
- TaskRunerInstrumentor(task_runner_type=routine).instrument()
- logger.info("Instrumented task runner telemetry collectors")
- return routine
-
- if issubclass(routine, TasksQueuer):
- TasksQueuerInstrumentor(task_queuer_type=routine).instrument()
- logger.info("Instrumented task queuer telemetry collectors")
- return routine
-
- raise ValueError(
- "Unknown routine, do not know how to do "
- "open-telemetry instrumentation for it."
- )
-
-
-class SchedulerInstrumentor:
- """Alerts scheduler open-telemetry instrumentor."""
-
- def __init__(self, scheduler_type: "t.Type[AlertsScheduler]"):
- self.scheduler_type = scheduler_type
- self.original_run_all_organizations = self.scheduler_type.run_all_organizations
- self.original_run_organization = self.scheduler_type.run_organization
-
- def instrument(self):
- """Instrument open-telemetry for given scheduler type."""
-
- @wraps(self.original_run_all_organizations)
- async def run_all_organizations(scheduler: "AlertsScheduler", *args, **kwargs):
- db_url = scheduler.engine.url
- with sentry_sdk.start_transaction(name="Alerts Execution"):
- sentry_sdk.set_context("deepchecks_monitoring", {
- "version": __version__
- })
- sentry_sdk.set_context("database", {
- "name": str(db_url.database),
- "uri": str(db_url),
- "user": str(db_url.username)
- })
- with sentry_sdk.start_span(op="AlertsScheduler.run_all_organizations") as span:
- span.set_data("sleep_seconds", scheduler.sleep_seconds)
- try:
- await self.original_run_all_organizations(
- scheduler,
- *args,
- **kwargs
- )
- except Exception as error:
- sentry_sdk.capture_exception(error)
- if isinstance(error, anyio.get_cancelled_exc_class()):
- span.set_status(SpanStatus.CANCELED)
- else:
- span.set_status(SpanStatus.FAILED)
- raise
- else:
- span.set_status(SpanStatus.OK)
-
- @wraps(self.original_run_organization)
- async def run_organization(
- scheduler: "AlertsScheduler",
- organization: "Organization",
- *args,
- **kwargs
- ):
- with sentry_sdk.start_span(op="AlertsScheduler.run_organization") as span:
- span.set_data("organization.id", organization.id)
- span.set_data("organization.schema_name", organization.schema_name)
- kwargs = {**kwargs, "organization": organization}
- try:
- enqueued_tasks = await self.original_run_organization(
- scheduler,
- *args,
- **kwargs
- )
- except Exception as error:
- sentry_sdk.capture_exception(error)
- span.set_status(
- SpanStatus.CANCELED
- if isinstance(error, anyio.get_cancelled_exc_class())
- else SpanStatus.FAILED
- )
- raise
- else:
- span.set_status(SpanStatus.OK)
-
- if enqueued_tasks is not None:
- stringified_tasks = "\n".join([repr(task) for task in enqueued_tasks])
- span.set_data("enqueued_tasks", stringified_tasks)
- span.set_data("description", f"Enqueued {len(enqueued_tasks)} tasks")
- else:
- span.set_data("description", "Enqueued 0 tasks")
-
- return enqueued_tasks
-
- self.scheduler_type.run_all_organizations = run_all_organizations
- self.scheduler_type.run_organization = run_organization
-
- def uninstrument(self):
- self.scheduler_type.run_all_organizations = self.original_run_all_organizations
- self.scheduler_type.run_organization = self.original_run_organization
-
-
-class DataIngetionInstrumentor:
- """Data ingestion backend open-telemetry instrumentor."""
-
- def __init__(self, data_ingestion_backend_type: t.Type["DataIngestionBackend"]):
- self.data_ingestion_backend_type = data_ingestion_backend_type
- self.original_log_samples = self.data_ingestion_backend_type.log_samples
- self.original_log_labels = self.data_ingestion_backend_type.log_labels
-
- def instrument(self):
- """Instrument fo the data ingestion backend."""
-
- @wraps(self.data_ingestion_backend_type.log_samples)
- async def log_samples(
- data_ingestion_backend: "DataIngestionBackend",
- model_version: ModelVersion,
- data: t.List[t.Dict[str, t.Any]],
- session: "AsyncSession",
- organization_id: int,
- log_time: "PendulumDateTime",
- ):
- settings = data_ingestion_backend.resources_provider.settings
-
- with sentry_sdk.start_transaction(name="Log Samples"):
- sentry_sdk.set_context("deepchecks_monitoring", {
- "version": __version__
- })
- sentry_sdk.set_context("kafka", {
- "host": settings.kafka_host,
- "username": settings.kafka_username,
- "security_protocol": settings.kafka_security_protocol,
- "max_metadata_age": settings.kafka_max_metadata_age,
- "replication_factor": settings.kafka_replication_factor,
- "sasl_mechanism": settings.kafka_sasl_mechanism,
- })
- sentry_sdk.set_context("redis", {
- "uri": settings.redis_uri
- })
- sentry_sdk.set_context("database", {
- "uri": settings.database_uri
- })
- with sentry_sdk.start_span(op="DataIngestionBackend.log_or_update") as span:
- span.set_data("organization_id", organization_id)
- span.set_data("n_of_samples", len(data))
- try:
- result = await self.original_log_samples(
- data_ingestion_backend,
- model_version,
- data,
- session,
- organization_id,
- log_time
- )
- except Exception as error:
- span.set_status(SpanStatus.FAILED)
- sentry_sdk.capture_exception(error)
- raise
- else:
- return result
-
- @wraps(self.data_ingestion_backend_type.log_labels)
- async def log_labels(
- data_ingestion_backend: "DataIngestionBackend",
- model: Model,
- data: t.List[t.Dict[str, t.Any]],
- session: "AsyncSession",
- organization_id: int,
- ):
- settings = data_ingestion_backend.resources_provider.settings
-
- with sentry_sdk.start_transaction(name="Log Labels"):
- sentry_sdk.set_context("deepchecks_monitoring", {
- "version": __version__
- })
- sentry_sdk.set_context("kafka", {
- "host": settings.kafka_host,
- "username": settings.kafka_username,
- "security_protocol": settings.kafka_security_protocol,
- "max_metadata_age": settings.kafka_max_metadata_age,
- "replication_factor": settings.kafka_replication_factor,
- "sasl_mechanism": settings.kafka_sasl_mechanism,
- })
- sentry_sdk.set_context("redis", {
- "uri": settings.redis_uri
- })
- sentry_sdk.set_context("database", {
- "uri": settings.database_uri
- })
- with sentry_sdk.start_span(op="DataIngestionBackend.log_or_update") as span:
- span.set_data("organization_id", organization_id)
- span.set_data("n_of_samples", len(data))
- try:
- result = await self.original_log_labels(
- data_ingestion_backend,
- model,
- data,
- session,
- organization_id
- )
- except Exception as error:
- span.set_status(SpanStatus.FAILED)
- sentry_sdk.capture_exception(error)
- raise
- else:
- return result
-
- self.data_ingestion_backend_type.log_samples = log_samples
- self.data_ingestion_backend_type.log_labels = log_labels
-
- def uninstrument(self):
- self.data_ingestion_backend_type.log_samples = self.original_log_samples
- self.data_ingestion_backend_type.log_labels = self.original_log_labels
-
-
-class TaskRunerInstrumentor:
- """Task runner open-telemetry instrumentor."""
-
- def __init__(self, task_runner_type: t.Type["TaskRunner"]):
- self.task_runner_type = task_runner_type
- self.original_run_task = self.task_runner_type._run_task
-
- def instrument(self):
- """Instrument the task runner functions we want to monitor."""
-
- @wraps(self.original_run_task)
- async def _run_task(runner: "TaskRunner", task, session, queued_time, lock):
- redis_uri = runner.resource_provider.redis_settings.redis_uri
- database_uri = runner.resource_provider.database_settings.database_uri
- kafka_settings = runner.resource_provider.kafka_settings
-
- with sentry_sdk.start_transaction(name="Task Runner"):
- sentry_sdk.set_context("deepchecks_monitoring", {
- "version": __version__
- })
- sentry_sdk.set_context("kafka", {
- "host": kafka_settings.kafka_host,
- "username": kafka_settings.kafka_username,
- "security_protocol": kafka_settings.kafka_security_protocol,
- "max_metadata_age": kafka_settings.kafka_max_metadata_age,
- "replication_factor": kafka_settings.kafka_replication_factor,
- "sasl_mechanism": kafka_settings.kafka_sasl_mechanism,
- })
- sentry_sdk.set_context("redis", {
- "uri": redis_uri
- })
- sentry_sdk.set_context("database", {
- "uri": database_uri
- })
- with sentry_sdk.start_span(op="TaskRunner.run_single_task") as span:
- span.set_data("task.num-pushed", str(task.num_pushed))
- span.set_data("task.params", json.dumps(task.params, indent=3))
- span.set_data("task.type", str(task.bg_worker_task))
- span.set_data("task.creation-time", str(task.creation_time))
- span.set_data("task.name", task.name)
- span.set_data("task.duration-in-queue", pdl.now().int_timestamp - queued_time)
-
- try:
- start = perf_counter()
- result = await self.original_run_task(runner, task, session, queued_time, lock)
- span.set_data("task.execution-duration", perf_counter() - start)
- span.set_status(SpanStatus.OK)
- except Exception as error:
- span.set_status(SpanStatus.FAILED)
- sentry_sdk.capture_exception(error)
- raise
- else:
- return result
-
- self.task_runner_type._run_task = _run_task # pylint: disable=protected-access
-
- def uninstrument(self):
- self.task_runner_type._run_task = self.original_run_task # pylint: disable=protected-access
-
-
-class TasksQueuerInstrumentor:
- """Task runner open-telemetry instrumentor."""
-
- def __init__(self, task_queuer_type: t.Type["TasksQueuer"]):
- self.task_queuer_type = task_queuer_type
- self.original_move_tasks_to_queue = self.task_queuer_type.move_tasks_to_queue
-
- def instrument(self):
- """Instrument the task runner functions we want to monitor."""
-
- @wraps(self.original_move_tasks_to_queue)
- async def move_tasks_to_queue(queuer: "TasksQueuer", session):
- redis_uri = queuer.resource_provider.redis_settings.redis_uri
- database_uri = queuer.resource_provider.database_settings.database_uri
-
- with sentry_sdk.start_transaction(name="Tasks Queuer"):
- sentry_sdk.set_context("deepchecks_monitoring", {
- "version": __version__
- })
- sentry_sdk.set_context("redis", {
- "uri": redis_uri
- })
- sentry_sdk.set_context("database", {
- "uri": database_uri
- })
- with sentry_sdk.start_span(op="TasksQueuer.move_tasks_to_queue") as span:
- try:
- start = perf_counter()
- result = await self.original_move_tasks_to_queue(queuer, session)
- span.set_data("execution-duration", perf_counter() - start)
- span.set_data("queued-tasks-amount", result)
- span.set_status(SpanStatus.OK)
- except Exception as error:
- span.set_status(SpanStatus.FAILED)
- sentry_sdk.capture_exception(error)
- raise
- else:
- return result
-
- self.task_queuer_type.move_tasks_to_queue = move_tasks_to_queue
-
- def uninstrument(self):
- self.task_queuer_type.move_tasks_to_queue = self.original_move_tasks_to_queue