From 8e237b3c985bb63e5e1ce5edc8f18f4cfb6c28a8 Mon Sep 17 00:00:00 2001 From: Marx Date: Mon, 9 Oct 2023 21:15:21 +0800 Subject: [PATCH 1/7] ANTs-745-complete-lever-integration --- .../connectors/source-lever/.dockerignore | 6 + .../connectors/source-lever/Dockerfile | 38 +++ .../connectors/source-lever/README.md | 133 +++++++++ .../source-lever/acceptance-test-config.yml | 39 +++ .../source-lever/acceptance-test-docker.sh | 3 + .../connectors/source-lever/build.gradle | 8 + .../integration_tests/__init__.py | 3 + .../integration_tests/abnormal_state.json | 5 + .../integration_tests/acceptance.py | 16 ++ .../integration_tests/configured_catalog.json | 169 +++++++++++ .../integration_tests/invalid_config.json | 3 + .../integration_tests/sample_config.json | 3 + .../integration_tests/sample_state.json | 5 + .../connectors/source-lever/main.py | 13 + .../connectors/source-lever/metadata.yaml | 25 ++ .../connectors/source-lever/requirements.txt | 1 + .../connectors/source-lever/setup.py | 30 ++ .../source-lever/source_lever/__init__.py | 8 + .../source-lever/source_lever/schemas/TODO.md | 25 ++ .../source_lever/schemas/applications.json | 163 +++++++++++ .../source_lever/schemas/archive_reasons.json | 18 ++ .../source_lever/schemas/contacts.json | 46 +++ .../source_lever/schemas/employees.json | 19 ++ .../source_lever/schemas/feedback.json | 114 ++++++++ .../source_lever/schemas/interviews.json | 77 +++++ .../source_lever/schemas/notes.json | 56 ++++ .../source_lever/schemas/offers.json | 44 +++ .../source_lever/schemas/opportunities.json | 209 ++++++++++++++ .../source_lever/schemas/panels.json | 103 +++++++ .../source_lever/schemas/postings.json | 165 +++++++++++ .../schemas/requisition_fields.json | 32 +++ .../source_lever/schemas/requisitions.json | 198 +++++++++++++ .../source_lever/schemas/sources.json | 12 + .../source_lever/schemas/stages.json | 12 + .../source_lever/schemas/tags.json | 12 + .../source_lever/schemas/users.json | 39 +++ .../source-lever/source_lever/source.py | 267 ++++++++++++++++++ .../source-lever/source_lever/spec.yaml | 23 ++ .../source-lever/unit_tests/__init__.py | 3 + .../unit_tests/test_incremental_streams.py | 59 ++++ .../source-lever/unit_tests/test_source.py | 22 ++ .../source-lever/unit_tests/test_streams.py | 83 ++++++ docs/integrations/sources/lever.md | 46 +++ 43 files changed, 2355 insertions(+) create mode 100644 airbyte-integrations/connectors/source-lever/.dockerignore create mode 100644 airbyte-integrations/connectors/source-lever/Dockerfile create mode 100644 airbyte-integrations/connectors/source-lever/README.md create mode 100644 airbyte-integrations/connectors/source-lever/acceptance-test-config.yml create mode 100755 airbyte-integrations/connectors/source-lever/acceptance-test-docker.sh create mode 100644 airbyte-integrations/connectors/source-lever/build.gradle create mode 100644 airbyte-integrations/connectors/source-lever/integration_tests/__init__.py create mode 100644 airbyte-integrations/connectors/source-lever/integration_tests/abnormal_state.json create mode 100644 airbyte-integrations/connectors/source-lever/integration_tests/acceptance.py create mode 100644 airbyte-integrations/connectors/source-lever/integration_tests/configured_catalog.json create mode 100644 airbyte-integrations/connectors/source-lever/integration_tests/invalid_config.json create mode 100644 airbyte-integrations/connectors/source-lever/integration_tests/sample_config.json create mode 100644 airbyte-integrations/connectors/source-lever/integration_tests/sample_state.json create mode 100644 airbyte-integrations/connectors/source-lever/main.py create mode 100644 airbyte-integrations/connectors/source-lever/metadata.yaml create mode 100644 airbyte-integrations/connectors/source-lever/requirements.txt create mode 100644 airbyte-integrations/connectors/source-lever/setup.py create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/__init__.py create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/schemas/TODO.md create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/schemas/applications.json create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/schemas/archive_reasons.json create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/schemas/contacts.json create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/schemas/employees.json create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/schemas/feedback.json create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/schemas/interviews.json create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/schemas/notes.json create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/schemas/offers.json create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/schemas/opportunities.json create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/schemas/panels.json create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/schemas/postings.json create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/schemas/requisition_fields.json create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/schemas/requisitions.json create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/schemas/sources.json create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/schemas/stages.json create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/schemas/tags.json create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/schemas/users.json create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/source.py create mode 100644 airbyte-integrations/connectors/source-lever/source_lever/spec.yaml create mode 100644 airbyte-integrations/connectors/source-lever/unit_tests/__init__.py create mode 100644 airbyte-integrations/connectors/source-lever/unit_tests/test_incremental_streams.py create mode 100644 airbyte-integrations/connectors/source-lever/unit_tests/test_source.py create mode 100644 airbyte-integrations/connectors/source-lever/unit_tests/test_streams.py create mode 100644 docs/integrations/sources/lever.md diff --git a/airbyte-integrations/connectors/source-lever/.dockerignore b/airbyte-integrations/connectors/source-lever/.dockerignore new file mode 100644 index 000000000000..338cc134eccd --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_lever +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-lever/Dockerfile b/airbyte-integrations/connectors/source-lever/Dockerfile new file mode 100644 index 000000000000..888e0d679ecb --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.13-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_lever ./source_lever + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-lever diff --git a/airbyte-integrations/connectors/source-lever/README.md b/airbyte-integrations/connectors/source-lever/README.md new file mode 100644 index 000000000000..60558937cd9b --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/README.md @@ -0,0 +1,133 @@ +# Lever Source + +This is the repository for the Lever source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/lever). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-lever:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/lever) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_lever/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source lever test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-lever:dev +``` + + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-lever:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-lever:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-lever:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-lever:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-lever:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-lever:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-lever:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-lever/acceptance-test-config.yml b/airbyte-integrations/connectors/source-lever/acceptance-test-config.yml new file mode 100644 index 000000000000..82a5b128a72e --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/acceptance-test-config.yml @@ -0,0 +1,39 @@ +# See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-lever:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_lever/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.jsonl" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state: +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-lever/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-lever/acceptance-test-docker.sh new file mode 100755 index 000000000000..b6d65deeccb4 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/acceptance-test-docker.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env sh + +source "$(git rev-parse --show-toplevel)/airbyte-integrations/bases/connector-acceptance-test/acceptance-test-docker.sh" diff --git a/airbyte-integrations/connectors/source-lever/build.gradle b/airbyte-integrations/connectors/source-lever/build.gradle new file mode 100644 index 000000000000..6c8b103244fe --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/build.gradle @@ -0,0 +1,8 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' +} + +airbytePython { + moduleDirectory 'source_lever' +} diff --git a/airbyte-integrations/connectors/source-lever/integration_tests/__init__.py b/airbyte-integrations/connectors/source-lever/integration_tests/__init__.py new file mode 100644 index 000000000000..c941b3045795 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-lever/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-lever/integration_tests/abnormal_state.json new file mode 100644 index 000000000000..52b0f2c2118f --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-lever/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-lever/integration_tests/acceptance.py new file mode 100644 index 000000000000..9e6409236281 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("connector_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-lever/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-lever/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..79d73c4599fe --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/integration_tests/configured_catalog.json @@ -0,0 +1,169 @@ +{ + "streams": [ + { + "stream": { + "name": "opportunities", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": true, + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "offers", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": true, + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "feedback", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": true, + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "interviews", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": true, + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "applications", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": true, + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "requisitions", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": true, + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "users", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": true, + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "stages", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": true, + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "postings", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": true, + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "archive_reasons", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": true, + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "panels", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": true, + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "tags", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": true, + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "sources", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": true, + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "requisition_fields", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": true, + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "notes", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": true, + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + } + ] +} diff --git a/airbyte-integrations/connectors/source-lever/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-lever/integration_tests/invalid_config.json new file mode 100644 index 000000000000..f3732995784f --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "todo-wrong-field": "this should be an incomplete config file, used in standard tests" +} diff --git a/airbyte-integrations/connectors/source-lever/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-lever/integration_tests/sample_config.json new file mode 100644 index 000000000000..ecc4913b84c7 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "fix-me": "TODO" +} diff --git a/airbyte-integrations/connectors/source-lever/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-lever/integration_tests/sample_state.json new file mode 100644 index 000000000000..3587e579822d --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-lever/main.py b/airbyte-integrations/connectors/source-lever/main.py new file mode 100644 index 000000000000..f6080ffde1b0 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_lever import SourceLever + +if __name__ == "__main__": + source = SourceLever() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-lever/metadata.yaml b/airbyte-integrations/connectors/source-lever/metadata.yaml new file mode 100644 index 000000000000..a5c369ad0807 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/metadata.yaml @@ -0,0 +1,25 @@ +data: + allowedHosts: + hosts: + - TODO # Please change to the hostname of the source. + registries: + oss: + enabled: false + cloud: + enabled: false + connectorSubtype: api + connectorType: source + definitionId: f20c575a-6d92-46b3-be9b-269074340169 + dockerImageTag: 0.1.0 + dockerRepository: airbyte/source-lever + githubIssueLabel: source-lever + icon: lever.svg + license: MIT + name: Lever + releaseDate: TODO + supportLevel: community + releaseStage: alpha + documentationUrl: https://docs.airbyte.com/integrations/sources/lever + tags: + - language:python +metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-lever/requirements.txt b/airbyte-integrations/connectors/source-lever/requirements.txt new file mode 100644 index 000000000000..d6e1198b1ab1 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/requirements.txt @@ -0,0 +1 @@ +-e . diff --git a/airbyte-integrations/connectors/source-lever/setup.py b/airbyte-integrations/connectors/source-lever/setup.py new file mode 100644 index 000000000000..3fc105985e84 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/setup.py @@ -0,0 +1,30 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.2", +] + +TEST_REQUIREMENTS = [ + "requests-mock~=1.9.3", + "pytest~=6.2", + "pytest-mock~=3.6.1", + "connector-acceptance-test", +] + +setup( + name="source_lever", + description="Source implementation for Lever.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-lever/source_lever/__init__.py b/airbyte-integrations/connectors/source-lever/source_lever/__init__.py new file mode 100644 index 000000000000..0db479832e53 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceLever + +__all__ = ["SourceLever"] diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/TODO.md b/airbyte-integrations/connectors/source-lever/source_lever/schemas/TODO.md new file mode 100644 index 000000000000..cf1efadb3c9c --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/TODO.md @@ -0,0 +1,25 @@ +# TODO: Define your stream schemas +Your connector must describe the schema of each stream it can output using [JSONSchema](https://json-schema.org). + +The simplest way to do this is to describe the schema of your streams using one `.json` file per stream. You can also dynamically generate the schema of your stream in code, or you can combine both approaches: start with a `.json` file and dynamically add properties to it. + +The schema of a stream is the return value of `Stream.get_json_schema`. + +## Static schemas +By default, `Stream.get_json_schema` reads a `.json` file in the `schemas/` directory whose name is equal to the value of the `Stream.name` property. In turn `Stream.name` by default returns the name of the class in snake case. Therefore, if you have a class `class EmployeeBenefits(HttpStream)` the default behavior will look for a file called `schemas/employee_benefits.json`. You can override any of these behaviors as you need. + +Important note: any objects referenced via `$ref` should be placed in the `shared/` directory in their own `.json` files. + +## Dynamic schemas +If you'd rather define your schema in code, override `Stream.get_json_schema` in your stream class to return a `dict` describing the schema using [JSONSchema](https://json-schema.org). + +## Dynamically modifying static schemas +Override `Stream.get_json_schema` to run the default behavior, edit the returned value, then return the edited value: +``` +def get_json_schema(self): + schema = super().get_json_schema() + schema['dynamically_determined_property'] = "property" + return schema +``` + +Delete this file once you're done. Or don't. Up to you :) diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/applications.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/applications.json new file mode 100644 index 000000000000..533f05b4ef33 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/applications.json @@ -0,0 +1,163 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "archived": { + "properties": { + "archivedAt": { + "type": "number" + }, + "reason": { + "type": "string" + } + }, + "type": "object" + }, + "candidateId": { + "type": "string" + }, + "comments": { + "type": ["null", "string"] + }, + "company": { + "type": ["null", "string"] + }, + "createdAt": { + "type": "number" + }, + "customQuestions": { + "items": { + "properties": { + "baseTemplateId": { + "type": "string" + }, + "completedAt": { + "type": "number" + }, + "createdAt": { + "type": "number" + }, + "fields": { + "items": { + "properties": { + "description": { + "type": "string" + }, + "id": { + "type": "string" + }, + "options": { + "items": { + "properties": { + "text": { + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "prompt": { + "type": "string" + }, + "required": { + "type": "boolean" + }, + "text": { + "type": "string" + }, + "type": { + "type": "string" + }, + "value": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ] + } + }, + "type": "object" + }, + "type": "array" + }, + "id": { + "type": "string" + }, + "instructions": { + "type": "string" + }, + "text": { + "type": "string" + }, + "type": { + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "email": { + "type": ["null", "string"] + }, + "id": { + "type": "string" + }, + "links": { + "type": "array" + }, + "name": { + "type": ["null", "string"] + }, + "opportunityId": { + "type": "string" + }, + "phone": { + "properties": { + "value": { + "type": "string" + } + }, + "type": ["object", "null"] + }, + "posting": { + "type": "string" + }, + "postingHiringManager": { + "type": ["null", "string"] + }, + "postingOwner": { + "type": ["null", "string"] + }, + "requisitionForHire": { + "properties": { + "hiringManagerOnHire": { + "type": "string" + }, + "id": { + "type": "string" + }, + "requisitionCode": { + "type": "string" + } + }, + "type": ["object", "null"] + }, + "type": { + "type": "string" + }, + "user": { + "type": ["null", "string"] + }, + "opportunity": { + "type": "string" + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/archive_reasons.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/archive_reasons.json new file mode 100644 index 000000000000..132e6b21db8a --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/archive_reasons.json @@ -0,0 +1,18 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "id": { + "type": "string" + }, + "status": { + "type": "string" + }, + "text": { + "type": "string" + }, + "type": { + "type": "string" + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/contacts.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/contacts.json new file mode 100644 index 000000000000..d42faec2625d --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/contacts.json @@ -0,0 +1,46 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "emails": { + "items": { + "type": "string" + }, + "type": "array" + }, + "headline": { + "type": "string" + }, + "id": { + "type": "string" + }, + "isAnonymized": { + "type": "boolean" + }, + "location": { + "properties": { + "name": { + "type": "string" + } + }, + "type": "object" + }, + "name": { + "type": "string" + }, + "phones": { + "items": { + "properties": { + "type": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/employees.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/employees.json new file mode 100644 index 000000000000..2fa01a0fa1ff --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/employees.json @@ -0,0 +1,19 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "years_of_service": { + "type": ["null", "integer"] + }, + "start_date": { + "type": ["null", "string"], + "format": "date-time" + } + } +} diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/feedback.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/feedback.json new file mode 100644 index 000000000000..1f20a343381f --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/feedback.json @@ -0,0 +1,114 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "baseTemplateId": { + "type": "string" + }, + "completedAt": { + "type": ["null", "number"] + }, + "createdAt": { + "type": "number" + }, + "fields": { + "items": { + "properties": { + "description": { + "type": "string" + }, + "id": { + "type": "string" + }, + "options": { + "items": { + "properties": { + "text": { + "type": "string" + } + }, + "type": "object" + }, + "type": ["array", "null"] + }, + "prompt": { + "type": ["string", "null"] + }, + "required": { + "type": "boolean" + }, + "scores": { + "items": { + "properties": { + "description": { + "type": "string" + }, + "text": { + "type": "string" + } + }, + "type": "object" + }, + "type": ["array", "null"] + }, + "text": { + "type": "string" + }, + "type": { + "type": "string" + }, + "value": { + "anyOf": [ + { + "type": ["null", "string", "number"] + }, + { + "items": { + "properties": { + "comment": { + "type": ["null", "string"] + }, + "score": { + "type": ["null", "number"] + } + }, + "type": "object" + }, + "type": "array" + } + ] + } + }, + "type": "object" + }, + "type": ["null", "array"] + }, + "id": { + "type": "string" + }, + "instructions": { + "type": "string" + }, + "interview": { + "type": ["null", "string"] + }, + "panel": { + "type": ["null", "string"] + }, + "text": { + "type": "string" + }, + "type": { + "type": "string" + }, + "updatedAt": { + "type": ["null", "number"] + }, + "user": { + "type": "string" + }, + "opportunity": { + "type": "string" + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/interviews.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/interviews.json new file mode 100644 index 000000000000..0ec143d831c8 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/interviews.json @@ -0,0 +1,77 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "createdAt": { + "type": "number" + }, + "date": { + "type": "number" + }, + "duration": { + "type": "number" + }, + "feedbackForms": { + "items": { + "type": "string" + }, + "type": "array" + }, + "feedbackReminder": { + "type": "string" + }, + "feedbackTemplate": { + "type": "string" + }, + "id": { + "type": "string" + }, + "interviewers": { + "items": { + "properties": { + "email": { + "type": "string" + }, + "id": { + "type": "string" + }, + "name": { + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "location": { + "type": "string" + }, + "note": { + "type": "string" + }, + "panel": { + "type": "string" + }, + "postings": { + "items": { + "type": "string" + }, + "type": "array" + }, + "stage": { + "type": "string" + }, + "subject": { + "type": "string" + }, + "timezone": { + "type": "string" + }, + "user": { + "type": "string" + }, + "opportunity": { + "type": "string" + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/notes.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/notes.json new file mode 100644 index 000000000000..addb73f81f24 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/notes.json @@ -0,0 +1,56 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "completedAt": { + "type": "number" + }, + "createdAt": { + "type": "number" + }, + "fields": { + "items": { + "properties": { + "createdAt": { + "type": "number" + }, + "score": { + "type": ["null", "number"] + }, + "stage": { + "type": "string" + }, + "text": { + "type": "string" + }, + "type": { + "type": "string" + }, + "user": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "id": { + "type": "string" + }, + "secret": { + "type": "boolean" + }, + "text": { + "type": "string" + }, + "user": { + "type": "string" + }, + "opportunity": { + "type": "string" + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/offers.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/offers.json new file mode 100644 index 000000000000..32070c499ef9 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/offers.json @@ -0,0 +1,44 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "createdAt": { + "type": ["number", "null"] + }, + "creator": { + "type": ["string", "null"] + }, + "fields": { + "items": { + "properties": { + "identifier": { + "type": "string" + }, + "text": { + "type": "string" + }, + "value": { + "type": ["boolean", "null", "number", "string"] + } + }, + "type": "object" + }, + "type": ["null", "array"] + }, + "id": { + "type": ["string", "null"] + }, + "posting": { + "type": ["string", "null"] + }, + "signatures": { + "type": ["object", "null"] + }, + "status": { + "type": ["string", "null"] + }, + "opportunity": { + "type": "string" + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/opportunities.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/opportunities.json new file mode 100644 index 000000000000..af22fcfd99df --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/opportunities.json @@ -0,0 +1,209 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "applications": { + "items": { + "type": "string" + }, + "type": "array" + }, + "archived": { + "properties": { + "archivedAt": { + "type": "number" + }, + "reason": { + "type": "string" + } + }, + "type": "object" + }, + "confidentiality": { + "type": "string" + }, + "contact": { + "properties": { + "emails": { + "items": { + "type": "string" + }, + "type": "array" + }, + "headline": { + "type": ["null", "string"] + }, + "id": { + "type": "string" + }, + "isAnonymized": { + "type": "boolean" + }, + "location": { + "properties": { + "name": { + "type": "string" + } + }, + "type": "object" + }, + "name": { + "type": "string" + }, + "phones": { + "items": { + "properties": { + "type": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + } + }, + "type": "object" + }, + "createdAt": { + "type": "number" + }, + "dataProtection": { + "properties": { + "contact": { + "properties": { + "allowed": { + "type": "boolean" + }, + "expiresAt": { + "type": "number" + } + }, + "type": "object" + }, + "store": { + "properties": { + "allowed": { + "type": "boolean" + }, + "expiresAt": { + "type": "number" + } + }, + "type": "object" + } + }, + "type": ["object", "null"] + }, + "emails": { + "items": { + "type": "string" + }, + "type": "array" + }, + "followers": { + "items": { + "type": "string" + }, + "type": "array" + }, + "headline": { + "type": "string" + }, + "id": { + "type": "string" + }, + "isAnonymized": { + "type": "boolean" + }, + "lastAdvancedAt": { + "type": "number" + }, + "lastInteractionAt": { + "type": "number" + }, + "links": { + "items": { + "type": "string" + }, + "type": "array" + }, + "location": { + "type": "string" + }, + "name": { + "type": "string" + }, + "origin": { + "type": "string" + }, + "owner": { + "type": ["null", "string"] + }, + "phones": { + "items": { + "properties": { + "type": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "sourcedBy": { + "type": ["null", "string"] + }, + "sources": { + "items": { + "type": "string" + }, + "type": "array" + }, + "stage": { + "type": "string" + }, + "stageChanges": { + "items": { + "properties": { + "toStageId": { + "type": "string" + }, + "toStageIndex": { + "type": "number" + }, + "updatedAt": { + "type": "number" + }, + "userId": { + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "tags": { + "items": { + "type": "string" + }, + "type": "array" + }, + "urls": { + "properties": { + "list": { + "type": "string" + }, + "show": { + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/panels.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/panels.json new file mode 100644 index 000000000000..40047016a58d --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/panels.json @@ -0,0 +1,103 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "applications": { + "items": [ + { + "type": "string" + } + ], + "type": "array" + }, + "canceledAt": { + "type": "null" + }, + "createdAt": { + "type": "integer" + }, + "end": { + "type": "integer" + }, + "externalUrl": { + "type": "string" + }, + "externallyManaged": { + "type": "boolean" + }, + "id": { + "type": "string" + }, + "interviews": { + "items": [ + { + "properties": { + "date": { + "type": "integer" + }, + "duration": { + "type": "integer" + }, + "feedbackReminder": { + "type": "string" + }, + "feedbackTemplate": { + "type": "string" + }, + "id": { + "type": "string" + }, + "interviewers": { + "items": [ + { + "properties": { + "email": { + "type": "string" + }, + "id": { + "type": "string" + }, + "name": { + "type": "string" + } + }, + "type": "object" + } + ], + "type": "array" + }, + "location": { + "type": "string" + }, + "note": { + "type": "string" + }, + "subject": { + "type": "string" + } + }, + "type": "object" + } + ], + "type": "array" + }, + "note": { + "type": "string" + }, + "stage": { + "type": "string" + }, + "start": { + "type": "integer" + }, + "timezone": { + "type": "string" + }, + "user": { + "type": "string" + }, + "opportunity": { + "type": "string" + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/postings.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/postings.json new file mode 100644 index 000000000000..f9c0052326d3 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/postings.json @@ -0,0 +1,165 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "data": { + "items": { + "properties": { + "categories": { + "properties": { + "commitment": { + "type": ["null", "string"] + }, + "department": { + "type": "string" + }, + "location": { + "type": "string" + }, + "team": { + "type": "string" + } + }, + "type": "object" + }, + "confidentiality": { + "type": "string" + }, + "content": { + "properties": { + "closing": { + "type": "string" + }, + "closingHtml": { + "type": "string" + }, + "description": { + "type": "string" + }, + "descriptionHtml": { + "type": "string" + }, + "lists": { + "items": { + "properties": { + "content": { + "type": "string" + }, + "text": { + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + } + }, + "type": "object" + }, + "country": { + "type": "string" + }, + "createdAt": { + "type": "number" + }, + "distributionChannels": { + "items": { + "type": "string" + }, + "type": ["array", "null"] + }, + "followers": { + "items": { + "type": "string" + }, + "type": "array" + }, + "hiringManager": { + "type": ["null", "string"] + }, + "id": { + "type": "string" + }, + "owner": { + "type": "string" + }, + "reqCode": { + "type": ["null", "string"] + }, + "requisitionCodes": { + "items": { + "type": "string" + }, + "type": "array" + }, + "salaryDescription": { + "type": "string" + }, + "salaryDescriptionHtml": { + "type": "string" + }, + "salaryRange": { + "properties": { + "currency": { + "type": "string" + }, + "interval": { + "type": "string" + }, + "max": { + "type": "number" + }, + "min": { + "type": "number" + } + }, + "type": "object" + }, + "state": { + "type": "string" + }, + "tags": { + "items": { + "type": "string" + }, + "type": "array" + }, + "text": { + "type": "string" + }, + "updatedAt": { + "type": "number" + }, + "urls": { + "properties": { + "apply": { + "type": "string" + }, + "list": { + "type": "string" + }, + "show": { + "type": "string" + } + }, + "type": ["object", "null"] + }, + "user": { + "type": "string" + }, + "workplaceType": { + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "hasNext": { + "type": "boolean" + }, + "next": { + "type": "string" + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/requisition_fields.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/requisition_fields.json new file mode 100644 index 000000000000..ac5752baf568 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/requisition_fields.json @@ -0,0 +1,32 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "id": { + "type": "string" + }, + "isRequired": { + "type": "boolean" + }, + "options": { + "items": { + "properties": { + "id": { + "type": "string" + }, + "text": { + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "text": { + "type": "string" + }, + "type": { + "type": "string" + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/requisitions.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/requisitions.json new file mode 100644 index 000000000000..e97dd193c851 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/requisitions.json @@ -0,0 +1,198 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "approval": { + "anyOf": [ + { + "type": "null" + }, + { + "properties": { + "accountId": { + "type": "string" + }, + "approvedAt": { + "type": ["null", "number"] + }, + "createdBy": { + "type": "string" + }, + "id": { + "type": "string" + }, + "startedAt": { + "type": "number" + }, + "status": { + "type": ["null", "string"] + }, + "steps": { + "items": { + "properties": { + "approvalsRequired": { + "type": "number" + }, + "approvers": { + "items": { + "properties": { + "approved": { + "type": "boolean" + }, + "approvedAt": { + "type": "number" + }, + "id": { + "type": "string" + }, + "isDynamic": { + "type": "boolean" + }, + "type": { + "type": "string" + }, + "user": { + "anyOf": [ + { + "type": "string" + }, + { + "properties": { + "email": { + "type": "string" + }, + "userId": { + "type": "string" + } + }, + "type": "object" + } + ] + } + }, + "type": "object" + }, + "type": "array" + }, + "completed": { + "type": "boolean" + }, + "conditions": { + "type": "array" + }, + "outOfBandOnly": { + "type": "boolean" + }, + "status": { + "type": ["null", "string"] + } + }, + "type": "object" + }, + "type": "array" + } + }, + "type": "object" + } + ] + }, + "backfill": { + "type": "boolean" + }, + "closedAt": { + "type": ["null", "number"] + }, + "compensationBand": { + "properties": { + "currency": { + "type": "string" + }, + "interval": { + "type": "string" + }, + "max": { + "type": "number" + }, + "min": { + "type": "number" + } + }, + "type": "object" + }, + "confidentiality": { + "type": "string" + }, + "createdAt": { + "type": "number" + }, + "creator": { + "type": ["null", "string"] + }, + "customFields": { + "type": "object" + }, + "department": { + "type": ["null", "string"] + }, + "employmentStatus": { + "type": "string" + }, + "headcountHired": { + "type": "number" + }, + "headcountInfinite": { + "type": "boolean" + }, + "headcountTotal": { + "type": ["number", "string"] + }, + "hiringManager": { + "type": ["null", "string"] + }, + "id": { + "type": "string" + }, + "internalNotes": { + "type": "string" + }, + "location": { + "type": "string" + }, + "name": { + "type": "string" + }, + "offerIds": { + "items": { + "type": "string" + }, + "type": "array" + }, + "owner": { + "type": "string" + }, + "postings": { + "items": { + "type": "string" + }, + "type": "array" + }, + "requisitionCode": { + "type": "string" + }, + "status": { + "type": "string" + }, + "team": { + "type": "string" + }, + "timeToFillEndAt": { + "type": ["null", "number"] + }, + "timeToFillStartAt": { + "type": ["null", "number"] + }, + "updatedAt": { + "type": "number" + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/sources.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/sources.json new file mode 100644 index 000000000000..2802939c1f37 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/sources.json @@ -0,0 +1,12 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "count": { + "type": "number" + }, + "text": { + "type": "string" + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/stages.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/stages.json new file mode 100644 index 000000000000..98fc6c632f1a --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/stages.json @@ -0,0 +1,12 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "id": { + "type": "string" + }, + "text": { + "type": "string" + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/tags.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/tags.json new file mode 100644 index 000000000000..2802939c1f37 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/tags.json @@ -0,0 +1,12 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "count": { + "type": "number" + }, + "text": { + "type": "string" + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/users.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/users.json new file mode 100644 index 000000000000..46f5dfd7b6c6 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/users.json @@ -0,0 +1,39 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "accessRole": { + "type": "string" + }, + "createdAt": { + "type": "number" + }, + "email": { + "type": "string" + }, + "id": { + "type": "string" + }, + "jobTitle": { + "type": ["null", "string"] + }, + "linkedContactIds": { + "items": { + "type": "string" + }, + "type": ["array", "null"] + }, + "managerId": { + "type": ["null", "string"] + }, + "name": { + "type": "string" + }, + "photo": { + "type": ["null", "string"] + }, + "username": { + "type": "string" + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-lever/source_lever/source.py b/airbyte-integrations/connectors/source-lever/source_lever/source.py new file mode 100644 index 000000000000..30aeb5ecb0f6 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/source.py @@ -0,0 +1,267 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from abc import ABC +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple + +import requests +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.http import HttpStream +from airbyte_cdk.sources.streams.http.auth import BasicHttpAuthenticator + +def _auth_from_config(config): + try: + if config["api_key"]: + return BasicHttpAuthenticator(username=config["api_key"], password=None, auth_method="Basic") + else: + print("Auth type was not configured properly") + return None + except Exception as e: + print(f"{e.__class__} occurred, there's an issue with credentials in your config") + raise e + + +class SourceLever(AbstractSource): + def check_connection(self, logger, config) -> Tuple[bool, any]: + try: + authenticator = _auth_from_config(config) + _ = authenticator.get_auth_header() + except Exception as e: + return False, str(e) + return True, None + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + authenticator = _auth_from_config(config) + # TODO: use extract_start_date + return [ + Opportunities(authenticator=authenticator), + Offers(authenticator=authenticator), + Feedback(authenticator=authenticator), + Interviews(authenticator=authenticator), + Applications(authenticator=authenticator), + Requisitions(authenticator=authenticator), + Users(authenticator=authenticator), + Stages(authenticator=authenticator), + Postings(authenticator=authenticator), + ArchiveReasons(authenticator=authenticator), + Panels(authenticator=authenticator), + Tags(authenticator=authenticator), + Sources(authenticator=authenticator), + RequisitionFields(authenticator=authenticator), + Notes(authenticator=authenticator) + ] + +# Basic full refresh stream +class LeverStream(HttpStream, ABC): + page_size = 100 + stream_params = {} + + API_VERSION = "v1" + base_url = "https://api.lever.co" + + @property + def url_base(self) -> str: + return f"{self.base_url}/{self.API_VERSION}/" + + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + response_data = response.json() + if response_data.get("hasNext"): + return {"offset": response_data["next"]} + + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + params = {"limit": self.page_size} + params.update(self.stream_params) + if next_page_token: + params.update(next_page_token) + return params + + def parse_response(self, response: requests.Response, stream_slice:Mapping[str, Any], **kwargs) -> Iterable[Mapping]: + yield from response.json()["data"] + +class Opportunities(LeverStream): + primary_key = "id" + stream_params = {"confidentiality": "all", "expand": "contact"} + # 1797 opportunities + # 1 record read + # stream_params = {"confidentiality": "all", "expand": "contact", "stage_id": "e54475bb-d3ad-43ff-b8b9-76c4fc38e78c" } + + # 8311 opportunities + # stream_params = {"confidentiality": "all", "expand": "contact", "stage_id": "3a255cc8-0732-4bee-92bd-62acfec3572c" } + + + @property + def use_cache(self) -> bool: + return True + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "opportunities" + +class Requisitions(LeverStream): + primary_key = "id" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "requisitions" + +class Users(LeverStream): + primary_key = "id" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "users" + +class Stages(LeverStream): + primary_key = "id" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "stages" + +class Postings(LeverStream): + primary_key = "id" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "postings" + +class Tags(LeverStream): + primary_key = "text" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "tags" + +class Sources(LeverStream): + primary_key = "text" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "sources" + +class RequisitionFields(LeverStream): + primary_key = "text" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "requisition_fields" + + +class ArchiveReasons(LeverStream): + primary_key = "id" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "archive_reasons" + + +# TODO: Basic incremental stream +class IncrementalLeverStream(LeverStream, ABC): + """ + TODO fill in details of this class to implement functionality related to incremental syncs for your connector. + if you do not need to implement incremental sync for any streams, remove this class. + """ + + # TODO: Fill in to checkpoint stream reads after N records. This prevents re-reading of data if the stream fails for any reason. + state_checkpoint_interval = None + + @property + def cursor_field(self) -> str: + """ + TODO + Override to return the cursor field used by this stream e.g: an API entity might always use created_at as the cursor field. This is + usually id or date based. This field's presence tells the framework this in an incremental stream. Required for incremental. + + :return str: The name of the cursor field. + """ + return [] + + def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: + """ + Override to determine the latest state after reading the latest record. This typically compared the cursor_field from the latest record and + the current state and picks the 'most' recent cursor. This is how a stream's state is determined. Required for incremental. + """ + return {} + + +# Basic Sub streams using Opportunity id +class OpportunitySubStream(LeverStream, ABC): + def __init__(self, **kwargs): + super().__init__(**kwargs) + + # def __init__(self, start_date: str, **kwargs): + # super().__init__(**kwargs) + # self._start_date = start_date + + def path(self, stream_slice: Mapping[str, any] = None, **kwargs) -> str: + return f"opportunities/{stream_slice['opportunity_id']}/{self.name}" + + def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: + for stream_slice in super().stream_slices(**kwargs): + # opportunities_stream = Opportunities(authenticator=self.authenticator, base_url=self.base_url, start_date=self._start_date) + opportunities_stream = Opportunities(authenticator=self.authenticator) + for opportunity in opportunities_stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice): + yield {"opportunity_id": opportunity["id"]} + + + def parse_response(self, response: requests.Response, stream_slice:[Mapping[str, Any]], **kwargs) -> Iterable[Mapping]: + records = response.json()["data"] + if not records: + records = [{}] + + for record in records: + record["opportunity"] = stream_slice["opportunity_id"] + yield from records + + +class Offers(OpportunitySubStream): + """ + Offers stream: https://hire.lever.co/developer/documentation#list-all-offers + """ + primary_key = "id" + +class Feedback(OpportunitySubStream): + """ + Feedback stream: https://hire.lever.co/developer/documentation#list-all-feedback + """ + primary_key = "id" + +class Interviews(OpportunitySubStream): + """ + Interviews stream: https://hire.lever.co/developer/documentation#list-all-interviews + """ + primary_key = "id" + +class Applications(OpportunitySubStream): + """ + Applications stream: https://hire.lever.co/developer/documentation#list-all-applications + """ + primary_key = "id" + +class Panels(OpportunitySubStream): + """ + Panels stream: https://hire.lever.co/developer/documentation#list-all-panels + """ + primary_key = "id" + +class Notes(OpportunitySubStream): + """ + Notes stream: https://hire.lever.co/developer/documentation#list-all-notes + """ + primary_key = "id" diff --git a/airbyte-integrations/connectors/source-lever/source_lever/spec.yaml b/airbyte-integrations/connectors/source-lever/source_lever/spec.yaml new file mode 100644 index 000000000000..c25733f64ac7 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/source_lever/spec.yaml @@ -0,0 +1,23 @@ +documentationUrl: https://hire.lever.co/developer/documentation +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Lever connection + type: object + required: + - api_key + - extract_start_date + additionalProperties: true + properties: + api_key: + type: string + order: 0 + description: "The API key for the Lever account." + airbyte_secret: true + extract_start_date: + type: string + order: 1 + title: Extract Start Date + format: date + default: "1990-01-01" + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ + description: "The date to start getting data from. " diff --git a/airbyte-integrations/connectors/source-lever/unit_tests/__init__.py b/airbyte-integrations/connectors/source-lever/unit_tests/__init__.py new file mode 100644 index 000000000000..c941b3045795 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-lever/unit_tests/test_incremental_streams.py b/airbyte-integrations/connectors/source-lever/unit_tests/test_incremental_streams.py new file mode 100644 index 000000000000..9e40754feb28 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/unit_tests/test_incremental_streams.py @@ -0,0 +1,59 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from airbyte_cdk.models import SyncMode +from pytest import fixture +from source_lever.source import IncrementalLeverStream + + +@fixture +def patch_incremental_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(IncrementalLeverStream, "path", "v0/example_endpoint") + mocker.patch.object(IncrementalLeverStream, "primary_key", "test_primary_key") + mocker.patch.object(IncrementalLeverStream, "__abstractmethods__", set()) + + +def test_cursor_field(patch_incremental_base_class): + stream = IncrementalLeverStream() + # TODO: replace this with your expected cursor field + expected_cursor_field = [] + assert stream.cursor_field == expected_cursor_field + + +def test_get_updated_state(patch_incremental_base_class): + stream = IncrementalLeverStream() + # TODO: replace this with your input parameters + inputs = {"current_stream_state": None, "latest_record": None} + # TODO: replace this with your expected updated stream state + expected_state = {} + assert stream.get_updated_state(**inputs) == expected_state + + +def test_stream_slices(patch_incremental_base_class): + stream = IncrementalLeverStream() + # TODO: replace this with your input parameters + inputs = {"sync_mode": SyncMode.incremental, "cursor_field": [], "stream_state": {}} + # TODO: replace this with your expected stream slices list + expected_stream_slice = [None] + assert stream.stream_slices(**inputs) == expected_stream_slice + + +def test_supports_incremental(patch_incremental_base_class, mocker): + mocker.patch.object(IncrementalLeverStream, "cursor_field", "dummy_field") + stream = IncrementalLeverStream() + assert stream.supports_incremental + + +def test_source_defined_cursor(patch_incremental_base_class): + stream = IncrementalLeverStream() + assert stream.source_defined_cursor + + +def test_stream_checkpoint_interval(patch_incremental_base_class): + stream = IncrementalLeverStream() + # TODO: replace this with your expected checkpoint interval + expected_checkpoint_interval = None + assert stream.state_checkpoint_interval == expected_checkpoint_interval diff --git a/airbyte-integrations/connectors/source-lever/unit_tests/test_source.py b/airbyte-integrations/connectors/source-lever/unit_tests/test_source.py new file mode 100644 index 000000000000..dcfba91fb19c --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/unit_tests/test_source.py @@ -0,0 +1,22 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +from source_lever.source import SourceLever + + +def test_check_connection(mocker): + source = SourceLever() + logger_mock, config_mock = MagicMock(), MagicMock() + assert source.check_connection(logger_mock, config_mock) == (True, None) + + +def test_streams(mocker): + source = SourceLever() + config_mock = MagicMock() + streams = source.streams(config_mock) + # TODO: replace this with your streams number + expected_streams_number = 2 + assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-lever/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-lever/unit_tests/test_streams.py new file mode 100644 index 000000000000..26eae6cb02b6 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/unit_tests/test_streams.py @@ -0,0 +1,83 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from http import HTTPStatus +from unittest.mock import MagicMock + +import pytest +from source_lever.source import LeverStream + + +@pytest.fixture +def patch_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(LeverStream, "path", "v0/example_endpoint") + mocker.patch.object(LeverStream, "primary_key", "test_primary_key") + mocker.patch.object(LeverStream, "__abstractmethods__", set()) + + +def test_request_params(patch_base_class): + stream = LeverStream() + # TODO: replace this with your input parameters + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + # TODO: replace this with your expected request parameters + expected_params = {} + assert stream.request_params(**inputs) == expected_params + + +def test_next_page_token(patch_base_class): + stream = LeverStream() + # TODO: replace this with your input parameters + inputs = {"response": MagicMock()} + # TODO: replace this with your expected next page token + expected_token = None + assert stream.next_page_token(**inputs) == expected_token + + +def test_parse_response(patch_base_class): + stream = LeverStream() + # TODO: replace this with your input parameters + inputs = {"response": MagicMock()} + # TODO: replace this with your expected parced object + expected_parsed_object = {} + assert next(stream.parse_response(**inputs)) == expected_parsed_object + + +def test_request_headers(patch_base_class): + stream = LeverStream() + # TODO: replace this with your input parameters + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + # TODO: replace this with your expected request headers + expected_headers = {} + assert stream.request_headers(**inputs) == expected_headers + + +def test_http_method(patch_base_class): + stream = LeverStream() + # TODO: replace this with your expected http request method + expected_method = "GET" + assert stream.http_method == expected_method + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, False), + (HTTPStatus.TOO_MANY_REQUESTS, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + ], +) +def test_should_retry(patch_base_class, http_status, should_retry): + response_mock = MagicMock() + response_mock.status_code = http_status + stream = LeverStream() + assert stream.should_retry(response_mock) == should_retry + + +def test_backoff_time(patch_base_class): + response_mock = MagicMock() + stream = LeverStream() + expected_backoff_time = None + assert stream.backoff_time(response_mock) == expected_backoff_time diff --git a/docs/integrations/sources/lever.md b/docs/integrations/sources/lever.md new file mode 100644 index 000000000000..72cde7350894 --- /dev/null +++ b/docs/integrations/sources/lever.md @@ -0,0 +1,46 @@ +# Lever + +## Sync overview + +The Lever Hiring source supports both Full Refresh and Incremental syncs. You can choose if this connector will copy only the new or updated data, or all rows in the tables and columns you set up for replication, every time a sync is run. + +This source can sync data for the [Lever API](https://hire.lever.co/developer/documentation#introduction). + +### Output schema + +This Source is capable of syncing the following core Streams: + +- [Applications](https://hire.lever.co/developer/documentation#list-all-applications) +- [Interviews](https://hire.lever.co/developer/documentation#list-all-interviews) +- [Notes](https://hire.lever.co/developer/documentation#list-all-notes) +- [Offers](https://hire.lever.co/developer/documentation#list-all-offers) +- [Opportunities](https://hire.lever.co/developer/documentation#list-all-opportunities) +- [Referrals](https://hire.lever.co/developer/documentation#list-all-referrals) +- [Users](https://hire.lever.co/developer/documentation#list-all-users) + +### Features + +| Feature | Supported?\(Yes/No\) | Notes | +| :------------------------ | :------------------- | :---- | +| Full Refresh Sync | Yes | | +| Incremental - Append Sync | Yes | | +| SSL connection | Yes | | +| Namespaces | No | | + +### Performance considerations + +The Lever connector should not run into Lever API limitations under normal usage. Please [create an issue](https://github.com/airbytehq/airbyte/issues) if you see any rate limit issues that are not automatically retried successfully. + +## Getting started + +### Requirements + +- Lever Hiring Client Id +- Lever Hiring Client Secret +- Lever Hiring Refresh Token + +## Changelog + +| Version | Date | Pull Request | Subject | +| :------ | :--------- | :------------------------------------------------------- | :---------------------- | +| 0.2.0 | 2023-05-25 | [26564](https://github.com/airbytehq/airbyte/pull/26564) | Migrate to advancedAuth | From dd00e530d10e0de587bb83f23429a34d82d174a0 Mon Sep 17 00:00:00 2001 From: Marx Date: Wed, 11 Oct 2023 16:31:45 +0800 Subject: [PATCH 2/7] Fixed postings schema --- .../source_lever/schemas/postings.json | 277 +++++++++--------- 1 file changed, 131 insertions(+), 146 deletions(-) diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/postings.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/postings.json index f9c0052326d3..cf659206bf19 100644 --- a/airbyte-integrations/connectors/source-lever/source_lever/schemas/postings.json +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/postings.json @@ -1,165 +1,150 @@ { "$schema": "http://json-schema.org/draft-07/schema#", "properties": { - "data": { - "items": { - "properties": { - "categories": { - "properties": { - "commitment": { - "type": ["null", "string"] - }, - "department": { - "type": "string" - }, - "location": { - "type": "string" - }, - "team": { - "type": "string" - } - }, - "type": "object" - }, - "confidentiality": { - "type": "string" - }, - "content": { - "properties": { - "closing": { - "type": "string" - }, - "closingHtml": { - "type": "string" - }, - "description": { - "type": "string" - }, - "descriptionHtml": { - "type": "string" - }, - "lists": { - "items": { - "properties": { - "content": { - "type": "string" - }, - "text": { - "type": "string" - } - }, - "type": "object" - }, - "type": "array" - } - }, - "type": "object" - }, - "country": { - "type": "string" - }, - "createdAt": { - "type": "number" - }, - "distributionChannels": { - "items": { - "type": "string" - }, - "type": ["array", "null"] - }, - "followers": { - "items": { - "type": "string" - }, - "type": "array" - }, - "hiringManager": { - "type": ["null", "string"] - }, - "id": { - "type": "string" - }, - "owner": { - "type": "string" - }, - "reqCode": { - "type": ["null", "string"] - }, - "requisitionCodes": { - "items": { - "type": "string" - }, - "type": "array" - }, - "salaryDescription": { - "type": "string" - }, - "salaryDescriptionHtml": { - "type": "string" - }, - "salaryRange": { + "categories": { + "properties": { + "commitment": { + "type": ["null", "string"] + }, + "department": { + "type": "string" + }, + "location": { + "type": "string" + }, + "team": { + "type": "string" + } + }, + "type": "object" + }, + "confidentiality": { + "type": "string" + }, + "content": { + "properties": { + "closing": { + "type": "string" + }, + "closingHtml": { + "type": "string" + }, + "description": { + "type": "string" + }, + "descriptionHtml": { + "type": "string" + }, + "lists": { + "items": { "properties": { - "currency": { + "content": { "type": "string" }, - "interval": { + "text": { "type": "string" - }, - "max": { - "type": "number" - }, - "min": { - "type": "number" } }, "type": "object" }, - "state": { - "type": "string" - }, - "tags": { - "items": { - "type": "string" - }, - "type": "array" - }, - "text": { - "type": "string" - }, - "updatedAt": { - "type": "number" - }, - "urls": { - "properties": { - "apply": { - "type": "string" - }, - "list": { - "type": "string" - }, - "show": { - "type": "string" - } - }, - "type": ["object", "null"] - }, - "user": { - "type": "string" - }, - "workplaceType": { - "type": "string" - } + "type": "array" + } + }, + "type": "object" + }, + "country": { + "type": "string" + }, + "createdAt": { + "type": "number" + }, + "distributionChannels": { + "items": { + "type": "string" + }, + "type": ["array", "null"] + }, + "followers": { + "items": { + "type": "string" + }, + "type": "array" + }, + "hiringManager": { + "type": ["null", "string"] + }, + "id": { + "type": "string" + }, + "owner": { + "type": "string" + }, + "reqCode": { + "type": ["null", "string"] + }, + "requisitionCodes": { + "items": { + "type": "string" + }, + "type": "array" + }, + "salaryDescription": { + "type": "string" + }, + "salaryDescriptionHtml": { + "type": "string" + }, + "salaryRange": { + "properties": { + "currency": { + "type": "string" + }, + "interval": { + "type": "string" + }, + "max": { + "type": "number" }, - "type": "object" + "min": { + "type": "number" + } + }, + "type": "object" + }, + "state": { + "type": "string" + }, + "tags": { + "items": { + "type": "string" }, "type": "array" }, - "hasNext": { - "type": "boolean" + "text": { + "type": "string" + }, + "updatedAt": { + "type": "number" + }, + "urls": { + "properties": { + "apply": { + "type": "string" + }, + "list": { + "type": "string" + }, + "show": { + "type": "string" + } + }, + "type": ["object", "null"] + }, + "user": { + "type": "string" }, - "next": { + "workplaceType": { "type": "string" } - }, - "type": "object" + } } From ff2bc87ce522a0e90f67606087ce07340f9b267c Mon Sep 17 00:00:00 2001 From: Marx Date: Mon, 16 Oct 2023 14:20:34 +0800 Subject: [PATCH 3/7] Update offers schema to include approved data --- .../source_lever/schemas/offers.json | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/offers.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/offers.json index 32070c499ef9..35df9e329fab 100644 --- a/airbyte-integrations/connectors/source-lever/source_lever/schemas/offers.json +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/offers.json @@ -38,6 +38,43 @@ }, "opportunity": { "type": "string" + }, + "signedDocument": { + "type": ["object", "null"], + "properties": { + "fileName": { + "type": "string" + }, + "uploadedAt": { + "type": "number" + }, + "downloadUrl": { + "type": "string" + } + } + }, + "sentDocument": { + "type": ["object", "null"], + "properties": { + "fileName": { + "type": "string" + }, + "uploadedAt": { + "type": "number" + }, + "downloadUrl": { + "type": "string" + } + } + }, + "sentAt": { + "type": ["number", "null"] + }, + "approved": { + "type": ["boolean", "null"] + }, + "approvedAt": { + "type": ["number", "null"] } }, "type": "object" From bbfce94664979e95bc146a7abfc0c2c5d3cfafe6 Mon Sep 17 00:00:00 2001 From: Marx Date: Mon, 16 Oct 2023 14:46:30 +0800 Subject: [PATCH 4/7] Remove applications sub stream & Expand applications in Opportunities --- .../integration_tests/configured_catalog.json | 11 -- .../source_lever/schemas/applications.json | 163 ------------------ .../source_lever/schemas/opportunities.json | 161 ++++++++++++++++- .../source-lever/source_lever/source.py | 15 +- 4 files changed, 161 insertions(+), 189 deletions(-) delete mode 100644 airbyte-integrations/connectors/source-lever/source_lever/schemas/applications.json diff --git a/airbyte-integrations/connectors/source-lever/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-lever/integration_tests/configured_catalog.json index 79d73c4599fe..605a64476c47 100644 --- a/airbyte-integrations/connectors/source-lever/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-lever/integration_tests/configured_catalog.json @@ -44,17 +44,6 @@ "sync_mode": "full_refresh", "destination_sync_mode": "append" }, - { - "stream": { - "name": "applications", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"], - "source_defined_cursor": true, - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "append" - }, { "stream": { "name": "requisitions", diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/applications.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/applications.json deleted file mode 100644 index 533f05b4ef33..000000000000 --- a/airbyte-integrations/connectors/source-lever/source_lever/schemas/applications.json +++ /dev/null @@ -1,163 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "properties": { - "archived": { - "properties": { - "archivedAt": { - "type": "number" - }, - "reason": { - "type": "string" - } - }, - "type": "object" - }, - "candidateId": { - "type": "string" - }, - "comments": { - "type": ["null", "string"] - }, - "company": { - "type": ["null", "string"] - }, - "createdAt": { - "type": "number" - }, - "customQuestions": { - "items": { - "properties": { - "baseTemplateId": { - "type": "string" - }, - "completedAt": { - "type": "number" - }, - "createdAt": { - "type": "number" - }, - "fields": { - "items": { - "properties": { - "description": { - "type": "string" - }, - "id": { - "type": "string" - }, - "options": { - "items": { - "properties": { - "text": { - "type": "string" - } - }, - "type": "object" - }, - "type": "array" - }, - "prompt": { - "type": "string" - }, - "required": { - "type": "boolean" - }, - "text": { - "type": "string" - }, - "type": { - "type": "string" - }, - "value": { - "anyOf": [ - { - "type": "string" - }, - { - "items": { - "type": "string" - }, - "type": "array" - } - ] - } - }, - "type": "object" - }, - "type": "array" - }, - "id": { - "type": "string" - }, - "instructions": { - "type": "string" - }, - "text": { - "type": "string" - }, - "type": { - "type": "string" - } - }, - "type": "object" - }, - "type": "array" - }, - "email": { - "type": ["null", "string"] - }, - "id": { - "type": "string" - }, - "links": { - "type": "array" - }, - "name": { - "type": ["null", "string"] - }, - "opportunityId": { - "type": "string" - }, - "phone": { - "properties": { - "value": { - "type": "string" - } - }, - "type": ["object", "null"] - }, - "posting": { - "type": "string" - }, - "postingHiringManager": { - "type": ["null", "string"] - }, - "postingOwner": { - "type": ["null", "string"] - }, - "requisitionForHire": { - "properties": { - "hiringManagerOnHire": { - "type": "string" - }, - "id": { - "type": "string" - }, - "requisitionCode": { - "type": "string" - } - }, - "type": ["object", "null"] - }, - "type": { - "type": "string" - }, - "user": { - "type": ["null", "string"] - }, - "opportunity": { - "type": "string" - } - }, - "type": "object" -} diff --git a/airbyte-integrations/connectors/source-lever/source_lever/schemas/opportunities.json b/airbyte-integrations/connectors/source-lever/source_lever/schemas/opportunities.json index af22fcfd99df..adab30c74700 100644 --- a/airbyte-integrations/connectors/source-lever/source_lever/schemas/opportunities.json +++ b/airbyte-integrations/connectors/source-lever/source_lever/schemas/opportunities.json @@ -3,7 +3,166 @@ "properties": { "applications": { "items": { - "type": "string" + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + "candidateId": { + "type": "string" + }, + "opportunityId": { + "type": "string" + }, + "posting": { + "type": "string" + }, + "postingHiringManager": { + "type": ["null", "string"] + }, + "postingOwner": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "company": { + "type": ["null", "string"] + }, + "phone": { + "properties": { + "type": { + "type": ["null", "string"] + }, + "value": { + "type": "string" + } + }, + "type": ["object", "null"] + }, + "email": { + "type": ["null", "string"] + }, + "links": { + "type": "array" + }, + "comments": { + "type": ["null", "string"] + }, + "user": { + "type": ["null", "string"] + }, + "customQuestions": { + "items": { + "properties": { + "baseTemplateId": { + "type": "string" + }, + "completedAt": { + "type": "number" + }, + "createdAt": { + "type": "number" + }, + "fields": { + "items": { + "properties": { + "description": { + "type": "string" + }, + "id": { + "type": "string" + }, + "options": { + "items": { + "properties": { + "text": { + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "prompt": { + "type": "string" + }, + "required": { + "type": "boolean" + }, + "text": { + "type": "string" + }, + "type": { + "type": "string" + }, + "value": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ] + } + }, + "type": "object" + }, + "type": "array" + }, + "id": { + "type": "string" + }, + "instructions": { + "type": "string" + }, + "text": { + "type": "string" + }, + "type": { + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "createdAt": { + "type": "number" + }, + "archived": { + "properties": { + "archivedAt": { + "type": "number" + }, + "reason": { + "type": "string" + } + }, + "type": "object" + }, + "requisitionForHire": { + "properties": { + "hiringManagerOnHire": { + "type": "string" + }, + "id": { + "type": "string" + }, + "requisitionCode": { + "type": "string" + } + }, + "type": ["object", "null"] + } + }, + "type": "object" }, "type": "array" }, diff --git a/airbyte-integrations/connectors/source-lever/source_lever/source.py b/airbyte-integrations/connectors/source-lever/source_lever/source.py index 30aeb5ecb0f6..e690fe9e1e03 100644 --- a/airbyte-integrations/connectors/source-lever/source_lever/source.py +++ b/airbyte-integrations/connectors/source-lever/source_lever/source.py @@ -42,7 +42,6 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: Offers(authenticator=authenticator), Feedback(authenticator=authenticator), Interviews(authenticator=authenticator), - Applications(authenticator=authenticator), Requisitions(authenticator=authenticator), Users(authenticator=authenticator), Stages(authenticator=authenticator), @@ -87,13 +86,7 @@ def parse_response(self, response: requests.Response, stream_slice:Mapping[str, class Opportunities(LeverStream): primary_key = "id" - stream_params = {"confidentiality": "all", "expand": "contact"} - # 1797 opportunities - # 1 record read - # stream_params = {"confidentiality": "all", "expand": "contact", "stage_id": "e54475bb-d3ad-43ff-b8b9-76c4fc38e78c" } - - # 8311 opportunities - # stream_params = {"confidentiality": "all", "expand": "contact", "stage_id": "3a255cc8-0732-4bee-92bd-62acfec3572c" } + stream_params = {"confidentiality": "all", "expand": "contact", "expand": "applications"} @property @@ -248,12 +241,6 @@ class Interviews(OpportunitySubStream): """ primary_key = "id" -class Applications(OpportunitySubStream): - """ - Applications stream: https://hire.lever.co/developer/documentation#list-all-applications - """ - primary_key = "id" - class Panels(OpportunitySubStream): """ Panels stream: https://hire.lever.co/developer/documentation#list-all-panels From 3b7bfbee7da5e759e3d833ef463838052c7daf9c Mon Sep 17 00:00:00 2001 From: Marx Date: Thu, 19 Oct 2023 21:16:01 +0800 Subject: [PATCH 5/7] Complete incremental stream --- .../integration_tests/configured_catalog.json | 24 +-- .../integration_tests/input_state.json | 5 + .../source-lever/source_lever/source.py | 196 +++++++++++++----- .../source-lever/source_lever/spec.yaml | 9 - .../source-lever/unit_tests/test_source.py | 3 +- 5 files changed, 159 insertions(+), 78 deletions(-) create mode 100644 airbyte-integrations/connectors/source-lever/integration_tests/input_state.json diff --git a/airbyte-integrations/connectors/source-lever/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-lever/integration_tests/configured_catalog.json index 605a64476c47..712605d75ec3 100644 --- a/airbyte-integrations/connectors/source-lever/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-lever/integration_tests/configured_catalog.json @@ -4,44 +4,44 @@ "stream": { "name": "opportunities", "json_schema": {}, - "supported_sync_modes": ["full_refresh"], + "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, "source_defined_primary_key": [["id"]] }, - "sync_mode": "full_refresh", + "sync_mode": "incremental", "destination_sync_mode": "append" }, { "stream": { "name": "offers", "json_schema": {}, - "supported_sync_modes": ["full_refresh"], + "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, "source_defined_primary_key": [["id"]] }, - "sync_mode": "full_refresh", + "sync_mode": "incremental", "destination_sync_mode": "append" }, { "stream": { "name": "feedback", "json_schema": {}, - "supported_sync_modes": ["full_refresh"], + "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, "source_defined_primary_key": [["id"]] }, - "sync_mode": "full_refresh", + "sync_mode": "incremental", "destination_sync_mode": "append" }, { "stream": { "name": "interviews", "json_schema": {}, - "supported_sync_modes": ["full_refresh"], + "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, "source_defined_primary_key": [["id"]] }, - "sync_mode": "full_refresh", + "sync_mode": "incremental", "destination_sync_mode": "append" }, { @@ -103,11 +103,11 @@ "stream": { "name": "panels", "json_schema": {}, - "supported_sync_modes": ["full_refresh"], + "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, "source_defined_primary_key": [["id"]] }, - "sync_mode": "full_refresh", + "sync_mode": "incremental", "destination_sync_mode": "append" }, { @@ -147,11 +147,11 @@ "stream": { "name": "notes", "json_schema": {}, - "supported_sync_modes": ["full_refresh"], + "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, "source_defined_primary_key": [["id"]] }, - "sync_mode": "full_refresh", + "sync_mode": "incremental", "destination_sync_mode": "append" } ] diff --git a/airbyte-integrations/connectors/source-lever/integration_tests/input_state.json b/airbyte-integrations/connectors/source-lever/integration_tests/input_state.json new file mode 100644 index 000000000000..717fa5034116 --- /dev/null +++ b/airbyte-integrations/connectors/source-lever/integration_tests/input_state.json @@ -0,0 +1,5 @@ +{ + "opportunities": { + "updatedAt": 631152000000 + } +} diff --git a/airbyte-integrations/connectors/source-lever/source_lever/source.py b/airbyte-integrations/connectors/source-lever/source_lever/source.py index e690fe9e1e03..20a8b9a650a5 100644 --- a/airbyte-integrations/connectors/source-lever/source_lever/source.py +++ b/airbyte-integrations/connectors/source-lever/source_lever/source.py @@ -6,10 +6,11 @@ from abc import ABC from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple +import pendulum import requests from airbyte_cdk.models import SyncMode from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams import Stream, IncrementalMixin from airbyte_cdk.sources.streams.http import HttpStream from airbyte_cdk.sources.streams.http.auth import BasicHttpAuthenticator @@ -36,25 +37,31 @@ def check_connection(self, logger, config) -> Tuple[bool, any]: def streams(self, config: Mapping[str, Any]) -> List[Stream]: authenticator = _auth_from_config(config) - # TODO: use extract_start_date + + incremental_stream_params = { + "authenticator":authenticator, + "start_date": '1990-01-01' + } + + opportunity_steam = Opportunities(**incremental_stream_params) + return [ - Opportunities(authenticator=authenticator), - Offers(authenticator=authenticator), - Feedback(authenticator=authenticator), - Interviews(authenticator=authenticator), + opportunity_steam, + Offers(**incremental_stream_params, parent_stream=opportunity_steam), + Feedback(**incremental_stream_params, parent_stream=opportunity_steam), + Interviews(**incremental_stream_params, parent_stream=opportunity_steam), + Notes(**incremental_stream_params, parent_stream=opportunity_steam), + Panels(**incremental_stream_params, parent_stream=opportunity_steam), Requisitions(authenticator=authenticator), Users(authenticator=authenticator), Stages(authenticator=authenticator), Postings(authenticator=authenticator), ArchiveReasons(authenticator=authenticator), - Panels(authenticator=authenticator), Tags(authenticator=authenticator), Sources(authenticator=authenticator), RequisitionFields(authenticator=authenticator), - Notes(authenticator=authenticator) ] -# Basic full refresh stream class LeverStream(HttpStream, ABC): page_size = 100 stream_params = {} @@ -73,7 +80,7 @@ def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, return {"offset": response_data["next"]} def request_params( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None ) -> MutableMapping[str, Any]: params = {"limit": self.page_size} params.update(self.stream_params) @@ -84,14 +91,97 @@ def request_params( def parse_response(self, response: requests.Response, stream_slice:Mapping[str, Any], **kwargs) -> Iterable[Mapping]: yield from response.json()["data"] -class Opportunities(LeverStream): - primary_key = "id" - stream_params = {"confidentiality": "all", "expand": "contact", "expand": "applications"} +""" +TODO: Temp workaround. + +For incremental substreams to work, make sure the connector is set up with the state: +[ + { + "streamDescriptor": { + "name": "opportunities" + }, + "streamState": { + "updatedAt": 631152000000 + } + } +] + +There is an issue where an empty state {} will cause the parent Opportunity stream to update state wrongly. +The state after the first run of an incremental stream will become: +[ + { + "streamDescriptor": { + "name": "opportunities" + }, + "streamState": { + "updatedAt": "None" + } + } +] + +This breaks the implementation we have below since we **reasonably** assume `updatedAt: int`. By setting the initial state +to a valid `updatedAt: number` value, we can work around this issue and subsequent updates will work correctly using the Opportunity's latest record +""" +# +# +class IncrementalLeverStream(LeverStream, IncrementalMixin): + state_checkpoint_interval = 100 + cursor_field = "updatedAt" + + def __init__(self, start_date: str, **kwargs): + super().__init__(**kwargs) + self._start_timestamp = int(pendulum.parse(start_date).timestamp()) * 1000 + self._cursor_value = None + self._initial_cursor_value = None - @property - def use_cache(self) -> bool: - return True + def state(self) -> Mapping[str, Any]: + return {self.cursor_field: str(self._cursor_value)} + + @state.setter + def state(self, value: Mapping[str, Any]): + if value[self.cursor_field] and value[self.cursor_field] != 'None' : + self._cursor_value = value[self.cursor_field] + if not self._initial_cursor_value: + self._initial_cursor_value = value[self.cursor_field] + else: + self._cursor_value = self._start_timestamp + self._initial_cursor_value = self._start_timestamp + + """ + We use this function so sub streams can reference the inital state the parent stream started with e.g: + - initial cursor value = 2023-01-01 + - after stream completes, cursor value = now() + substream run should use initial cursor value(2023-01-01). Using now() will give us too little records + """ + def initial_state(self) -> Mapping[str, Any]: + return {self.cursor_field: self._initial_cursor_value} + + def read_records(self, *args, **kwargs) -> Iterable[Mapping[str, Any]]: + for record in super().read_records(*args, **kwargs): + if self._cursor_value: + latest_record_date = record[self.cursor_field] + self._cursor_value = max(int(self._cursor_value), int(latest_record_date)) + yield record + + + def request_params(self, stream_state: Mapping[str, Any] = None, **kwargs): + stream_state = stream_state or {} + params = super().request_params(stream_state=stream_state, **kwargs) + stream_state_timestamp = stream_state.get(self.cursor_field, 0) or 0 + params["updated_at_start"] = max(int(stream_state_timestamp), self._start_timestamp) + print("> Request params:", params) + + return params + +class Opportunities(IncrementalLeverStream): + """ + Opportunities stream: https://hire.lever.co/developer/documentation#opportunities + """ + + use_cache = True + primary_key = "id" + stream_params = {"confidentiality": "all", "expand": ["contact", "applications"], "stage_id": "offer"} def path( self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None @@ -99,6 +189,7 @@ def path( return "opportunities" class Requisitions(LeverStream): + # https://hire.lever.co/developer/documentation#requisitions primary_key = "id" def path( @@ -107,6 +198,9 @@ def path( return "requisitions" class Users(LeverStream): + """ + Users stream: https://hire.lever.co/developer/documentation#users + """ primary_key = "id" def path( @@ -115,6 +209,9 @@ def path( return "users" class Stages(LeverStream): + """ + Stages stream: https://hire.lever.co/developer/documentation#stages + """ primary_key = "id" def path( @@ -123,6 +220,9 @@ def path( return "stages" class Postings(LeverStream): + """ + Postings stream: https://hire.lever.co/developer/documentation#postings + """ primary_key = "id" def path( @@ -131,6 +231,9 @@ def path( return "postings" class Tags(LeverStream): + """ + Tags stream: https://hire.lever.co/developer/documentation#tags + """ primary_key = "text" def path( @@ -139,6 +242,9 @@ def path( return "tags" class Sources(LeverStream): + """ + Sources stream: https://hire.lever.co/developer/documentation#sources + """ primary_key = "text" def path( @@ -147,6 +253,9 @@ def path( return "sources" class RequisitionFields(LeverStream): + """ + Requisiton fields stream: https://hire.lever.co/developer/documentation#requisition-fields + """ primary_key = "text" def path( @@ -154,8 +263,10 @@ def path( ) -> str: return "requisition_fields" - class ArchiveReasons(LeverStream): + """ + Archive Reasons stream: https://hire.lever.co/developer/documentation#archive-reasons + """ primary_key = "id" def path( @@ -164,60 +275,35 @@ def path( return "archive_reasons" -# TODO: Basic incremental stream -class IncrementalLeverStream(LeverStream, ABC): - """ - TODO fill in details of this class to implement functionality related to incremental syncs for your connector. - if you do not need to implement incremental sync for any streams, remove this class. - """ - - # TODO: Fill in to checkpoint stream reads after N records. This prevents re-reading of data if the stream fails for any reason. - state_checkpoint_interval = None - - @property - def cursor_field(self) -> str: - """ - TODO - Override to return the cursor field used by this stream e.g: an API entity might always use created_at as the cursor field. This is - usually id or date based. This field's presence tells the framework this in an incremental stream. Required for incremental. - - :return str: The name of the cursor field. - """ - return [] - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - """ - Override to determine the latest state after reading the latest record. This typically compared the cursor_field from the latest record and - the current state and picks the 'most' recent cursor. This is how a stream's state is determined. Required for incremental. - """ - return {} - - # Basic Sub streams using Opportunity id class OpportunitySubStream(LeverStream, ABC): - def __init__(self, **kwargs): + + def __init__(self, start_date:str, parent_stream:IncrementalLeverStream, **kwargs): super().__init__(**kwargs) - - # def __init__(self, start_date: str, **kwargs): - # super().__init__(**kwargs) - # self._start_date = start_date + self._start_date = start_date + self.parent_stream = parent_stream + + - def path(self, stream_slice: Mapping[str, any] = None, **kwargs) -> str: + def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: return f"opportunities/{stream_slice['opportunity_id']}/{self.name}" def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: for stream_slice in super().stream_slices(**kwargs): - # opportunities_stream = Opportunities(authenticator=self.authenticator, base_url=self.base_url, start_date=self._start_date) - opportunities_stream = Opportunities(authenticator=self.authenticator) - for opportunity in opportunities_stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice): + for opportunity in self.parent_stream.read_records(stream_state=self.parent_stream.initial_state(), stream_slice=stream_slice, sync_mode=SyncMode.incremental): yield {"opportunity_id": opportunity["id"]} def parse_response(self, response: requests.Response, stream_slice:[Mapping[str, Any]], **kwargs) -> Iterable[Mapping]: records = response.json()["data"] + + # https://airbytehq.slack.com/archives/C027KKE4BCZ/p1696509193002769 + # Fixes the issue where an empty array returned was not refreshing our heartbeat and causing a timeout issue. + # if records = [], we will add an empty object in the array if not records: records = [{}] + # Adds the parent stream's ID in each substream record for record in records: record["opportunity"] = stream_slice["opportunity_id"] yield from records diff --git a/airbyte-integrations/connectors/source-lever/source_lever/spec.yaml b/airbyte-integrations/connectors/source-lever/source_lever/spec.yaml index c25733f64ac7..f61fbf2ed9fa 100644 --- a/airbyte-integrations/connectors/source-lever/source_lever/spec.yaml +++ b/airbyte-integrations/connectors/source-lever/source_lever/spec.yaml @@ -5,7 +5,6 @@ connectionSpecification: type: object required: - api_key - - extract_start_date additionalProperties: true properties: api_key: @@ -13,11 +12,3 @@ connectionSpecification: order: 0 description: "The API key for the Lever account." airbyte_secret: true - extract_start_date: - type: string - order: 1 - title: Extract Start Date - format: date - default: "1990-01-01" - pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ - description: "The date to start getting data from. " diff --git a/airbyte-integrations/connectors/source-lever/unit_tests/test_source.py b/airbyte-integrations/connectors/source-lever/unit_tests/test_source.py index dcfba91fb19c..4a3184a322a1 100644 --- a/airbyte-integrations/connectors/source-lever/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-lever/unit_tests/test_source.py @@ -17,6 +17,5 @@ def test_streams(mocker): source = SourceLever() config_mock = MagicMock() streams = source.streams(config_mock) - # TODO: replace this with your streams number - expected_streams_number = 2 + expected_streams_number = 14 assert len(streams) == expected_streams_number From 407da6c42b52cbd9dd5175acf0c28fecc3421897 Mon Sep 17 00:00:00 2001 From: Marx Date: Thu, 19 Oct 2023 21:17:34 +0800 Subject: [PATCH 6/7] Remove offer stage_id param used for testing --- .../connectors/source-lever/source_lever/source.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-integrations/connectors/source-lever/source_lever/source.py b/airbyte-integrations/connectors/source-lever/source_lever/source.py index 20a8b9a650a5..2a96bb4b9ed7 100644 --- a/airbyte-integrations/connectors/source-lever/source_lever/source.py +++ b/airbyte-integrations/connectors/source-lever/source_lever/source.py @@ -181,7 +181,7 @@ class Opportunities(IncrementalLeverStream): use_cache = True primary_key = "id" - stream_params = {"confidentiality": "all", "expand": ["contact", "applications"], "stage_id": "offer"} + stream_params = {"confidentiality": "all", "expand": ["contact", "applications"]} def path( self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None From a08e00c536237680509caf691967970af3684254 Mon Sep 17 00:00:00 2001 From: Marx Date: Thu, 19 Oct 2023 23:37:28 +0800 Subject: [PATCH 7/7] Fix issue where cursor_field can be null --- .../connectors/source-lever/source_lever/source.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte-integrations/connectors/source-lever/source_lever/source.py b/airbyte-integrations/connectors/source-lever/source_lever/source.py index 2a96bb4b9ed7..bf575acd0a82 100644 --- a/airbyte-integrations/connectors/source-lever/source_lever/source.py +++ b/airbyte-integrations/connectors/source-lever/source_lever/source.py @@ -159,8 +159,8 @@ def initial_state(self) -> Mapping[str, Any]: def read_records(self, *args, **kwargs) -> Iterable[Mapping[str, Any]]: for record in super().read_records(*args, **kwargs): - if self._cursor_value: - latest_record_date = record[self.cursor_field] + latest_record_date = record[self.cursor_field] + if self._cursor_value and record[self.cursor_field]: self._cursor_value = max(int(self._cursor_value), int(latest_record_date)) yield record