Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Standard Tests: gradle command for running tests #2913

Merged
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion airbyte-integrations/bases/standard-test/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,4 @@ RUN pip install .
LABEL io.airbyte.version=0.1.0
LABEL io.airbyte.name=airbyte/standard-test

ENTRYPOINT ["python", "-m", "pytest", "standard_test/tests", "-rsx", "-vvv"]
ENTRYPOINT ["python", "-m", "pytest", "standard_test/tests", "-rsxv"]
10 changes: 7 additions & 3 deletions airbyte-integrations/bases/standard-test/standard_test/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,15 @@
SOFTWARE.
"""

from typing import List, Optional, Mapping
from typing import List, Mapping, Optional

from pydantic import BaseModel, Field

config_path: str = Field(default="secrets/config.json", description="Path to a JSON object representing a valid connector configuration")
invalid_config_path: str = Field(description="Path to a JSON object representing an invalid connector configuration")
spec_path: str = Field(default="secrets/spec.json", description="Path to a JSON object representing the spec expected to be output by this connector")
spec_path: str = Field(
default="secrets/spec.json", description="Path to a JSON object representing the spec expected to be output by this connector"
)
configured_catalog_path: str = Field(default="sample_files/configured_catalog.json", description="Path to configured catalog")


Expand Down Expand Up @@ -65,7 +67,9 @@ class FullRefreshConfig(BaseConfig):
class IncrementalConfig(BaseConfig):
config_path: str = config_path
configured_catalog_path: str = configured_catalog_path
cursor_paths: Optional[Mapping[str, List[str]]] = Field(description="For each stream, the path of its cursor field in the output state messages.")
cursor_paths: Optional[Mapping[str, List[str]]] = Field(
description="For each stream, the path of its cursor field in the output state messages."
)
state_path: Optional[str] = Field(description="Path to state file")


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,7 @@
from typing import Optional

import pytest
from airbyte_protocol import (AirbyteCatalog, ConfiguredAirbyteCatalog,
ConnectorSpecification)

from airbyte_protocol import AirbyteCatalog, ConfiguredAirbyteCatalog, ConnectorSpecification
from standard_test.connector_runner import ConnectorRunner
from standard_test.utils import load_config

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@
SOFTWARE.
"""

from functools import reduce
from typing import List

import pendulum
from functools import reduce


class JsonSchemaHelper:
Expand All @@ -34,21 +34,21 @@ def __init__(self, schema):

def get_ref(self, path):
node = self._schema
for segment in path.split('/')[1:]:
for segment in path.split("/")[1:]:
node = node[segment]
return node

def get_property(self, path: List[str]):
node = self._schema
for segment in path:
if '$ref' in node:
node = self.get_ref(node['$ref'])
node = node['properties'][segment]
if "$ref" in node:
node = self.get_ref(node["$ref"])
node = node["properties"][segment]
return node

def get_type_for_key_path(self, path: List[str]):
try:
return self.get_property(path)['type']
return self.get_property(path)["type"]
except KeyError:
return None

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
"""

import pytest

from standard_test.utils import load_config


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
import pytest
from airbyte_protocol import ConnectorSpecification, Status, Type
from docker.errors import ContainerError

from standard_test.base import BaseTest
from standard_test.connector_runner import ConnectorRunner

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@

import pytest
from airbyte_protocol import Type

from standard_test.base import BaseTest
from standard_test.connector_runner import ConnectorRunner
from standard_test.utils import full_refresh_only_catalog
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,16 +23,14 @@
"""

import json

from pathlib import Path

import pytest
from airbyte_protocol import Type, ConfiguredAirbyteCatalog

from airbyte_protocol import ConfiguredAirbyteCatalog, Type
from standard_test import BaseTest
from standard_test.connector_runner import ConnectorRunner
from standard_test.utils import filter_output, incremental_only_catalog
from standard_test.json_schema_helper import JsonSchemaHelper
from standard_test.utils import filter_output, incremental_only_catalog


@pytest.fixture(name="future_state_path")
Expand Down Expand Up @@ -85,10 +83,7 @@ def test_read(self, connector_config, configured_catalog_for_incremental, docker

def test_two_sequential_reads(self, connector_config, configured_catalog_for_incremental, cursor_paths, docker_runner: ConnectorRunner):
output = docker_runner.call_read(connector_config, configured_catalog_for_incremental)
stream_mapping = {
stream.stream.name: stream
for stream in configured_catalog_for_incremental.streams
}
stream_mapping = {stream.stream.name: stream for stream in configured_catalog_for_incremental.streams}

records_1 = filter_output(output, type_=Type.RECORD)
states_1 = filter_output(output, type_=Type.STATE)
Expand All @@ -99,7 +94,9 @@ def test_two_sequential_reads(self, connector_config, configured_catalog_for_inc
helper = JsonSchemaHelper(schema=stream.stream.json_schema)
record_value = helper.get_cursor_value(record=record.record.data, cursor_path=stream.cursor_field)
state_value = helper.get_state_value(state=latest_state[stream_name], cursor_path=cursor_paths[stream_name])
assert record_value <= state_value, "First incremental sync should produce records younger or equal to cursor value from the state"
assert (
record_value <= state_value
), "First incremental sync should produce records younger or equal to cursor value from the state"

output = docker_runner.call_read_with_state(connector_config, configured_catalog_for_incremental, state=latest_state)
records_2 = filter_output(output, type_=Type.RECORD)
Expand All @@ -110,7 +107,9 @@ def test_two_sequential_reads(self, connector_config, configured_catalog_for_inc
helper = JsonSchemaHelper(schema=stream.stream.json_schema)
record_value = helper.get_cursor_value(record=record.record.data, cursor_path=stream.cursor_field)
state_value = helper.get_state_value(state=latest_state[stream_name], cursor_path=cursor_paths[stream_name])
assert record_value >= state_value, "Second incremental sync should produce records older or equal to cursor value from the state"
assert (
record_value >= state_value
), "Second incremental sync should produce records older or equal to cursor value from the state"

def test_state_with_abnormally_large_values(self, connector_config, configured_catalog, future_state, docker_runner: ConnectorRunner):
configured_catalog = incremental_only_catalog(configured_catalog)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
from yaml import Loader

from airbyte_protocol import AirbyteMessage, ConfiguredAirbyteCatalog, SyncMode

from standard_test.config import Config


Expand Down
2 changes: 1 addition & 1 deletion airbyte-integrations/connectors/source-hubspot/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ ENV AIRBYTE_IMPL_PATH="SourceHubspot"
WORKDIR /airbyte/integration_code
COPY $CODE_PATH ./$CODE_PATH
COPY setup.py ./
RUN pip install ".[main]"
RUN pip install .

LABEL io.airbyte.version=0.1.1
LABEL io.airbyte.name=airbyte/source-hubspot
20 changes: 1 addition & 19 deletions airbyte-integrations/connectors/source-hubspot/build.gradle
Original file line number Diff line number Diff line change
@@ -1,32 +1,14 @@
plugins {
id 'airbyte-python'
id 'airbyte-docker'
id 'airbyte-standard-source-test-file'
id 'airbyte-standard-test'
}

airbytePython {
moduleDirectory 'source_hubspot'
}

airbyteStandardSourceTestFile {
// For more information on standard source tests, see https://docs.airbyte.io/contributing-to-airbyte/building-new-connector/testing-connectors

// All these input paths must live inside this connector's directory (or subdirectories)
// TODO update the spec JSON file
specPath = "source_hubspot/spec.json"

// configPath points to a config file which matches the spec.json supplied above. secrets/ is gitignored by default, so place your config file
// there (in case it contains any credentials)
// TODO update the config file to contain actual credentials
configPath = "secrets/config.json"
// TODO update the sample configured_catalog JSON for use in testing
// Note: If your source supports incremental syncing, then make sure that the catalog that is returned in the get_catalog method is configured
// for incremental syncing (e.g. include cursor fields, etc).
configuredCatalogPath = "sample_files/configured_catalog.json"
}

dependencies {
implementation files(project(':airbyte-integrations:bases:base-standard-source-test-file').airbyteDocker.outputs)
implementation files(project(':airbyte-integrations:bases:base-python').airbyteDocker.outputs)
}

Expand Down
4 changes: 2 additions & 2 deletions airbyte-integrations/connectors/source-hubspot/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@
]

TEST_REQUIREMENTS = [
"pytest",
"requests_mock==1.8.0",
"pytest==6.1.2",
"requests_mock==1.8.0"
]

setup(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
connector_image: airbyte/source-hubspot:dev
tests:
spec:
- spec_path: "source_hubspot/spec.json"
connection:
- config_path: "secrets/config.json"
invalid_config_path: "sample_files/invalid_config.json"
discovery:
- config_path: "secrets/config.json"
basic_read:
- config_path: "secrets/config.json"
configured_catalog_path: "sample_files/configured_catalog.json"
validate_output_from_all_streams: yes
incremental:
- config_path: "secrets/config.json"
configured_catalog_path: "sample_files/configured_catalog.json"
state_path: "sample_files/abnormal_state.json"
cursor_paths:
subscription_changes: ["timestamp"]
email_events: ["timestamp"]
full_refresh:
- config_path: "secrets/config.json"
configured_catalog_path: "sample_files/configured_catalog.json"
44 changes: 44 additions & 0 deletions buildSrc/src/main/groovy/airbyte-standard-test.gradle
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import org.gradle.api.Plugin
import org.gradle.api.Project

class AirbyteStandardTestPlugin implements Plugin<Project> {
void apply(Project project) {
project.task('standardTest') {
doFirst {
project.exec {
def targetMountDirectory = "/test_input"
def args = [
'docker', 'run', '--rm', '-i',
// provide access to the docker daemon
'-v', "/var/run/docker.sock:/var/run/docker.sock",
// A container within a container mounts from the host filesystem, not the parent container.
// this forces /tmp to be the same directory for host, parent container, and child container.
'-v', "/tmp:/tmp",
// mount the project dir. all provided input paths must be relative to that dir.
'-v', "${project.projectDir.absolutePath}:${targetMountDirectory}",
'airbyte/standard-test:dev',
'--standard_test_config', "$targetMountDirectory",
]
commandLine args
}
}

outputs.upToDateWhen { false }
}

project.standardTest.dependsOn(':airbyte-integrations:bases:standard-test:airbyteDocker')
project.standardTest.dependsOn(project.build)
project.standardTest.dependsOn(project.airbyteDocker)
if (project.hasProperty('airbyteDockerTest')){
project.standardTest.dependsOn(project.airbyteDockerTest)
}

// make sure we create the integrationTest task once in case a java integration test was already initialized
if (!project.hasProperty('integrationTest')) {
project.task('integrationTest')
}

project.integrationTest.dependsOn(project.standardTest)
}
}