Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix errors in markdown and codspeed workflows #15841

Merged
merged 8 commits into from
Oct 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .github/workflows/codspeed-benchmarks.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,8 @@ jobs:
- name: Install packages
run: |
python -m pip install -U uv
uv pip install --upgrade --system .[dev]
uv pip install --upgrade --system .[dev] pytest-codspeed
uv pip uninstall --system pytest-benchmark

- name: Start server
run: |
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/markdown-tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ jobs:
python -m pip install -U uv
uv pip install --upgrade --system -e '.[dev]'
uv pip install --upgrade --system -r requirements-markdown-tests.txt
uv pip uninstall --system pytest-benchmark

- name: Start server
run: |
Expand Down
22 changes: 13 additions & 9 deletions benches/bench_flows.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,16 @@
TODO: Add benches for higher number of tasks; blocked by engine deadlocks in CI.
"""

from typing import TYPE_CHECKING

import anyio
import pytest
from pytest_benchmark.fixture import BenchmarkFixture

from prefect import flow, task

if TYPE_CHECKING:
from pytest_benchmark.fixture import BenchmarkFixture


def noop_function():
pass
Expand All @@ -17,12 +21,12 @@ async def anoop_function():
pass


def bench_flow_decorator(benchmark: BenchmarkFixture):
def bench_flow_decorator(benchmark: "BenchmarkFixture"):
benchmark(flow, noop_function)


@pytest.mark.parametrize("options", [{}, {"timeout_seconds": 10}])
def bench_flow_call(benchmark: BenchmarkFixture, options):
def bench_flow_call(benchmark: "BenchmarkFixture", options):
noop_flow = flow(**options)(noop_function)
benchmark(noop_flow)

Expand All @@ -35,7 +39,7 @@ def bench_flow_call(benchmark: BenchmarkFixture, options):


@pytest.mark.parametrize("num_tasks", [10, 50, 100])
def bench_flow_with_submitted_tasks(benchmark: BenchmarkFixture, num_tasks: int):
def bench_flow_with_submitted_tasks(benchmark: "BenchmarkFixture", num_tasks: int):
test_task = task(noop_function)

@flow
Expand All @@ -47,7 +51,7 @@ def benchmark_flow():


@pytest.mark.parametrize("num_tasks", [10, 50, 100, 250])
def bench_flow_with_called_tasks(benchmark: BenchmarkFixture, num_tasks: int):
def bench_flow_with_called_tasks(benchmark: "BenchmarkFixture", num_tasks: int):
test_task = task(noop_function)

@flow
Expand All @@ -62,7 +66,7 @@ def benchmark_flow():


@pytest.mark.parametrize("num_tasks", [10, 50, 100, 250])
def bench_async_flow_with_async_tasks(benchmark: BenchmarkFixture, num_tasks: int):
def bench_async_flow_with_async_tasks(benchmark: "BenchmarkFixture", num_tasks: int):
test_task = task(anoop_function)

@flow
Expand All @@ -78,7 +82,7 @@ async def benchmark_flow():


@pytest.mark.parametrize("num_flows", [5, 10, 20])
def bench_flow_with_subflows(benchmark: BenchmarkFixture, num_flows: int):
def bench_flow_with_subflows(benchmark: "BenchmarkFixture", num_flows: int):
test_flow = flow(noop_function)

@flow
Expand All @@ -91,7 +95,7 @@ def benchmark_flow():

@pytest.mark.parametrize("num_flows", [5, 10, 20])
def bench_async_flow_with_sequential_subflows(
benchmark: BenchmarkFixture, num_flows: int
benchmark: "BenchmarkFixture", num_flows: int
):
test_flow = flow(anoop_function)

Expand All @@ -105,7 +109,7 @@ async def benchmark_flow():

@pytest.mark.parametrize("num_flows", [5, 10, 20])
def bench_async_flow_with_concurrent_subflows(
benchmark: BenchmarkFixture, num_flows: int
benchmark: "BenchmarkFixture", num_flows: int
):
test_flow = flow(anoop_function)

Expand Down
9 changes: 6 additions & 3 deletions benches/bench_import.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
import importlib
import sys
from typing import TYPE_CHECKING

import pytest
from prometheus_client import REGISTRY
from pytest_benchmark.fixture import BenchmarkFixture

if TYPE_CHECKING:
from pytest_benchmark.fixture import BenchmarkFixture


def reset_imports():
Expand All @@ -21,7 +24,7 @@ def reset_imports():


@pytest.mark.benchmark(group="imports")
def bench_import_prefect(benchmark: BenchmarkFixture):
def bench_import_prefect(benchmark: "BenchmarkFixture"):
def import_prefect():
reset_imports()

Expand All @@ -32,7 +35,7 @@ def import_prefect():

@pytest.mark.timeout(180)
@pytest.mark.benchmark(group="imports")
def bench_import_prefect_flow(benchmark: BenchmarkFixture):
def bench_import_prefect_flow(benchmark: "BenchmarkFixture"):
def import_prefect_flow():
reset_imports()

Expand Down
11 changes: 7 additions & 4 deletions benches/bench_tasks.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
from pytest_benchmark.fixture import BenchmarkFixture
from typing import TYPE_CHECKING

if TYPE_CHECKING:
from pytest_benchmark.fixture import BenchmarkFixture

from prefect import flow, task

Expand All @@ -7,11 +10,11 @@ def noop_function():
pass


def bench_task_decorator(benchmark: BenchmarkFixture):
def bench_task_decorator(benchmark: "BenchmarkFixture"):
benchmark(task, noop_function)


def bench_task_call(benchmark: BenchmarkFixture):
def bench_task_call(benchmark: "BenchmarkFixture"):
noop_task = task(noop_function)

@flow
Expand All @@ -21,7 +24,7 @@ def benchmark_flow():
benchmark_flow()


def bench_task_submit(benchmark: BenchmarkFixture):
def bench_task_submit(benchmark: "BenchmarkFixture"):
noop_task = task(noop_function)

# The benchmark occurs within the flow to measure _submission_ time without
Expand Down
22 changes: 0 additions & 22 deletions benches/conftest.py

This file was deleted.

1 change: 0 additions & 1 deletion requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ pluggy >= 1.4.0
pytest >= 8.3
pytest-asyncio >= 0.24
pytest-benchmark
pytest-codspeed
pytest-cov
pytest-env
pytest-flakefinder
Expand Down
Loading