diff --git a/.ci_support/migrations/aws_crt_cpp0230.yaml b/.ci_support/migrations/aws_crt_cpp0230.yaml deleted file mode 100644 index b6887f27e..000000000 --- a/.ci_support/migrations/aws_crt_cpp0230.yaml +++ /dev/null @@ -1,8 +0,0 @@ -__migrator: - build_number: 1 - kind: version - migration_number: 1 - automerge: true -aws_crt_cpp: -- 0.23.0 -migrator_ts: 1692537530.8322937 diff --git a/conda-forge.yml b/conda-forge.yml index f9c486a89..cb8108c7c 100644 --- a/conda-forge.yml +++ b/conda-forge.yml @@ -3,6 +3,7 @@ azure: max_parallel: 20 bot: abi_migration_branches: + - 12.x - 11.0.x - 10.0.x - 9.0.x diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 40ef681bb..0a3b556b4 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -1,4 +1,4 @@ -{% set version = "12.0.1" %} +{% set version = "13.0.0" %} {% set cuda_enabled = cuda_compiler_version != "None" %} {% set build_ext_version = "4.0.0" %} {% set build_ext = "cuda" if cuda_enabled else "cpu" %} @@ -13,15 +13,20 @@ package: version: {{ version }} source: + # arrow has the unfortunate habit of changing tags of X.0.0 in the + # lead-up until release -> don't use github sources on main + # - url: https://github.com/apache/arrow/archive/refs/tags/apache-arrow-{{ version }}.tar.gz - url: https://dist.apache.org/repos/dist/release/arrow/arrow-{{ version }}/apache-arrow-{{ version }}.tar.gz - sha256: 3481c411393aa15c75e88d93cf8315faf7f43e180fe0790128d3840d417de858 + sha256: 35dfda191262a756be934eef8afee8d09762cad25021daa626eb249e251ac9e6 + patches: + - patches/0001-GH-15017-Python-Harden-test_memory.py-for-use-with-A.patch # testing-submodule not part of release tarball - git_url: https://github.com/apache/arrow-testing.git git_rev: 47f7b56b25683202c1fd957668e13f2abafc0f12 folder: testing build: - number: 9 + number: 0 # for cuda support, building with one version is enough to be compatible with # all later versions, since arrow is only using libcuda, and not libcudart. skip: true # [cuda_compiler_version not in ("None", cuda_compiler_version_min)] @@ -147,7 +152,7 @@ outputs: test: commands: {% set headers = [ - "arrow/api.h", "arrow/acero/groupby.h", "arrow/flight/types.h", + "arrow/api.h", "arrow/acero/api.h", "arrow/flight/types.h", "arrow/flight/sql/api.h", "gandiva/engine.h", "parquet/api/reader.h" ] %} {% for each_header in headers %} @@ -375,8 +380,6 @@ outputs: # skip tests that raise SIGINT and crash the test suite {% set tests_to_skip = tests_to_skip + " or (test_csv and test_cancellation)" %} # [linux] {% set tests_to_skip = tests_to_skip + " or (test_flight and test_interrupt)" %} # [linux] - # tests that may crash the agent due to out-of-bound memory writes or other risky stuff - {% set tests_to_skip = tests_to_skip + " or test_debug_memory_pool" %} # [aarch64 or ppc64le] # cannot pass -D_LIBCPP_DISABLE_AVAILABILITY to test suite for our older macos sdk {% set tests_to_skip = tests_to_skip + " or test_cpp_extension_in_python" %} # [osx] # skip tests that make invalid(-for-conda) assumptions about the compilers setup @@ -386,22 +389,13 @@ outputs: {% set tests_to_skip = tests_to_skip + " or test_debug_memory_pool_disabled" %} # [aarch64 or ppc64le] {% set tests_to_skip = tests_to_skip + " or test_env_var_io_thread_count" %} # [aarch64 or ppc64le] # vvvvvvv TESTS THAT SHOULDN'T HAVE TO BE SKIPPED vvvvvvv - {% set tests_to_skip = tests_to_skip + " or test_extension_to_pandas_storage_type" %} # segfaults on OSX: to investigate ASAP {% set tests_to_skip = tests_to_skip + " or test_flight" %} # [osx] # gandiva tests are segfaulting on ppc {% set tests_to_skip = tests_to_skip + " or test_gandiva" %} # [ppc64le] - # test failures on ppc + # test failures on ppc (both failing with: Float value was truncated converting to int32) {% set tests_to_skip = tests_to_skip + " or test_safe_cast_from_float_with_nans_to_int" %} # [ppc64le] - # gandiva tests are segfaulting on ppc - {% set tests_to_skip = tests_to_skip + " or test_float_with_null_as_integer" %} # [ppc64le] - # "Unsupported backend 'nonexistent' specified in ARROW_DEFAULT_MEMORY_POOL" - {% set tests_to_skip = tests_to_skip + " or (test_memory and test_env_var)" %} # [unix] - # test is broken; header is in $PREFIX, not $SP_DIR - {% set tests_to_skip = tests_to_skip + " or (test_misc and test_get_include)" %} # [unix] - # flaky tests that fail occasionally - {% set tests_to_skip = tests_to_skip + " or test_total_bytes_allocated " %} # [linux] - {% set tests_to_skip = tests_to_skip + " or test_feather_format " %} # [linux] + {% set tests_to_skip = tests_to_skip + " or test_float_with_null_as_integer" %} # [ppc64le] # ^^^^^^^ TESTS THAT SHOULDN'T HAVE TO BE SKIPPED ^^^^^^^ - pytest -rfEs -k "not ({{ tests_to_skip }})" {% endif %} diff --git a/recipe/patches/0001-GH-15017-Python-Harden-test_memory.py-for-use-with-A.patch b/recipe/patches/0001-GH-15017-Python-Harden-test_memory.py-for-use-with-A.patch new file mode 100644 index 000000000..4fb714c3d --- /dev/null +++ b/recipe/patches/0001-GH-15017-Python-Harden-test_memory.py-for-use-with-A.patch @@ -0,0 +1,52 @@ +From 0ff739a0d3c8c4df4b46625f0bb0bc87c6b0d29d Mon Sep 17 00:00:00 2001 +From: h-vetinari +Date: Fri, 28 Jul 2023 13:31:01 +1100 +Subject: [PATCH] GH-15017: [Python] Harden test_memory.py for use with + ARROW_USE_GLOG=ON (#36901) + +Accept output pattern for ARROW_USE_GLOG=ON too. + +* Closes: #15017 + +Lead-authored-by: H. Vetinari +Co-authored-by: Sutou Kouhei +Signed-off-by: Sutou Kouhei +--- + dev/tasks/conda-recipes/arrow-cpp/meta.yaml | 2 -- + python/pyarrow/tests/test_memory.py | 10 ++++++++-- + 2 files changed, 8 insertions(+), 4 deletions(-) + +diff --git a/dev/tasks/conda-recipes/arrow-cpp/meta.yaml b/dev/tasks/conda-recipes/arrow-cpp/meta.yaml +index 2f79bbe95..24c0a6ba7 100644 +--- a/dev/tasks/conda-recipes/arrow-cpp/meta.yaml ++++ b/dev/tasks/conda-recipes/arrow-cpp/meta.yaml +@@ -381,8 +381,6 @@ outputs: + {% set tests_to_skip = tests_to_skip + " or test_safe_cast_from_float_with_nans_to_int" %} # [ppc64le] + # gandiva tests are segfaulting on ppc + {% set tests_to_skip = tests_to_skip + " or test_float_with_null_as_integer" %} # [ppc64le] +- # "Unsupported backend 'nonexistent' specified in ARROW_DEFAULT_MEMORY_POOL" +- {% set tests_to_skip = tests_to_skip + " or (test_memory and test_env_var)" %} # [unix] + # test is broken; header is in $PREFIX, not $SP_DIR + {% set tests_to_skip = tests_to_skip + " or (test_misc and test_get_include)" %} # [unix] + # ^^^^^^^ TESTS THAT SHOULDN'T HAVE TO BE SKIPPED ^^^^^^^ +diff --git a/python/pyarrow/tests/test_memory.py b/python/pyarrow/tests/test_memory.py +index 092c50de3..d9fdeb152 100644 +--- a/python/pyarrow/tests/test_memory.py ++++ b/python/pyarrow/tests/test_memory.py +@@ -134,8 +134,14 @@ def check_env_var(name, expected, *, expect_warning=False): + res.check_returncode() # fail + errlines = res.stderr.splitlines() + if expect_warning: +- assert len(errlines) == 1 +- assert f"Unsupported backend '{name}'" in errlines[0] ++ assert len(errlines) in (1, 2) ++ if len(errlines) == 1: ++ # ARROW_USE_GLOG=OFF ++ assert f"Unsupported backend '{name}'" in errlines[0] ++ else: ++ # ARROW_USE_GLOG=ON ++ assert "InitGoogleLogging()" in errlines[0] ++ assert f"Unsupported backend '{name}'" in errlines[1] + else: + assert len(errlines) == 0 +