From bab104459b782b93825d341b04e0d1111ac17965 Mon Sep 17 00:00:00 2001 From: Joseph Crail Date: Mon, 11 Sep 2017 18:42:28 -0700 Subject: [PATCH 01/21] Fix upper-bound bin error for auto-ranged data Fix #457 --- datashader/glyphs.py | 10 ++++++---- datashader/tests/test_glyphs.py | 10 ++++++++-- datashader/tests/test_pandas.py | 15 +++++++++++++++ 3 files changed, 29 insertions(+), 6 deletions(-) diff --git a/datashader/glyphs.py b/datashader/glyphs.py index fb9cb1f4e..b0184af49 100644 --- a/datashader/glyphs.py +++ b/datashader/glyphs.py @@ -40,7 +40,7 @@ def _compute_x_bounds(xs): maxval = x if np.isnan(minval) or np.isnan(maxval): raise ValueError('All x coordinates are NaN.') - return minval, maxval + return minval, maxval + np.spacing(maxval) @staticmethod @ngjit @@ -54,7 +54,7 @@ def _compute_y_bounds(ys): maxval = y if np.isnan(minval) or np.isnan(maxval): raise ValueError('All y coordinates are NaN.') - return minval, maxval + return minval, maxval + np.spacing(maxval) @memoize def _compute_x_bounds_dask(self, df): @@ -62,7 +62,8 @@ def _compute_x_bounds_dask(self, df): ``df`` is immutable/hashable (a Dask dataframe). """ xs = df[self.x].values - return np.nanmin(xs), np.nanmax(xs) + minval, maxval = np.nanmin(xs), np.nanmax(xs) + return minval, maxval + np.spacing(maxval) @memoize def _compute_y_bounds_dask(self, df): @@ -70,7 +71,8 @@ def _compute_y_bounds_dask(self, df): ``df`` is immutable/hashable (a Dask dataframe). """ ys = df[self.y].values - return np.nanmin(ys), np.nanmax(ys) + minval, maxval = np.nanmin(ys), np.nanmax(ys) + return minval, maxval + np.spacing(maxval) class Point(_PointLike): diff --git a/datashader/tests/test_glyphs.py b/datashader/tests/test_glyphs.py index 087e6d3ea..b99c746c9 100644 --- a/datashader/tests/test_glyphs.py +++ b/datashader/tests/test_glyphs.py @@ -10,8 +10,14 @@ def test_point_bounds_check(): df = pd.DataFrame({'x': [1, 2, 3], 'y': [5, 6, 7]}) p = Point('x', 'y') - assert p._compute_x_bounds(df['x'].values) == (1, 3) - assert p._compute_y_bounds(df['y'].values) == (5, 7) + + xmin, xmax = p._compute_x_bounds(df['x'].values) + assert xmin == 1 + assert xmax > 3 and np.isclose(xmax, 3) + + ymin, ymax = p._compute_y_bounds(df['y'].values) + assert ymin == 5 + assert ymax > 7 and np.isclose(ymax, 7) def test_point_validate(): diff --git a/datashader/tests/test_pandas.py b/datashader/tests/test_pandas.py index 413e2d4ff..ebdaa1dd0 100644 --- a/datashader/tests/test_pandas.py +++ b/datashader/tests/test_pandas.py @@ -146,6 +146,21 @@ def test_multiple_aggregates(): assert_eq(agg.i32_count, f(np.array([[5, 5], [5, 5]], dtype='i4'))) +def test_auto_range_points(): + df = pd.DataFrame({'time': [1, 2, 3], + 'x': [1.0, 1.1, 1.2], + 'y': [1.0, 1.1, 1.2]}) + cvs = ds.Canvas(plot_width=15, plot_height=15) + agg = cvs.points(df, 'x', 'y', ds.count('time')) + + sol = np.zeros((15, 15), int) + sol[0, 0] = 1 + sol[7, 7] = 1 + sol[14, 14] = 1 + + np.testing.assert_equal(agg.data, sol) + + def test_log_axis_points(): # Upper bound for scale/index of x-axis start, end = map(np.log10, (1, 11)) From 2131de4684a37d46ac092e8baed89f6ee5500946 Mon Sep 17 00:00:00 2001 From: Joseph Crail Date: Fri, 15 Sep 2017 11:15:10 -0700 Subject: [PATCH 02/21] Clean up docs --- datashader/core.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/datashader/core.py b/datashader/core.py index d71617ff6..582df3d03 100644 --- a/datashader/core.py +++ b/datashader/core.py @@ -60,8 +60,6 @@ def compute_scale_and_translate(self, range, n): Returns ------- s, t : floats - Parameters represe - """ start, end = map(self.mapper, range) s = n/(end - start) From 80829e92acd287f2c242186075cf588644fd4faf Mon Sep 17 00:00:00 2001 From: Joseph Crail Date: Mon, 25 Sep 2017 23:13:52 -0700 Subject: [PATCH 03/21] Add more comprehensive tests --- datashader/tests/test_dask.py | 62 +++++++++++++++++++++++++++++++++ datashader/tests/test_pandas.py | 61 +++++++++++++++++++++++++++----- 2 files changed, 115 insertions(+), 8 deletions(-) diff --git a/datashader/tests/test_dask.py b/datashader/tests/test_dask.py index b3fddc562..f6a862aec 100644 --- a/datashader/tests/test_dask.py +++ b/datashader/tests/test_dask.py @@ -1,3 +1,5 @@ +import itertools + from dask.local import get_sync from dask.context import set_options import dask.dataframe as dd @@ -41,6 +43,13 @@ def assert_eq(agg, b): assert agg.equals(b) +def floats(n): + """Returns contiguous list of floats from initial point""" + while True: + yield n + n = n + np.spacing(n) + + def test_count(): out = xr.DataArray(np.array([[5, 5], [5, 5]], dtype='i4'), coords=coords, dims=dims) @@ -151,6 +160,59 @@ def test_multiple_aggregates(): assert_eq(agg.i32_count, f(np.array([[5, 5], [5, 5]], dtype='i4'))) +def test_auto_range_points(): + n = 10 + fs = list(itertools.islice(floats(1.0), n)) + df = pd.DataFrame({'time': np.arange(n), + 'x': fs, + 'y': fs}) + ddf = dd.from_pandas(df, npartitions=3) + + # Expect continuous left-right diagonal + cvs = ds.Canvas(plot_width=n, plot_height=n) + agg = cvs.points(ddf, 'x', 'y', ds.count('time')) + sol = np.zeros((n, n), int) + np.fill_diagonal(sol, 1) + np.testing.assert_equal(agg.data, sol) + + # Expect continuous left-right diagonal w/ hole in middle + cvs = ds.Canvas(plot_width=n+1, plot_height=n+1) + agg = cvs.points(ddf, 'x', 'y', ds.count('time')) + sol = np.zeros((n+1, n+1), int) + np.fill_diagonal(sol, 1) + sol[5, 5] = 0 + # For 32-bit or 64-bit floats, the hole will be in the middle due to + # rounding errors. The hole will be in the lower-right corner for + # 128-bit float or arbitrary precision float. + # sol[n, n] = 0 + np.testing.assert_equal(agg.data, sol) + + n = 4 + fs = list(itertools.islice(floats(1.0), n)) + df = pd.DataFrame({'time': np.arange(n), + 'x': fs, + 'y': fs}) + ddf = dd.from_pandas(df, npartitions=3) + + # Expect alternating left-right diagonal + cvs = ds.Canvas(plot_width=2*n, plot_height=2*n) + agg = cvs.points(ddf, 'x', 'y', ds.count('time')) + sol = np.zeros((2*n, 2*n), int) + np.fill_diagonal(sol, 1) + sol[[range(1, 2*n, 2)]] = 0 + np.testing.assert_equal(agg.data, sol) + + # Expect alternating left-right diagonal with hole in lower-right + # corner + cvs = ds.Canvas(plot_width=2*n+1, plot_height=2*n+1) + agg = cvs.points(ddf, 'x', 'y', ds.count('time')) + sol = np.zeros((2*n+1, 2*n+1), int) + np.fill_diagonal(sol, 1) + sol[[range(1, 2*n+1, 2)]] = 0 + sol[2*n, 2*n] = 0 + np.testing.assert_equal(agg.data, sol) + + def test_log_axis_points(): # Upper bound for scale/index of x-axis start, end = map(np.log10, (1, 11)) diff --git a/datashader/tests/test_pandas.py b/datashader/tests/test_pandas.py index ebdaa1dd0..666d68f77 100644 --- a/datashader/tests/test_pandas.py +++ b/datashader/tests/test_pandas.py @@ -1,3 +1,5 @@ +import itertools + import numpy as np import pandas as pd import xarray as xr @@ -36,6 +38,13 @@ def assert_eq(agg, b): assert agg.equals(b) +def floats(n): + """Returns contiguous list of floats from initial point""" + while True: + yield n + n = n + np.spacing(n) + + def test_count(): out = xr.DataArray(np.array([[5, 5], [5, 5]], dtype='i4'), coords=coords, dims=dims) @@ -147,17 +156,53 @@ def test_multiple_aggregates(): def test_auto_range_points(): - df = pd.DataFrame({'time': [1, 2, 3], - 'x': [1.0, 1.1, 1.2], - 'y': [1.0, 1.1, 1.2]}) - cvs = ds.Canvas(plot_width=15, plot_height=15) + n = 10 + fs = list(itertools.islice(floats(1.0), n)) + df = pd.DataFrame({'time': np.arange(n), + 'x': fs, + 'y': fs}) + + # Expect continuous left-right diagonal + cvs = ds.Canvas(plot_width=n, plot_height=n) agg = cvs.points(df, 'x', 'y', ds.count('time')) + sol = np.zeros((n, n), int) + np.fill_diagonal(sol, 1) + np.testing.assert_equal(agg.data, sol) - sol = np.zeros((15, 15), int) - sol[0, 0] = 1 - sol[7, 7] = 1 - sol[14, 14] = 1 + # Expect continuous left-right diagonal w/ hole in middle + cvs = ds.Canvas(plot_width=n+1, plot_height=n+1) + agg = cvs.points(df, 'x', 'y', ds.count('time')) + sol = np.zeros((n+1, n+1), int) + np.fill_diagonal(sol, 1) + sol[5, 5] = 0 + # For 32-bit or 64-bit floats, the hole will be in the middle due to + # rounding errors. The hole will be in the lower-right corner for + # 128-bit float or arbitrary precision float. + # sol[n, n] = 0 + np.testing.assert_equal(agg.data, sol) + n = 4 + fs = list(itertools.islice(floats(1.0), n)) + df = pd.DataFrame({'time': np.arange(n), + 'x': fs, + 'y': fs}) + + # Expect alternating left-right diagonal + cvs = ds.Canvas(plot_width=2*n, plot_height=2*n) + agg = cvs.points(df, 'x', 'y', ds.count('time')) + sol = np.zeros((2*n, 2*n), int) + np.fill_diagonal(sol, 1) + sol[[range(1, 2*n, 2)]] = 0 + np.testing.assert_equal(agg.data, sol) + + # Expect alternating left-right diagonal with hole in lower-right + # corner + cvs = ds.Canvas(plot_width=2*n+1, plot_height=2*n+1) + agg = cvs.points(df, 'x', 'y', ds.count('time')) + sol = np.zeros((2*n+1, 2*n+1), int) + np.fill_diagonal(sol, 1) + sol[[range(1, 2*n+1, 2)]] = 0 + sol[2*n, 2*n] = 0 np.testing.assert_equal(agg.data, sol) From a0bab325c81367c30d4ded9270b3e67306c443f1 Mon Sep 17 00:00:00 2001 From: Joseph Crail Date: Tue, 26 Sep 2017 00:20:58 -0700 Subject: [PATCH 04/21] Refactor bounds expansion into separate method --- datashader/dask.py | 6 ++++-- datashader/glyphs.py | 8 ++++---- datashader/pandas.py | 5 +++++ datashader/utils.py | 6 ++++++ 4 files changed, 19 insertions(+), 6 deletions(-) diff --git a/datashader/dask.py b/datashader/dask.py index a94e1e174..f5dea89e5 100644 --- a/datashader/dask.py +++ b/datashader/dask.py @@ -9,7 +9,7 @@ from .compatibility import apply from .compiler import compile_components from .glyphs import Glyph, Line -from .utils import Dispatcher +from .utils import Dispatcher, exclusive_range __all__ = () @@ -28,7 +28,9 @@ def shape_bounds_st_and_axis(df, canvas, glyph): x_range = canvas.x_range or glyph._compute_x_bounds_dask(df) y_range = canvas.y_range or glyph._compute_y_bounds_dask(df) x_min, x_max, y_min, y_max = bounds = compute(*(x_range + y_range)) - x_range, y_range = (x_min, x_max), (y_min, y_max) + x_range = exclusive_range((x_min, x_max)) + y_range = exclusive_range((y_min, y_max)) + width = canvas.plot_width height = canvas.plot_height diff --git a/datashader/glyphs.py b/datashader/glyphs.py index b0184af49..d5c3f5877 100644 --- a/datashader/glyphs.py +++ b/datashader/glyphs.py @@ -40,7 +40,7 @@ def _compute_x_bounds(xs): maxval = x if np.isnan(minval) or np.isnan(maxval): raise ValueError('All x coordinates are NaN.') - return minval, maxval + np.spacing(maxval) + return minval, maxval @staticmethod @ngjit @@ -54,7 +54,7 @@ def _compute_y_bounds(ys): maxval = y if np.isnan(minval) or np.isnan(maxval): raise ValueError('All y coordinates are NaN.') - return minval, maxval + np.spacing(maxval) + return minval, maxval @memoize def _compute_x_bounds_dask(self, df): @@ -63,7 +63,7 @@ def _compute_x_bounds_dask(self, df): """ xs = df[self.x].values minval, maxval = np.nanmin(xs), np.nanmax(xs) - return minval, maxval + np.spacing(maxval) + return minval, maxval @memoize def _compute_y_bounds_dask(self, df): @@ -72,7 +72,7 @@ def _compute_y_bounds_dask(self, df): """ ys = df[self.y].values minval, maxval = np.nanmin(ys), np.nanmax(ys) - return minval, maxval + np.spacing(maxval) + return minval, maxval class Point(_PointLike): diff --git a/datashader/pandas.py b/datashader/pandas.py index e13457ab9..04062b45b 100644 --- a/datashader/pandas.py +++ b/datashader/pandas.py @@ -4,6 +4,7 @@ from .core import bypixel from .compiler import compile_components +from .utils import exclusive_range __all__ = () @@ -17,6 +18,10 @@ def pandas_pipeline(df, schema, canvas, glyph, summary): x_range = canvas.x_range or glyph._compute_x_bounds(df[glyph.x].values) y_range = canvas.y_range or glyph._compute_y_bounds(df[glyph.y].values) + + x_range = exclusive_range(x_range) + y_range = exclusive_range(y_range) + width = canvas.plot_width height = canvas.plot_height diff --git a/datashader/utils.py b/datashader/utils.py index 4dd187e3e..283fe834a 100644 --- a/datashader/utils.py +++ b/datashader/utils.py @@ -352,3 +352,9 @@ def dshape_from_dask(df): categoricals_in_dtypes = np.vectorize(lambda dtype: dtype.name == 'category', otypes='?') def categorical_in_dtypes(dtype_arr): return categoricals_in_dtypes(dtype_arr).any() + + +def exclusive_range(bounds): + """Expand bounds to include upper bound value.""" + lower, upper = bounds + return (lower, upper + np.spacing(upper)) From 1d5b94dfc7cba2e90b04b194749caf2800a34d26 Mon Sep 17 00:00:00 2001 From: Joseph Crail Date: Tue, 26 Sep 2017 18:05:07 -0700 Subject: [PATCH 05/21] Switch to exclusive range for all use cases This reverts to exclusive ranges both manual and auto for all glyphs (points/line) without introducing regressions of #318, #330, and #343. I refactored several tests to make xarray coordinate indices easier to read and more explicit. --- datashader/core.py | 2 +- datashader/dask.py | 6 +-- datashader/glyphs.py | 4 +- datashader/pandas.py | 4 -- datashader/tests/test_dask.py | 84 ++++++++++++++++----------------- datashader/tests/test_glyphs.py | 10 +--- datashader/tests/test_pandas.py | 76 ++++++++++++++--------------- datashader/utils.py | 6 --- 8 files changed, 88 insertions(+), 104 deletions(-) diff --git a/datashader/core.py b/datashader/core.py index 0ed31c5a0..8456d09f5 100644 --- a/datashader/core.py +++ b/datashader/core.py @@ -63,7 +63,7 @@ def compute_scale_and_translate(self, range, n): s, t : floats """ start, end = map(self.mapper, range) - s = n/(end - start) + s = (n-1)/(end - start) t = -start * s return s, t diff --git a/datashader/dask.py b/datashader/dask.py index f5dea89e5..3d72feeac 100644 --- a/datashader/dask.py +++ b/datashader/dask.py @@ -9,7 +9,7 @@ from .compatibility import apply from .compiler import compile_components from .glyphs import Glyph, Line -from .utils import Dispatcher, exclusive_range +from .utils import Dispatcher __all__ = () @@ -28,8 +28,8 @@ def shape_bounds_st_and_axis(df, canvas, glyph): x_range = canvas.x_range or glyph._compute_x_bounds_dask(df) y_range = canvas.y_range or glyph._compute_y_bounds_dask(df) x_min, x_max, y_min, y_max = bounds = compute(*(x_range + y_range)) - x_range = exclusive_range((x_min, x_max)) - y_range = exclusive_range((y_min, y_max)) + x_range = (x_min, x_max) + y_range = (y_min, y_max) width = canvas.plot_width height = canvas.plot_height diff --git a/datashader/glyphs.py b/datashader/glyphs.py index d5c3f5877..2f2771673 100644 --- a/datashader/glyphs.py +++ b/datashader/glyphs.py @@ -97,7 +97,7 @@ def _extend(vt, bounds, xs, ys, *aggs_and_cols): for i in range(xs.shape[0]): x = xs[i] y = ys[i] - if (xmin <= x < xmax) and (ymin <= y < ymax): + if (xmin <= x <= xmax) and (ymin <= y <= ymax): append(i, int(x_mapper(x) * sx + tx), int(y_mapper(y) * sy + ty), @@ -132,7 +132,7 @@ def extend(aggs, df, vt, bounds, plot_start=True): # Scale/transform float bounds to integer space and adjust for # exclusive upper bounds xmin, xmax, ymin, ymax = map_onto_pixel(vt, *bounds) - mapped_bounds = (xmin, xmax - 1, ymin, ymax - 1) + mapped_bounds = (xmin, xmax, ymin, ymax) xs = df[x_name].values ys = df[y_name].values diff --git a/datashader/pandas.py b/datashader/pandas.py index 04062b45b..5eb9335e4 100644 --- a/datashader/pandas.py +++ b/datashader/pandas.py @@ -4,7 +4,6 @@ from .core import bypixel from .compiler import compile_components -from .utils import exclusive_range __all__ = () @@ -19,9 +18,6 @@ def pandas_pipeline(df, schema, canvas, glyph, summary): x_range = canvas.x_range or glyph._compute_x_bounds(df[glyph.x].values) y_range = canvas.y_range or glyph._compute_y_bounds(df[glyph.y].values) - x_range = exclusive_range(x_range) - y_range = exclusive_range(y_range) - width = canvas.plot_width height = canvas.plot_height diff --git a/datashader/tests/test_dask.py b/datashader/tests/test_dask.py index f6a862aec..788f2a4ed 100644 --- a/datashader/tests/test_dask.py +++ b/datashader/tests/test_dask.py @@ -27,15 +27,17 @@ ddf = dd.from_pandas(df, npartitions=3) -c = ds.Canvas(plot_width=2, plot_height=2, x_range=(0, 2), y_range=(0, 2)) -c_logx = ds.Canvas(plot_width=2, plot_height=2, x_range=(1, 11), - y_range=(0, 2), x_axis_type='log') -c_logy = ds.Canvas(plot_width=2, plot_height=2, x_range=(0, 2), - y_range=(1, 11), y_axis_type='log') -c_logxy = ds.Canvas(plot_width=2, plot_height=2, x_range=(1, 11), - y_range=(1, 11), x_axis_type='log', y_axis_type='log') - -coords = [np.arange(2, dtype='f8')+0.5, np.arange(2, dtype='f8')+0.5] +c = ds.Canvas(plot_width=2, plot_height=2, x_range=(0, 1), y_range=(0, 1)) +c_logx = ds.Canvas(plot_width=2, plot_height=2, x_range=(1, 10), + y_range=(0, 1), x_axis_type='log') +c_logy = ds.Canvas(plot_width=2, plot_height=2, x_range=(0, 1), + y_range=(1, 10), y_axis_type='log') +c_logxy = ds.Canvas(plot_width=2, plot_height=2, x_range=(1, 10), + y_range=(1, 10), x_axis_type='log', y_axis_type='log') + +axis = ds.core.LinearAxis() +lincoords = axis.compute_index(axis.compute_scale_and_translate((0, 1), 2), 2) +coords = [lincoords, lincoords] dims = ['y', 'x'] @@ -65,12 +67,12 @@ def test_count(): def test_any(): out = xr.DataArray(np.array([[True, True], [True, True]]), coords=coords, dims=dims) - assert_eq(c.points(df, 'x', 'y', ds.any('i64')), out) - assert_eq(c.points(df, 'x', 'y', ds.any('f64')), out) - assert_eq(c.points(df, 'x', 'y', ds.any()), out) + assert_eq(c.points(ddf, 'x', 'y', ds.any('i64')), out) + assert_eq(c.points(ddf, 'x', 'y', ds.any('f64')), out) + assert_eq(c.points(ddf, 'x', 'y', ds.any()), out) out = xr.DataArray(np.array([[True, True], [True, False]]), coords=coords, dims=dims) - assert_eq(c.points(df, 'x', 'y', ds.any('empty_bin')), out) + assert_eq(c.points(ddf, 'x', 'y', ds.any('empty_bin')), out) def test_sum(): @@ -161,6 +163,9 @@ def test_multiple_aggregates(): def test_auto_range_points(): + # Since the following tests use contiguous values of 32-bit or + # 64-bit floats, we need to adjust the theoretical expected results + # if we were using a 128-bit float or arbitrary precision float. n = 10 fs = list(itertools.islice(floats(1.0), n)) df = pd.DataFrame({'time': np.arange(n), @@ -168,23 +173,17 @@ def test_auto_range_points(): 'y': fs}) ddf = dd.from_pandas(df, npartitions=3) - # Expect continuous left-right diagonal cvs = ds.Canvas(plot_width=n, plot_height=n) agg = cvs.points(ddf, 'x', 'y', ds.count('time')) sol = np.zeros((n, n), int) np.fill_diagonal(sol, 1) np.testing.assert_equal(agg.data, sol) - # Expect continuous left-right diagonal w/ hole in middle cvs = ds.Canvas(plot_width=n+1, plot_height=n+1) agg = cvs.points(ddf, 'x', 'y', ds.count('time')) sol = np.zeros((n+1, n+1), int) np.fill_diagonal(sol, 1) - sol[5, 5] = 0 - # For 32-bit or 64-bit floats, the hole will be in the middle due to - # rounding errors. The hole will be in the lower-right corner for - # 128-bit float or arbitrary precision float. - # sol[n, n] = 0 + sol[5, 5] = 0 # adjustment np.testing.assert_equal(agg.data, sol) n = 4 @@ -194,37 +193,35 @@ def test_auto_range_points(): 'y': fs}) ddf = dd.from_pandas(df, npartitions=3) - # Expect alternating left-right diagonal cvs = ds.Canvas(plot_width=2*n, plot_height=2*n) agg = cvs.points(ddf, 'x', 'y', ds.count('time')) sol = np.zeros((2*n, 2*n), int) np.fill_diagonal(sol, 1) sol[[range(1, 2*n, 2)]] = 0 + sol[6, 6] = 0 # adjustment np.testing.assert_equal(agg.data, sol) - # Expect alternating left-right diagonal with hole in lower-right - # corner cvs = ds.Canvas(plot_width=2*n+1, plot_height=2*n+1) agg = cvs.points(ddf, 'x', 'y', ds.count('time')) sol = np.zeros((2*n+1, 2*n+1), int) np.fill_diagonal(sol, 1) sol[[range(1, 2*n+1, 2)]] = 0 - sol[2*n, 2*n] = 0 + sol[4, 4] = 0 # adjustment np.testing.assert_equal(agg.data, sol) def test_log_axis_points(): - # Upper bound for scale/index of x-axis - start, end = map(np.log10, (1, 11)) - s = 2/(end - start) - t = -start * s - px = np.arange(2)+0.5 - logcoords = 10**((px-t)/s) + axis = ds.core.LogAxis() + logcoords = axis.compute_index(axis.compute_scale_and_translate((1, 10), 2), 2) + + axis = ds.core.LinearAxis() + lincoords = axis.compute_index(axis.compute_scale_and_translate((0, 1), 2), 2) + sol = np.array([[5, 5], [5, 5]], dtype='i4') - out = xr.DataArray(sol, coords=[np.array([0.5, 1.5]), logcoords], + out = xr.DataArray(sol, coords=[lincoords, logcoords], dims=['y', 'log_x']) assert_eq(c_logx.points(ddf, 'log_x', 'y', ds.count('i32')), out) - out = xr.DataArray(sol, coords=[logcoords, np.array([0.5, 1.5])], + out = xr.DataArray(sol, coords=[logcoords, lincoords], dims=['log_y', 'x']) assert_eq(c_logy.points(ddf, 'x', 'log_y', ds.count('i32')), out) out = xr.DataArray(sol, coords=[logcoords, logcoords], @@ -233,11 +230,14 @@ def test_log_axis_points(): def test_line(): + axis = ds.core.LinearAxis() + lincoords = axis.compute_index(axis.compute_scale_and_translate((-3., 3.), 7), 7) + df = pd.DataFrame({'x': [4, 0, -4, -3, -2, -1.9, 0, 10, 10, 0, 4], 'y': [0, -4, 0, 1, 2, 2.1, 4, 20, 30, 4, 0]}) ddf = dd.from_pandas(df, npartitions=3) cvs = ds.Canvas(plot_width=7, plot_height=7, - x_range=(-3, 4), y_range=(-3, 4)) + x_range=(-3, 3), y_range=(-3, 3)) agg = cvs.line(ddf, 'x', 'y', ds.count()) sol = np.array([[0, 0, 1, 0, 1, 0, 0], [0, 1, 0, 0, 0, 1, 0], @@ -246,23 +246,23 @@ def test_line(): [1, 0, 0, 0, 0, 0, 1], [0, 2, 0, 0, 0, 1, 0], [0, 0, 1, 0, 1, 0, 0]], dtype='i4') - out = xr.DataArray(sol, coords=[np.arange(-3., 4.)+0.5, np.arange(-3., 4.)+0.5], + out = xr.DataArray(sol, coords=[lincoords, lincoords], dims=['y', 'x']) assert_eq(agg, out) def test_log_axis_line(): - # Upper bound for scale/index of x-axis - start, end = map(np.log10, (1, 11)) - s = 2/(end - start) - t = -start * s - px = np.arange(2)+0.5 - logcoords = 10**((px-t)/s) + axis = ds.core.LogAxis() + logcoords = axis.compute_index(axis.compute_scale_and_translate((1, 10), 2), 2) + + axis = ds.core.LinearAxis() + lincoords = axis.compute_index(axis.compute_scale_and_translate((0, 1), 2), 2) + sol = np.array([[5, 5], [5, 5]], dtype='i4') - out = xr.DataArray(sol, coords=[np.array([0.5, 1.5]), logcoords], + out = xr.DataArray(sol, coords=[lincoords, logcoords], dims=['y', 'log_x']) assert_eq(c_logx.line(ddf, 'log_x', 'y', ds.count('i32')), out) - out = xr.DataArray(sol, coords=[logcoords, np.array([0.5, 1.5])], + out = xr.DataArray(sol, coords=[logcoords, lincoords], dims=['log_y', 'x']) assert_eq(c_logy.line(ddf, 'x', 'log_y', ds.count('i32')), out) out = xr.DataArray(sol, coords=[logcoords, logcoords], diff --git a/datashader/tests/test_glyphs.py b/datashader/tests/test_glyphs.py index b99c746c9..087e6d3ea 100644 --- a/datashader/tests/test_glyphs.py +++ b/datashader/tests/test_glyphs.py @@ -10,14 +10,8 @@ def test_point_bounds_check(): df = pd.DataFrame({'x': [1, 2, 3], 'y': [5, 6, 7]}) p = Point('x', 'y') - - xmin, xmax = p._compute_x_bounds(df['x'].values) - assert xmin == 1 - assert xmax > 3 and np.isclose(xmax, 3) - - ymin, ymax = p._compute_y_bounds(df['y'].values) - assert ymin == 5 - assert ymax > 7 and np.isclose(ymax, 7) + assert p._compute_x_bounds(df['x'].values) == (1, 3) + assert p._compute_y_bounds(df['y'].values) == (5, 7) def test_point_validate(): diff --git a/datashader/tests/test_pandas.py b/datashader/tests/test_pandas.py index 666d68f77..ad48d8c9f 100644 --- a/datashader/tests/test_pandas.py +++ b/datashader/tests/test_pandas.py @@ -22,15 +22,17 @@ df.f32[2] = np.nan df.f64[2] = np.nan -c = ds.Canvas(plot_width=2, plot_height=2, x_range=(0, 2), y_range=(0, 2)) -c_logx = ds.Canvas(plot_width=2, plot_height=2, x_range=(1, 11), - y_range=(0, 2), x_axis_type='log') -c_logy = ds.Canvas(plot_width=2, plot_height=2, x_range=(0, 2), - y_range=(1, 11), y_axis_type='log') -c_logxy = ds.Canvas(plot_width=2, plot_height=2, x_range=(1, 11), - y_range=(1, 11), x_axis_type='log', y_axis_type='log') - -coords = [np.arange(2, dtype='f8')+0.5, np.arange(2, dtype='f8')+0.5] +c = ds.Canvas(plot_width=2, plot_height=2, x_range=(0, 1), y_range=(0, 1)) +c_logx = ds.Canvas(plot_width=2, plot_height=2, x_range=(1, 10), + y_range=(0, 1), x_axis_type='log') +c_logy = ds.Canvas(plot_width=2, plot_height=2, x_range=(0, 1), + y_range=(1, 10), y_axis_type='log') +c_logxy = ds.Canvas(plot_width=2, plot_height=2, x_range=(1, 10), + y_range=(1, 10), x_axis_type='log', y_axis_type='log') + +axis = ds.core.LinearAxis() +lincoords = axis.compute_index(axis.compute_scale_and_translate((0, 1), 2), 2) +coords = [lincoords, lincoords] dims = ['y', 'x'] @@ -156,29 +158,26 @@ def test_multiple_aggregates(): def test_auto_range_points(): + # Since the following tests use contiguous values of 32-bit or + # 64-bit floats, we need to adjust the theoretical expected results + # if we were using a 128-bit float or arbitrary precision float. n = 10 fs = list(itertools.islice(floats(1.0), n)) df = pd.DataFrame({'time': np.arange(n), 'x': fs, 'y': fs}) - # Expect continuous left-right diagonal cvs = ds.Canvas(plot_width=n, plot_height=n) agg = cvs.points(df, 'x', 'y', ds.count('time')) sol = np.zeros((n, n), int) np.fill_diagonal(sol, 1) np.testing.assert_equal(agg.data, sol) - # Expect continuous left-right diagonal w/ hole in middle cvs = ds.Canvas(plot_width=n+1, plot_height=n+1) agg = cvs.points(df, 'x', 'y', ds.count('time')) sol = np.zeros((n+1, n+1), int) np.fill_diagonal(sol, 1) - sol[5, 5] = 0 - # For 32-bit or 64-bit floats, the hole will be in the middle due to - # rounding errors. The hole will be in the lower-right corner for - # 128-bit float or arbitrary precision float. - # sol[n, n] = 0 + sol[5, 5] = 0 # adjustment np.testing.assert_equal(agg.data, sol) n = 4 @@ -187,37 +186,35 @@ def test_auto_range_points(): 'x': fs, 'y': fs}) - # Expect alternating left-right diagonal cvs = ds.Canvas(plot_width=2*n, plot_height=2*n) agg = cvs.points(df, 'x', 'y', ds.count('time')) sol = np.zeros((2*n, 2*n), int) np.fill_diagonal(sol, 1) sol[[range(1, 2*n, 2)]] = 0 + sol[6, 6] = 0 # adjustment np.testing.assert_equal(agg.data, sol) - # Expect alternating left-right diagonal with hole in lower-right - # corner cvs = ds.Canvas(plot_width=2*n+1, plot_height=2*n+1) agg = cvs.points(df, 'x', 'y', ds.count('time')) sol = np.zeros((2*n+1, 2*n+1), int) np.fill_diagonal(sol, 1) sol[[range(1, 2*n+1, 2)]] = 0 - sol[2*n, 2*n] = 0 + sol[4, 4] = 0 # adjustment np.testing.assert_equal(agg.data, sol) def test_log_axis_points(): - # Upper bound for scale/index of x-axis - start, end = map(np.log10, (1, 11)) - s = 2/(end - start) - t = -start * s - px = np.arange(2)+0.5 - logcoords = 10**((px-t)/s) + axis = ds.core.LogAxis() + logcoords = axis.compute_index(axis.compute_scale_and_translate((1, 10), 2), 2) + + axis = ds.core.LinearAxis() + lincoords = axis.compute_index(axis.compute_scale_and_translate((0, 1), 2), 2) + sol = np.array([[5, 5], [5, 5]], dtype='i4') - out = xr.DataArray(sol, coords=[np.array([0.5, 1.5]), logcoords], + out = xr.DataArray(sol, coords=[lincoords, logcoords], dims=['y', 'log_x']) assert_eq(c_logx.points(df, 'log_x', 'y', ds.count('i32')), out) - out = xr.DataArray(sol, coords=[logcoords, np.array([0.5, 1.5])], + out = xr.DataArray(sol, coords=[logcoords, lincoords], dims=['log_y', 'x']) assert_eq(c_logy.points(df, 'x', 'log_y', ds.count('i32')), out) out = xr.DataArray(sol, coords=[logcoords, logcoords], @@ -226,10 +223,13 @@ def test_log_axis_points(): def test_line(): + axis = ds.core.LinearAxis() + lincoords = axis.compute_index(axis.compute_scale_and_translate((-3., 3.), 7), 7) + df = pd.DataFrame({'x': [4, 0, -4, -3, -2, -1.9, 0, 10, 10, 0, 4], 'y': [0, -4, 0, 1, 2, 2.1, 4, 20, 30, 4, 0]}) cvs = ds.Canvas(plot_width=7, plot_height=7, - x_range=(-3, 4), y_range=(-3, 4)) + x_range=(-3, 3), y_range=(-3, 3)) agg = cvs.line(df, 'x', 'y', ds.count()) sol = np.array([[0, 0, 1, 0, 1, 0, 0], [0, 1, 0, 0, 0, 1, 0], @@ -238,23 +238,23 @@ def test_line(): [1, 0, 0, 0, 0, 0, 1], [0, 2, 0, 0, 0, 1, 0], [0, 0, 1, 0, 1, 0, 0]], dtype='i4') - out = xr.DataArray(sol, coords=[np.arange(-3., 4.)+0.5, np.arange(-3., 4.)+0.5], + out = xr.DataArray(sol, coords=[lincoords, lincoords], dims=['y', 'x']) assert_eq(agg, out) def test_log_axis_line(): - # Upper bound for scale/index of x-axis - start, end = map(np.log10, (1, 11)) - s = 2/(end - start) - t = -start * s - px = np.arange(2)+0.5 - logcoords = 10**((px-t)/s) + axis = ds.core.LogAxis() + logcoords = axis.compute_index(axis.compute_scale_and_translate((1, 10), 2), 2) + + axis = ds.core.LinearAxis() + lincoords = axis.compute_index(axis.compute_scale_and_translate((0, 1), 2), 2) + sol = np.array([[5, 5], [5, 5]], dtype='i4') - out = xr.DataArray(sol, coords=[np.array([0.5, 1.5]), logcoords], + out = xr.DataArray(sol, coords=[lincoords, logcoords], dims=['y', 'log_x']) assert_eq(c_logx.line(df, 'log_x', 'y', ds.count('i32')), out) - out = xr.DataArray(sol, coords=[logcoords, np.array([0.5, 1.5])], + out = xr.DataArray(sol, coords=[logcoords, lincoords], dims=['log_y', 'x']) assert_eq(c_logy.line(df, 'x', 'log_y', ds.count('i32')), out) out = xr.DataArray(sol, coords=[logcoords, logcoords], diff --git a/datashader/utils.py b/datashader/utils.py index 283fe834a..4dd187e3e 100644 --- a/datashader/utils.py +++ b/datashader/utils.py @@ -352,9 +352,3 @@ def dshape_from_dask(df): categoricals_in_dtypes = np.vectorize(lambda dtype: dtype.name == 'category', otypes='?') def categorical_in_dtypes(dtype_arr): return categoricals_in_dtypes(dtype_arr).any() - - -def exclusive_range(bounds): - """Expand bounds to include upper bound value.""" - lower, upper = bounds - return (lower, upper + np.spacing(upper)) From af57cbcde3f0d9b97d670cab55d8a52a4414bc1d Mon Sep 17 00:00:00 2001 From: Joseph Crail Date: Tue, 26 Sep 2017 18:14:14 -0700 Subject: [PATCH 06/21] Revert whitespace updates --- datashader/dask.py | 3 +-- datashader/glyphs.py | 6 ++---- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/datashader/dask.py b/datashader/dask.py index 3d72feeac..776f45fd9 100644 --- a/datashader/dask.py +++ b/datashader/dask.py @@ -28,8 +28,7 @@ def shape_bounds_st_and_axis(df, canvas, glyph): x_range = canvas.x_range or glyph._compute_x_bounds_dask(df) y_range = canvas.y_range or glyph._compute_y_bounds_dask(df) x_min, x_max, y_min, y_max = bounds = compute(*(x_range + y_range)) - x_range = (x_min, x_max) - y_range = (y_min, y_max) + x_range, y_range = (x_min, x_max), (y_min, y_max) width = canvas.plot_width height = canvas.plot_height diff --git a/datashader/glyphs.py b/datashader/glyphs.py index 2f2771673..cc3017e4b 100644 --- a/datashader/glyphs.py +++ b/datashader/glyphs.py @@ -62,8 +62,7 @@ def _compute_x_bounds_dask(self, df): ``df`` is immutable/hashable (a Dask dataframe). """ xs = df[self.x].values - minval, maxval = np.nanmin(xs), np.nanmax(xs) - return minval, maxval + return np.nanmin(xs), np.nanmax(xs) @memoize def _compute_y_bounds_dask(self, df): @@ -71,8 +70,7 @@ def _compute_y_bounds_dask(self, df): ``df`` is immutable/hashable (a Dask dataframe). """ ys = df[self.y].values - minval, maxval = np.nanmin(ys), np.nanmax(ys) - return minval, maxval + return np.nanmin(ys), np.nanmax(ys) class Point(_PointLike): From b323b1202b297a38632f833a736975f776739511 Mon Sep 17 00:00:00 2001 From: Joseph Crail Date: Tue, 26 Sep 2017 21:17:04 -0700 Subject: [PATCH 07/21] Add auto range tests for line glyph --- datashader/tests/test_dask.py | 19 +++++++++++++++++++ datashader/tests/test_pandas.py | 18 ++++++++++++++++++ 2 files changed, 37 insertions(+) diff --git a/datashader/tests/test_dask.py b/datashader/tests/test_dask.py index 788f2a4ed..0c54ca31a 100644 --- a/datashader/tests/test_dask.py +++ b/datashader/tests/test_dask.py @@ -268,3 +268,22 @@ def test_log_axis_line(): out = xr.DataArray(sol, coords=[logcoords, logcoords], dims=['log_y', 'log_x']) assert_eq(c_logxy.line(ddf, 'log_x', 'log_y', ds.count('i32')), out) + + +def test_auto_range_line(): + axis = ds.core.LinearAxis() + lincoords = axis.compute_index(axis.compute_scale_and_translate((-10., 10.), 5), 5) + + df = pd.DataFrame({'x': [-10, 0, 10, 0, -10], + 'y': [ 0, 10, 0, -10, 0]}) + ddf = dd.from_pandas(df, npartitions=3) + cvs = ds.Canvas(plot_width=5, plot_height=5) + agg = cvs.line(ddf, 'x', 'y', ds.count()) + sol = np.array([[0, 0, 1, 0, 0], + [0, 1, 0, 1, 0], + [2, 0, 0, 0, 1], + [0, 1, 0, 1, 0], + [0, 0, 1, 0, 0]], dtype='i4') + out = xr.DataArray(sol, coords=[lincoords, lincoords], + dims=['y', 'x']) + assert_eq(agg, out) diff --git a/datashader/tests/test_pandas.py b/datashader/tests/test_pandas.py index ad48d8c9f..936eb84ac 100644 --- a/datashader/tests/test_pandas.py +++ b/datashader/tests/test_pandas.py @@ -260,3 +260,21 @@ def test_log_axis_line(): out = xr.DataArray(sol, coords=[logcoords, logcoords], dims=['log_y', 'log_x']) assert_eq(c_logxy.line(df, 'log_x', 'log_y', ds.count('i32')), out) + + +def test_auto_range_line(): + axis = ds.core.LinearAxis() + lincoords = axis.compute_index(axis.compute_scale_and_translate((-10., 10.), 5), 5) + + df = pd.DataFrame({'x': [-10, 0, 10, 0, -10], + 'y': [ 0, 10, 0, -10, 0]}) + cvs = ds.Canvas(plot_width=5, plot_height=5) + agg = cvs.line(df, 'x', 'y', ds.count()) + sol = np.array([[0, 0, 1, 0, 0], + [0, 1, 0, 1, 0], + [2, 0, 0, 0, 1], + [0, 1, 0, 1, 0], + [0, 0, 1, 0, 0]], dtype='i4') + out = xr.DataArray(sol, coords=[lincoords, lincoords], + dims=['y', 'x']) + assert_eq(agg, out) From 3ba7ce9cc08ef0600ba9e6e124f2fc48a67e84b4 Mon Sep 17 00:00:00 2001 From: Joseph Crail Date: Wed, 27 Sep 2017 13:52:10 -0700 Subject: [PATCH 08/21] Add tests for uniform distribution of points --- datashader/tests/test_dask.py | 12 ++++++++++++ datashader/tests/test_pandas.py | 13 +++++++++++++ 2 files changed, 25 insertions(+) diff --git a/datashader/tests/test_dask.py b/datashader/tests/test_dask.py index 0c54ca31a..1b650ac84 100644 --- a/datashader/tests/test_dask.py +++ b/datashader/tests/test_dask.py @@ -210,6 +210,18 @@ def test_auto_range_points(): np.testing.assert_equal(agg.data, sol) +def test_uniform_points(): + n = 101 + df = pd.DataFrame({'time': np.ones(2*n, dtype='i4'), + 'x': np.concatenate((np.arange(n, dtype='f8'), + np.arange(n, dtype='f8'))), + 'y': np.concatenate(([0.] * n, [1.] * n))}) + + cvs = ds.Canvas(plot_width=10, plot_height=2, y_range=(0, 1)) + agg = cvs.points(df, 'x', 'y', ds.count('time')) + sol = np.array([[10] * 9 + [11], [10] * 9 + [11]], dtype='i4') + np.testing.assert_equal(agg.data, sol) + def test_log_axis_points(): axis = ds.core.LogAxis() logcoords = axis.compute_index(axis.compute_scale_and_translate((1, 10), 2), 2) diff --git a/datashader/tests/test_pandas.py b/datashader/tests/test_pandas.py index 936eb84ac..41c5b2eaa 100644 --- a/datashader/tests/test_pandas.py +++ b/datashader/tests/test_pandas.py @@ -203,6 +203,19 @@ def test_auto_range_points(): np.testing.assert_equal(agg.data, sol) +def test_uniform_points(): + n = 101 + df = pd.DataFrame({'time': np.ones(2*n, dtype='i4'), + 'x': np.concatenate((np.arange(n, dtype='f8'), + np.arange(n, dtype='f8'))), + 'y': np.concatenate(([0.] * n, [1.] * n))}) + + cvs = ds.Canvas(plot_width=10, plot_height=2, y_range=(0, 1)) + agg = cvs.points(df, 'x', 'y', ds.count('time')) + sol = np.array([[10] * 9 + [11], [10] * 9 + [11]], dtype='i4') + np.testing.assert_equal(agg.data, sol) + + def test_log_axis_points(): axis = ds.core.LogAxis() logcoords = axis.compute_index(axis.compute_scale_and_translate((1, 10), 2), 2) From 245ae5be75abc2fd32f7eb0c5817e80dc3c8a2e1 Mon Sep 17 00:00:00 2001 From: Joseph Crail Date: Wed, 27 Sep 2017 17:57:46 -0700 Subject: [PATCH 09/21] Revert exclusive range adjustments --- datashader/core.py | 2 +- datashader/glyphs.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datashader/core.py b/datashader/core.py index 8456d09f5..0ed31c5a0 100644 --- a/datashader/core.py +++ b/datashader/core.py @@ -63,7 +63,7 @@ def compute_scale_and_translate(self, range, n): s, t : floats """ start, end = map(self.mapper, range) - s = (n-1)/(end - start) + s = n/(end - start) t = -start * s return s, t diff --git a/datashader/glyphs.py b/datashader/glyphs.py index cc3017e4b..fb9cb1f4e 100644 --- a/datashader/glyphs.py +++ b/datashader/glyphs.py @@ -95,7 +95,7 @@ def _extend(vt, bounds, xs, ys, *aggs_and_cols): for i in range(xs.shape[0]): x = xs[i] y = ys[i] - if (xmin <= x <= xmax) and (ymin <= y <= ymax): + if (xmin <= x < xmax) and (ymin <= y < ymax): append(i, int(x_mapper(x) * sx + tx), int(y_mapper(y) * sy + ty), @@ -130,7 +130,7 @@ def extend(aggs, df, vt, bounds, plot_start=True): # Scale/transform float bounds to integer space and adjust for # exclusive upper bounds xmin, xmax, ymin, ymax = map_onto_pixel(vt, *bounds) - mapped_bounds = (xmin, xmax, ymin, ymax) + mapped_bounds = (xmin, xmax - 1, ymin, ymax - 1) xs = df[x_name].values ys = df[y_name].values From eb3051795998cbdfe9965e3afa4ab6ccebec7ba4 Mon Sep 17 00:00:00 2001 From: Joseph Crail Date: Thu, 28 Sep 2017 14:53:47 -0700 Subject: [PATCH 10/21] Update point glyph to use fully inclusive range --- datashader/glyphs.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/datashader/glyphs.py b/datashader/glyphs.py index fb9cb1f4e..71aad8df6 100644 --- a/datashader/glyphs.py +++ b/datashader/glyphs.py @@ -92,14 +92,21 @@ def _build_extend(self, x_mapper, y_mapper, info, append): def _extend(vt, bounds, xs, ys, *aggs_and_cols): sx, tx, sy, ty = vt xmin, xmax, ymin, ymax = bounds + + def map_onto_pixel(x, y): + xx, yy = x_mapper(x) * sx + tx, y_mapper(y) * sy + ty + if x == xmax: + xx -= np.spacing(xx) + if y == ymax: + yy -= np.spacing(yy) + return int(xx), int(yy) + for i in range(xs.shape[0]): x = xs[i] y = ys[i] - if (xmin <= x < xmax) and (ymin <= y < ymax): - append(i, - int(x_mapper(x) * sx + tx), - int(y_mapper(y) * sy + ty), - *aggs_and_cols) + if (xmin <= x <= xmax) and (ymin <= y <= ymax): + xi, yi = map_onto_pixel(x, y) + append(i, xi, yi, *aggs_and_cols) def extend(aggs, df, vt, bounds): xs = df[x_name].values From 5e369159b7db810da30d0300b9f119b6392811d4 Mon Sep 17 00:00:00 2001 From: Joseph Crail Date: Thu, 28 Sep 2017 14:57:00 -0700 Subject: [PATCH 11/21] Update line glyph to use fully inclusive range I also fixed extend_line to only calculate with floats until drawing the line if accepted. We were previously juggling floats and their associated integer values. This caused incorrect mapping to the grid. --- datashader/glyphs.py | 81 +++++++++++++++++++++----------------------- 1 file changed, 38 insertions(+), 43 deletions(-) diff --git a/datashader/glyphs.py b/datashader/glyphs.py index 71aad8df6..c39c0a470 100644 --- a/datashader/glyphs.py +++ b/datashader/glyphs.py @@ -134,16 +134,10 @@ def _build_extend(self, x_mapper, y_mapper, info, append): y_name = self.y def extend(aggs, df, vt, bounds, plot_start=True): - # Scale/transform float bounds to integer space and adjust for - # exclusive upper bounds - xmin, xmax, ymin, ymax = map_onto_pixel(vt, *bounds) - mapped_bounds = (xmin, xmax - 1, ymin, ymax - 1) - xs = df[x_name].values ys = df[y_name].values cols = aggs + info(df) - - extend_line(vt, bounds, mapped_bounds, xs, ys, plot_start, *cols) + extend_line(vt, bounds, xs, ys, plot_start, *cols) return extend @@ -176,13 +170,16 @@ def _compute_outcode(x, y, xmin, xmax, ymin, ymax): def _build_map_onto_pixel(x_mapper, y_mapper): @ngjit - def map_onto_pixel(vt, x0, x1, y0, y1): + def map_onto_pixel(vt, bounds, x, y): """Map points onto pixel grid""" sx, tx, sy, ty = vt - return (int(x_mapper(x0) * sx + tx), - int(x_mapper(x1) * sx + tx), - int(y_mapper(y0) * sy + ty), - int(y_mapper(y1) * sy + ty)) + _, xmax, _, ymax = bounds + xx, yy = x_mapper(x) * sx + tx, y_mapper(y) * sy + ty + if x == xmax: + xx -= np.spacing(xx) + if y == ymax: + yy -= np.spacing(yy) + return int(xx), int(yy) return map_onto_pixel @@ -241,9 +238,9 @@ def draw_line(x0i, y0i, x1i, y1i, i, plot_start, clipped, *aggs_and_cols): def _build_extend_line(draw_line, map_onto_pixel): @ngjit - def extend_line(vt, bounds, mapped_bounds, xs, ys, plot_start, *aggs_and_cols): + def extend_line(vt, bounds, xs, ys, plot_start, *aggs_and_cols): """Aggregate along a line formed by ``xs`` and ``ys``""" - xmin, xmax, ymin, ymax = mapped_bounds + xmin, xmax, ymin, ymax = bounds nrows = xs.shape[0] i = 0 while i < nrows - 1: @@ -259,11 +256,9 @@ def extend_line(vt, bounds, mapped_bounds, xs, ys, plot_start, *aggs_and_cols): i += 1 continue - x0i, x1i, y0i, y1i = map_onto_pixel(vt, x0, x1, y0, y1) - # Use Cohen-Sutherland to clip the segment to a bounding box - outcode0 = _compute_outcode(x0i, y0i, xmin, xmax, ymin, ymax) - outcode1 = _compute_outcode(x1i, y1i, xmin, xmax, ymin, ymax) + outcode0 = _compute_outcode(x0, y0, xmin, xmax, ymin, ymax) + outcode1 = _compute_outcode(x1, y1, xmin, xmax, ymin, ymax) accept = False clipped = False @@ -275,34 +270,34 @@ def extend_line(vt, bounds, mapped_bounds, xs, ys, plot_start, *aggs_and_cols): elif outcode0 & outcode1: plot_start = True break + + clipped = True + outcode_out = outcode0 if outcode0 else outcode1 + if outcode_out & TOP: + x = x0 + (x1 - x0) * (ymax - y0) / (y1 - y0) + y = ymax + elif outcode_out & BOTTOM: + x = x0 + (x1 - x0) * (ymin - y0) / (y1 - y0) + y = ymin + elif outcode_out & RIGHT: + y = y0 + (y1 - y0) * (xmax - x0) / (x1 - x0) + x = xmax + elif outcode_out & LEFT: + y = y0 + (y1 - y0) * (xmin - x0) / (x1 - x0) + x = xmin + + if outcode_out == outcode0: + x0, y0 = x, y + outcode0 = _compute_outcode(x0, y0, xmin, xmax, ymin, ymax) + # If x0i is clipped, we need to plot the new start + plot_start = True else: - clipped = True - outcode_out = outcode0 if outcode0 else outcode1 - if outcode_out & TOP: - x = x0i + int((x1i - x0i) * (ymax - y0i) / (y1i - y0i)) - y = ymax - elif outcode_out & BOTTOM: - x = x0i + int((x1i - x0i) * (ymin - y0i) / (y1i - y0i)) - y = ymin - elif outcode_out & RIGHT: - y = y0i + int((y1i - y0i) * (xmax - x0i) / (x1i - x0i)) - x = xmax - elif outcode_out & LEFT: - y = y0i + int((y1i - y0i) * (xmin - x0i) / (x1i - x0i)) - x = xmin - - if outcode_out == outcode0: - x0i, y0i = x, y - outcode0 = _compute_outcode(x0i, y0i, xmin, xmax, - ymin, ymax) - # If x0i is clipped, we need to plot the new start - plot_start = True - else: - x1i, y1i = x, y - outcode1 = _compute_outcode(x1i, y1i, xmin, xmax, - ymin, ymax) + x1, y1 = x, y + outcode1 = _compute_outcode(x1, y1, xmin, xmax, ymin, ymax) if accept: + x0i, y0i = map_onto_pixel(vt, bounds, x0, y0) + x1i, y1i = map_onto_pixel(vt, bounds, x1, y1) draw_line(x0i, y0i, x1i, y1i, i, plot_start, clipped, *aggs_and_cols) plot_start = False i += 1 From 6051cd26c3007807b94bdd7e3bde17d144dc9cf3 Mon Sep 17 00:00:00 2001 From: Joseph Crail Date: Thu, 28 Sep 2017 14:59:04 -0700 Subject: [PATCH 12/21] Update docs --- datashader/core.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datashader/core.py b/datashader/core.py index 0ed31c5a0..12395b056 100644 --- a/datashader/core.py +++ b/datashader/core.py @@ -54,7 +54,7 @@ def compute_scale_and_translate(self, range, n): ---------- range : tuple A tuple representing the range ``[min, max]`` along the axis, in - data space. min is inclusive and max is exclusive. + data space. both min and max are inclusive. n : int The number of bins along the axis. From 6ac43a661bc7aec781bb988060a8d4a8b0207983 Mon Sep 17 00:00:00 2001 From: Joseph Crail Date: Thu, 28 Sep 2017 14:59:19 -0700 Subject: [PATCH 13/21] Update tests --- datashader/tests/test_dask.py | 28 +++++++++++++--------------- datashader/tests/test_pandas.py | 29 +++++++++++++---------------- 2 files changed, 26 insertions(+), 31 deletions(-) diff --git a/datashader/tests/test_dask.py b/datashader/tests/test_dask.py index 1b650ac84..501550eb9 100644 --- a/datashader/tests/test_dask.py +++ b/datashader/tests/test_dask.py @@ -163,14 +163,11 @@ def test_multiple_aggregates(): def test_auto_range_points(): - # Since the following tests use contiguous values of 32-bit or - # 64-bit floats, we need to adjust the theoretical expected results - # if we were using a 128-bit float or arbitrary precision float. n = 10 - fs = list(itertools.islice(floats(1.0), n)) + data = np.arange(n, dtype='i4') df = pd.DataFrame({'time': np.arange(n), - 'x': fs, - 'y': fs}) + 'x': data, + 'y': data}) ddf = dd.from_pandas(df, npartitions=3) cvs = ds.Canvas(plot_width=n, plot_height=n) @@ -183,30 +180,31 @@ def test_auto_range_points(): agg = cvs.points(ddf, 'x', 'y', ds.count('time')) sol = np.zeros((n+1, n+1), int) np.fill_diagonal(sol, 1) - sol[5, 5] = 0 # adjustment + sol[5, 5] = 0 np.testing.assert_equal(agg.data, sol) n = 4 - fs = list(itertools.islice(floats(1.0), n)) + data = np.arange(n, dtype='i4') df = pd.DataFrame({'time': np.arange(n), - 'x': fs, - 'y': fs}) + 'x': data, + 'y': data}) ddf = dd.from_pandas(df, npartitions=3) cvs = ds.Canvas(plot_width=2*n, plot_height=2*n) agg = cvs.points(ddf, 'x', 'y', ds.count('time')) sol = np.zeros((2*n, 2*n), int) np.fill_diagonal(sol, 1) - sol[[range(1, 2*n, 2)]] = 0 - sol[6, 6] = 0 # adjustment + sol[[range(1, 4, 2)]] = 0 + sol[[range(4, 8, 2)]] = 0 np.testing.assert_equal(agg.data, sol) cvs = ds.Canvas(plot_width=2*n+1, plot_height=2*n+1) agg = cvs.points(ddf, 'x', 'y', ds.count('time')) sol = np.zeros((2*n+1, 2*n+1), int) - np.fill_diagonal(sol, 1) - sol[[range(1, 2*n+1, 2)]] = 0 - sol[4, 4] = 0 # adjustment + sol[0, 0] = 1 + sol[3, 3] = 1 + sol[6, 6] = 1 + sol[8, 8] = 1 np.testing.assert_equal(agg.data, sol) diff --git a/datashader/tests/test_pandas.py b/datashader/tests/test_pandas.py index 41c5b2eaa..af671178d 100644 --- a/datashader/tests/test_pandas.py +++ b/datashader/tests/test_pandas.py @@ -7,7 +7,6 @@ import datashader as ds - df = pd.DataFrame({'x': np.array(([0.] * 10 + [1] * 10)), 'y': np.array(([0.] * 5 + [1] * 5 + [0] * 5 + [1] * 5)), 'log_x': np.array(([1.] * 10 + [10] * 10)), @@ -158,14 +157,11 @@ def test_multiple_aggregates(): def test_auto_range_points(): - # Since the following tests use contiguous values of 32-bit or - # 64-bit floats, we need to adjust the theoretical expected results - # if we were using a 128-bit float or arbitrary precision float. n = 10 - fs = list(itertools.islice(floats(1.0), n)) + data = np.arange(n, dtype='i4') df = pd.DataFrame({'time': np.arange(n), - 'x': fs, - 'y': fs}) + 'x': data, + 'y': data}) cvs = ds.Canvas(plot_width=n, plot_height=n) agg = cvs.points(df, 'x', 'y', ds.count('time')) @@ -177,29 +173,30 @@ def test_auto_range_points(): agg = cvs.points(df, 'x', 'y', ds.count('time')) sol = np.zeros((n+1, n+1), int) np.fill_diagonal(sol, 1) - sol[5, 5] = 0 # adjustment + sol[5, 5] = 0 np.testing.assert_equal(agg.data, sol) n = 4 - fs = list(itertools.islice(floats(1.0), n)) + data = np.arange(n, dtype='i4') df = pd.DataFrame({'time': np.arange(n), - 'x': fs, - 'y': fs}) + 'x': data, + 'y': data}) cvs = ds.Canvas(plot_width=2*n, plot_height=2*n) agg = cvs.points(df, 'x', 'y', ds.count('time')) sol = np.zeros((2*n, 2*n), int) np.fill_diagonal(sol, 1) - sol[[range(1, 2*n, 2)]] = 0 - sol[6, 6] = 0 # adjustment + sol[[range(1, 4, 2)]] = 0 + sol[[range(4, 8, 2)]] = 0 np.testing.assert_equal(agg.data, sol) cvs = ds.Canvas(plot_width=2*n+1, plot_height=2*n+1) agg = cvs.points(df, 'x', 'y', ds.count('time')) sol = np.zeros((2*n+1, 2*n+1), int) - np.fill_diagonal(sol, 1) - sol[[range(1, 2*n+1, 2)]] = 0 - sol[4, 4] = 0 # adjustment + sol[0, 0] = 1 + sol[3, 3] = 1 + sol[6, 6] = 1 + sol[8, 8] = 1 np.testing.assert_equal(agg.data, sol) From 54ac722e12a9489cb4c014803af3112406460833 Mon Sep 17 00:00:00 2001 From: Joseph Crail Date: Thu, 28 Sep 2017 16:56:29 -0700 Subject: [PATCH 14/21] Update comment --- datashader/glyphs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datashader/glyphs.py b/datashader/glyphs.py index c39c0a470..cac18f92d 100644 --- a/datashader/glyphs.py +++ b/datashader/glyphs.py @@ -289,7 +289,7 @@ def extend_line(vt, bounds, xs, ys, plot_start, *aggs_and_cols): if outcode_out == outcode0: x0, y0 = x, y outcode0 = _compute_outcode(x0, y0, xmin, xmax, ymin, ymax) - # If x0i is clipped, we need to plot the new start + # If x0 is clipped, we need to plot the new start plot_start = True else: x1, y1 = x, y From 343953c95331caaf95f9cfdbb319c69e7ffe1af1 Mon Sep 17 00:00:00 2001 From: Joseph Crail Date: Thu, 28 Sep 2017 16:57:15 -0700 Subject: [PATCH 15/21] Fix glyph tests --- datashader/tests/test_glyphs.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/datashader/tests/test_glyphs.py b/datashader/tests/test_glyphs.py index 087e6d3ea..0eb97665c 100644 --- a/datashader/tests/test_glyphs.py +++ b/datashader/tests/test_glyphs.py @@ -37,8 +37,6 @@ def new_agg(): bounds = (-3, 1, -3, 1) vt = (1., 3., 1., 3.) -xmin, xmax, ymin, ymax = map_onto_pixel(vt, *bounds) -mbounds = (xmin, xmax - 1, ymin, ymax - 1) def test_draw_line(): @@ -133,23 +131,23 @@ def test_extend_lines(): [0, 1, 0, 1, 0], [0, 0, 0, 0, 0]]) agg = new_agg() - extend_line(vt, bounds, mbounds, xs, ys, False, agg) + extend_line(vt, bounds, xs, ys, False, agg) np.testing.assert_equal(agg, out) # plot_start = True out[2, 3] += 1 agg = new_agg() - extend_line(vt, bounds, mbounds, xs, ys, True, agg) + extend_line(vt, bounds, xs, ys, True, agg) np.testing.assert_equal(agg, out) xs = np.array([2, 1, 0, -1, -4, -1, -100, -1, 2]) ys = np.array([-1, -2, -3, -4, -1, 2, 100, 2, -1]) out = np.array([[0, 1, 0, 1, 0], - [1, 0, 0, 0, 0], + [1, 0, 0, 1, 0], [0, 0, 0, 0, 0], - [1, 0, 0, 0, 0], + [1, 1, 0, 1, 0], [0, 0, 0, 0, 0]]) agg = new_agg() - extend_line(vt, bounds, mbounds, xs, ys, True, agg) + extend_line(vt, bounds, xs, ys, True, agg) np.testing.assert_equal(agg, out) @@ -157,7 +155,7 @@ def test_extend_lines_all_out_of_bounds(): xs = np.array([-100, -200, -100]) ys = np.array([0, 0, 1]) agg = new_agg() - extend_line(vt, bounds, mbounds, xs, ys, True, agg) + extend_line(vt, bounds, xs, ys, True, agg) assert agg.sum() == 0 @@ -165,6 +163,6 @@ def test_extend_lines_nan(): xs = np.array([-3, -2, np.nan, 0, 1]) ys = np.array([-3, -2, np.nan, 0, 1]) agg = new_agg() - extend_line(vt, bounds, mbounds, xs, ys, True, agg) - out = np.diag([1, 1, 0, 1, 0]) + extend_line(vt, bounds, xs, ys, True, agg) + out = np.diag([1, 1, 0, 2, 0]) np.testing.assert_equal(agg, out) From 025ab072085c0362638e82eb5d2b39aecd21daa3 Mon Sep 17 00:00:00 2001 From: Joseph Crail Date: Thu, 28 Sep 2017 17:14:02 -0700 Subject: [PATCH 16/21] Remove unused module --- datashader/tests/test_dask.py | 2 -- datashader/tests/test_pandas.py | 2 -- 2 files changed, 4 deletions(-) diff --git a/datashader/tests/test_dask.py b/datashader/tests/test_dask.py index 501550eb9..300920a45 100644 --- a/datashader/tests/test_dask.py +++ b/datashader/tests/test_dask.py @@ -1,5 +1,3 @@ -import itertools - from dask.local import get_sync from dask.context import set_options import dask.dataframe as dd diff --git a/datashader/tests/test_pandas.py b/datashader/tests/test_pandas.py index af671178d..22f37fc8d 100644 --- a/datashader/tests/test_pandas.py +++ b/datashader/tests/test_pandas.py @@ -1,5 +1,3 @@ -import itertools - import numpy as np import pandas as pd import xarray as xr From 8df659946929d6b9a27c4791ddd41019b4fd23d7 Mon Sep 17 00:00:00 2001 From: "James A. Bednar" Date: Fri, 29 Sep 2017 12:26:35 -0500 Subject: [PATCH 17/21] Fixed typo --- datashader/core.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datashader/core.py b/datashader/core.py index 12395b056..b919aaa76 100644 --- a/datashader/core.py +++ b/datashader/core.py @@ -54,7 +54,7 @@ def compute_scale_and_translate(self, range, n): ---------- range : tuple A tuple representing the range ``[min, max]`` along the axis, in - data space. both min and max are inclusive. + data space. Both min and max are inclusive. n : int The number of bins along the axis. From 90aa4cd501aef8e05622b30cd1b717181c2b6175 Mon Sep 17 00:00:00 2001 From: Joseph Crail Date: Fri, 29 Sep 2017 17:31:08 -0700 Subject: [PATCH 18/21] Add more tests for uniform points --- datashader/tests/test_dask.py | 24 ++++++++++++++++++++++++ datashader/tests/test_pandas.py | 23 +++++++++++++++++++++++ 2 files changed, 47 insertions(+) diff --git a/datashader/tests/test_dask.py b/datashader/tests/test_dask.py index 300920a45..461327c4b 100644 --- a/datashader/tests/test_dask.py +++ b/datashader/tests/test_dask.py @@ -7,6 +7,8 @@ import datashader as ds +import pytest + set_options(get=get_sync) df = pd.DataFrame({'x': np.array(([0.] * 10 + [1] * 10)), @@ -218,6 +220,28 @@ def test_uniform_points(): sol = np.array([[10] * 9 + [11], [10] * 9 + [11]], dtype='i4') np.testing.assert_equal(agg.data, sol) + +@pytest.mark.parametrize('high', [9, 10, 99, 100]) +@pytest.mark.parametrize('low', [0]) +def test_uniform_diagonal_points(low, high): + bounds = (low, high) + x_range, y_range = bounds, bounds + + width = x_range[1] - x_range[0] + height = y_range[1] - y_range[0] + n = width * height + df = pd.DataFrame({'time': np.ones(n, dtype='i4'), + 'x': np.array([np.arange(*x_range, dtype='f8')] * width).flatten(), + 'y': np.array([np.arange(*y_range, dtype='f8')] * height).flatten()}) + + cvs = ds.Canvas(plot_width=2, plot_height=2, x_range=x_range, y_range=y_range) + agg = cvs.points(df, 'x', 'y', ds.count('time')) + + diagonal = agg.data.diagonal(0) + assert sum(diagonal) == n + assert abs(bounds[1] - bounds[0]) % 2 == abs(diagonal[1] / high - diagonal[0] / high) + + def test_log_axis_points(): axis = ds.core.LogAxis() logcoords = axis.compute_index(axis.compute_scale_and_translate((1, 10), 2), 2) diff --git a/datashader/tests/test_pandas.py b/datashader/tests/test_pandas.py index 22f37fc8d..313114639 100644 --- a/datashader/tests/test_pandas.py +++ b/datashader/tests/test_pandas.py @@ -4,6 +4,8 @@ import datashader as ds +import pytest + df = pd.DataFrame({'x': np.array(([0.] * 10 + [1] * 10)), 'y': np.array(([0.] * 5 + [1] * 5 + [0] * 5 + [1] * 5)), @@ -211,6 +213,27 @@ def test_uniform_points(): np.testing.assert_equal(agg.data, sol) +@pytest.mark.parametrize('high', [9, 10, 99, 100]) +@pytest.mark.parametrize('low', [0]) +def test_uniform_diagonal_points(low, high): + bounds = (low, high) + x_range, y_range = bounds, bounds + + width = x_range[1] - x_range[0] + height = y_range[1] - y_range[0] + n = width * height + df = pd.DataFrame({'time': np.ones(n, dtype='i4'), + 'x': np.array([np.arange(*x_range, dtype='f8')] * width).flatten(), + 'y': np.array([np.arange(*y_range, dtype='f8')] * height).flatten()}) + + cvs = ds.Canvas(plot_width=2, plot_height=2, x_range=x_range, y_range=y_range) + agg = cvs.points(df, 'x', 'y', ds.count('time')) + + diagonal = agg.data.diagonal(0) + assert sum(diagonal) == n + assert abs(bounds[1] - bounds[0]) % 2 == abs(diagonal[1] / high - diagonal[0] / high) + + def test_log_axis_points(): axis = ds.core.LogAxis() logcoords = axis.compute_index(axis.compute_scale_and_translate((1, 10), 2), 2) From 8c8b9f6771c02401cc2ea3c90fbbe1b6e5bb29e4 Mon Sep 17 00:00:00 2001 From: Joseph Crail Date: Fri, 29 Sep 2017 17:31:25 -0700 Subject: [PATCH 19/21] Simplify mapping to pixels --- datashader/glyphs.py | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/datashader/glyphs.py b/datashader/glyphs.py index cac18f92d..5462e3020 100644 --- a/datashader/glyphs.py +++ b/datashader/glyphs.py @@ -94,12 +94,10 @@ def _extend(vt, bounds, xs, ys, *aggs_and_cols): xmin, xmax, ymin, ymax = bounds def map_onto_pixel(x, y): - xx, yy = x_mapper(x) * sx + tx, y_mapper(y) * sy + ty - if x == xmax: - xx -= np.spacing(xx) - if y == ymax: - yy -= np.spacing(yy) - return int(xx), int(yy) + xx = int(x_mapper(x) * sx + tx) + yy = int(y_mapper(y) * sy + ty) + return (xx - 1 if x == xmax else xx, + yy - 1 if y == ymax else yy) for i in range(xs.shape[0]): x = xs[i] @@ -174,12 +172,10 @@ def map_onto_pixel(vt, bounds, x, y): """Map points onto pixel grid""" sx, tx, sy, ty = vt _, xmax, _, ymax = bounds - xx, yy = x_mapper(x) * sx + tx, y_mapper(y) * sy + ty - if x == xmax: - xx -= np.spacing(xx) - if y == ymax: - yy -= np.spacing(yy) - return int(xx), int(yy) + xx = int(x_mapper(x) * sx + tx) + yy = int(y_mapper(y) * sy + ty) + return (xx - 1 if x == xmax else xx, + yy - 1 if y == ymax else yy) return map_onto_pixel From 72f6851e5b47636634de6cd7f30012783f12683a Mon Sep 17 00:00:00 2001 From: "James A. Bednar" Date: Sat, 30 Sep 2017 06:55:14 -0500 Subject: [PATCH 20/21] Added comment --- datashader/glyphs.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/datashader/glyphs.py b/datashader/glyphs.py index 5462e3020..ea26a6638 100644 --- a/datashader/glyphs.py +++ b/datashader/glyphs.py @@ -77,6 +77,8 @@ class Point(_PointLike): """A point, with center at ``x`` and ``y``. Points map each record to a single bin. + Points falling exactly on the upper bounds treated as are a special case, + mapping into the previous bin rather than being cropped off. Parameters ---------- @@ -96,6 +98,7 @@ def _extend(vt, bounds, xs, ys, *aggs_and_cols): def map_onto_pixel(x, y): xx = int(x_mapper(x) * sx + tx) yy = int(y_mapper(y) * sy + ty) + # Points falling on upper bound are mapped into previous bin return (xx - 1 if x == xmax else xx, yy - 1 if y == ymax else yy) @@ -174,6 +177,7 @@ def map_onto_pixel(vt, bounds, x, y): _, xmax, _, ymax = bounds xx = int(x_mapper(x) * sx + tx) yy = int(y_mapper(y) * sy + ty) + # Points falling on upper bound are mapped into previous bin return (xx - 1 if x == xmax else xx, yy - 1 if y == ymax else yy) From 43bf9bc904cea2f8e85d69c7ae8ea5cb80f0a2e5 Mon Sep 17 00:00:00 2001 From: "James A. Bednar" Date: Sat, 30 Sep 2017 06:56:03 -0500 Subject: [PATCH 21/21] Fixed typo in comment --- datashader/glyphs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datashader/glyphs.py b/datashader/glyphs.py index ea26a6638..bb1a3c517 100644 --- a/datashader/glyphs.py +++ b/datashader/glyphs.py @@ -77,7 +77,7 @@ class Point(_PointLike): """A point, with center at ``x`` and ``y``. Points map each record to a single bin. - Points falling exactly on the upper bounds treated as are a special case, + Points falling exactly on the upper bounds are treated as a special case, mapping into the previous bin rather than being cropped off. Parameters