diff --git a/pygeofilter/parsers/cql2_json/__init__.py b/pygeofilter/parsers/cql2_json/__init__.py new file mode 100644 index 0000000..e5afb0e --- /dev/null +++ b/pygeofilter/parsers/cql2_json/__init__.py @@ -0,0 +1,3 @@ +from .parser import parse + +__all__ = ['parse'] diff --git a/pygeofilter/parsers/cql2_json/parser.py b/pygeofilter/parsers/cql2_json/parser.py new file mode 100644 index 0000000..a168261 --- /dev/null +++ b/pygeofilter/parsers/cql2_json/parser.py @@ -0,0 +1,237 @@ +# ------------------------------------------------------------------------------ +# +# Project: pygeofilter +# Authors: Fabian Schindler +# +# ------------------------------------------------------------------------------ +# Copyright (C) 2021 EOX IT Services GmbH +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies of this Software or works derived from this Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# ------------------------------------------------------------------------------ + +from typing import Union +import json + +from ... import ast +from ... import values +from ... util import parse_datetime, parse_date, parse_duration + +# https://github.com/opengeospatial/ogcapi-features/tree/master/cql2 + + +COMPARISON_MAP = { + 'eq': ast.Equal, + '=': ast.Equal, + 'ne': ast.NotEqual, + '!=': ast.NotEqual, + 'lt': ast.LessThan, + '<': ast.LessThan, + 'lte': ast.LessEqual, + '<=': ast.LessEqual, + 'gt': ast.GreaterThan, + '>': ast.GreaterThan, + 'gte': ast.GreaterEqual, + '>=': ast.GreaterEqual, +} + +SPATIAL_PREDICATES_MAP = { + 's_intersects': ast.GeometryIntersects, + 's_equals': ast.GeometryEquals, + 's_disjoint': ast.GeometryDisjoint, + 's_touches': ast.GeometryTouches, + 's_within': ast.GeometryWithin, + 's_overlaps': ast.GeometryOverlaps, + 's_crosses': ast.GeometryCrosses, + 's_contains': ast.GeometryContains, +} + +TEMPORAL_PREDICATES_MAP = { + 't_before': ast.TimeBefore, + 't_after': ast.TimeAfter, + 't_meets': ast.TimeMeets, + 't_metby': ast.TimeMetBy, + 't_overlaps': ast.TimeOverlaps, + 't_overlappedby': ast.TimeOverlappedBy, + 't_begins': ast.TimeBegins, + 't_begunby': ast.TimeBegunBy, + 't_during': ast.TimeDuring, + 't_contains': ast.TimeContains, + 't_ends': ast.TimeEnds, + 't_endedby': ast.TimeEndedBy, + 't_equals': ast.TimeEquals, +} + + +ARRAY_PREDICATES_MAP = { + 'a_equals': ast.ArrayEquals, + 'a_contains': ast.ArrayContains, + 'a_containedBy': ast.ArrayContainedBy, + 'a_overlaps': ast.ArrayOverlaps, +} + +ARITHMETIC_MAP = { + '+': ast.Add, + '-': ast.Sub, + '*': ast.Mul, + '/': ast.Div, +} + + +def walk_cql_json(node: dict) -> ast.Node: + if isinstance(node, (str, float, int, bool)): + return node + + if isinstance(node, list): + return [ + walk_cql_json(sub_node) + for sub_node in node + ] + + if not isinstance(node, dict): + raise ValueError(f'Invalid type {type(node)}') + + # check if we are dealing with a geometry + if 'type' in node and 'coordinates' in node: + # TODO: test if node is actually valid + return values.Geometry(node) + + elif 'bbox' in node: + return values.Envelope(*node['bbox']) + + elif 'date' in node: + return parse_date(node['date']) + + elif 'timestamp' in node: + return parse_datetime(node['timestamp']) + + elif 'interval' in node: + parsed = [] + for value in node['interval']: + print(value) + if value == '..': + parsed.append(None) + continue + try: + parsed.append( + parse_date(value) + ) + except ValueError: + try: + parsed.append(parse_duration(value)) + except ValueError: + parsed.append(parse_datetime(value)) + + return values.Interval(*parsed) + + # decode all other nodes + for name, value in node.items(): + if name in ('and', 'or'): + sub_items = walk_cql_json(value) + last = sub_items[0] + for sub_item in sub_items[1:]: + last = (ast.And if name == 'and' else ast.Or)( + last, + sub_item, + ) + return last + + elif name == 'not': + # allow both arrays and objects, the standard is ambigous in + # that regard + if isinstance(value, list): + value = value[0] + return ast.Not(walk_cql_json(value)) + + elif name in COMPARISON_MAP: + return COMPARISON_MAP[name]( + walk_cql_json(value[0]), + walk_cql_json(value[1]), + ) + + elif name == 'between': + return ast.Between( + walk_cql_json(value['value']), + walk_cql_json(value['lower']), + walk_cql_json(value['upper']), + not_=False, + ) + + elif name == 'like': + return ast.Like( + walk_cql_json(value[0]), + value[1], + nocase=False, + wildcard='%', + singlechar='.', + escapechar='\\', + not_=False, + ) + + elif name == 'in': + return ast.In( + walk_cql_json(value['value']), + walk_cql_json(value['list']), + not_=False, + ) + + elif name == 'isNull': + return ast.IsNull( + walk_cql_json(value), + not_=False, + ) + + elif name in SPATIAL_PREDICATES_MAP: + return SPATIAL_PREDICATES_MAP[name]( + walk_cql_json(value[0]), + walk_cql_json(value[1]), + ) + + elif name in TEMPORAL_PREDICATES_MAP: + return TEMPORAL_PREDICATES_MAP[name]( + walk_cql_json(value[0]), + walk_cql_json(value[1]), + ) + + elif name in ARRAY_PREDICATES_MAP: + return ARRAY_PREDICATES_MAP[name]( + walk_cql_json(value[0]), + walk_cql_json(value[1]), + ) + + elif name in ARITHMETIC_MAP: + return ARITHMETIC_MAP[name]( + walk_cql_json(value[0]), + walk_cql_json(value[1]), + ) + + elif name == 'property': + return ast.Attribute(value) + + elif name == 'function': + return ast.Function( + value['name'], + walk_cql_json(value['arguments']), + ) + + +def parse(cql: Union[str, dict]) -> ast.Node: + if isinstance(cql, str): + cql = json.loads(cql) + + return walk_cql_json(cql) diff --git a/pygeofilter/util.py b/pygeofilter/util.py index f43fc79..5411b29 100644 --- a/pygeofilter/util.py +++ b/pygeofilter/util.py @@ -26,7 +26,7 @@ # ------------------------------------------------------------------------------ import re -from datetime import timedelta +from datetime import timedelta, date from dateparser import parse as parse_datetime __all__ = [ @@ -73,6 +73,16 @@ def parse_duration(value: str) -> timedelta: return sign * timedelta(days, fsec) +def parse_date(value: str) -> date: + """ Backport for `fromisoformat` for dates in Python 3.6 + """ + + if hasattr(date, 'fromisoformat'): + return date.fromisoformat(value) + + return date(*(int(part) for part in value.split('-'))) + + def like_pattern_to_re_pattern(like, wildcard, single_char, escape_char): x_wildcard = re.escape(wildcard) x_single_char = re.escape(single_char) diff --git a/setup.py b/setup.py index 6209b73..bcc8331 100644 --- a/setup.py +++ b/setup.py @@ -62,7 +62,7 @@ include_package_data=True, install_requires=[ "dateparser", - "lark", + "lark<1.0", "pygeoif", "dataclasses;python_version<'3.7'", ] if not on_rtd else [], diff --git a/tests/parsers/cql2_json/__init__.py b/tests/parsers/cql2_json/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/parsers/cql2_json/test_parser.py b/tests/parsers/cql2_json/test_parser.py new file mode 100644 index 0000000..8c550e9 --- /dev/null +++ b/tests/parsers/cql2_json/test_parser.py @@ -0,0 +1,857 @@ +# ------------------------------------------------------------------------------ +# +# Project: pygeofilter +# Authors: Fabian Schindler +# +# ------------------------------------------------------------------------------ +# Copyright (C) 2019 EOX IT Services GmbH +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies of this Software or works derived from this Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# ------------------------------------------------------------------------------ + +import json +from datetime import datetime, timedelta + +from dateparser.timezone_parser import StaticTzInfo +from pygeoif import geometry + +from pygeofilter.parsers.cql2_json import parse +from pygeofilter import ast +from pygeofilter import values + + +def normalize_geom(geometry): + if hasattr(geometry, '__geo_interface__'): + geometry = geometry.__geo_interface__ + return json.loads(json.dumps(geometry)) + + +def test_attribute_eq_literal(): + result = parse('{ "eq": [{ "property": "attr" }, "A"]}') + assert result == ast.Equal( + ast.Attribute('attr'), + 'A', + ) + + +def test_attribute_lt_literal(): + result = parse('{ "lt": [{ "property": "attr" }, 5]}') + assert result == ast.LessThan( + ast.Attribute('attr'), + 5.0, + ) + + +def test_attribute_lte_literal(): + result = parse('{ "lte": [{ "property": "attr" }, 5]}') + assert result == ast.LessEqual( + ast.Attribute('attr'), + 5.0, + ) + + +def test_attribute_gt_literal(): + result = parse('{ "gt": [{ "property": "attr" }, 5]}') + assert result == ast.GreaterThan( + ast.Attribute('attr'), + 5.0, + ) + + +def test_attribute_gte_literal(): + result = parse('{ "gte": [{ "property": "attr" }, 5]}') + assert result == ast.GreaterEqual( + ast.Attribute('attr'), + 5.0, + ) + + +# def test_attribute_ne_literal(): +# result = parse('attr <> 5') +# assert result == ast.ComparisonPredicateNode( +# ast.Attribute('attr'), +# 5, +# ast.ComparisonOp('<>'), +# ) + + +def test_attribute_between(): + result = parse({ + "between": { + "value": { + "property": "attr" + }, + "lower": 2, + "upper": 5, + } + }) + assert result == ast.Between( + ast.Attribute('attr'), + 2, + 5, + False, + ) + + +# def test_attribute_not_between(): +# result = parse('attr NOT BETWEEN 2 AND 5') +# assert result == ast.BetweenPredicateNode( +# ast.Attribute('attr'), +# 2, +# 5, +# True, +# ) + + +def test_attribute_between_negative_positive(): + result = parse({ + "between": { + "value": { + "property": "attr" + }, + "lower": -1, + "upper": 1, + } + }) + assert result == ast.Between( + ast.Attribute('attr'), + -1, + 1, + False, + ) + + +def test_string_like(): + result = parse({ + "like": [ + {"property": "attr"}, + "some%", + ] + }) + assert result == ast.Like( + ast.Attribute('attr'), + 'some%', + nocase=False, + not_=False, + wildcard='%', + singlechar='.', + escapechar='\\', + ) + +# def test_string_not_like(): +# result = parse('attr NOT LIKE "some%"') +# assert result == ast.LikePredicateNode( +# ast.Attribute('attr'), +# 'some%', +# nocase=False, +# not_=True, +# wildcard='%', +# singlechar='.', +# escapechar=None, +# ) + + +# def test_string_not_ilike(): +# result = parse('attr NOT ILIKE "some%"') +# assert result == ast.LikePredicateNode( +# ast.Attribute('attr'), +# 'some%', +# nocase=True, +# not_=True, +# wildcard='%', +# singlechar='.', +# escapechar=None, +# ) + + +def test_attribute_in_list(): + result = parse({ + "in": { + "value": {"property": "attr"}, + "list": [1, 2, 3, 4], + } + }) + assert result == ast.In( + ast.Attribute('attr'), [ + 1, + 2, + 3, + 4, + ], + False + ) + + +# def test_attribute_not_in_list(): +# result = parse('attr NOT IN ("A", "B", \'C\', \'D\')') +# assert result == ast.InPredicateNode( +# ast.Attribute('attr'), [ +# "A", +# "B", +# "C", +# "D", +# ], +# True +# ) + + +def test_attribute_is_null(): + result = parse({ + "isNull": {"property": "attr"} + }) + assert result == ast.IsNull( + ast.Attribute('attr'), False + ) + + +# def test_attribute_is_not_null(): +# result = parse('attr IS NOT NULL') +# assert result == ast.NullPredicateNode( +# ast.Attribute('attr'), True +# ) + +# # Temporal predicate + + +def test_attribute_before(): + result = parse({ + "t_before": [ + {"property": "attr"}, + {"timestamp": "2000-01-01T00:00:01Z"}, + ] + }) + assert result == ast.TimeBefore( + ast.Attribute('attr'), + datetime( + 2000, 1, 1, 0, 0, 1, + tzinfo=StaticTzInfo('Z', timedelta(0)) + ), + ) + + +def test_attribute_after_dt_dt(): + result = parse({ + "t_after": [ + {"property": "attr"}, + {"interval": ["2000-01-01T00:00:00Z", "2000-01-01T00:00:01Z"]} + ] + }) + + assert result == ast.TimeAfter( + ast.Attribute('attr'), + values.Interval( + datetime( + 2000, 1, 1, 0, 0, 0, + tzinfo=StaticTzInfo('Z', timedelta(0)) + ), + datetime( + 2000, 1, 1, 0, 0, 1, + tzinfo=StaticTzInfo('Z', timedelta(0)) + ), + ), + ) + + +def test_meets_dt_dr(): + result = parse({ + "t_meets": [ + {"property": "attr"}, + {"interval": ["2000-01-01T00:00:00Z", "PT4S"]} + ] + }) + assert result == ast.TimeMeets( + ast.Attribute('attr'), + values.Interval( + datetime( + 2000, 1, 1, 0, 0, 0, + tzinfo=StaticTzInfo('Z', timedelta(0)) + ), + timedelta(seconds=4), + ), + ) + + +def test_attribute_metby_dr_dt(): + result = parse({ + "t_metby": [ + {"property": "attr"}, + {"interval": ["PT4S", "2000-01-01T00:00:03Z"]} + ] + }) + assert result == ast.TimeMetBy( + ast.Attribute('attr'), + values.Interval( + timedelta(seconds=4), + datetime( + 2000, 1, 1, 0, 0, 3, + tzinfo=StaticTzInfo('Z', timedelta(0)) + ), + ), + ) + + +def test_attribute_toverlaps_open_dt(): + result = parse({ + "t_overlaps": [ + {"property": "attr"}, + {"interval": ["..", "2000-01-01T00:00:03Z"]} + ] + }) + assert result == ast.TimeOverlaps( + ast.Attribute('attr'), + values.Interval( + None, + datetime( + 2000, 1, 1, 0, 0, 3, + tzinfo=StaticTzInfo('Z', timedelta(0)) + ), + ), + ) + + +def test_attribute_overlappedby_dt_open(): + result = parse({ + "t_overlappedby": [ + {"property": "attr"}, + {"interval": ["2000-01-01T00:00:03Z", ".."]} + ] + }) + assert result == ast.TimeOverlappedBy( + ast.Attribute('attr'), + values.Interval( + datetime( + 2000, 1, 1, 0, 0, 3, + tzinfo=StaticTzInfo('Z', timedelta(0)) + ), + None, + ), + ) + + +# Array predicate + + +def test_attribute_aequals(): + result = parse({ + "a_equals": [ + {"property": "arrayattr"}, + [1, 2, 3] + ] + }) + assert result == ast.ArrayEquals( + ast.Attribute('arrayattr'), + [1, 2, 3], + ) + + +def test_attribute_aoverlaps(): + result = parse({ + "a_overlaps": [ + {"property": "arrayattr"}, + [1, 2, 3] + ] + }) + assert result == ast.ArrayOverlaps( + ast.Attribute('arrayattr'), + [1, 2, 3], + ) + + +def test_attribute_acontains(): + result = parse({ + "a_contains": [ + {"property": "arrayattr"}, + [1, 2, 3] + ] + }) + assert result == ast.ArrayContains( + ast.Attribute('arrayattr'), + [1, 2, 3], + ) + + +def test_attribute_acontainedby(): + result = parse({ + "a_containedBy": [ + {"property": "arrayattr"}, + [1, 2, 3] + ] + }) + assert result == ast.ArrayContainedBy( + ast.Attribute('arrayattr'), + [1, 2, 3], + ) + + +# Spatial predicate + + +def test_intersects_attr_point(): + result = parse({ + "s_intersects": [ + {"property": "geometry"}, + { + "type": "Point", + "coordinates": [1, 1], + } + ] + }) + assert result == ast.GeometryIntersects( + ast.Attribute('geometry'), + values.Geometry( + normalize_geom( + geometry.Point(1, 1).__geo_interface__ + ) + ), + ) + + +def test_disjoint_linestring_attr(): + result = parse({ + "s_disjoint": [ + { + "type": "LineString", + "coordinates": [[1, 1], [2, 2]], + "bbox": [1.0, 1.0, 2.0, 2.0] + }, + {"property": "geometry"}, + ] + }) + assert result == ast.GeometryDisjoint( + values.Geometry( + normalize_geom( + geometry.LineString([(1, 1), (2, 2)]).__geo_interface__ + ), + ), + ast.Attribute('geometry'), + ) + + +def test_contains_attr_polygon(): + result = parse({ + "s_contains": [ + {"property": "geometry"}, + { + "type": "Polygon", + "coordinates": [ + [[1, 1], [2, 2], [0, 3], [1, 1]] + ], + 'bbox': [0.0, 1.0, 2.0, 3.0] + }, + ] + }) + assert result == ast.GeometryContains( + ast.Attribute('geometry'), + values.Geometry( + normalize_geom( + geometry.Polygon( + [(1, 1), (2, 2), (0, 3), (1, 1)] + ).__geo_interface__ + ), + ), + ) + + +def test_within_multipolygon_attr(): + result = parse({ + "s_within": [ + { + "type": "MultiPolygon", + "coordinates": [ + [[[1, 1], [2, 2], [0, 3], [1, 1]]] + ], + 'bbox': [0.0, 1.0, 2.0, 3.0] + }, + {"property": "geometry"}, + ] + }) + assert result == ast.GeometryWithin( + values.Geometry( + normalize_geom( + geometry.MultiPolygon([ + geometry.Polygon([(1, 1), (2, 2), (0, 3), (1, 1)]) + ]).__geo_interface__ + ), + ), + ast.Attribute('geometry'), + ) + + +def test_touches_attr_multilinestring(): + result = parse({ + "s_touches": [ + {"property": "geometry"}, + { + "type": "MultiLineString", + "coordinates": [[[1, 1], [2, 2]], [[0, 3], [1, 1]]], + "bbox": [0.0, 1.0, 2.0, 3.0] + }, + ] + }) + assert result == ast.GeometryTouches( + ast.Attribute('geometry'), + values.Geometry( + normalize_geom( + geometry.MultiLineString([ + geometry.LineString([(1, 1), (2, 2)]), + geometry.LineString([(0, 3), (1, 1)]), + ]).__geo_interface__ + ), + ), + ) + + +def test_crosses_attr_multilinestring(): + result = parse({ + "s_crosses": [ + {"property": "geometry"}, + { + "type": "MultiLineString", + "coordinates": [[[1, 1], [2, 2]], [[0, 3], [1, 1]]], + "bbox": [0.0, 1.0, 2.0, 3.0] + }, + ] + }) + assert result == ast.GeometryCrosses( + ast.Attribute('geometry'), + values.Geometry( + normalize_geom( + geometry.MultiLineString([ + geometry.LineString([(1, 1), (2, 2)]), + geometry.LineString([(0, 3), (1, 1)]), + ]).__geo_interface__ + ) + ), + ) + + +def test_overlaps_attr_multilinestring(): + result = parse({ + "s_overlaps": [ + {"property": "geometry"}, + { + "type": "MultiLineString", + "coordinates": [[[1, 1], [2, 2]], [[0, 3], [1, 1]]], + "bbox": [0.0, 1.0, 2.0, 3.0] + }, + ] + }) + assert result == ast.GeometryOverlaps( + ast.Attribute('geometry'), + values.Geometry( + normalize_geom( + geometry.MultiLineString([ + geometry.LineString([(1, 1), (2, 2)]), + geometry.LineString([(0, 3), (1, 1)]), + ]).__geo_interface__ + ), + ), + ) + + +# POINT(1 1) +# LINESTRING(1 1,2 2) +# MULTIPOLYGON(((1 1,2 2,0 3,1 1)) +# MULTILINESTRING((1 1,2 2),(0 3,1 1)) +# POLYGON((1 1,2 2,0 3,1 1)) + +# def test_equals_attr_geometrycollection(): +# result = parse('OVERLAPS(geometry, )') +# assert result == ast.SpatialPredicateNode( +# ast.Attribute('geometry'), +# ast.LiteralExpression( +# geometry.MultiLineString([ +# geometry.LineString([(1, 1), (2, 2)]), +# geometry.LineString([(0, 3), (1, 1)]), +# ]) +# ), +# 'OVERLAPS' +# ) + + +# relate + +# def test_relate_attr_polygon(): +# result = parse('RELATE(geometry, POLYGON((1 1,2 2,0 3,1 1)), +# "1*T***T**")') +# assert result == ast.SpatialPatternPredicateNode( +# ast.Attribute('geometry'), +# ast.LiteralExpression( +# geometry.Polygon([(1, 1), (2, 2), (0, 3), (1, 1)]) +# ), +# pattern='1*T***T**', +# ) + + +# # dwithin/beyond + +# def test_dwithin_attr_polygon(): +# result = parse('DWITHIN(geometry, POLYGON((1 1,2 2,0 3,1 1)), 5, feet)') +# print(get_repr(result)) +# assert result == ast.SpatialDistancePredicateNode( +# ast.Attribute('geometry'), +# ast.LiteralExpression( +# geometry.Polygon([(1, 1), (2, 2), (0, 3), (1, 1)]) +# ), +# ast.SpatialDistanceOp('DWITHIN'), +# distance=5, +# units='feet', +# ) + + +# def test_beyond_attr_polygon(): +# result = parse( +# 'BEYOND(geometry, POLYGON((1 1,2 2,0 3,1 1)), 5, nautical miles)' +# ) +# print(get_repr(result)) +# assert result == ast.SpatialDistancePredicateNode( +# ast.Attribute('geometry'), +# ast.LiteralExpression( +# geometry.Polygon([(1, 1), (2, 2), (0, 3), (1, 1)]) +# ), +# ast.SpatialDistanceOp('BEYOND'), +# distance=5, +# units='nautical miles', +# ) + + +# BBox prediacte + + +# def test_bbox_simple(): +# result = parse('BBOX(geometry, 1, 2, 3, 4)') +# assert result == ast.BBoxPredicateNode( +# ast.Attribute('geometry'), +# ast.LiteralExpression(1), +# ast.LiteralExpression(2), +# ast.LiteralExpression(3), +# ast.LiteralExpression(4), +# ) + + +# def test_bbox_crs(): +# result = parse('BBOX(geometry, 1, 2, 3, 4, "EPSG:3875")') +# assert result == ast.BBoxPredicateNode( +# ast.Attribute('geometry'), +# ast.LiteralExpression(1), +# ast.LiteralExpression(2), +# ast.LiteralExpression(3), +# ast.LiteralExpression(4), +# 'EPSG:3875', +# ) + + +def test_attribute_arithmetic_add(): + result = parse({ + "eq": [ + {"property": "attr"}, + {"+": [5, 2]} + ] + }) + assert result == ast.Equal( + ast.Attribute('attr'), + ast.Add( + 5, + 2, + ), + ) + + +def test_attribute_arithmetic_sub(): + result = parse({ + "eq": [ + {"property": "attr"}, + {"-": [5, 2]} + ] + }) + assert result == ast.Equal( + ast.Attribute('attr'), + ast.Sub( + 5, + 2, + ), + ) + + +def test_attribute_arithmetic_mul(): + result = parse({ + "eq": [ + {"property": "attr"}, + {"*": [5, 2]} + ] + }) + assert result == ast.Equal( + ast.Attribute('attr'), + ast.Mul( + 5, + 2, + ), + ) + + +def test_attribute_arithmetic_div(): + result = parse({ + "eq": [ + {"property": "attr"}, + {"/": [5, 2]} + ] + }) + assert result == ast.Equal( + ast.Attribute('attr'), + ast.Div( + 5, + 2, + ), + ) + + +def test_attribute_arithmetic_add_mul(): + result = parse({ + "eq": [ + {"property": "attr"}, + {"+": [ + 3, + {"*": [5, 2]}, + ]}, + ], + }) + assert result == ast.Equal( + ast.Attribute('attr'), + ast.Add( + 3, + ast.Mul( + 5, + 2, + ), + ), + ) + + +def test_attribute_arithmetic_div_sub(): + result = parse({ + "eq": [ + {"property": "attr"}, + {"-": [ + {"/": [3, 5]}, + 2, + ]}, + ], + }) + assert result == ast.Equal( + ast.Attribute('attr'), + ast.Sub( + ast.Div( + 3, + 5, + ), + 2, + ), + ) + + +def test_attribute_arithmetic_div_sub_bracketted(): + result = parse({ + "eq": [ + {"property": "attr"}, + {"/": [ + 3, + {"-": [5, 2]}, + ]}, + ], + }) + assert result == ast.Equal( + ast.Attribute('attr'), + ast.Div( + 3, + ast.Sub( + 5, + 2, + ), + ), + ) + +# test function expression parsing + + +def test_function_no_arg(): + result = parse({ + "eq": [ + {"property": "attr"}, + { + "function": { + "name": "myfunc", + "arguments": [] + } + } + ] + }) + assert result == ast.Equal( + ast.Attribute('attr'), + ast.Function( + 'myfunc', [ + ] + ), + ) + + +def test_function_single_arg(): + result = parse({ + "eq": [ + {"property": "attr"}, + { + "function": { + "name": "myfunc", + "arguments": [1] + } + } + ] + }) + assert result == ast.Equal( + ast.Attribute('attr'), + ast.Function( + 'myfunc', + [1], + ), + ) + + +def test_function_attr_string_arg(): + result = parse({ + "eq": [ + {"property": "attr"}, + { + "function": { + "name": "myfunc", + "arguments": [ + {"property": "other_attr"}, + "abc" + ] + } + } + ] + }) + assert result == ast.Equal( + ast.Attribute('attr'), + ast.Function( + 'myfunc', [ + ast.Attribute('other_attr'), + "abc", + ] + ), + )