From f39352523734d0f0008916f67f61e0ba667ef2af Mon Sep 17 00:00:00 2001 From: Ondrej Pesek Date: Mon, 22 Jan 2024 12:17:04 +0100 Subject: [PATCH 1/7] g.gui.gmodeler: fix relationship with history pane (#3337) * missing giface was causing the entire gmodeler to crash. Fixes #3336. --- gui/wxpython/gmodeler/frame.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gui/wxpython/gmodeler/frame.py b/gui/wxpython/gmodeler/frame.py index 0e5a6f010c5..0ca4db7d3dc 100644 --- a/gui/wxpython/gmodeler/frame.py +++ b/gui/wxpython/gmodeler/frame.py @@ -140,7 +140,7 @@ def __init__( self.pythonPanel = PythonPanel(parent=self) - self._gconsole = GConsole(guiparent=self) + self._gconsole = GConsole(guiparent=self, giface=giface) self.goutput = GConsoleWindow( parent=self, giface=giface, gconsole=self._gconsole ) From d5ed511759610467cc60c3b05f4ab1af8ae871ee Mon Sep 17 00:00:00 2001 From: Sharan Jamanani <54804304+Sharansrj567@users.noreply.github.com> Date: Mon, 22 Jan 2024 07:55:20 -0500 Subject: [PATCH 2/7] libdisplay: simplify cairo driver selection (#3357) Addresses a Cppcheck "duplicateExpression" warning. --- lib/display/r_raster.c | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/lib/display/r_raster.c b/lib/display/r_raster.c index 01b877ef9dd..381fbf640e8 100644 --- a/lib/display/r_raster.c +++ b/lib/display/r_raster.c @@ -138,10 +138,9 @@ int D_open_driver(void) drv = (p && G_strcasecmp(p, "png") == 0) ? PNG_Driver() : (p && G_strcasecmp(p, "ps") == 0) ? PS_Driver() : (p && G_strcasecmp(p, "html") == 0) ? HTML_Driver() - : + : #ifdef USE_CAIRO - (p && G_strcasecmp(p, "cairo") == 0) ? Cairo_Driver() - : Cairo_Driver(); + Cairo_Driver(); #else PNG_Driver(); #endif From cfc987854f21a84ccfc8b65b2716922e5d8dc0eb Mon Sep 17 00:00:00 2001 From: Ondrej Pesek Date: Mon, 22 Jan 2024 17:14:14 +0100 Subject: [PATCH 3/7] db.execute: fix duplicated word in in docs (#3370) --- db/db.execute/db.execute.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/db/db.execute/db.execute.html b/db/db.execute/db.execute.html index 59312aadb4c..8700258b404 100644 --- a/db/db.execute/db.execute.html +++ b/db/db.execute/db.execute.html @@ -12,7 +12,7 @@

NOTES

db.connect, they are taken as default values and do not need to be specified each time.

-If you have a large number of SQL commands to process, it is much much +If you have a large number of SQL commands to process, it is much faster to place all the SQL statements into a text file and use input file parameter than it is to process each statement individually in a loop. If multiple instruction lines are given, each From e07531fae05e883222b9b7b07bddd23d788151af Mon Sep 17 00:00:00 2001 From: Martin Landa Date: Mon, 22 Jan 2024 19:18:07 +0100 Subject: [PATCH 4/7] Graphical Modeler: improve GetNewShapePos() (#3371) --- gui/wxpython/gmodeler/frame.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/gui/wxpython/gmodeler/frame.py b/gui/wxpython/gmodeler/frame.py index 0ca4db7d3dc..a3aa2ca2399 100644 --- a/gui/wxpython/gmodeler/frame.py +++ b/gui/wxpython/gmodeler/frame.py @@ -1054,11 +1054,12 @@ def GetOptData(self, dcmd, layer, params, propwin): # arrange data items if data_items: dc = wx.ClientDC(self.canvas) - p = 360 / len(data_items) - r = 200 + p = 180 / (len(data_items) - 1) if len(data_items) > 1 else 0 + rx = 200 + ry = 100 alpha = 270 * (math.pi / 180) for data in data_items: - data.Move(dc, x + r * math.sin(alpha), y + r * math.cos(alpha)) + data.Move(dc, x + rx * math.sin(alpha), y + ry * math.cos(alpha)) alpha += p * (math.pi / 180) data.Show(True) @@ -1349,14 +1350,13 @@ def GetNewShapePos(self, yoffset=50): :return: x,y """ - diagram = self.GetDiagram() - if diagram.GetShapeList(): - last = diagram.GetShapeList()[-1] - y = last.GetY() + last.GetBoundingBoxMin()[1] - else: - y = 20 + ymax = 20 + for item in self.GetDiagram().GetShapeList(): + y = item.GetY() + item.GetBoundingBoxMin()[1] + if y > ymax: + ymax = y - return (self.GetSize()[0] // 2, y + yoffset) + return (self.GetSize()[0] // 2, ymax + yoffset) def GetShapesSelected(self): """Get list of selected shapes""" From cbef14ad4a223b5c272b5896000743c157863e1d Mon Sep 17 00:00:00 2001 From: Martin Landa Date: Mon, 22 Jan 2024 19:21:50 +0100 Subject: [PATCH 5/7] SearchModuleWidget: show module description also for first match (#3372) --- gui/wxpython/gui_core/widgets.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gui/wxpython/gui_core/widgets.py b/gui/wxpython/gui_core/widgets.py index 774b489ed27..3ced80291ce 100644 --- a/gui/wxpython/gui_core/widgets.py +++ b/gui/wxpython/gui_core/widgets.py @@ -1250,7 +1250,7 @@ def __init__(self, parent, model, showChoice=True, showTip=False, **kwargs): if self._showTip: self._searchTip = StaticWrapText( - parent=self, id=wx.ID_ANY, label="Choose a tool", size=(-1, 35) + parent=self, id=wx.ID_ANY, label="Choose a tool", size=(-1, 40) ) if self._showChoice: @@ -1315,9 +1315,9 @@ def OnSearchModule(self, event): self._searchChoice.SetSelection(0) self.OnSelectModule() - label = _("%d tools match") % len(commands) + label = _("{} tools matched").format(len(commands)) if self._showTip: - self._searchTip.SetLabel(label) + self._searchTip.SetLabel(self._searchTip.GetLabel() + " [{}]".format(label)) self.showNotification.emit(message=label) From 0c73ced9e7099cad88ceada12874f95f65dbdfa5 Mon Sep 17 00:00:00 2001 From: Stefan Blumentrath Date: Tue, 23 Jan 2024 23:26:30 +0100 Subject: [PATCH 6/7] temporal: Update STDS metadata from DB (#3350) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * compute granularity from time tuples * update metadata from DB * black * flake8 * more flake8 * address test-failures * address test-failures * address more test-failures * handle empty maps list * handle empty maps list * fix temporal relation remove unnecessary loop * add docs, function to get function * add docs, function to get function * dont check data structure in loop * check empty list * remove DB version check * implement suggstions from review * Update python/grass/temporal/abstract_space_time_dataset.py Co-authored-by: Edouard Choinière <27212526+echoix@users.noreply.github.com> * implement suggstions from review * reduce looping * order time units in dict * Update python/grass/temporal/temporal_granularity.py Co-authored-by: Edouard Choinière <27212526+echoix@users.noreply.github.com> * Update python/grass/temporal/temporal_granularity.py Co-authored-by: Edouard Choinière <27212526+echoix@users.noreply.github.com> * Update python/grass/temporal/temporal_granularity.py Co-authored-by: Edouard Choinière <27212526+echoix@users.noreply.github.com> * Update python/grass/temporal/temporal_granularity.py Co-authored-by: Edouard Choinière <27212526+echoix@users.noreply.github.com> * Update python/grass/temporal/temporal_granularity.py Co-authored-by: Edouard Choinière <27212526+echoix@users.noreply.github.com> * use sets of granularity units * handle max_days, typos --------- Co-authored-by: Edouard Choinière <27212526+echoix@users.noreply.github.com> --- .../temporal/abstract_space_time_dataset.py | 155 +++---- python/grass/temporal/metadata.py | 3 - python/grass/temporal/temporal_granularity.py | 417 +++++++++--------- 3 files changed, 289 insertions(+), 286 deletions(-) diff --git a/python/grass/temporal/abstract_space_time_dataset.py b/python/grass/temporal/abstract_space_time_dataset.py index 2e46f06a6d3..a193a1a926d 100644 --- a/python/grass/temporal/abstract_space_time_dataset.py +++ b/python/grass/temporal/abstract_space_time_dataset.py @@ -2,7 +2,7 @@ The abstract_space_time_dataset module provides the AbstractSpaceTimeDataset class that is the base class for all space time datasets. -(C) 2011-2013 by the GRASS Development Team +(C) 2011-2024 by the GRASS Development Team This program is free software under the GNU General Public License (>=v2). Read the file COPYING that comes with GRASS for details. @@ -18,15 +18,18 @@ class that is the base class for all space time datasets. from .core import ( init_dbif, get_sql_template_path, - get_tgis_metadata, get_current_mapset, get_tgis_db_version_from_metadata, ) -from .abstract_dataset import AbstractDataset, AbstractDatasetComparisonKeyStartTime +from .abstract_dataset import ( + AbstractDataset, + AbstractDatasetComparisonKeyStartTime, +) from .temporal_granularity import ( check_granularity_string, compute_absolute_time_granularity, compute_relative_time_granularity, + get_time_tuple_function, ) from .spatio_temporal_relationships import ( count_temporal_topology_relationships, @@ -441,7 +444,8 @@ def get_map_time(self): def count_temporal_types(self, maps=None, dbif=None): """Return the temporal type of the registered maps as dictionary - The map list must be ordered by start time + The map list can be a list of AbstractDataset or database rows. + It must be ordered by start time The temporal type can be: @@ -449,37 +453,29 @@ def count_temporal_types(self, maps=None, dbif=None): - interval -> start and end time - invalid -> No valid time point or interval found - :param maps: A sorted (start_time) list of AbstractDataset objects + :param maps: A sorted (start_time) list of AbstractDataset objects or database rows :param dbif: The database interface to be used """ + tcount = {"point": 0, "interval": 0, "invalid": 0} + if maps is None: - maps = self.get_registered_maps_as_objects( - where=None, order="start_time", dbif=dbif - ) + maps = self.get_registered_maps(where=None, order="start_time", dbif=dbif) - time_invalid = 0 - time_point = 0 - time_interval = 0 + if not maps: + return tcount - tcount = {} - for i in range(len(maps)): - # Check for point and interval data - if maps[i].is_time_absolute(): - start, end = maps[i].get_absolute_time() - if maps[i].is_time_relative(): - start, end, unit = maps[i].get_relative_time() + get_time_tuple = get_time_tuple_function(maps) + for map_reference in maps: + # Check for point and interval data + start, end = get_time_tuple(map_reference) if start is not None and end is not None: - time_interval += 1 + tcount["interval"] += 1 elif start is not None and end is None: - time_point += 1 + tcount["point"] += 1 else: - time_invalid += 1 - - tcount["point"] = time_point - tcount["interval"] = time_interval - tcount["invalid"] = time_invalid + tcount["invalid"] += 1 return tcount @@ -1517,79 +1513,66 @@ def get_registered_maps_as_objects( "contains": maps that contain (fully cover) the provided spatial extent :return: The ordered map object list, - In case nothing found None is returned + In case nothing is found, an empty list is returned """ dbif, connection_state_changed = init_dbif(dbif) obj_list = [] - # Older temporal databases have no bottom and top columns - # in their views so we need a work around to set the full - # spatial extent as well - - rows = get_tgis_metadata(dbif) - db_version = 0 - - if rows: - for row in rows: - if row["key"] == "tgis_db_version": - db_version = int(float(row["value"])) - # use all columns rows = self.get_registered_maps( None, where, order, dbif, spatial_extent, spatial_relation ) - if rows is not None: - has_bt_columns = False - has_semantic_label = False - first_row = True - for row in rows: - if first_row: - first_row = False - # check keys in first row - # note that 'if "bottom" in row' does not work - # because row is not a dict but some db backend object - if "bottom" in row.keys() and "top" in row.keys(): - has_bt_columns = True - if "semantic_label" in row.keys(): - has_semantic_label = True - - map = self.get_new_map_instance(row["id"]) - # time - if self.is_time_absolute(): - map.set_absolute_time(row["start_time"], row["end_time"]) - elif self.is_time_relative(): - map.set_relative_time( - row["start_time"], - row["end_time"], - self.get_relative_time_unit(), - ) - # space - # The fast way - if has_bt_columns: - map.set_spatial_extent_from_values( - west=row["west"], - east=row["east"], - south=row["south"], - top=row["top"], - north=row["north"], - bottom=row["bottom"], - ) - # The slow work around - else: - map.spatial_extent.select(dbif) + if rows: + # Older temporal databases have no bottom and top columns + # in their views so we need a work around to set the full + # spatial extent as well + + # check keys in first row + # note that 'if "bottom" in row' does not work + # because row is not a dict but some db backend object + has_bt_columns = "bottom" in rows[0].keys() + has_semantic_label = "semantic_label" in rows[0].keys() + else: + return obj_list - # labels - if ( - has_semantic_label - and row["semantic_label"] is not None - and row["semantic_label"] != "None" - ): - map.metadata.set_semantic_label(row["semantic_label"]) + for row in rows: + map = self.get_new_map_instance(row["id"]) + # time + if self.is_time_absolute(): + map.set_absolute_time(row["start_time"], row["end_time"]) + elif self.is_time_relative(): + map.set_relative_time( + row["start_time"], + row["end_time"], + self.get_relative_time_unit(), + ) + # space + # The fast way + if has_bt_columns: + map.set_spatial_extent_from_values( + west=row["west"], + east=row["east"], + south=row["south"], + top=row["top"], + north=row["north"], + bottom=row["bottom"], + ) + # The slow work around + else: + map.spatial_extent.select(dbif) + + # labels + if ( + has_semantic_label + and row["semantic_label"] is not None + and row["semantic_label"] != "None" + ): + map.metadata.set_semantic_label(row["semantic_label"]) - obj_list.append(copy.copy(map)) + obj_list.append(copy.copy(map)) if connection_state_changed: dbif.close() @@ -2872,7 +2855,7 @@ def update_from_registered_maps(self, dbif=None): dbif.execute_transaction(sql, mapset=self.base.mapset) # Count the temporal map types - maps = self.get_registered_maps_as_objects(dbif=dbif) + maps = self.get_registered_maps(order="start_time", dbif=dbif) tlist = self.count_temporal_types(maps) if tlist["interval"] > 0 and tlist["point"] == 0 and tlist["invalid"] == 0: diff --git a/python/grass/temporal/metadata.py b/python/grass/temporal/metadata.py index 84e7633e833..b9bd7adcc41 100644 --- a/python/grass/temporal/metadata.py +++ b/python/grass/temporal/metadata.py @@ -352,9 +352,6 @@ def __init__( max, ) - if get_tgis_db_version_from_metadata() > 2: - self.set_semantic_label(semantic_label) - def set_semantic_label(self, semantic_label): """Set the semantic label identifier""" self.D["semantic_label"] = semantic_label diff --git a/python/grass/temporal/temporal_granularity.py b/python/grass/temporal/temporal_granularity.py index 8c646af2ec7..df7eb949b0f 100644 --- a/python/grass/temporal/temporal_granularity.py +++ b/python/grass/temporal/temporal_granularity.py @@ -10,7 +10,7 @@ tgis.compute_relative_time_granularity(maps) -(C) 2012-2013 by the GRASS Development Team +(C) 2012-2024 by the GRASS Development Team This program is free software under the GNU General Public License (>=v2). Read the file COPYING that comes with GRASS for details. @@ -18,6 +18,7 @@ :authors: Soeren Gebbert """ from .datetime_math import compute_datetime_delta +from .abstract_map_dataset import AbstractMapDataset from functools import reduce from collections import OrderedDict import ast @@ -111,6 +112,81 @@ def check_granularity_string(granularity, temporal_type): ############################################################################### +def get_time_tuple_function(maps): + """Helper function to return the appropriate function to get + time tuple from either TGIS DB rows or AbstractDataset object + + :param maps: a list of AbstractDataset objects or database rows + :return: A function + + .. code-block:: python + + >>> from grass.temporal.abstract_map_dataset import AbstractMapDataset + >>> maps = AbstractMapDataset() + >>> get_time_tuple_function(maps).__name__ + '_get_map_time_tuple' + + """ + + def _get_map_time_tuple(map_object): + """Sub-function to return time tuple + from AbstractDataset object""" + if map_object.is_time_absolute(): + time_tuple = map_object.get_absolute_time() + if map_object.is_time_relative(): + time_tuple = map_object.get_relative_time() + return time_tuple[0:2] + + def _get_row_time_tuple(db_table_row): + """Sub-function to return time tuple + from database row""" + return db_table_row["start_time"], db_table_row["end_time"] + + # Check if input is list of MapDataset objects or SQLite rows + if issubclass(maps[0].__class__, AbstractMapDataset): + return _get_map_time_tuple + else: + return _get_row_time_tuple + + +def _is_after(start, start1, end1): + """Helper function that checks if start timestamp is + temporally after the start1 and end1, where start1 and end1 + represent a temporal extent. + + :param start: datetime object to check if it is after start1 and end1 + :param start1: datetime object for comparison + :param end1: datetime object (>= start1) or None for comparison + :return: bool + + .. code-block:: python + + >>> from datetime import datetime + >>> start = datetime(2024, 1, 1) + >>> start1 = datetime(2023, 12, 12) + >>> end1 = None + >>> _is_after(start, start1, end1) + True + + >>> start = datetime(2023, 12, 14) + >>> start1 = datetime(2023, 12, 12) + >>> end1 = datetime(2023, 12, 24) + >>> _is_after(start, start1, end1) + False + + """ + if end1 is None: + if start > start1: + return True + else: + return False + + if start > end1: + return True + else: + return False + + def compute_relative_time_granularity(maps): """Compute the relative time granularity @@ -221,39 +297,42 @@ def compute_relative_time_granularity(maps): """ + if not maps: + return None + + get_time_tuple = get_time_tuple_function(maps) + # The interval time must be scaled to days resolution granularity = None - delta = [] + delta = set() + previous_start, previous_end = get_time_tuple(maps[0]) # First we compute the timedelta of the intervals - for map in maps: - start, end = map.get_temporal_extent_as_tuple() + for stds_map in maps: + start, end = get_time_tuple(stds_map) if (start == 0 or start) and end: t = abs(end - start) - delta.append(int(t)) - - # Compute the timedelta of the gaps - for i in range(len(maps)): - if i < len(maps) - 1: - relation = maps[i + 1].temporal_relation(maps[i]) - if relation == "after": - start1, end1 = maps[i].get_temporal_extent_as_tuple() - start2, end2 = maps[i + 1].get_temporal_extent_as_tuple() - # Gaps are between intervals, intervals and - # points, points and points - if end1 and start2: - t = abs(end1 - start2) - delta.append(int(t)) - if not end1 and start2: - t = abs(start1 - start2) - delta.append(int(t)) - - delta.sort() - ulist = list(set(delta)) - if len(ulist) > 1: + delta.add(int(t)) + + # Compute the timedelta of the gaps + if _is_after(start, previous_start, previous_end): + # Gaps are between intervals, intervals and + # points, points and points + # start time is required in TGIS and expected to be present + if previous_end: + # Gap between previous end and current start + t = abs(start - previous_end) + delta.add(int(t)) + else: + # Gap between previous start and current start + t = abs(start - previous_start) + delta.add(int(t)) + previous_start, previous_end = start, end + + if len(delta) > 1: # Find greatest common divisor - granularity = gcd_list(ulist) - elif len(ulist) == 1: - granularity = ulist[0] + granularity = gcd_list(delta) + elif len(delta) == 1: + granularity = delta.pop() else: granularity = 0 @@ -273,7 +352,7 @@ def compute_absolute_time_granularity(maps): The computed granularity is returned as number of seconds or minutes or hours or days or months or years. - :param maps: a ordered by start_time list of map objects + :param maps: a ordered by start_time list of map objects or database rows :return: The temporal topology as string "integer unit" .. code-block:: python @@ -344,164 +423,108 @@ def compute_absolute_time_granularity(maps): """ - has_seconds = False - has_minutes = False - has_hours = False - has_days = False - has_months = False - has_years = False - - use_seconds = False - use_minutes = False - use_hours = False - use_days = False - use_months = False - use_years = False - - delta = [] - datetime_delta = [] + # Create a granularity dict with time units of increasing length + # that covers all possible keys in the result of compute_datetime_delta + # The order of the keys is important so that loops over the dictionary + # can be aborted as soon as a non-zero value is encountered + granularity_units = { + "second": set(), + "minute": set(), + "hour": set(), + "max_days": set(), + "day": set(), + "month": set(), + "year": set(), + } + + get_time_tuple = get_time_tuple_function(maps) + + previous_start, previous_end = get_time_tuple(maps[0]) + # First we compute the timedelta of the intervals - for map in maps: - start, end = map.get_temporal_extent_as_tuple() - if start and end: - delta.append(end - start) - datetime_delta.append(compute_datetime_delta(start, end)) - - # Compute the timedelta of the gaps - for i in range(len(maps)): - if i < len(maps) - 1: - relation = maps[i + 1].temporal_relation(maps[i]) - if relation == "after": - start1, end1 = maps[i].get_temporal_extent_as_tuple() - start2, end2 = maps[i + 1].get_temporal_extent_as_tuple() - # Gaps are between intervals, intervals and - # points, points and points - if end1 and start2: - delta.append(end1 - start2) - datetime_delta.append(compute_datetime_delta(end1, start2)) - if not end1 and start2: - delta.append(start2 - start1) - datetime_delta.append(compute_datetime_delta(start1, start2)) - # Check what changed - dlist = [] - for d in datetime_delta: - if "second" in d and d["second"] > 0: - has_seconds = True - # print "has second" - if "minute" in d and d["minute"] > 0: - has_minutes = True - # print "has minute" - if "hour" in d and d["hour"] > 0: - has_hours = True - # print "has hour" - if "day" in d and d["day"] > 0: - has_days = True - # print "has day" - if "month" in d and d["month"] > 0: - has_months = True - # print "has month" - if "year" in d and d["year"] > 0: - has_years = True - # print "has year" + for stds_map in maps: + start, end = get_time_tuple(stds_map) + # start time is required in TGIS and expected to be present + if end: + map_datetime_delta = compute_datetime_delta(start, end) + for time_unit in granularity_units: + if ( + time_unit in map_datetime_delta + and map_datetime_delta[time_unit] > 0 + ): + granularity_units[time_unit].add(map_datetime_delta[time_unit]) + if time_unit != "max_days": + break + # Compute the timedelta of the gaps + if _is_after(start, previous_start, previous_end): + # Gaps are between intervals, intervals and + # points, points and points + # start time is required in TGIS and expected to be present + if previous_end: + gap_datetime_delta = compute_datetime_delta(previous_end, start) + else: + gap_datetime_delta = compute_datetime_delta(previous_start, start) + # Add to the set of the smallest granularity in the granularity_units dict + for time_unit in granularity_units: + if ( + time_unit in gap_datetime_delta + and gap_datetime_delta[time_unit] > 0 + ): + granularity_units[time_unit].add(gap_datetime_delta[time_unit]) + if time_unit != "max_days": + break + # Keep the temporal extent to compare to the following/next map + previous_start, previous_end = start, end # Create a list with a single time unit only - if has_seconds: - for d in datetime_delta: - if "second" in d and d["second"] > 0: - dlist.append(d["second"]) - elif "minute" in d and d["minute"] > 0: - dlist.append(d["minute"] * 60) - elif "hour" in d and d["hour"] > 0: - dlist.append(d["hour"] * 3600) - elif "day" in d and d["day"] > 0: - dlist.append(d["day"] * 24 * 3600) - else: - dlist.append(d["max_days"] * 24 * 3600) - use_seconds = True - elif has_minutes: - for d in datetime_delta: - if "minute" in d and d["minute"] > 0: - dlist.append(d["minute"]) - elif "hour" in d and d["hour"] > 0: - dlist.append(d["hour"] * 60) - elif "day" in d: - dlist.append(d["day"] * 24 * 60) - else: - dlist.append(d["max_days"] * 24 * 60) - use_minutes = True - elif has_hours: - for d in datetime_delta: - if "hour" in d and d["hour"] > 0: - dlist.append(d["hour"]) - elif "day" in d and d["day"] > 0: - dlist.append(d["day"] * 24) - else: - dlist.append(d["max_days"] * 24) - use_hours = True - elif has_days: - for d in datetime_delta: - if "day" in d and d["day"] > 0: - dlist.append(d["day"]) - else: - dlist.append(d["max_days"]) - use_days = True - elif has_months: - for d in datetime_delta: - if "month" in d and d["month"] > 0: - dlist.append(d["month"]) - elif "year" in d and d["year"] > 0: - dlist.append(d["year"] * 12) - use_months = True - elif has_years: - for d in datetime_delta: - if "year" in d: - dlist.append(d["year"]) - use_years = True - - dlist.sort() - ulist = list(set(dlist)) - - if len(ulist) == 0: + dlist = set() + assigned_time_unit = None + time_unit_multipliers = { + "second": {"minute": 60, "hour": 3600, "day": 24 * 3600, "max_days": 24 * 3600}, + "minute": {"hour": 60, "day": 24 * 60, "max_days": 24 * 60}, + "hour": {"day": 24, "max_days": 24}, + "day": {"max_days": 1}, + "month": {"year": 12}, + } + + for time_unit, granularity_set in granularity_units.items(): + # The smallest granularity unit is used so as soon as a non-zero + # value / set is encountered, the loop can be aborted + if granularity_set: + # Skip max_days + if time_unit == "max_days": + continue + assigned_time_unit = time_unit + break + + if assigned_time_unit is None: return None - if len(ulist) > 1: + dlist.update(granularity_units[assigned_time_unit]) + if assigned_time_unit in time_unit_multipliers: + for unit, unit_factor in time_unit_multipliers[assigned_time_unit].items(): + if granularity_units[unit]: + dlist.update( + {time_value * unit_factor for time_value in granularity_units[unit]} + ) + + if not dlist: + return None + + if len(dlist) > 1: # Find greatest common divisor - granularity = gcd_list(ulist) + granularity = gcd_list(dlist) else: - granularity = ulist[0] + granularity = dlist.pop() - if use_seconds: - if granularity == 1: - return "%i second" % granularity - else: - return "%i seconds" % granularity - elif use_minutes: - if granularity == 1: - return "%i minute" % granularity - else: - return "%i minutes" % granularity - elif use_hours: - if granularity == 1: - return "%i hour" % granularity - else: - return "%i hours" % granularity - elif use_days: - if granularity == 1: - return "%i day" % granularity - else: - return "%i days" % granularity - elif use_months: - if granularity == 1: - return "%i month" % granularity - else: - return "%i months" % granularity - elif use_years: - if granularity == 1: - return "%i year" % granularity - else: - return "%i years" % granularity + if granularity is None: + return None - return None + plural = "" + if granularity > 1: + plural = "s" + + return f"{granularity} {assigned_time_unit}{plural}" ############################################################################### @@ -939,9 +962,9 @@ def compute_common_absolute_time_granularity_simple(gran_list): gran = "second" if num > 1: gran += "s" - return "%i %s" % (num, gran) + return f"{num} {gran}" - elif has_minutes: + if has_minutes: if has_hours: hours.sort() minutes.append(hours[0] * 60) @@ -962,9 +985,9 @@ def compute_common_absolute_time_granularity_simple(gran_list): gran = "minute" if num > 1: gran += "s" - return "%i %s" % (num, gran) + return f"{num} {gran}" - elif has_hours: + if has_hours: if has_days: days.sort() hours.append(days[0] * 24) @@ -982,9 +1005,9 @@ def compute_common_absolute_time_granularity_simple(gran_list): gran = "hour" if num > 1: gran += "s" - return "%i %s" % (num, gran) + return f"{num} {gran}" - elif has_days: + if has_days: if has_months: months.sort() days.append(months[0] * 28) @@ -999,9 +1022,9 @@ def compute_common_absolute_time_granularity_simple(gran_list): gran = "day" if num > 1: gran += "s" - return "%i %s" % (num, gran) + return f"{num} {gran}" - elif has_months: + if has_months: if has_years: years.sort() months.append(years[0] * 12) @@ -1009,14 +1032,14 @@ def compute_common_absolute_time_granularity_simple(gran_list): gran = "month" if num > 1: gran += "s" - return "%i %s" % (num, gran) + return f"{num} {gran}" - elif has_years: + if has_years: num = gcd_list(years) gran = "year" if num > 1: gran += "s" - return "%i %s" % (num, gran) + return f"{num} {gran}" ####################################################################### @@ -1055,8 +1078,8 @@ def gran_singular_unit(gran): print( _( "Output granularity seems not to be valid. Please use " - "one of the following values : {gr}".format(gr=lists) - ) + "one of the following values : {gr}" + ).format(gr=lists) ) return False else: @@ -1094,14 +1117,14 @@ def gran_plural_unit(gran): if unit in PLURAL_GRAN: return unit elif unit in SINGULAR_GRAN: - return "{gr}s".format(gr=unit) + return f"{unit}s" else: - lists = "{gr}".format(gr=SUPPORTED_GRAN).replace("[", "").replace("]", "") + lists = ", ".join(SUPPORTED_GRAN) print( _( "Output granularity seems not to be valid. Please use " - "one of the following values : {gr}".format(gr=lists) - ) + "one of the following values : {gr}" + ).format(gr=lists) ) else: print(_("Invalid absolute granularity")) @@ -1156,9 +1179,9 @@ def _return(output, tounit, shell): return output else: if output == 1: - return "{val} {unit}".format(val=output, unit=tounit) + return f"{output} {tounit}" else: - return "{val} {unit}s".format(val=output, unit=tounit) + return f"{output} {tounit}s" # TODO check the leap second if check_granularity_string(from_gran, "absolute"): From e276098175b739a2be7bd6edb4fa2da516060276 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Edouard=20Choini=C3=A8re?= <27212526+echoix@users.noreply.github.com> Date: Tue, 23 Jan 2024 19:32:01 -0500 Subject: [PATCH 7/7] python: Add extract filter for tarfile.extractall (#3340) --- python/grass/temporal/stds_import.py | 13 ++++++++++++- python/grass/utils/download.py | 16 +++++++++++++++- scripts/g.extension/g.extension.py | 22 +++++++++++++++++++--- scripts/r.unpack/r.unpack.py | 13 ++++++++++++- scripts/v.unpack/v.unpack.py | 13 ++++++++++++- 5 files changed, 70 insertions(+), 7 deletions(-) diff --git a/python/grass/temporal/stds_import.py b/python/grass/temporal/stds_import.py index b7f680e7936..35f768c582c 100644 --- a/python/grass/temporal/stds_import.py +++ b/python/grass/temporal/stds_import.py @@ -274,7 +274,18 @@ def import_stds( gscript.fatal(_("Unable to find projection file <%s>") % proj_file_name) msgr.message(_("Extracting data...")) - tar.extractall(path=directory) + # Extraction filters were added in Python 3.12, + # and backported to 3.8.17, 3.9.17, 3.10.12, and 3.11.4 + # See https://docs.python.org/3.12/library/tarfile.html#tarfile-extraction-filter + # and https://peps.python.org/pep-0706/ + # In Python 3.12, using `filter=None` triggers a DepreciationWarning, + # and in Python 3.14, `filter='data'` will be the default + if hasattr(tarfile, "data_filter"): + tar.extractall(path=directory, filter="data") + else: + # Remove this when no longer needed + gscript.warning(_("Extracting may be unsafe; consider updating Python")) + tar.extractall(path=directory) tar.close() # We use a new list file name for map registration diff --git a/python/grass/utils/download.py b/python/grass/utils/download.py index 170cc75e618..f6b734b7291 100644 --- a/python/grass/utils/download.py +++ b/python/grass/utils/download.py @@ -52,7 +52,21 @@ def extract_tar(name, directory, tmpdir): tar = tarfile.open(name) extract_dir = os.path.join(tmpdir, "extract_dir") os.mkdir(extract_dir) - tar.extractall(path=extract_dir) + + # Extraction filters were added in Python 3.12, + # and backported to 3.8.17, 3.9.17, 3.10.12, and 3.11.4 + # See + # https://docs.python.org/3.12/library/tarfile.html#tarfile-extraction-filter + # and https://peps.python.org/pep-0706/ + # In Python 3.12, using `filter=None` triggers a DepreciationWarning, + # and in Python 3.14, `filter='data'` will be the default + if hasattr(tarfile, "data_filter"): + tar.extractall(path=extract_dir, filter="data") + else: + # Remove this when no longer needed + debug(_("Extracting may be unsafe; consider updating Python")) + tar.extractall(path=extract_dir) + files = os.listdir(extract_dir) _move_extracted_files( extract_dir=extract_dir, target_dir=directory, files=files diff --git a/scripts/g.extension/g.extension.py b/scripts/g.extension/g.extension.py index 132897a6771..36362928c1d 100644 --- a/scripts/g.extension/g.extension.py +++ b/scripts/g.extension/g.extension.py @@ -1839,18 +1839,34 @@ def extract_tar(name, directory, tmpdir): " tmpdir={tmpdir})".format(name=name, directory=directory, tmpdir=tmpdir), 3, ) - try: - import tarfile # we don't need it anywhere else + import tarfile + try: tar = tarfile.open(name) extract_dir = os.path.join(tmpdir, "extract_dir") os.mkdir(extract_dir) - tar.extractall(path=extract_dir) + + # Extraction filters were added in Python 3.12, + # and backported to 3.8.17, 3.9.17, 3.10.12, and 3.11.4 + # See + # https://docs.python.org/3.12/library/tarfile.html#tarfile-extraction-filter + # and https://peps.python.org/pep-0706/ + # In Python 3.12, using `filter=None` triggers a DepreciationWarning, + # and in Python 3.14, `filter='data'` will be the default + if hasattr(tarfile, "data_filter"): + tar.extractall(path=extract_dir, filter="data") + else: + # Remove this when no longer needed + gs.warning(_("Extracting may be unsafe; consider updating Python")) + tar.extractall(path=extract_dir) + files = os.listdir(extract_dir) move_extracted_files(extract_dir=extract_dir, target_dir=directory, files=files) except tarfile.TarError as error: gs.fatal(_("Archive file is unreadable: {0}").format(error)) + del tarfile # we don't need it anywhere else + extract_tar.supported_formats = ["tar.gz", "gz", "bz2", "tar", "gzip", "targz"] diff --git a/scripts/r.unpack/r.unpack.py b/scripts/r.unpack/r.unpack.py index 2f606fe113f..8ec8cdcba48 100644 --- a/scripts/r.unpack/r.unpack.py +++ b/scripts/r.unpack/r.unpack.py @@ -110,7 +110,18 @@ def main(): ) # extract data - tar.extractall() + # Extraction filters were added in Python 3.12, + # and backported to 3.8.17, 3.9.17, 3.10.12, and 3.11.4 + # See https://docs.python.org/3.12/library/tarfile.html#tarfile-extraction-filter + # and https://peps.python.org/pep-0706/ + # In Python 3.12, using `filter=None` triggers a DepreciationWarning, + # and in Python 3.14, `filter='data'` will be the default + if hasattr(tarfile, "data_filter"): + tar.extractall(filter="data") + else: + # Remove this when no longer needed + grass.warning(_("Extracting may be unsafe; consider updating Python")) + tar.extractall() tar.close() os.chdir(data_names[0]) diff --git a/scripts/v.unpack/v.unpack.py b/scripts/v.unpack/v.unpack.py index 640b7699a8a..c55432316ce 100644 --- a/scripts/v.unpack/v.unpack.py +++ b/scripts/v.unpack/v.unpack.py @@ -120,7 +120,18 @@ def main(): shutil.rmtree(new_dir, True) # extract data - tar.extractall() + # Extraction filters were added in Python 3.12, + # and backported to 3.8.17, 3.9.17, 3.10.12, and 3.11.4 + # See https://docs.python.org/3.12/library/tarfile.html#tarfile-extraction-filter + # and https://peps.python.org/pep-0706/ + # In Python 3.12, using `filter=None` triggers a DepreciationWarning, + # and in Python 3.14, `filter='data'` will be the default + if hasattr(tarfile, "data_filter"): + tar.extractall(filter="data") + else: + # Remove this when no longer needed + grass.warning(_("Extracting may be unsafe; consider updating Python")) + tar.extractall() tar.close() if os.path.exists(os.path.join(data_name, "coor")): pass