diff --git a/src/taipy/core/_entity/_migrate/_utils.py b/src/taipy/core/_entity/_migrate/_utils.py index fb7e2aaa..b53165a9 100644 --- a/src/taipy/core/_entity/_migrate/_utils.py +++ b/src/taipy/core/_entity/_migrate/_utils.py @@ -190,6 +190,9 @@ def __migrate_datanode(datanode: Dict) -> Dict: # Update Scope enum after Pipeline removal datanode["scope"] = __update_scope(datanode["scope"]) + # Update move name attribute to properties dictionary + datanode["data_node_properties"]["name"] = datanode.pop("name", None) + if "last_edit_date" not in datanode: datanode["last_edit_date"] = datanode.get("last_edition_date") if "last_edition_date" in datanode: diff --git a/src/taipy/core/data/_abstract_sql.py b/src/taipy/core/data/_abstract_sql.py index 99fecfa3..f024c9ae 100644 --- a/src/taipy/core/data/_abstract_sql.py +++ b/src/taipy/core/data/_abstract_sql.py @@ -87,7 +87,6 @@ def __init__( config_id: str, scope: Scope, id: Optional[DataNodeId] = None, - name: Optional[str] = None, owner_id: Optional[str] = None, parent_ids: Optional[Set[str]] = None, last_edit_date: Optional[datetime] = None, @@ -111,7 +110,6 @@ def __init__( config_id, scope, id, - name, owner_id, parent_ids, last_edit_date, diff --git a/src/taipy/core/data/_data_converter.py b/src/taipy/core/data/_data_converter.py index 942c02d4..596b7df5 100644 --- a/src/taipy/core/data/_data_converter.py +++ b/src/taipy/core/data/_data_converter.py @@ -144,7 +144,6 @@ def _entity_to_model(cls, data_node: DataNode) -> _DataNodeModel: data_node.config_id, data_node._scope, data_node.storage_type(), - properties.pop("name", None) or data_node._name, data_node.owner_id, list(data_node._parent_ids), data_node._last_edit_date.isoformat() if data_node._last_edit_date else None, @@ -287,7 +286,6 @@ def _model_to_entity(cls, model: _DataNodeModel) -> DataNode: config_id=model.config_id, scope=model.scope, id=model.id, - name=model.name, owner_id=model.owner_id, parent_ids=set(model.parent_ids), last_edit_date=datetime.fromisoformat(model.last_edit_date) if model.last_edit_date else None, diff --git a/src/taipy/core/data/_data_manager.py b/src/taipy/core/data/_data_manager.py index 2de385c7..38e95c3d 100644 --- a/src/taipy/core/data/_data_manager.py +++ b/src/taipy/core/data/_data_manager.py @@ -96,7 +96,6 @@ def __create( return cls.__DATA_NODE_CLASS_MAP[storage_type]( config_id=data_node_config.id, scope=data_node_config.scope or DataNodeConfig._DEFAULT_SCOPE, - name=props.pop(cls.__NAME_KEY, None), validity_period=data_node_config.validity_period, owner_id=owner_id, parent_ids=parent_ids, diff --git a/src/taipy/core/data/_data_model.py b/src/taipy/core/data/_data_model.py index 0c4cdb6d..a271f512 100644 --- a/src/taipy/core/data/_data_model.py +++ b/src/taipy/core/data/_data_model.py @@ -31,7 +31,6 @@ class _DataNodeModel(_BaseModel): Column("config_id", String), Column("scope", Enum(Scope)), Column("storage_type", String), - Column("name", String), Column("owner_id", String), Column("parent_ids", JSON), Column("last_edit_date", String), @@ -50,7 +49,6 @@ class _DataNodeModel(_BaseModel): config_id: str scope: Scope storage_type: str - name: Optional[str] owner_id: Optional[str] parent_ids: List[str] last_edit_date: Optional[str] @@ -71,7 +69,6 @@ def from_dict(data: Dict[str, Any]): config_id=data["config_id"], scope=Scope._from_repr(data["scope"]), storage_type=data["storage_type"], - name=data.get("name"), owner_id=data.get("owner_id"), parent_ids=data.get("parent_ids", []), last_edit_date=data.get("last_edit_date"), diff --git a/src/taipy/core/data/csv.py b/src/taipy/core/data/csv.py index 98d764b1..8715d1fc 100644 --- a/src/taipy/core/data/csv.py +++ b/src/taipy/core/data/csv.py @@ -39,7 +39,6 @@ class CSVDataNode(DataNode, _AbstractFileDataNode, _AbstractTabularDataNode): Python identifier. scope (Scope^): The scope of this data node. id (str): The unique identifier of this data node. - name (str): A user-readable name of this data node. owner_id (str): The identifier of the owner (sequence_id, scenario_id, cycle_id) or `None`. parent_ids (Optional[Set[str]]): The identifiers of the parent tasks or `None`. last_edit_date (datetime): The date and time of the last modification. @@ -84,7 +83,6 @@ def __init__( config_id: str, scope: Scope, id: Optional[DataNodeId] = None, - name: Optional[str] = None, owner_id: Optional[str] = None, parent_ids: Optional[Set[str]] = None, last_edit_date: Optional[datetime] = None, @@ -115,7 +113,6 @@ def __init__( config_id, scope, id, - name, owner_id, parent_ids, last_edit_date, diff --git a/src/taipy/core/data/data_node.py b/src/taipy/core/data/data_node.py index 6b45f92c..8a98160d 100644 --- a/src/taipy/core/data/data_node.py +++ b/src/taipy/core/data/data_node.py @@ -95,7 +95,6 @@ def __init__( config_id, scope: Scope = Scope(Scope.SCENARIO), id: Optional[DataNodeId] = None, - name: Optional[str] = None, owner_id: Optional[str] = None, parent_ids: Optional[Set[str]] = None, last_edit_date: Optional[datetime] = None, @@ -113,7 +112,6 @@ def __init__( self._parent_ids = parent_ids or set() self._scope = scope self._last_edit_date = last_edit_date - self._name = name self._edit_in_progress = edit_in_progress self._version = version or _VersionManagerFactory._build_manager()._get_latest_version() self._validity_period = validity_period @@ -200,14 +198,12 @@ def expiration_date(self) -> datetime: return last_edit_date + validity_period if validity_period else last_edit_date @property # type: ignore - @_self_reload(_MANAGER_NAME) - def name(self): - return self._name + def name(self) -> Optional[str]: + return self.properties.get("name") @name.setter # type: ignore - @_self_setter(_MANAGER_NAME) def name(self, val): - self._name = val + self.properties["name"] = val @property def version(self): diff --git a/src/taipy/core/data/excel.py b/src/taipy/core/data/excel.py index c2912dad..e58aa925 100644 --- a/src/taipy/core/data/excel.py +++ b/src/taipy/core/data/excel.py @@ -43,7 +43,6 @@ class ExcelDataNode(DataNode, _AbstractFileDataNode, _AbstractTabularDataNode): identifier. scope (Scope^): The scope of this data node. id (str): The unique identifier of this data node. - name (str): A user-readable name of this data node. owner_id (str): The identifier of the owner (sequence_id, scenario_id, cycle_id) or `None`. parent_ids (Optional[Set[str]]): The identifiers of the parent tasks or `None`. @@ -89,7 +88,6 @@ def __init__( config_id: str, scope: Scope, id: Optional[DataNodeId] = None, - name: Optional[str] = None, owner_id: Optional[str] = None, parent_ids: Optional[Set[str]] = None, last_edit_date: Optional[datetime] = None, @@ -120,7 +118,6 @@ def __init__( config_id, scope, id, - name, owner_id, parent_ids, last_edit_date, diff --git a/src/taipy/core/data/generic.py b/src/taipy/core/data/generic.py index 47236269..2983b26d 100644 --- a/src/taipy/core/data/generic.py +++ b/src/taipy/core/data/generic.py @@ -30,7 +30,6 @@ class GenericDataNode(DataNode): identifier. scope (Scope^): The scope of this data node. id (str): The unique identifier of the data node. - name (str): A user-readable name of the data node. owner_id (str): The identifier of the owner (sequence_id, scenario_id, cycle_id) or `None`. parent_ids (Optional[Set[str]]): The identifiers of the parent tasks or `None`. @@ -67,7 +66,6 @@ def __init__( config_id: str, scope: Scope, id: Optional[DataNodeId] = None, - name: Optional[str] = None, owner_id: Optional[str] = None, parent_ids: Optional[Set[str]] = None, last_edit_date: Optional[datetime] = None, @@ -99,7 +97,6 @@ def __init__( config_id, scope, id, - name, owner_id, parent_ids, last_edit_date, diff --git a/src/taipy/core/data/in_memory.py b/src/taipy/core/data/in_memory.py index 91dd5f5a..8b603066 100644 --- a/src/taipy/core/data/in_memory.py +++ b/src/taipy/core/data/in_memory.py @@ -34,7 +34,6 @@ class InMemoryDataNode(DataNode): identifier. scope (Scope^): The scope of this data node. id (str): The unique identifier of this data node. - name (str): A user-readable name of this data node. owner_id (str): The identifier of the owner (sequence_id, scenario_id, cycle_id) or `None`. parent_ids (Optional[Set[str]]): The identifiers of the parent tasks or `None`. @@ -66,7 +65,6 @@ def __init__( config_id: str, scope: Scope, id: Optional[DataNodeId] = None, - name: Optional[str] = None, owner_id: Optional[str] = None, parent_ids: Optional[Set[str]] = None, last_edit_date: Optional[datetime] = None, @@ -85,7 +83,6 @@ def __init__( config_id, scope, id, - name, owner_id, parent_ids, last_edit_date, diff --git a/src/taipy/core/data/json.py b/src/taipy/core/data/json.py index 800fbee9..56c0a4eb 100644 --- a/src/taipy/core/data/json.py +++ b/src/taipy/core/data/json.py @@ -36,7 +36,6 @@ class JSONDataNode(DataNode, _AbstractFileDataNode): Python identifier. scope (Scope^): The scope of this data node. id (str): The unique identifier of this data node. - name (str): A user-readable name of this data node. owner_id (str): The identifier of the owner (sequence_id, scenario_id, cycle_id) or `None`. parent_ids (Optional[Set[str]]): The identifiers of the parent tasks or `None`. last_edit_date (datetime): The date and time of the last modification. @@ -77,7 +76,6 @@ def __init__( config_id: str, scope: Scope, id: Optional[DataNodeId] = None, - name: Optional[str] = None, owner_id: Optional[str] = None, parent_ids: Optional[Set[str]] = None, last_edit_date: Optional[datetime] = None, @@ -101,7 +99,6 @@ def __init__( config_id, scope, id, - name, owner_id, parent_ids, last_edit_date, diff --git a/src/taipy/core/data/mongo.py b/src/taipy/core/data/mongo.py index cc2aca84..769c609e 100644 --- a/src/taipy/core/data/mongo.py +++ b/src/taipy/core/data/mongo.py @@ -30,7 +30,6 @@ class MongoCollectionDataNode(DataNode): identifier. scope (Scope^): The scope of this data node. id (str): The unique identifier of this data node. - name (str): A user-readable name of this data node. owner_id (str): The identifier of the owner (sequence_id, scenario_id, cycle_id) or None. parent_ids (Optional[Set[str]]): The identifiers of the parent tasks or `None`. @@ -88,7 +87,6 @@ def __init__( config_id: str, scope: Scope, id: Optional[DataNodeId] = None, - name: Optional[str] = None, owner_id: Optional[str] = None, parent_ids: Optional[Set[str]] = None, last_edit_date: Optional[datetime] = None, @@ -114,7 +112,6 @@ def __init__( config_id, scope, id, - name, owner_id, parent_ids, last_edit_date, diff --git a/src/taipy/core/data/parquet.py b/src/taipy/core/data/parquet.py index 87f60251..053ebdb4 100644 --- a/src/taipy/core/data/parquet.py +++ b/src/taipy/core/data/parquet.py @@ -39,7 +39,6 @@ class ParquetDataNode(DataNode, _AbstractFileDataNode, _AbstractTabularDataNode) Python identifier. scope (Scope^): The scope of this data node. id (str): The unique identifier of this data node. - name (str): A user-readable name of this data node. owner_id (str): The identifier of the owner (sequence_id, scenario_id, cycle_id) or `None`. parent_ids (Optional[Set[str]]): The identifiers of the parent tasks or `None`. last_edit_date (datetime): The date and time of the last modification. @@ -99,7 +98,6 @@ def __init__( config_id: str, scope: Scope, id: Optional[DataNodeId] = None, - name: Optional[str] = None, owner_id: Optional[str] = None, parent_ids: Optional[Set[str]] = None, last_edit_date: Optional[datetime] = None, @@ -151,7 +149,6 @@ def __init__( config_id, scope, id, - name, owner_id, parent_ids, last_edit_date, diff --git a/src/taipy/core/data/pickle.py b/src/taipy/core/data/pickle.py index 7b042583..685cdba3 100644 --- a/src/taipy/core/data/pickle.py +++ b/src/taipy/core/data/pickle.py @@ -34,7 +34,6 @@ class PickleDataNode(DataNode, _AbstractFileDataNode): identifer. scope (Scope^): The scope of this data node. id (str): The unique identifier of this data node. - name (str): A user-readable name of this data node. owner_id (str): The identifier of the owner (sequence_id, scenario_id, cycle_id) or `None`. parent_ids (Optional[Set[str]]): The identifiers of the parent tasks or `None`. @@ -71,7 +70,6 @@ def __init__( config_id: str, scope: Scope, id: Optional[DataNodeId] = None, - name: Optional[str] = None, owner_id: Optional[str] = None, parent_ids: Optional[Set[str]] = None, last_edit_date: Optional[datetime] = None, @@ -95,7 +93,6 @@ def __init__( config_id, scope, id, - name, owner_id, parent_ids, last_edit_date, diff --git a/src/taipy/core/data/sql.py b/src/taipy/core/data/sql.py index 2a6314dc..3f368de4 100644 --- a/src/taipy/core/data/sql.py +++ b/src/taipy/core/data/sql.py @@ -30,7 +30,6 @@ class SQLDataNode(_AbstractSQLDataNode): identifier. scope (Scope^): The scope of this data node. id (str): The unique identifier of this data node. - name (str): A user-readable name of this data node. owner_id (str): The identifier of the owner (sequence_id, scenario_id, cycle_id) or None. parent_ids (Optional[Set[str]]): The identifiers of the parent tasks or `None`. @@ -79,7 +78,6 @@ def __init__( config_id: str, scope: Scope, id: Optional[DataNodeId] = None, - name: Optional[str] = None, owner_id: Optional[str] = None, parent_ids: Optional[Set[str]] = None, last_edit_date: Optional[datetime] = None, @@ -102,7 +100,6 @@ def __init__( config_id, scope, id, - name, owner_id, parent_ids, last_edit_date, diff --git a/src/taipy/core/data/sql_table.py b/src/taipy/core/data/sql_table.py index ebe5209f..d2fa86a1 100644 --- a/src/taipy/core/data/sql_table.py +++ b/src/taipy/core/data/sql_table.py @@ -33,7 +33,6 @@ class SQLTableDataNode(_AbstractSQLDataNode): identifier. scope (Scope^): The scope of this data node. id (str): The unique identifier of this data node. - name (str): A user-readable name of this data node. owner_id (str): The identifier of the owner (sequence_id, scenario_id, cycle_id) or None. parent_ids (Optional[Set[str]]): The identifiers of the parent tasks or `None`. @@ -78,7 +77,6 @@ def __init__( config_id: str, scope: Scope, id: Optional[DataNodeId] = None, - name: Optional[str] = None, owner_id: Optional[str] = None, parent_ids: Optional[Set[str]] = None, last_edit_date: Optional[datetime] = None, @@ -98,7 +96,6 @@ def __init__( config_id, scope, id=id, - name=name, owner_id=owner_id, parent_ids=parent_ids, last_edit_date=last_edit_date, diff --git a/tests/core/_entity/data_sample_migrated/data_nodes/DATANODE_cleaned_dataset_0e715489-98ee-4a06-9b38-21409ed04d03.json b/tests/core/_entity/data_sample_migrated/data_nodes/DATANODE_cleaned_dataset_0e715489-98ee-4a06-9b38-21409ed04d03.json index 0a3004a4..eca2f099 100644 --- a/tests/core/_entity/data_sample_migrated/data_nodes/DATANODE_cleaned_dataset_0e715489-98ee-4a06-9b38-21409ed04d03.json +++ b/tests/core/_entity/data_sample_migrated/data_nodes/DATANODE_cleaned_dataset_0e715489-98ee-4a06-9b38-21409ed04d03.json @@ -3,7 +3,6 @@ "config_id": "cleaned_dataset", "scope": "", "storage_type": "parquet", -"name": null, "owner_id": null, "parent_ids": [ "TASK_predict_cf8b3c83-5ea7-40eb-b8af-d5240852ff5b", @@ -29,6 +28,7 @@ "compression": "snappy", "exposed_type": "pandas", "read_kwargs": {}, -"write_kwargs": {} +"write_kwargs": {}, +"name": null } } \ No newline at end of file diff --git a/tests/core/_entity/data_sample_migrated/data_nodes/DATANODE_day_09c01a64-a998-4ba6-ac24-43e02db535a3.json b/tests/core/_entity/data_sample_migrated/data_nodes/DATANODE_day_09c01a64-a998-4ba6-ac24-43e02db535a3.json index 02fecaba..1f7a2f91 100644 --- a/tests/core/_entity/data_sample_migrated/data_nodes/DATANODE_day_09c01a64-a998-4ba6-ac24-43e02db535a3.json +++ b/tests/core/_entity/data_sample_migrated/data_nodes/DATANODE_day_09c01a64-a998-4ba6-ac24-43e02db535a3.json @@ -3,7 +3,6 @@ "config_id": "day", "scope": "", "storage_type": "pickle", -"name": null, "last_edit_date": "2023-10-08T21:12:32.977796", "edits": [ { @@ -18,7 +17,8 @@ "validity_seconds": null, "edit_in_progress": false, "data_node_properties": { -"is_generated": true +"is_generated": true, +"name": null }, "parent_ids": [ "TASK_evaluate_57cbea3b-332e-47ac-a468-472e4885eb82", diff --git a/tests/core/_entity/data_sample_migrated/data_nodes/DATANODE_evaluation_7173c0d1-2b84-486f-af76-82788f957c3b.json b/tests/core/_entity/data_sample_migrated/data_nodes/DATANODE_evaluation_7173c0d1-2b84-486f-af76-82788f957c3b.json index d59a5553..6bc838a2 100644 --- a/tests/core/_entity/data_sample_migrated/data_nodes/DATANODE_evaluation_7173c0d1-2b84-486f-af76-82788f957c3b.json +++ b/tests/core/_entity/data_sample_migrated/data_nodes/DATANODE_evaluation_7173c0d1-2b84-486f-af76-82788f957c3b.json @@ -3,7 +3,6 @@ "config_id": "evaluation", "scope": "", "storage_type": "pickle", -"name": null, "owner_id": "SCENARIO_scenario_62d29866-610e-4173-bd22-6a0555e80ff4", "parent_ids": [ "TASK_evaluate_57cbea3b-332e-47ac-a468-472e4885eb82" @@ -20,6 +19,7 @@ "validity_seconds": null, "edit_in_progress": false, "data_node_properties": { -"is_generated": true +"is_generated": true, +"name": null } } \ No newline at end of file diff --git a/tests/core/_entity/data_sample_migrated/data_nodes/DATANODE_initial_dataset_7d571742-92a5-457f-aef2-5d2eff3a3e99.json b/tests/core/_entity/data_sample_migrated/data_nodes/DATANODE_initial_dataset_7d571742-92a5-457f-aef2-5d2eff3a3e99.json index 6b91f605..7f37445a 100644 --- a/tests/core/_entity/data_sample_migrated/data_nodes/DATANODE_initial_dataset_7d571742-92a5-457f-aef2-5d2eff3a3e99.json +++ b/tests/core/_entity/data_sample_migrated/data_nodes/DATANODE_initial_dataset_7d571742-92a5-457f-aef2-5d2eff3a3e99.json @@ -3,7 +3,6 @@ "config_id": "initial_dataset", "scope": "", "storage_type": "csv", -"name": null, "owner_id": null, "parent_ids": [ "TASK_clean_data_2b17fa43-6746-45d6-b22d-0f5b09b9048a" @@ -17,6 +16,7 @@ "data_node_properties": { "path": "data/dataset.csv", "has_header": true, -"exposed_type": "pandas" +"exposed_type": "pandas", +"name": null } } \ No newline at end of file diff --git a/tests/core/_entity/data_sample_migrated/data_nodes/DATANODE_predictions_04a03760-c963-46cc-bd24-d798f5821cef.json b/tests/core/_entity/data_sample_migrated/data_nodes/DATANODE_predictions_04a03760-c963-46cc-bd24-d798f5821cef.json index 6c404a59..22275ad3 100644 --- a/tests/core/_entity/data_sample_migrated/data_nodes/DATANODE_predictions_04a03760-c963-46cc-bd24-d798f5821cef.json +++ b/tests/core/_entity/data_sample_migrated/data_nodes/DATANODE_predictions_04a03760-c963-46cc-bd24-d798f5821cef.json @@ -3,7 +3,6 @@ "config_id": "predictions", "scope": "", "storage_type": "pickle", -"name": null, "owner_id": "SCENARIO_scenario_62d29866-610e-4173-bd22-6a0555e80ff4", "parent_ids": [ "TASK_predict_cf8b3c83-5ea7-40eb-b8af-d5240852ff5b", @@ -21,6 +20,7 @@ "validity_seconds": null, "edit_in_progress": false, "data_node_properties": { -"is_generated": true +"is_generated": true, +"name": null } } \ No newline at end of file diff --git a/tests/core/data/test_csv_data_node.py b/tests/core/data/test_csv_data_node.py index b204c42e..315c86dd 100644 --- a/tests/core/data/test_csv_data_node.py +++ b/tests/core/data/test_csv_data_node.py @@ -49,7 +49,9 @@ def __init__(self, id, integer, text): class TestCSVDataNode: def test_create(self): path = "data/node/path" - dn = CSVDataNode("foo_bar", Scope.SCENARIO, name="super name", properties={"path": path, "has_header": False}) + dn = CSVDataNode( + "foo_bar", Scope.SCENARIO, properties={"path": path, "has_header": False, "name": "super name"} + ) assert isinstance(dn, CSVDataNode) assert dn.storage_type() == "csv" assert dn.config_id == "foo_bar" @@ -66,7 +68,7 @@ def test_create(self): with pytest.raises(InvalidConfigurationId): dn = CSVDataNode( - "foo bar", Scope.SCENARIO, name="super name", properties={"path": path, "has_header": False} + "foo bar", Scope.SCENARIO, properties={"path": path, "has_header": False, "name": "super name"} ) def test_get_user_properties(self, csv_file): diff --git a/tests/core/data/test_data_node.py b/tests/core/data/test_data_node.py index c4c9e155..0b18b99b 100644 --- a/tests/core/data/test_data_node.py +++ b/tests/core/data/test_data_node.py @@ -67,13 +67,13 @@ def test_create(self): "foo_bar", Scope.SCENARIO, DataNodeId("an_id"), - "a name", "a_scenario_id", {"a_parent_id"}, a_date, [dict(job_id="a_job_id")], edit_in_progress=False, prop="erty", + name="a name", ) assert dn.config_id == "foo_bar" assert dn.scope == Scope.SCENARIO @@ -84,8 +84,8 @@ def test_create(self): assert dn.last_edit_date == a_date assert dn.job_ids == ["a_job_id"] assert dn.is_ready_for_reading - assert len(dn.properties) == 1 - assert dn.properties["prop"] == "erty" + assert len(dn.properties) == 2 + assert dn.properties == {"prop": "erty", "name": "a name"} with pytest.raises(InvalidConfigurationId): DataNode("foo bar") @@ -411,13 +411,15 @@ def test_auto_set_and_reload(self, current_datetime): "foo", scope=Scope.GLOBAL, id=DataNodeId("an_id"), - name="foo", owner_id=None, parent_ids=None, last_edit_date=current_datetime, edits=[dict(job_id="a_job_id")], edit_in_progress=False, validity_period=None, + properties={ + "name": "foo", + }, ) dm = _DataManager() @@ -497,14 +499,14 @@ def test_auto_set_and_reload(self, current_datetime): assert dn_2.validity_period == time_period_2 # auto set & reload on properties attribute - assert dn_1.properties == {} - assert dn_2.properties == {} + assert dn_1.properties == {"name": "def"} + assert dn_2.properties == {"name": "def"} dn_1._properties["qux"] = 4 assert dn_1.properties["qux"] == 4 assert dn_2.properties["qux"] == 4 - assert dn_1.properties == {"qux": 4} - assert dn_2.properties == {"qux": 4} + assert dn_1.properties == {"qux": 4, "name": "def"} + assert dn_2.properties == {"qux": 4, "name": "def"} dn_2._properties["qux"] = 5 assert dn_1.properties["qux"] == 5 assert dn_2.properties["qux"] == 5 @@ -512,11 +514,13 @@ def test_auto_set_and_reload(self, current_datetime): dn_1.properties["temp_key_1"] = "temp_value_1" dn_1.properties["temp_key_2"] = "temp_value_2" assert dn_1.properties == { + "name": "def", "qux": 5, "temp_key_1": "temp_value_1", "temp_key_2": "temp_value_2", } assert dn_2.properties == { + "name": "def", "qux": 5, "temp_key_1": "temp_value_1", "temp_key_2": "temp_value_2", @@ -525,28 +529,60 @@ def test_auto_set_and_reload(self, current_datetime): assert "temp_key_1" not in dn_1.properties.keys() assert "temp_key_1" not in dn_1.properties.keys() assert dn_1.properties == { + "name": "def", "qux": 5, "temp_key_2": "temp_value_2", } assert dn_2.properties == { + "name": "def", "qux": 5, "temp_key_2": "temp_value_2", } dn_2.properties.pop("temp_key_2") - assert dn_1.properties == {"qux": 5} - assert dn_2.properties == {"qux": 5} + assert dn_1.properties == { + "qux": 5, + "name": "def", + } + assert dn_2.properties == { + "qux": 5, + "name": "def", + } assert "temp_key_2" not in dn_1.properties.keys() assert "temp_key_2" not in dn_2.properties.keys() dn_1.properties["temp_key_3"] = 0 - assert dn_1.properties == {"qux": 5, "temp_key_3": 0} - assert dn_2.properties == {"qux": 5, "temp_key_3": 0} + assert dn_1.properties == { + "qux": 5, + "temp_key_3": 0, + "name": "def", + } + assert dn_2.properties == { + "qux": 5, + "temp_key_3": 0, + "name": "def", + } dn_1.properties.update({"temp_key_3": 1}) - assert dn_1.properties == {"qux": 5, "temp_key_3": 1} - assert dn_2.properties == {"qux": 5, "temp_key_3": 1} + assert dn_1.properties == { + "qux": 5, + "temp_key_3": 1, + "name": "def", + } + assert dn_2.properties == { + "qux": 5, + "temp_key_3": 1, + "name": "def", + } dn_1.properties.update(dict()) - assert dn_1.properties == {"qux": 5, "temp_key_3": 1} - assert dn_2.properties == {"qux": 5, "temp_key_3": 1} + assert dn_1.properties == { + "qux": 5, + "temp_key_3": 1, + "name": "def", + } + assert dn_2.properties == { + "qux": 5, + "temp_key_3": 1, + "name": "def", + } dn_1.properties["temp_key_4"] = 0 dn_1.properties["temp_key_5"] = 0 @@ -676,13 +712,13 @@ def test_label(self): "foo_bar", Scope.SCENARIO, DataNodeId("an_id"), - "a name", "a_scenario_id", {"a_parent_id"}, a_date, [dict(job_id="a_job_id")], edit_in_progress=False, prop="erty", + name="a name", ) with mock.patch("src.taipy.core.get") as get_mck: @@ -702,13 +738,13 @@ def test_explicit_label(self): "foo_bar", Scope.SCENARIO, DataNodeId("an_id"), - "a name", "a_scenario_id", {"a_parent_id"}, a_date, [dict(job_id="a_job_id")], edit_in_progress=False, label="a label", + name="a name", ) assert dn.get_label() == "a label" assert dn.get_simple_label() == "a label" diff --git a/tests/core/data/test_data_repositories.py b/tests/core/data/test_data_repositories.py index a7122c6e..ca71a8e7 100644 --- a/tests/core/data/test_data_repositories.py +++ b/tests/core/data/test_data_repositories.py @@ -52,9 +52,9 @@ def test_load_all_with_filters(self, data_node, repo, init_sql_repo): for i in range(10): data_node.id = DataNodeId(f"data_node-{i}") - data_node.name = f"data_node-{i}" + data_node.owner_id = f"task-{i}" repository._save(data_node) - objs = repository._load_all(filters=[{"name": "data_node-2"}]) + objs = repository._load_all(filters=[{"owner_id": "task-2"}]) assert len(objs) == 1 @@ -119,20 +119,20 @@ def test_search(self, data_node, repo, init_sql_repo): for i in range(10): data_node.id = DataNodeId(f"data_node-{i}") - data_node.name = f"data_node-{i}" + data_node.owner_id = f"task-{i}" repository._save(data_node) assert len(repository._load_all()) == 10 - objs = repository._search("name", "data_node-2") + objs = repository._search("owner_id", "task-2") assert len(objs) == 1 assert isinstance(objs[0], DataNode) - objs = repository._search("name", "data_node-2", filters=[{"version": "random_version_number"}]) + objs = repository._search("owner_id", "task-2", filters=[{"version": "random_version_number"}]) assert len(objs) == 1 assert isinstance(objs[0], DataNode) - assert repository._search("name", "data_node-2", filters=[{"version": "non_existed_version"}]) == [] + assert repository._search("owner_id", "task-2", filters=[{"version": "non_existed_version"}]) == [] @pytest.mark.parametrize("repo", [_DataFSRepository, _DataSQLRepository]) def test_export(self, tmpdir, data_node, repo, init_sql_repo): diff --git a/tests/core/data/test_excel_data_node.py b/tests/core/data/test_excel_data_node.py index b2e958e3..110e6860 100644 --- a/tests/core/data/test_excel_data_node.py +++ b/tests/core/data/test_excel_data_node.py @@ -83,8 +83,7 @@ def test_create(self): dn = ExcelDataNode( "foo_bar", Scope.SCENARIO, - name="super name", - properties={"path": path, "has_header": False, "sheet_name": sheet_names}, + properties={"path": path, "has_header": False, "sheet_name": sheet_names, "name": "super name"}, ) assert isinstance(dn, ExcelDataNode) assert dn.storage_type() == "excel" diff --git a/tests/core/data/test_generic_data_node.py b/tests/core/data/test_generic_data_node.py index a0ca24d5..0175fa1c 100644 --- a/tests/core/data/test_generic_data_node.py +++ b/tests/core/data/test_generic_data_node.py @@ -54,7 +54,7 @@ class TestGenericDataNode: def test_create(self): dn = GenericDataNode( - "foo_bar", Scope.SCENARIO, name="super name", properties={"read_fct": read_fct, "write_fct": write_fct} + "foo_bar", Scope.SCENARIO, properties={"read_fct": read_fct, "write_fct": write_fct, "name": "super name"} ) assert isinstance(dn, GenericDataNode) assert dn.storage_type() == "generic" @@ -69,7 +69,9 @@ def test_create(self): assert dn.properties["read_fct"] == read_fct assert dn.properties["write_fct"] == write_fct - dn_1 = GenericDataNode("foo", Scope.SCENARIO, name="foo", properties={"read_fct": read_fct, "write_fct": None}) + dn_1 = GenericDataNode( + "foo", Scope.SCENARIO, properties={"read_fct": read_fct, "write_fct": None, "name": "foo"} + ) assert isinstance(dn, GenericDataNode) assert dn_1.storage_type() == "generic" assert dn_1.config_id == "foo" @@ -83,7 +85,9 @@ def test_create(self): assert dn_1.properties["read_fct"] == read_fct assert dn_1.properties["write_fct"] is None - dn_2 = GenericDataNode("xyz", Scope.SCENARIO, name="xyz", properties={"read_fct": None, "write_fct": write_fct}) + dn_2 = GenericDataNode( + "xyz", Scope.SCENARIO, properties={"read_fct": None, "write_fct": write_fct, "name": "xyz"} + ) assert isinstance(dn, GenericDataNode) assert dn_2.storage_type() == "generic" assert dn_2.config_id == "xyz" @@ -97,7 +101,7 @@ def test_create(self): assert dn_2.properties["read_fct"] is None assert dn_2.properties["write_fct"] == write_fct - dn_3 = GenericDataNode("xyz", Scope.SCENARIO, name="xyz", properties={"read_fct": read_fct}) + dn_3 = GenericDataNode("xyz", Scope.SCENARIO, properties={"read_fct": read_fct, "name": "xyz"}) assert isinstance(dn, GenericDataNode) assert dn_3.storage_type() == "generic" assert dn_3.config_id == "xyz" @@ -111,7 +115,7 @@ def test_create(self): assert dn_3.properties["read_fct"] == read_fct assert dn_3.properties["write_fct"] is None - dn_4 = GenericDataNode("xyz", Scope.SCENARIO, name="xyz", properties={"write_fct": write_fct}) + dn_4 = GenericDataNode("xyz", Scope.SCENARIO, properties={"write_fct": write_fct, "name": "xyz"}) assert isinstance(dn, GenericDataNode) assert dn_4.storage_type() == "generic" assert dn_4.config_id == "xyz" diff --git a/tests/core/data/test_in_memory_data_node.py b/tests/core/data/test_in_memory_data_node.py index 6b84c666..7d58a086 100644 --- a/tests/core/data/test_in_memory_data_node.py +++ b/tests/core/data/test_in_memory_data_node.py @@ -24,9 +24,8 @@ def test_create(self): "foobar_bazy", Scope.SCENARIO, DataNodeId("id_uio"), - "my name", "owner_id", - properties={"default_data": "In memory Data Node"}, + properties={"default_data": "In memory Data Node", "name": "my name"}, ) assert isinstance(dn, InMemoryDataNode) assert dn.storage_type() == "in_memory" diff --git a/tests/core/data/test_json_data_node.py b/tests/core/data/test_json_data_node.py index c023601e..86733401 100644 --- a/tests/core/data/test_json_data_node.py +++ b/tests/core/data/test_json_data_node.py @@ -86,7 +86,7 @@ def object_hook(self, o): class TestJSONDataNode: def test_create(self): path = "data/node/path" - dn = JSONDataNode("foo_bar", Scope.SCENARIO, name="super name", properties={"default_path": path}) + dn = JSONDataNode("foo_bar", Scope.SCENARIO, properties={"default_path": path, "name": "super name"}) assert isinstance(dn, JSONDataNode) assert dn.storage_type() == "json" assert dn.config_id == "foo_bar" @@ -101,7 +101,7 @@ def test_create(self): with pytest.raises(InvalidConfigurationId): dn = JSONDataNode( - "foo bar", Scope.SCENARIO, name="super name", properties={"default_path": path, "has_header": False} + "foo bar", Scope.SCENARIO, properties={"default_path": path, "has_header": False, "name": "super name"} ) def test_get_user_properties(self, json_file): diff --git a/tests/core/data/test_parquet_data_node.py b/tests/core/data/test_parquet_data_node.py index d63a6cbe..d33b9b88 100644 --- a/tests/core/data/test_parquet_data_node.py +++ b/tests/core/data/test_parquet_data_node.py @@ -71,7 +71,7 @@ def test_create(self): path = "data/node/path" compression = "snappy" dn = ParquetDataNode( - "foo_bar", Scope.SCENARIO, name="super name", properties={"path": path, "compression": compression} + "foo_bar", Scope.SCENARIO, properties={"path": path, "compression": compression, "name": "super name"} ) assert isinstance(dn, ParquetDataNode) assert dn.storage_type() == "parquet" @@ -89,7 +89,7 @@ def test_create(self): assert dn.engine == "pyarrow" with pytest.raises(InvalidConfigurationId): - dn = ParquetDataNode("foo bar", Scope.SCENARIO, name="super name", properties={"path": path}) + dn = ParquetDataNode("foo bar", Scope.SCENARIO, properties={"path": path, "name": "super name"}) def test_get_user_properties(self, parquet_file_path): dn_1 = ParquetDataNode("dn_1", Scope.SCENARIO, properties={"path": parquet_file_path}) diff --git a/tests/core/notification/test_notifier.py b/tests/core/notification/test_notifier.py index a0413c6a..fd632f06 100644 --- a/tests/core/notification/test_notifier.py +++ b/tests/core/notification/test_notifier.py @@ -346,7 +346,7 @@ def test_publish_event(): task.properties.pop("debug") assert registration_queue.qsize() == 22 - dn.name = "new datanode name" + dn.editor_id = "new editor id" assert registration_queue.qsize() == 23 dn.properties["sorted"] = True @@ -413,7 +413,7 @@ def test_publish_event(): "properties", "properties", "properties", - "name", + "editor_id", "properties", "properties", "properties", @@ -506,7 +506,7 @@ def test_publish_event(): t.properties.update({"debug": True}) assert registration_queue.qsize() == 0 - d.name = "another new datanode name" + d.editor_id = "another new editor id" assert registration_queue.qsize() == 0 d.properties["sorted"] = True @@ -553,7 +553,7 @@ def test_publish_event(): "skippable", "properties", "properties", - "name", + "editor_id", "properties", "properties", ]