Skip to content

Commit

Permalink
add new node presets
Browse files Browse the repository at this point in the history
  • Loading branch information
Durman committed May 31, 2021
1 parent 3ff6111 commit b538db5
Show file tree
Hide file tree
Showing 5 changed files with 135 additions and 17 deletions.
4 changes: 2 additions & 2 deletions ui/development.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ def draw(self, context):
save = layout.operator(SvSaveSelected.bl_idname, text="Save current settings as node preset", icon='SOLO_ON')
save.id_tree = ntree.name
save.category = node.bl_idname
save.save_defaults = True


def idname_draw(self, context):
if not displaying_sverchok_nodes(context):
Expand Down Expand Up @@ -306,7 +306,7 @@ def idname_draw(self, context):
save = save_row.operator(SvSaveSelected.bl_idname, text="Save Node Preset", icon='SOLO_ON')
save.id_tree = ntree.name
save.category = node.bl_idname
save.save_defaults = True
save.is_node_preset = True
selected_nodes = [node for node in ntree.nodes if node.select]
save_row.enabled = len(selected_nodes) == 1

Expand Down
12 changes: 9 additions & 3 deletions ui/presets.py
Original file line number Diff line number Diff line change
Expand Up @@ -406,7 +406,7 @@ class SvSaveSelected(bpy.types.Operator):
preset_name: StringProperty(name="Name", description="Preset name")
id_tree: StringProperty()
category: StringProperty()
save_defaults : BoolProperty(default = False)
is_node_preset: BoolProperty()

def execute(self, context):
if not self.id_tree:
Expand All @@ -430,11 +430,16 @@ def execute(self, context):
self.report({'ERROR'}, msg)
return {'CANCELLED'}

layout_dict = JSONExporter.get_tree_structure(ng, True)
# the operator can be used for both preset importing of a node and preset from a bunch of selected nodes
if self.is_node_preset:
layout_dict = JSONExporter.get_node_structure(nodes[0])
else:
layout_dict = JSONExporter.get_tree_structure(ng, True)

preset = SvPreset(name=self.preset_name, category = self.category)
preset.make_add_operator()
destination_path = preset.path
json.dump(layout_dict, open(destination_path, 'w'), sort_keys=True, indent=2)
json.dump(layout_dict, open(destination_path, 'w'), indent=2) # sort keys is not expected by the exporter
msg = 'exported to: ' + destination_path
self.report({"INFO"}, msg)
info(msg)
Expand Down Expand Up @@ -898,6 +903,7 @@ def draw(self, context):
op = row.operator('node.sv_save_selected', text="Save Preset", icon='SOLO_ON')
op.id_tree = ntree.name
op.category = panel_props.category
op.is_node_preset = False

selected_nodes = [node for node in ntree.nodes if node.select]
can_save_preset = len(selected_nodes) > 0
Expand Down
7 changes: 6 additions & 1 deletion utils/sv_json_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from sverchok.utils.sv_node_utils import recursive_framed_location_finder
from sverchok.utils.handle_blender_data import BPYProperty
from sverchok.utils.sv_IO_monad_helpers import pack_monad
from sverchok.utils.sv_json_struct import FileStruct
from sverchok.utils.sv_json_struct import FileStruct, NodePresetFileStruct

if TYPE_CHECKING:
from sverchok.node_tree import SverchCustomTree, SverchCustomTreeNode
Expand All @@ -36,6 +36,11 @@ def get_tree_structure(tree: SverchCustomTree, use_selection=False) -> dict:
else:
return FileStruct().export_tree(tree, use_selection)

@staticmethod
def get_node_structure(node) -> dict:
"""For exporting node properties"""
return NodePresetFileStruct().export(node)

@staticmethod
def _get_nodes_structure(nodes: List[SverchCustomTreeNode]) -> dict:
"""Generate structure of given nodes which can be saved into json format"""
Expand Down
10 changes: 8 additions & 2 deletions utils/sv_json_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from sverchok.utils.logging import info, warning, getLogger, logging
from sverchok.utils.handle_blender_data import BPYProperty, BPYNode
from sverchok.utils.sv_IO_monad_helpers import unpack_monad
from sverchok.utils.sv_json_struct import FileStruct
from sverchok.utils.sv_json_struct import FileStruct, NodePresetFileStruct

if TYPE_CHECKING:
from sverchok.node_tree import SverchCustomTree, SverchCustomTreeNode
Expand Down Expand Up @@ -63,7 +63,13 @@ def import_into_tree(self, tree: SverchCustomTree, print_log: bool = True):
build_update_list(tree)
process_tree(tree)

def import_node_settings(self, node: SverchCustomTreeNode): # todo should be done something for new importer
def import_node_settings(self, node: SverchCustomTreeNode):
if self.structure_version < 1.0:
self._old_import_node_settings(node)
else:
NodePresetFileStruct(logger=self._fails_log, structure=self._structure).build(node)

def _old_import_node_settings(self, node: SverchCustomTreeNode):
"""
It takes first node from file and apply its settings to given node
It is strange but it is how it was originally implemented
Expand Down
119 changes: 110 additions & 9 deletions utils/sv_json_struct.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,6 @@ def __init__(self, factories: List[Type[Struct]]):
if factory.type in self._factory_names:
factory_name = self._factory_names[factory.type]
setattr(self, factory_name, factory)
# probably we would never want using file structure from inside others
elif factory.type == StrTypes.FILE:
continue
else:
raise TypeError(f'Factory with type: {factory.type}'
f' is not among supported: {self._factory_names.keys()}')
Expand All @@ -67,7 +64,7 @@ def get_factory(self, struct_type: StrTypes) -> Type[Struct]:
raise TypeError(f'Given struct type: {struct_type} is not among supported {self._factory_names.keys()}')

@classmethod
def gram_from_module(cls) -> StructFactory:
def grab_from_module(cls) -> StructFactory:
"""Grab all factories in the module"""
factory_classes = []
module_classes = inspect.getmembers(sys.modules[__name__],
Expand All @@ -79,7 +76,6 @@ def gram_from_module(cls) -> StructFactory:


class StrTypes(Enum):
FILE = auto()
TREE = auto()
NODE = auto()
SOCK = auto()
Expand Down Expand Up @@ -150,8 +146,6 @@ def read_collection(self, collection: dict):


class FileStruct(Struct):
type = StrTypes.FILE

def __init__(self, name=None, logger: FailsLog = None, struct: dict = None):
self._struct: Dict[str, Any] = struct or {"export_version": str(self.version)}
self.logger: FailsLog = logger
Expand All @@ -165,7 +159,7 @@ def export_tree(self, tree, use_selection=False):
raise TypeError(f'Only exporting main trees is supported, {tree.bl_label} is given')

self._struct["main_tree"] = dict()
struct_factories = StructFactory.gram_from_module() # todo to args?
struct_factories = StructFactory.grab_from_module() # todo to args?
dependencies: List[Tuple[BPYPointers, str]] = []

# export main tree first
Expand Down Expand Up @@ -204,7 +198,7 @@ def build_into_tree(self, tree): # todo add protection from exporting inside no

with tree.throttle_update(): # todo it is required only for current update system can be deleted later??

factories = StructFactory.gram_from_module()
factories = StructFactory.grab_from_module()
imported_structs = OldNewNames()
trees_to_build = []
version, main_tree, data_blocks = self.read()
Expand Down Expand Up @@ -251,6 +245,113 @@ def _data_blocks_reader(self): # todo add logger?
yield StrTypes[struct_type_name], block_name, block_struct


class NodePresetFileStruct(Struct):
def __init__(self, name=None, logger=None, structure=None):
default_struct = {
"export_version": str(self.version),
"node": dict(),
}
self.logger = logger
self._struct = structure or default_struct

def export(self, node):
factories = StructFactory.grab_from_module()
dependencies: List[Tuple[BPYPointers, str]] = []

struct = factories.node(node.name, self.logger)
self._struct["node"][node.name] = struct.export(node, factories, dependencies)

while dependencies:
block_type, block_name = dependencies.pop()
struct_type = StrTypes.get_type(block_type)
if struct_type.name not in self._struct:
self._struct[struct_type.name] = dict()
if block_name not in self._struct[struct_type.name]:
factory = factories.get_factory(struct_type)
data_block = block_type.collection[block_name]
structure = factory(block_name, self.logger).export(data_block, factories, dependencies)
self._struct[struct_type.name][block_name] = structure

return self._struct

def build(self, node):
tree = node.id_data
with tree.throttle_update(), tree.init_tree(): # todo throttle can be deleted later

factories = StructFactory.grab_from_module()
imported_structs = OldNewNames()
version, data_blocks = self.read()
trees_to_build = []

# initialize trees and build other data block types
for struct_type, block_name, raw_struct in data_blocks:
if struct_type == StrTypes.TREE: # in case it was group node
tree_struct = factories.tree(block_name, self.logger, raw_struct)
data_block = bpy.data.node_groups.new(block_name, tree_struct.read_bl_type())
tree_struct.build_interface(data_block, factories, imported_structs)
imported_structs[(struct_type, '', block_name)] = data_block.name
trees_to_build.append(tree_struct)
else:
# all data block except node trees
block_struct = factories.get_factory(struct_type)(block_name, self.logger, raw_struct)
block_struct.build(factories, imported_structs)

for tree_struct in trees_to_build:
new_name = imported_structs[StrTypes.TREE, '', tree_struct.name]
data_block = bpy.data.node_groups[new_name]
tree_struct.build(data_block, factories, imported_structs)

# now it's time to update the node, we have to save its links first because they will be removed
links = []
for link in _ordered_links(tree):
if link.from_node.name == node.name or link.to_node.name == node.name:
link_struct = factories.link(None, self.logger)
link_struct.export(link, factories, [])
links.append(link_struct)

# recreate node from scratch, this need for resetting all its properties to default
node_name, raw_struct = next(iter(self._struct["node"].items()))
node_struct = factories.node(node_name, self.logger, raw_struct)
location = node.location[:] # without copying it looks like gives straight references to memory
tree.nodes.remove(node)
node = tree.nodes.new(node_struct.read_bl_type())
node.name = node_name
node.select = True
tree.nodes.active = node
imported_structs[StrTypes.NODE, tree.name, node_name] = node.name

# all nodes should be as if they was imported with new names before linking
for node in tree.nodes:
imported_structs[StrTypes.NODE, tree.name, node.name] = node.name

# import the node and rebuild the links if possible
node_struct.build(node, factories, imported_structs)
node.location = location # return to initial position, it has to be after node build
for link_struct in links:
try:
link_struct.build(tree, factories, imported_structs)
except LookupError: # the node seems has different sockets
pass
# how it should work with group node links is not clear
# because they are bound to identifiers of the group tree input outputs
# for now breaking links will be considered as desired behaviour

node.process_node(bpy.context)

def read(self):
with self.logger.add_fail("Reading version of the file"):
version = float(self._struct["export_version"])

return version, self._data_blocks_reader()

def _data_blocks_reader(self): # todo add logger?
struct_type: StrTypes
for struct_type_name, structures in self._struct.items():
if struct_type_name in (it.name for it in StrTypes):
for block_name, block_struct in structures.items():
yield StrTypes[struct_type_name], block_name, block_struct


class TreeStruct(Struct):
type = StrTypes.TREE

Expand Down

0 comments on commit b538db5

Please sign in to comment.