Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Blender 4.2.1 #1136

Merged
merged 31 commits into from
Sep 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
cfe62a0
feat(blender): Upgrade to 3.6.14
Griperis Jul 30, 2024
22a85de
chore(utility): Corrected typing
Griperis Jul 30, 2024
c7c54e3
feat(blender): upgrade to 4.0.2
Griperis Aug 1, 2024
e72a780
feat(blender): adjusted PrincipledBSDF input names to 4.0
Griperis Aug 1, 2024
847321e
feat(blender): use context.temp_override for operator overrides
Griperis Aug 2, 2024
ecca5cf
feat(blender): use the node_tree.interface API instead of .outputs an…
Griperis Aug 2, 2024
2dbf982
feat(blender): remove the use_legacy_obj_import
Griperis Aug 6, 2024
4221571
feat(blender): import ply using bpy.op.wm.ply_import
Griperis Aug 6, 2024
0430a92
feat(blender): bump version to 4.1.1
Griperis Aug 8, 2024
442c947
feat(blender): make pip packages setup work with blender 4.1 updated …
Griperis Aug 8, 2024
bb85745
feat(blender): use auto smooth geometry nodes
Griperis Aug 8, 2024
71bfebb
feat(blender): make depth, distance and segmentation output rgb in bl…
Griperis Aug 8, 2024
a66112a
feat(blender): use correct import in external/vhacd
Griperis Aug 8, 2024
5940449
fix(camera): use np.float32 instead of deprecated np.float
Griperis Aug 14, 2024
4c3ac3e
feat(render): add view transform as 'render' parameter
Griperis Aug 14, 2024
152100f
feat(blender): bump blender to 4.2.0
Griperis Aug 14, 2024
6817ed6
feat(blender): load auto smooth node group manually
Griperis Aug 19, 2024
4deeb74
fix(writer): Fixes matplotlib error on headless machines
cornerfarmer Aug 19, 2024
3c1be7b
feat(blender): assign the root collection after cleaning up data blocks
Griperis Aug 19, 2024
c24a8e5
fix(mesh): convert deg to rad when assigning to the shade smooth modi…
Griperis Aug 27, 2024
a37b4f0
feat(blender): bump to 4.2.1 LTS
Griperis Aug 27, 2024
4645722
fix(amass): Fixes subsurface shader
cornerfarmer Aug 27, 2024
f99fb32
fix(ObjectLoader): don't validate meshes when importing obj
Griperis Aug 29, 2024
4cacdbe
fix(ply): Removes incorrect custom split normals from bop/replica obj…
cornerfarmer Aug 29, 2024
b0e01b8
fix(material): Adjusts displacement scale to fit blender 4.2
cornerfarmer Sep 4, 2024
eeaf5ef
fix(loader): Allow overwriting validate_meshes in ObjLoader
cornerfarmer Sep 6, 2024
61e2cf7
fix(shapenet): Enables mesh validation per default for shapenet objects
cornerfarmer Sep 9, 2024
95ba9f3
fix(physics): Use surface area to compute center of mass per default
cornerfarmer Sep 9, 2024
593d344
doc(mesh): document 'angle' parameter in 'add_auto_smooth_modifier'
Griperis Sep 12, 2024
275141e
refactor(render): move manipulating 'view_transform' to 'set_output_f…
Griperis Sep 12, 2024
01c8a12
fix(mesh): Makes smooth modifier working on mac
cornerfarmer Sep 12, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion blenderproc/external/vhacd/decompose.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ def convex_decomposition(obj: "MeshObject", temp_dir: str, vhacd_path: str, reso
else:
out_file_name = os.path.join(cache_dir, str(mesh_hash) + ".obj")

bpy.ops.import_scene.obj(filepath=out_file_name, axis_forward="Y", axis_up="Z")
bpy.ops.wm.obj_import(filepath=out_file_name, forward_axis="Y", up_axis="Z")
imported = bpy.context.selected_objects

# Name and transform the loaded parts
Expand Down
2 changes: 1 addition & 1 deletion blenderproc/python/camera/LensDistortionUtility.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ def _internal_apply(input_image: np.ndarray) -> np.ndarray:
amount_of_output_channels = input_image.shape[2]
image_distorted = np.zeros((orig_res_y, orig_res_x, amount_of_output_channels))
used_dtpye = input_image.dtype
data = input_image.astype(np.float)
data = input_image.astype(np.float32)
# Forward mapping in order to distort the undistorted image coordinates
# and reshape the arrays into the image shape grid.
# The reference frame for coords is as in DLR CalDe etc. (the upper-left pixel center is at [0,0])
Expand Down
8 changes: 5 additions & 3 deletions blenderproc/python/loader/AMASSLoader.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,13 +285,15 @@ def correct_materials(objects: List[MeshObject]):
skin_tone_fac = random.uniform(0.0, 1)
skin_tone_rgb = [value * skin_tone_fac for value in skin_tone_rgb]
principled_bsdf.inputs["Base Color"].default_value = mathutils.Vector([*skin_tone_rgb, 1.0])
principled_bsdf.inputs["Subsurface"].default_value = 0.2
principled_bsdf.inputs["Subsurface Color"].default_value = mathutils.Vector([*skin_tone_rgb, 1.0])

principled_bsdf.subsurface_method = "RANDOM_WALK_SKIN"
principled_bsdf.inputs["Subsurface Weight"].default_value = 1
principled_bsdf.inputs["Subsurface Scale"].default_value = 0.2
principled_bsdf.inputs["Subsurface Radius"].default_value = mathutils.Vector([1.0, 0.2, 0.1])
principled_bsdf.inputs["Subsurface IOR"].default_value = 2.5

# darker skin looks better when made less specular
principled_bsdf.inputs["Specular"].default_value = np.mean(skin_tone_rgb) / 255.0
principled_bsdf.inputs["Specular IOR Level"].default_value = np.mean(skin_tone_rgb) / 255.0

texture_nodes = material.get_nodes_with_type("ShaderNodeTexImage")
if texture_nodes and len(texture_nodes) > 1:
Expand Down
4 changes: 4 additions & 0 deletions blenderproc/python/loader/BopLoader.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,6 +347,10 @@ def load_mesh(obj_id: int, model_p: dict, bop_dataset_name: str, scale: float =
# if the object was not previously loaded - load it, if duplication is allowed - duplicate it
duplicated = model_path in _BopLoader.CACHED_OBJECTS
objs = load_obj(model_path, cached_objects=_BopLoader.CACHED_OBJECTS)
# Bop objects comes with incorrect custom normals, so remove them
for obj in objs:
obj.clear_custom_splitnormals()

assert (
len(objs) == 1
), f"Loading object from '{model_path}' returned more than one mesh"
Expand Down
2 changes: 1 addition & 1 deletion blenderproc/python/loader/CCMaterialLoader.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def create_material(new_mat: bpy.types.Material, base_image_path: str, ambient_o
base_color = MaterialLoaderUtility.add_base_color(nodes, links, base_image_path, principled_bsdf)
collection_of_texture_nodes.append(base_color)

principled_bsdf.inputs["Specular"].default_value = 0.333
principled_bsdf.inputs["Specular IOR Level"].default_value = 0.333

ao_node = MaterialLoaderUtility.add_ambient_occlusion(nodes, links, ambient_occlusion_image_path,
principled_bsdf, base_color)
Expand Down
4 changes: 2 additions & 2 deletions blenderproc/python/loader/Front3DLoader.py
Original file line number Diff line number Diff line change
Expand Up @@ -368,11 +368,11 @@ def load_furniture_objs(data: dict, future_model_path: str, lamp_light_strength:

# Front3d .mtl files contain emission color which make the object mistakenly emissive
# => Reset the emission color
principled_node.inputs["Emission"].default_value[:3] = [0, 0, 0]
principled_node.inputs["Emission Color"].default_value[:3] = [0, 0, 0]

# Front3d .mtl files use Tf incorrectly, they make all materials fully transmissive
# Revert that:
principled_node.inputs["Transmission"].default_value = 0
principled_node.inputs["Transmission Weight"].default_value = 0

# For each a texture node
image_node = mat.new_node('ShaderNodeTexImage')
Expand Down
17 changes: 7 additions & 10 deletions blenderproc/python/loader/ObjectLoader.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,7 @@
from blenderproc.python.material.MaterialLoaderUtility import create as create_material


def load_obj(filepath: str, cached_objects: Optional[Dict[str, List[MeshObject]]] = None,
use_legacy_obj_import: bool = False, **kwargs) -> List[MeshObject]:
def load_obj(filepath: str, cached_objects: Optional[Dict[str, List[MeshObject]]] = None, **kwargs) -> List[MeshObject]:
""" Import all objects for the given file and returns the loaded objects

In .obj files a list of objects can be saved in.
Expand All @@ -22,8 +21,6 @@ def load_obj(filepath: str, cached_objects: Optional[Dict[str, List[MeshObject]]
:param filepath: the filepath to the location where the data is stored
:param cached_objects: a dict of filepath to objects, which have been loaded before, to avoid reloading
(the dict is updated in this function)
:param use_legacy_obj_import: If this is true the old legacy obj importer in python is used. It is slower, but
it correctly imports the textures in the ShapeNet dataset.
:param kwargs: all other params are handed directly to the bpy loading fct. check the corresponding documentation
:return: The list of loaded mesh objects.
"""
Expand All @@ -43,11 +40,11 @@ def load_obj(filepath: str, cached_objects: Optional[Dict[str, List[MeshObject]]
# save all selected objects
previously_selected_objects = bpy.context.selected_objects
if filepath.endswith(".obj"):
# Set validate_meshes to False per default to be backwards compatible
if "validate_meshes" not in kwargs:
kwargs["validate_meshes"] = False
# load an .obj file:
if use_legacy_obj_import:
bpy.ops.import_scene.obj(filepath=filepath, **kwargs)
else:
bpy.ops.wm.obj_import(filepath=filepath, **kwargs)
bpy.ops.wm.obj_import(filepath=filepath, **kwargs)
elif filepath.endswith(".ply"):
PLY_TEXTURE_FILE_COMMENT = "comment TextureFile "
model_name = os.path.basename(filepath)
Expand Down Expand Up @@ -78,11 +75,11 @@ def load_obj(filepath: str, cached_objects: Optional[Dict[str, List[MeshObject]]
file.write(new_ply_file_content)

# Load .ply mesh
bpy.ops.import_mesh.ply(filepath=tmp_ply_file, **kwargs)
bpy.ops.wm.ply_import(filepath=tmp_ply_file, **kwargs)

else: # If no texture was given
# load a .ply mesh
bpy.ops.import_mesh.ply(filepath=filepath, **kwargs)
bpy.ops.wm.ply_import(filepath=filepath, **kwargs)
# Create default material
material = create_material('ply_material')
material.map_vertex_color()
Expand Down
3 changes: 3 additions & 0 deletions blenderproc/python/loader/ReplicaLoader.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,9 @@ def load_replica(data_path: str, data_set_name: str, use_smooth_shading: bool =
"""
file_path = os.path.join(data_path, data_set_name, 'mesh.ply')
loaded_objects = load_obj(file_path)
# Replica comes with incorrect custom normals, so remove them
for obj in loaded_objects:
obj.clear_custom_splitnormals()

if use_smooth_shading:
for obj in loaded_objects:
Expand Down
7 changes: 5 additions & 2 deletions blenderproc/python/loader/ShapeNetLoader.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@


def load_shapenet(data_path: str, used_synset_id: str, used_source_id: str = "",
move_object_origin: bool = True) -> MeshObject:
move_object_origin: bool = True, validate_meshes: bool = False) -> MeshObject:
""" This loads an object from ShapeNet based on the given synset_id, which specifies the category of objects to use.

From these objects one is randomly sampled and loaded.
Expand All @@ -30,6 +30,9 @@ def load_shapenet(data_path: str, used_synset_id: str, used_source_id: str = "",
:param move_object_origin: Moves the object center to the bottom of the bounding box in Z direction and also in the
middle of the X and Y plane, this does not change the `.location` of the object.
Default: True
:param validate_meshes: If set to True, imported meshed will be validated and corrected.
This might help for some ShapeNet objects to e.g. remove duplicate faces.
However, it might lead to the texturing being destroyed.
:return: The loaded mesh object.
"""
data_path = resolve_path(data_path)
Expand All @@ -39,7 +42,7 @@ def load_shapenet(data_path: str, used_synset_id: str, used_source_id: str = "",
taxonomy_file_path, data_path)
selected_obj = random.choice(files_with_fitting_synset)
# with the new version the textures are all wrong
loaded_objects = load_obj(selected_obj, use_legacy_obj_import=True)
loaded_objects = load_obj(selected_obj, validate_meshes=validate_meshes)

# In shapenet every .obj file only contains one object, make sure that is the case
if len(loaded_objects) != 1:
Expand Down
17 changes: 12 additions & 5 deletions blenderproc/python/material/Dust.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ def add_dust(material: Material, strength: float, texture_nodes: List[bpy.types.
# the used dust color is a grey with a tint in orange
dust_color.inputs["Base Color"].default_value = [0.8, 0.773, 0.7, 1.0]
dust_color.inputs["Roughness"].default_value = 1.0
dust_color.inputs["Specular"].default_value = 0.0
dust_color.inputs["Specular IOR Level"].default_value = 0.0
links.new(dust_color.outputs["BSDF"], mix_shader.inputs[2])

# create the input and output nodes inside of the group
Expand All @@ -156,10 +156,17 @@ def add_dust(material: Material, strength: float, texture_nodes: List[bpy.types.
group_input.location = (x_pos + x_diff * 7, y_pos - y_diff * 0.5)

# create sockets for the outside of the group match them to the mix shader
group.outputs.new(mix_shader.outputs[0].bl_idname, mix_shader.outputs[0].name)
group.inputs.new(mix_shader.inputs[1].bl_idname, mix_shader.inputs[1].name)
group.inputs.new(multiply_node.inputs[1].bl_idname, "Dust strength")
group.inputs.new(mapping_node.inputs["Scale"].bl_idname, "Texture scale")
group.interface.new_socket(
mix_shader.outputs[0].name, in_out='OUTPUT', socket_type=mix_shader.outputs[0].bl_idname)
group.interface.new_socket(
mix_shader.inputs[1].name, in_out='INPUT', socket_type=mix_shader.inputs[1].bl_idname)
group.interface.new_socket(
"Dust strength", in_out='INPUT', socket_type=multiply_node.inputs[1].bl_idname)
# We set the socket_type='NodeSocketVector' directly instead of using
# 'mapping.node.inputs["Scale"].bl_idname', because the 'Scale' has a specific bl_idname of
# 'NodeSocketVectorXYZ', but 'new_socket' expects 'NodeSocketVector'.
group.interface.new_socket(
"Texture scale", in_out='INPUT', socket_type='NodeSocketVector')

# link the input and output to the mix shader
links.new(group_input.outputs[0], mix_shader.inputs[1])
Expand Down
6 changes: 3 additions & 3 deletions blenderproc/python/material/MaterialLoaderUtility.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ def add_specular(nodes: bpy.types.Nodes, links: bpy.types.NodeLinks, specular_im
if os.path.exists(specular_image_path):
specular_texture = create_image_node(nodes, specular_image_path, True,
_x_texture_node, 0)
links.new(specular_texture.outputs["Color"], principled_bsdf.inputs["Specular"])
links.new(specular_texture.outputs["Color"], principled_bsdf.inputs["Specular IOR Level"])
return specular_texture
return None

Expand Down Expand Up @@ -347,7 +347,7 @@ def add_displacement(nodes: bpy.types.Nodes, links: bpy.types.NodeLinks, displac
_y_texture_node * -4)
displacement_node = nodes.new("ShaderNodeDisplacement")
displacement_node.inputs["Midlevel"].default_value = 0.5
displacement_node.inputs["Scale"].default_value = 0.15
displacement_node.inputs["Scale"].default_value = 0.03
displacement_node.location.x = _x_texture_node * 0.5
displacement_node.location.y = _y_texture_node * -4
links.new(displacement_texture.outputs["Color"], displacement_node.inputs["Height"])
Expand Down Expand Up @@ -482,7 +482,7 @@ def change_to_texture_less_render(use_alpha_channel):
principled_bsdf = Utility.get_the_one_node_with_type(nodes, "BsdfPrincipled")

# setting the color values for the shader
principled_bsdf.inputs['Specular'].default_value = 0.65 # specular
principled_bsdf.inputs['Specular IOR Level'].default_value = 0.65 # specular
principled_bsdf.inputs['Roughness'].default_value = 0.2 # roughness

for used_object in [obj for obj in bpy.context.scene.objects if hasattr(obj.data, 'materials')]:
Expand Down
21 changes: 14 additions & 7 deletions blenderproc/python/object/PhysicsSimulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ def simulate_physics_and_fix_final_poses(min_simulation_time: float = 4.0, max_s
check_object_interval: float = 2.0,
object_stopped_location_threshold: float = 0.01,
object_stopped_rotation_threshold: float = 0.1, substeps_per_frame: int = 10,
solver_iters: int = 10, verbose: bool = False):
solver_iters: int = 10, verbose: bool = False, use_volume_com: bool = False):
""" Simulates the current scene and in the end fixes the final poses of all active objects.

The simulation is run for at least `min_simulation_time` seconds and at a maximum `max_simulation_time` seconds.
Expand All @@ -36,18 +36,21 @@ def simulate_physics_and_fix_final_poses(min_simulation_time: float = 4.0, max_s
:param substeps_per_frame: Number of simulation steps taken per frame.
:param solver_iters: Number of constraint solver iterations made per simulation step.
:param verbose: If True, more details during the physics simulation are printed.
:param use_volume_com: If True, the center of mass will be calculated by using the object volume.
This is more accurate than using the surface area (default), but requires a watertight mesh.
"""
# Undo changes made in the simulation like origin adjustment and persisting the object's scale
with UndoAfterExecution():
# Run simulation and remember poses before and after
obj_poses_before_sim = _PhysicsSimulation.get_pose()
origin_shifts = simulate_physics(min_simulation_time, max_simulation_time, check_object_interval,
object_stopped_location_threshold, object_stopped_rotation_threshold,
substeps_per_frame, solver_iters, verbose)
substeps_per_frame, solver_iters, verbose, use_volume_com)
obj_poses_after_sim = _PhysicsSimulation.get_pose()

# Make sure to remove the simulation cache as we are only interested in the final poses
bpy.ops.ptcache.free_bake({"point_cache": bpy.context.scene.rigidbody_world.point_cache})
with bpy.context.temp_override(point_cache=bpy.context.scene.rigidbody_world.point_cache):
bpy.ops.ptcache.free_bake()

# Fix the pose of all objects to their pose at the end of the simulation (also revert origin shift)
for obj in get_all_mesh_objects():
Expand Down Expand Up @@ -76,7 +79,7 @@ def simulate_physics_and_fix_final_poses(min_simulation_time: float = 4.0, max_s
def simulate_physics(min_simulation_time: float = 4.0, max_simulation_time: float = 40.0,
check_object_interval: float = 2.0, object_stopped_location_threshold: float = 0.01,
object_stopped_rotation_threshold: float = 0.1, substeps_per_frame: int = 10,
solver_iters: int = 10, verbose: bool = False) -> dict:
solver_iters: int = 10, verbose: bool = False, use_volume_com: bool = False) -> dict:
""" Simulates the current scene.

The simulation is run for at least `min_simulation_time` seconds and at a maximum `max_simulation_time` seconds.
Expand All @@ -100,14 +103,16 @@ def simulate_physics(min_simulation_time: float = 4.0, max_simulation_time: floa
:param substeps_per_frame: Number of simulation steps taken per frame.
:param solver_iters: Number of constraint solver iterations made per simulation step.
:param verbose: If True, more details during the physics simulation are printed.
:param use_volume_com: If True, the center of mass will be calculated by using the object volume.
This is more accurate than using the surface area (default), but requires a watertight mesh.
:return: A dict containing for every active object the shift that was added to their origins.
"""
# Shift the origin of all objects to their center of mass to make the simulation more realistic
origin_shift = {}
for obj in get_all_mesh_objects():
if obj.has_rigidbody_enabled():
prev_origin = obj.get_origin()
new_origin = obj.set_origin(mode="CENTER_OF_VOLUME")
new_origin = obj.set_origin(mode="ORIGIN_CENTER_OF_VOLUME" if use_volume_com else "CENTER_OF_MASS")
origin_shift[obj.get_name()] = new_origin - prev_origin

# Persist mesh scaling as having a scale != 1 can make the simulation unstable
Expand Down Expand Up @@ -184,7 +189,8 @@ def do_simulation(min_simulation_time: float, max_simulation_time: float, check_
# Simulate current interval
point_cache.frame_end = current_frame
with stdout_redirected(enabled=not verbose):
bpy.ops.ptcache.bake({"point_cache": point_cache}, bake=True)
with bpy.context.temp_override(point_cache=point_cache):
bpy.ops.ptcache.bake(bake=True)

# Go to second last frame and get poses
bpy.context.scene.frame_set(current_frame - _PhysicsSimulation.seconds_to_frames(1))
Expand All @@ -205,7 +211,8 @@ def do_simulation(min_simulation_time: float, max_simulation_time: float, check_
else:
# Free bake (this will not completely remove the simulation cache, so further simulations can
# reuse the already calculated frames)
bpy.ops.ptcache.free_bake({"point_cache": point_cache})
with bpy.context.temp_override(point_cache=point_cache):
bpy.ops.ptcache.free_bake()

@staticmethod
def get_pose() -> dict:
Expand Down
Loading
Loading