mirror of
https://github.com/kaosat-dev/Blender_bevy_components_workflow.git
synced 2024-11-22 20:00:53 +00:00
feat(blenvy): further upgrade & cleanup work for serialize_scene
* rewrote & cleaned up material handling, seems to work well ! * attempts at unifying parsing of geometry/shader node parsing seems to lead to geometry node parameter changes not being dected * wip
This commit is contained in:
parent
c557334b75
commit
91f38255dd
@ -33,6 +33,10 @@ def _lookup_color(data):
|
|||||||
def _lookup_array(data):
|
def _lookup_array(data):
|
||||||
return peel_value(data)
|
return peel_value(data)
|
||||||
|
|
||||||
|
def _lookup_array2(data):
|
||||||
|
print("mystery vectors")
|
||||||
|
return peel_value(data)
|
||||||
|
|
||||||
def _lookup_prop_group(data):
|
def _lookup_prop_group(data):
|
||||||
bla = generic_fields_hasher_evolved(data, fields_to_ignore=fields_to_ignore_generic)
|
bla = generic_fields_hasher_evolved(data, fields_to_ignore=fields_to_ignore_generic)
|
||||||
print("PROPGROUP", bla)
|
print("PROPGROUP", bla)
|
||||||
@ -46,20 +50,14 @@ def _lookup_materialLineArt(data):
|
|||||||
|
|
||||||
type_lookups = {
|
type_lookups = {
|
||||||
Color: _lookup_color,#lambda input: print("dsf")',
|
Color: _lookup_color,#lambda input: print("dsf")',
|
||||||
|
bpy.types.FloatVectorAttribute: _lookup_array2,
|
||||||
bpy.types.bpy_prop_array: _lookup_array,
|
bpy.types.bpy_prop_array: _lookup_array,
|
||||||
bpy.types.PropertyGroup: _lookup_prop_group,
|
bpy.types.PropertyGroup: _lookup_prop_group,
|
||||||
bpy.types.bpy_prop_collection: _lookup_collection,
|
bpy.types.bpy_prop_collection: _lookup_collection,
|
||||||
bpy.types.MaterialLineArt: _lookup_materialLineArt
|
bpy.types.MaterialLineArt: _lookup_materialLineArt
|
||||||
}
|
}
|
||||||
|
|
||||||
# TODO: replace the first one with this once if its done
|
def convert_field(raw_value, field_name=""):
|
||||||
def generic_fields_hasher_evolved(data, fields_to_ignore):
|
|
||||||
all_field_names = dir(data)
|
|
||||||
field_values = []
|
|
||||||
for field_name in all_field_names:
|
|
||||||
if not field_name.startswith("__") and not field_name in fields_to_ignore and not field_name.startswith("show") and not callable(getattr(data, field_name, None)):
|
|
||||||
raw_value = getattr(data, field_name, None)
|
|
||||||
#print("raw value", raw_value, "type", type(raw_value), isinstance(raw_value, Color), isinstance(raw_value, bpy.types.bpy_prop_array))
|
|
||||||
conversion_lookup = None # type_lookups.get(type(raw_value), None)
|
conversion_lookup = None # type_lookups.get(type(raw_value), None)
|
||||||
all_types = inspect.getmro(type(raw_value))
|
all_types = inspect.getmro(type(raw_value))
|
||||||
for s_type in all_types:
|
for s_type in all_types:
|
||||||
@ -70,11 +68,22 @@ def generic_fields_hasher_evolved(data, fields_to_ignore):
|
|||||||
field_value = None
|
field_value = None
|
||||||
if conversion_lookup is not None:
|
if conversion_lookup is not None:
|
||||||
field_value = conversion_lookup(raw_value)
|
field_value = conversion_lookup(raw_value)
|
||||||
print("field_name",field_name,"conv value", field_value)
|
#print("field_name",field_name,"conv value", field_value)
|
||||||
else:
|
else:
|
||||||
print("field_name",field_name,"raw value", raw_value)
|
#print("field_name",field_name,"raw value", raw_value)
|
||||||
field_value = raw_value
|
field_value = raw_value
|
||||||
|
|
||||||
|
return field_value
|
||||||
|
|
||||||
|
# TODO: replace the first one with this once if its done
|
||||||
|
def generic_fields_hasher_evolved(data, fields_to_ignore):
|
||||||
|
all_field_names = dir(data)
|
||||||
|
field_values = []
|
||||||
|
for field_name in all_field_names:
|
||||||
|
if not field_name.startswith("__") and not field_name in fields_to_ignore and not field_name.startswith("show") and not callable(getattr(data, field_name, None)):
|
||||||
|
raw_value = getattr(data, field_name, None)
|
||||||
|
#print("raw value", raw_value, "type", type(raw_value), isinstance(raw_value, Color), isinstance(raw_value, bpy.types.bpy_prop_array))
|
||||||
|
field_value = convert_field(raw_value, field_name)
|
||||||
field_values.append(str(field_value))
|
field_values.append(str(field_value))
|
||||||
|
|
||||||
return str(field_values)
|
return str(field_values)
|
||||||
@ -170,29 +179,67 @@ def armature_hash(obj):
|
|||||||
print("bone", bone, bone_hash(bone))"""
|
print("bone", bone, bone_hash(bone))"""
|
||||||
return str(fields)
|
return str(fields)
|
||||||
|
|
||||||
|
# used for various node trees: shaders, modifiers etc
|
||||||
|
def node_tree(node_tree):
|
||||||
|
print("SCANNING NODE TREE", node_tree)
|
||||||
|
|
||||||
def node_tree(nodetree_data):
|
# storage for hashing
|
||||||
print("SCANNING NODE TREE", nodetree_data)
|
links_hashes = []
|
||||||
# output node:
|
nodes_hashes = []
|
||||||
output = nodetree_data.get_output_node("ALL")
|
root_inputs = dict(node_tree) # probably useless for materials, contains settings for certain modifiers
|
||||||
print("output", output)
|
|
||||||
|
|
||||||
fields_to_ignore = fields_to_ignore_generic+ ['contains_tree','get_output_node', 'interface_update', 'override_template_create']
|
for node in node_tree.nodes:
|
||||||
all_field_names = dir(nodetree_data)
|
#print("node", node, node.type, node.name, node.label)
|
||||||
fields = [getattr(nodetree_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
|
||||||
|
input_hashes = []
|
||||||
|
for input in node.inputs:
|
||||||
|
#print(" input", input, "label", input.label, "name", input.name, dir(input))
|
||||||
|
default_value = getattr(input, 'default_value', None)
|
||||||
|
input_hash = f"{convert_field(default_value)}"
|
||||||
|
input_hashes.append(input_hash)
|
||||||
|
|
||||||
|
output_hashes = []
|
||||||
|
# IF the node itself is a group input, its outputs are the inputs of the geometry node (yes, not easy)
|
||||||
|
node_in_use = True
|
||||||
|
for (index, output) in enumerate(node.outputs):
|
||||||
|
# print(" output", output, "label", output.label, "name", output.name, "generated name", f"Socket_{index+1}")
|
||||||
|
default_value = getattr(output, 'default_value', None)
|
||||||
|
output_hash = f"{convert_field(default_value)}"
|
||||||
|
output_hashes.append(output_hash)
|
||||||
|
|
||||||
|
node_in_use = node_in_use and default_value is not None
|
||||||
|
#print("NODE IN USE", node_in_use)
|
||||||
|
|
||||||
|
node_fields_to_ignore = fields_to_ignore_generic + ['internal_links', 'inputs', 'outputs']
|
||||||
|
node_hash = f"{generic_fields_hasher_evolved(node, node_fields_to_ignore)}_{str(input_hashes)}_{str(output_hashes)}"
|
||||||
|
#print("node hash", node_hash)
|
||||||
|
#print("node hash", str(input_hashes))
|
||||||
|
nodes_hashes.append(node_hash)
|
||||||
|
|
||||||
|
for link in node_tree.links:
|
||||||
|
"""print("LINK", link, dir(link))
|
||||||
|
print("FROM", link.from_node, link.from_socket)
|
||||||
|
print("TO", link.to_node, link.to_socket)"""
|
||||||
|
|
||||||
|
from_socket_default = link.from_socket.default_value if hasattr(link.from_socket, "default_value") else None
|
||||||
|
to_socket_default = link.to_socket.default_value if hasattr(link.to_socket, "default_value") else None
|
||||||
|
link_hash = f"{link.from_node.name}_{link.from_socket.name}_{from_socket_default}+{link.to_node.name}_{link.to_socket.name}_{to_socket_default}"
|
||||||
|
|
||||||
|
links_hashes.append(link_hash)
|
||||||
|
|
||||||
|
print("node hashes",nodes_hashes, "links_hashes", links_hashes)
|
||||||
|
return f"{str(root_inputs)}_{str(nodes_hashes)}_{str(links_hashes)}"
|
||||||
|
|
||||||
# print("node tree", fields)
|
|
||||||
return str(fields)
|
|
||||||
|
|
||||||
def material_hash(material, settings):
|
def material_hash(material, settings):
|
||||||
print("material_hash", material)
|
print("material_hash", material.name, material.node_tree)
|
||||||
hashed_material = generic_fields_hasher_evolved(material, fields_to_ignore_generic + ['node_tree']) # we want to handle the node tree seperatly
|
node_group = getattr(material, "node_tree", None)
|
||||||
print("HASH", hashed_material)
|
hashed_material_except_node_tree = generic_fields_hasher_evolved(material, fields_to_ignore_generic + ['node_tree']) # we want to handle the node tree seperatly
|
||||||
"""if node_group is not None and settings.auto_export.materials_in_depth_scan:
|
if node_group is not None and settings.auto_export.materials_in_depth_scan:
|
||||||
pass
|
hashed_node_tree = node_tree(node_group)
|
||||||
|
return str(hashed_material_except_node_tree) + str(hashed_node_tree)
|
||||||
else:
|
else:
|
||||||
generic_fields_hasher(material, fields_to_ignore_generic)"""
|
return str(hashed_material_except_node_tree)
|
||||||
return str(hashed_material)
|
|
||||||
|
|
||||||
# TODO: this is partially taken from export_materials utilities, perhaps we could avoid having to fetch things multiple times ?
|
# TODO: this is partially taken from export_materials utilities, perhaps we could avoid having to fetch things multiple times ?
|
||||||
def materials_hash(obj, cache, settings):
|
def materials_hash(obj, cache, settings):
|
||||||
@ -216,68 +263,14 @@ def materials_hash(obj, cache, settings):
|
|||||||
|
|
||||||
def modifier_hash(modifier_data, settings):
|
def modifier_hash(modifier_data, settings):
|
||||||
node_group = getattr(modifier_data, "node_group", None)
|
node_group = getattr(modifier_data, "node_group", None)
|
||||||
|
hashed_modifier_except_node_tree = generic_fields_hasher_evolved(modifier_data, fields_to_ignore_generic)
|
||||||
|
|
||||||
if node_group is not None and settings.auto_export.modifiers_in_depth_scan:
|
if node_group is not None and settings.auto_export.modifiers_in_depth_scan:
|
||||||
#print("THIS IS A GEOMETRY NODE")
|
print("modifier here")
|
||||||
|
hashed_node_tree = node_tree(node_group)
|
||||||
# storage for hashing
|
return str(hashed_modifier_except_node_tree) + str(hashed_node_tree)
|
||||||
links_hashes = []
|
|
||||||
nodes_hashes = []
|
|
||||||
modifier_inputs = dict(modifier_data)
|
|
||||||
|
|
||||||
for node in node_group.nodes:
|
|
||||||
#print("node", node, node.type, node.name, node.label)
|
|
||||||
#print("node info", dir(node))
|
|
||||||
|
|
||||||
input_hashes = []
|
|
||||||
for input in node.inputs:
|
|
||||||
#print(" input", input, "label", input.label, "name", input.name)
|
|
||||||
input_hash = f"{getattr(input, 'default_value', None)}"
|
|
||||||
input_hashes.append(input_hash)
|
|
||||||
"""if hasattr(input, "default_value"):
|
|
||||||
print("YOHO", dict(input), input.default_value)"""
|
|
||||||
|
|
||||||
output_hashes = []
|
|
||||||
# IF the node itself is a group input, its outputs are the inputs of the geometry node (yes, not easy)
|
|
||||||
node_in_use = True
|
|
||||||
for (index, output) in enumerate(node.outputs):
|
|
||||||
# print(" output", output, "label", output.label, "name", output.name, "generated name", f"Socket_{index+1}")
|
|
||||||
output_hash = f"{getattr(output, 'default_value', None)}"
|
|
||||||
output_hashes.append(output_hash)
|
|
||||||
"""if hasattr(output, "default_value"):
|
|
||||||
print("YOHO", output.default_value)"""
|
|
||||||
node_in_use = node_in_use and hasattr(output, "default_value")
|
|
||||||
#print("NODE IN USE", node_in_use)
|
|
||||||
|
|
||||||
node_fields_to_ignore = fields_to_ignore_generic + ['internal_links', 'inputs', 'outputs']
|
|
||||||
|
|
||||||
node_hash = f"{generic_fields_hasher(node, node_fields_to_ignore)}_{str(input_hashes)}_{str(output_hashes)}"
|
|
||||||
#print("node hash", node_hash)
|
|
||||||
nodes_hashes.append(node_hash)
|
|
||||||
#print(" ")
|
|
||||||
|
|
||||||
for link in node_group.links:
|
|
||||||
"""print("LINK", link) #dir(link)
|
|
||||||
print("FROM", link.from_node, link.from_socket)
|
|
||||||
print("TO", link.to_node, link.to_socket)"""
|
|
||||||
|
|
||||||
|
|
||||||
from_socket_default = link.from_socket.default_value if hasattr(link.from_socket, "default_value") else None
|
|
||||||
to_socket_default = link.to_socket.default_value if hasattr(link.to_socket, "default_value") else None
|
|
||||||
|
|
||||||
link_hash = f"{link.from_node.name}_{link.from_socket.name}_{from_socket_default}+{link.to_node.name}_{link.to_socket.name}_{to_socket_default}"
|
|
||||||
|
|
||||||
"""if hasattr(link.from_socket, "default_value"):
|
|
||||||
print("[FROM SOCKET]", link.from_socket.default_value)
|
|
||||||
if hasattr(link.to_socket, "default_value"):
|
|
||||||
print("[TO SOCKET]", link.to_socket.default_value)"""
|
|
||||||
|
|
||||||
links_hashes.append(link_hash)
|
|
||||||
#print("link_hash", link_hash)
|
|
||||||
|
|
||||||
return f"{str(modifier_inputs)}_{str(nodes_hashes)}_{str(links_hashes)}"
|
|
||||||
else:
|
else:
|
||||||
return generic_fields_hasher(modifier_data, fields_to_ignore_generic)
|
return str(hashed_modifier_except_node_tree)
|
||||||
|
|
||||||
|
|
||||||
def modifiers_hash(object, settings):
|
def modifiers_hash(object, settings):
|
||||||
|
@ -0,0 +1,56 @@
|
|||||||
|
#print("THIS IS A GEOMETRY NODE")
|
||||||
|
|
||||||
|
# storage for hashing
|
||||||
|
links_hashes = []
|
||||||
|
nodes_hashes = []
|
||||||
|
modifier_inputs = dict(modifier_data)
|
||||||
|
|
||||||
|
for node in node_group.nodes:
|
||||||
|
#print("node", node, node.type, node.name, node.label)
|
||||||
|
#print("node info", dir(node))
|
||||||
|
|
||||||
|
input_hashes = []
|
||||||
|
for input in node.inputs:
|
||||||
|
#print(" input", input, "label", input.label, "name", input.name)
|
||||||
|
input_hash = f"{getattr(input, 'default_value', None)}"
|
||||||
|
input_hashes.append(input_hash)
|
||||||
|
"""if hasattr(input, "default_value"):
|
||||||
|
print("YOHO", dict(input), input.default_value)"""
|
||||||
|
|
||||||
|
output_hashes = []
|
||||||
|
# IF the node itself is a group input, its outputs are the inputs of the geometry node (yes, not easy)
|
||||||
|
node_in_use = True
|
||||||
|
for (index, output) in enumerate(node.outputs):
|
||||||
|
# print(" output", output, "label", output.label, "name", output.name, "generated name", f"Socket_{index+1}")
|
||||||
|
output_hash = f"{getattr(output, 'default_value', None)}"
|
||||||
|
output_hashes.append(output_hash)
|
||||||
|
"""if hasattr(output, "default_value"):
|
||||||
|
print("YOHO", output.default_value)"""
|
||||||
|
node_in_use = node_in_use and hasattr(output, "default_value")
|
||||||
|
#print("NODE IN USE", node_in_use)
|
||||||
|
|
||||||
|
node_fields_to_ignore = fields_to_ignore_generic + ['internal_links', 'inputs', 'outputs']
|
||||||
|
|
||||||
|
node_hash = f"{generic_fields_hasher(node, node_fields_to_ignore)}_{str(input_hashes)}_{str(output_hashes)}"
|
||||||
|
#print("node hash", node_hash)
|
||||||
|
nodes_hashes.append(node_hash)
|
||||||
|
#print(" ")
|
||||||
|
|
||||||
|
for link in node_group.links:
|
||||||
|
"""print("LINK", link) #dir(link)
|
||||||
|
print("FROM", link.from_node, link.from_socket)
|
||||||
|
print("TO", link.to_node, link.to_socket)"""
|
||||||
|
|
||||||
|
from_socket_default = link.from_socket.default_value if hasattr(link.from_socket, "default_value") else None
|
||||||
|
to_socket_default = link.to_socket.default_value if hasattr(link.to_socket, "default_value") else None
|
||||||
|
link_hash = f"{link.from_node.name}_{link.from_socket.name}_{from_socket_default}+{link.to_node.name}_{link.to_socket.name}_{to_socket_default}"
|
||||||
|
|
||||||
|
"""if hasattr(link.from_socket, "default_value"):
|
||||||
|
print("[FROM SOCKET]", link.from_socket.default_value)
|
||||||
|
if hasattr(link.to_socket, "default_value"):
|
||||||
|
print("[TO SOCKET]", link.to_socket.default_value)"""
|
||||||
|
|
||||||
|
links_hashes.append(link_hash)
|
||||||
|
#print("link_hash", link_hash)
|
||||||
|
|
||||||
|
return f"{str(modifier_inputs)}_{str(nodes_hashes)}_{str(links_hashes)}"
|
Loading…
Reference in New Issue
Block a user