mirror of
https://github.com/kaosat-dev/Blender_bevy_components_workflow.git
synced 2024-11-22 20:00:53 +00:00
feat(Blenvy): modifier deep scan is done & huge cleanups
* finished & cleaned up modifier deep scan: works well ! * now toggleable via a setting * same for the deep scan for materials * rewrote the very messy materials scan, with a much improved generic_fields_hasher * still need to add back an improved material node tree serializer
This commit is contained in:
parent
cdab4c50f7
commit
c557334b75
@ -15,7 +15,7 @@ Auto export
|
|||||||
- [x] main/ library scene names
|
- [x] main/ library scene names
|
||||||
- [x] paths
|
- [x] paths
|
||||||
|
|
||||||
Data storage:
|
Data storage for custom properties:
|
||||||
- for scenes (main scenes)
|
- for scenes (main scenes)
|
||||||
- at scene level
|
- at scene level
|
||||||
- for blueprints
|
- for blueprints
|
||||||
@ -23,7 +23,7 @@ Data storage:
|
|||||||
- Note: these should be COPIED to the scene level when exporting, into the temp_scene's properties
|
- Note: these should be COPIED to the scene level when exporting, into the temp_scene's properties
|
||||||
|
|
||||||
> NOTE: UP until we manage to create a PR for Bevy to directly support the scene level gltf_extras, the auto exporter should automatically create (& remove)
|
> NOTE: UP until we manage to create a PR for Bevy to directly support the scene level gltf_extras, the auto exporter should automatically create (& remove)
|
||||||
an additional object with scene_<scene_name>_components to copy that data to
|
any additional object with scene_<scene_name>_components to copy that data to
|
||||||
|
|
||||||
Assets:
|
Assets:
|
||||||
- blueprint assets should be auto_generated & inserted into the list of assets : these assets are NOT removable by the user
|
- blueprint assets should be auto_generated & inserted into the list of assets : these assets are NOT removable by the user
|
||||||
@ -103,3 +103,5 @@ General issues:
|
|||||||
- [x] fix auto export workflow
|
- [x] fix auto export workflow
|
||||||
- [ ] should we write the previous _xxx data only AFTER a sucessfull export only ?
|
- [ ] should we write the previous _xxx data only AFTER a sucessfull export only ?
|
||||||
- [ ] add hashing of modifiers/ geometry nodes in serialize scene
|
- [ ] add hashing of modifiers/ geometry nodes in serialize scene
|
||||||
|
- [ ] add ability to FORCE export specific blueprints & levels
|
||||||
|
- [ ] undo after a save removes any saved "serialized scene" data ? DIG into this
|
@ -7,7 +7,7 @@ from .settings_diff import get_setting_changes
|
|||||||
# prepare export by gather the changes to the scenes & settings
|
# prepare export by gather the changes to the scenes & settings
|
||||||
def prepare_and_export():
|
def prepare_and_export():
|
||||||
print("prepare and export")
|
print("prepare and export")
|
||||||
bpy.context.window_manager.auto_export_tracker.disable_change_detection()
|
#bpy.context.window_manager.auto_export_tracker.disable_change_detection()
|
||||||
blenvy = bpy.context.window_manager.blenvy
|
blenvy = bpy.context.window_manager.blenvy
|
||||||
auto_export_settings = blenvy.auto_export
|
auto_export_settings = blenvy.auto_export
|
||||||
if auto_export_settings.auto_export: # only do the actual exporting if auto export is actually enabled
|
if auto_export_settings.auto_export: # only do the actual exporting if auto export is actually enabled
|
||||||
@ -22,6 +22,6 @@ def prepare_and_export():
|
|||||||
# cleanup
|
# cleanup
|
||||||
# TODO: these are likely obsolete
|
# TODO: these are likely obsolete
|
||||||
# reset the list of changes in the tracker
|
# reset the list of changes in the tracker
|
||||||
bpy.context.window_manager.auto_export_tracker.clear_changes()
|
#bpy.context.window_manager.auto_export_tracker.clear_changes()
|
||||||
print("AUTO EXPORT DONE")
|
print("AUTO EXPORT DONE")
|
||||||
bpy.app.timers.register(bpy.context.window_manager.auto_export_tracker.enable_change_detection, first_interval=0.1)
|
#bpy.app.timers.register(bpy.context.window_manager.auto_export_tracker.enable_change_detection, first_interval=0.1)
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
import inspect
|
||||||
import json
|
import json
|
||||||
from mathutils import Color
|
from mathutils import Color
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@ -13,6 +14,71 @@ fields_to_ignore_generic = [
|
|||||||
'session_uid', 'copy', 'id_type', 'is_embedded_data', 'is_evaluated', 'is_library_indirect', 'is_missing', 'is_runtime_data'
|
'session_uid', 'copy', 'id_type', 'is_embedded_data', 'is_evaluated', 'is_library_indirect', 'is_missing', 'is_runtime_data'
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def generic_fields_hasher(data, fields_to_ignore):
|
||||||
|
all_field_names = dir(data)
|
||||||
|
field_values = [getattr(data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show") and not callable(getattr(data, prop, None)) ]
|
||||||
|
return str(field_values)
|
||||||
|
|
||||||
|
def peel_value( value ):
|
||||||
|
try:
|
||||||
|
len( value )
|
||||||
|
return [ peel_value( x ) for x in value ]
|
||||||
|
except TypeError:
|
||||||
|
return value
|
||||||
|
|
||||||
|
def _lookup_color(data):
|
||||||
|
return peel_value(data)
|
||||||
|
|
||||||
|
def _lookup_array(data):
|
||||||
|
return peel_value(data)
|
||||||
|
|
||||||
|
def _lookup_prop_group(data):
|
||||||
|
bla = generic_fields_hasher_evolved(data, fields_to_ignore=fields_to_ignore_generic)
|
||||||
|
print("PROPGROUP", bla)
|
||||||
|
return bla
|
||||||
|
|
||||||
|
def _lookup_collection(data):
|
||||||
|
return [generic_fields_hasher_evolved(item, fields_to_ignore=fields_to_ignore_generic) for item in data]
|
||||||
|
|
||||||
|
def _lookup_materialLineArt(data):
|
||||||
|
return generic_fields_hasher_evolved(data, fields_to_ignore=fields_to_ignore_generic)
|
||||||
|
|
||||||
|
type_lookups = {
|
||||||
|
Color: _lookup_color,#lambda input: print("dsf")',
|
||||||
|
bpy.types.bpy_prop_array: _lookup_array,
|
||||||
|
bpy.types.PropertyGroup: _lookup_prop_group,
|
||||||
|
bpy.types.bpy_prop_collection: _lookup_collection,
|
||||||
|
bpy.types.MaterialLineArt: _lookup_materialLineArt
|
||||||
|
}
|
||||||
|
|
||||||
|
# TODO: replace the first one with this once if its done
|
||||||
|
def generic_fields_hasher_evolved(data, fields_to_ignore):
|
||||||
|
all_field_names = dir(data)
|
||||||
|
field_values = []
|
||||||
|
for field_name in all_field_names:
|
||||||
|
if not field_name.startswith("__") and not field_name in fields_to_ignore and not field_name.startswith("show") and not callable(getattr(data, field_name, None)):
|
||||||
|
raw_value = getattr(data, field_name, None)
|
||||||
|
#print("raw value", raw_value, "type", type(raw_value), isinstance(raw_value, Color), isinstance(raw_value, bpy.types.bpy_prop_array))
|
||||||
|
conversion_lookup = None # type_lookups.get(type(raw_value), None)
|
||||||
|
all_types = inspect.getmro(type(raw_value))
|
||||||
|
for s_type in all_types:
|
||||||
|
if type_lookups.get(s_type, None) is not None:
|
||||||
|
conversion_lookup = type_lookups[s_type]
|
||||||
|
break
|
||||||
|
|
||||||
|
field_value = None
|
||||||
|
if conversion_lookup is not None:
|
||||||
|
field_value = conversion_lookup(raw_value)
|
||||||
|
print("field_name",field_name,"conv value", field_value)
|
||||||
|
else:
|
||||||
|
print("field_name",field_name,"raw value", raw_value)
|
||||||
|
field_value = raw_value
|
||||||
|
|
||||||
|
field_values.append(str(field_value))
|
||||||
|
|
||||||
|
return str(field_values)
|
||||||
|
|
||||||
# possible alternatives https://blender.stackexchange.com/questions/286010/bpy-detect-modified-mesh-data-vertices-edges-loops-or-polygons-for-cachin
|
# possible alternatives https://blender.stackexchange.com/questions/286010/bpy-detect-modified-mesh-data-vertices-edges-loops-or-polygons-for-cachin
|
||||||
def mesh_hash(obj):
|
def mesh_hash(obj):
|
||||||
# this is incomplete, how about edges ?
|
# this is incomplete, how about edges ?
|
||||||
@ -62,24 +128,23 @@ def animation_hash(obj):
|
|||||||
return compact_result
|
return compact_result
|
||||||
|
|
||||||
|
|
||||||
def camera_hash(obj):
|
# TODO : we should also check for custom props on scenes, meshes, materials
|
||||||
camera_fields = ["angle", "angle_x", "angle_y", "animation_data", "background_images", "clip_end", "clip_start", "display_size", "dof", "fisheye_fov"]
|
# TODO: also how about our new "assets" custom properties ? those need to be check too
|
||||||
camera_data = obj.data
|
def custom_properties_hash(obj):
|
||||||
fields_to_ignore= fields_to_ignore_generic
|
custom_properties = {}
|
||||||
|
for property_name in obj.keys():
|
||||||
|
if property_name not in '_RNA_UI' and property_name != 'components_meta':
|
||||||
|
custom_properties[property_name] = obj[property_name]
|
||||||
|
return str(hash(str(custom_properties)))
|
||||||
|
|
||||||
all_field_names = dir(camera_data)
|
def camera_hash(obj):
|
||||||
fields = [getattr(camera_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
camera_data = obj.data
|
||||||
# TODO: the above is not enough, certain fields are left as bpy.data.xx
|
# TODO: the above is not enough, certain fields are left as bpy.data.xx
|
||||||
#print("camera", obj, fields)
|
return str(generic_fields_hasher(camera_data, fields_to_ignore_generic))
|
||||||
return str(fields)
|
|
||||||
|
|
||||||
def light_hash(obj):
|
def light_hash(obj):
|
||||||
light_data = obj.data
|
light_data = obj.data
|
||||||
fields_to_ignore = fields_to_ignore_generic
|
return str(generic_fields_hasher(light_data, fields_to_ignore_generic))
|
||||||
|
|
||||||
all_field_names = dir(light_data)
|
|
||||||
fields = [getattr(light_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
|
||||||
return str(fields)
|
|
||||||
|
|
||||||
def bones_hash(bones):
|
def bones_hash(bones):
|
||||||
fields_to_ignore = fields_to_ignore_generic + ['AxisRollFromMatrix', 'MatrixFromAxisRoll', 'evaluate_envelope', 'convert_local_to_pose', 'foreach_get', 'foreach_set', 'get', 'set', 'find', 'items', 'keys', 'values']
|
fields_to_ignore = fields_to_ignore_generic + ['AxisRollFromMatrix', 'MatrixFromAxisRoll', 'evaluate_envelope', 'convert_local_to_pose', 'foreach_get', 'foreach_set', 'get', 'set', 'find', 'items', 'keys', 'values']
|
||||||
@ -100,25 +165,11 @@ def armature_hash(obj):
|
|||||||
all_field_names = dir(armature_data)
|
all_field_names = dir(armature_data)
|
||||||
|
|
||||||
fields = [getattr(armature_data, prop, None) if not prop in fields_to_convert.keys() else fields_to_convert[prop](getattr(armature_data, prop)) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
fields = [getattr(armature_data, prop, None) if not prop in fields_to_convert.keys() else fields_to_convert[prop](getattr(armature_data, prop)) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||||
#print("ARMATURE", fields)
|
|
||||||
|
|
||||||
"""for bone in armature_data.bones:
|
"""for bone in armature_data.bones:
|
||||||
print("bone", bone, bone_hash(bone))"""
|
print("bone", bone, bone_hash(bone))"""
|
||||||
return str(fields)
|
return str(fields)
|
||||||
|
|
||||||
def field_value(data):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def color(color_data):
|
|
||||||
# print("color", color_data, type(color_data))
|
|
||||||
return str(peel_value(color_data))
|
|
||||||
|
|
||||||
def lineart(lineart_data):
|
|
||||||
fields_to_ignore = fields_to_ignore_generic
|
|
||||||
|
|
||||||
all_field_names = dir(lineart_data)
|
|
||||||
fields = [getattr(lineart_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
|
||||||
return str(fields)
|
|
||||||
|
|
||||||
def node_tree(nodetree_data):
|
def node_tree(nodetree_data):
|
||||||
print("SCANNING NODE TREE", nodetree_data)
|
print("SCANNING NODE TREE", nodetree_data)
|
||||||
@ -126,8 +177,6 @@ def node_tree(nodetree_data):
|
|||||||
output = nodetree_data.get_output_node("ALL")
|
output = nodetree_data.get_output_node("ALL")
|
||||||
print("output", output)
|
print("output", output)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
fields_to_ignore = fields_to_ignore_generic+ ['contains_tree','get_output_node', 'interface_update', 'override_template_create']
|
fields_to_ignore = fields_to_ignore_generic+ ['contains_tree','get_output_node', 'interface_update', 'override_template_create']
|
||||||
all_field_names = dir(nodetree_data)
|
all_field_names = dir(nodetree_data)
|
||||||
fields = [getattr(nodetree_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
fields = [getattr(nodetree_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||||
@ -135,33 +184,15 @@ def node_tree(nodetree_data):
|
|||||||
# print("node tree", fields)
|
# print("node tree", fields)
|
||||||
return str(fields)
|
return str(fields)
|
||||||
|
|
||||||
|
def material_hash(material, settings):
|
||||||
def peel_value( value ):
|
print("material_hash", material)
|
||||||
try:
|
hashed_material = generic_fields_hasher_evolved(material, fields_to_ignore_generic + ['node_tree']) # we want to handle the node tree seperatly
|
||||||
len( value )
|
print("HASH", hashed_material)
|
||||||
return [ peel_value( x ) for x in value ]
|
"""if node_group is not None and settings.auto_export.materials_in_depth_scan:
|
||||||
except TypeError:
|
pass
|
||||||
return value
|
else:
|
||||||
|
generic_fields_hasher(material, fields_to_ignore_generic)"""
|
||||||
def material_hash(material):
|
return str(hashed_material)
|
||||||
fields_to_ignore = fields_to_ignore_generic
|
|
||||||
fields_to_convert = {'diffuse_color': color, 'line_color': color, 'lineart': lineart, 'node_tree': node_tree} # TODO: perhaps use types rather than names
|
|
||||||
all_field_names = dir(material)
|
|
||||||
fields = [getattr(material, prop, None) if not prop in fields_to_convert.keys() else fields_to_convert[prop](getattr(material, prop)) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
|
||||||
|
|
||||||
type_of = [type(getattr(material, prop, None)) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
|
||||||
names = [prop for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
|
||||||
tutu = [t == Color for t in type_of] # bpy.types.MaterialLineArt bpy.types.ShaderNodeTree
|
|
||||||
#print("fields", type_of)
|
|
||||||
|
|
||||||
"""for prop in [prop for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]:
|
|
||||||
bla = getattr(material, prop, None)
|
|
||||||
if hasattr(bla, "rna_type"):
|
|
||||||
print("YOLO", prop, bla, peel_value(bla), "type", type(bla), bla.rna_type, bla.rna_type == bpy.types.FloatProperty, type(bla) == bpy.types.bpy_prop_collection)
|
|
||||||
print("types", type(bla) == bpy.types.bpy_prop_collection, type(bla) == bpy.types.FloatColorAttributeValue)"""
|
|
||||||
|
|
||||||
# print("oooooh", material, material.bl_rna.properties.items())
|
|
||||||
return str(fields)#str(hash(str(fields)))
|
|
||||||
|
|
||||||
# TODO: this is partially taken from export_materials utilities, perhaps we could avoid having to fetch things multiple times ?
|
# TODO: this is partially taken from export_materials utilities, perhaps we could avoid having to fetch things multiple times ?
|
||||||
def materials_hash(obj, cache, settings):
|
def materials_hash(obj, cache, settings):
|
||||||
@ -169,60 +200,92 @@ def materials_hash(obj, cache, settings):
|
|||||||
materials = []
|
materials = []
|
||||||
for material_slot in obj.material_slots:
|
for material_slot in obj.material_slots:
|
||||||
material = material_slot.material
|
material = material_slot.material
|
||||||
cached_hash = cache['materials'].get(material.name, None)
|
"""cached_hash = cache['materials'].get(material.name, None)
|
||||||
if cached_hash:
|
if cached_hash:
|
||||||
# print("CACHHHHHED", cached_hash)
|
|
||||||
materials.append(cached_hash)
|
materials.append(cached_hash)
|
||||||
|
print("CAACHED")
|
||||||
else:
|
else:
|
||||||
mat = material_hash(material)
|
mat = material_hash(material, settings)
|
||||||
|
cache['materials'][material.name] = mat
|
||||||
|
materials.append(mat)"""
|
||||||
|
mat = material_hash(material, settings)
|
||||||
cache['materials'][material.name] = mat
|
cache['materials'][material.name] = mat
|
||||||
materials.append(mat)
|
materials.append(mat)
|
||||||
# print("NOT CACHHH", mat)
|
|
||||||
return str(hash(str(materials)))
|
return str(hash(str(materials)))
|
||||||
|
|
||||||
# TODO : we should also check for custom props on scenes, meshes, materials
|
|
||||||
def custom_properties_hash(obj):
|
|
||||||
custom_properties = {}
|
|
||||||
for property_name in obj.keys():
|
|
||||||
if property_name not in '_RNA_UI' and property_name != 'components_meta':
|
|
||||||
custom_properties[property_name] = obj[property_name]
|
|
||||||
return str(hash(str(custom_properties)))
|
|
||||||
|
|
||||||
def modifier_hash(modifier_data, settings):
|
def modifier_hash(modifier_data, settings):
|
||||||
fields_to_ignore = fields_to_ignore_generic
|
|
||||||
all_field_names = dir(modifier_data)
|
|
||||||
fields = [getattr(modifier_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
|
||||||
|
|
||||||
filtered_field_names = [prop for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
|
||||||
print("fields", fields, "field names", filtered_field_names)
|
|
||||||
node_group = getattr(modifier_data, "node_group", None)
|
node_group = getattr(modifier_data, "node_group", None)
|
||||||
if node_group is not None:
|
|
||||||
print("THIS IS A GEOMETRY NODE")
|
if node_group is not None and settings.auto_export.modifiers_in_depth_scan:
|
||||||
|
#print("THIS IS A GEOMETRY NODE")
|
||||||
|
|
||||||
|
# storage for hashing
|
||||||
|
links_hashes = []
|
||||||
|
nodes_hashes = []
|
||||||
|
modifier_inputs = dict(modifier_data)
|
||||||
|
|
||||||
for node in node_group.nodes:
|
for node in node_group.nodes:
|
||||||
print("node", node)
|
#print("node", node, node.type, node.name, node.label)
|
||||||
print("node type", node.type)
|
#print("node info", dir(node))
|
||||||
try:
|
|
||||||
print("node value", node.values())
|
input_hashes = []
|
||||||
except:pass
|
|
||||||
for input in node.inputs:
|
for input in node.inputs:
|
||||||
print(" input", input, input.name, input.label)
|
#print(" input", input, "label", input.label, "name", input.name)
|
||||||
if hasattr(input, "default_value"):
|
input_hash = f"{getattr(input, 'default_value', None)}"
|
||||||
print("YOHO", dict(input), input.default_value)
|
input_hashes.append(input_hash)
|
||||||
|
"""if hasattr(input, "default_value"):
|
||||||
|
print("YOHO", dict(input), input.default_value)"""
|
||||||
|
|
||||||
|
output_hashes = []
|
||||||
|
# IF the node itself is a group input, its outputs are the inputs of the geometry node (yes, not easy)
|
||||||
|
node_in_use = True
|
||||||
|
for (index, output) in enumerate(node.outputs):
|
||||||
|
# print(" output", output, "label", output.label, "name", output.name, "generated name", f"Socket_{index+1}")
|
||||||
|
output_hash = f"{getattr(output, 'default_value', None)}"
|
||||||
|
output_hashes.append(output_hash)
|
||||||
|
"""if hasattr(output, "default_value"):
|
||||||
|
print("YOHO", output.default_value)"""
|
||||||
|
node_in_use = node_in_use and hasattr(output, "default_value")
|
||||||
|
#print("NODE IN USE", node_in_use)
|
||||||
|
|
||||||
|
node_fields_to_ignore = fields_to_ignore_generic + ['internal_links', 'inputs', 'outputs']
|
||||||
|
|
||||||
|
node_hash = f"{generic_fields_hasher(node, node_fields_to_ignore)}_{str(input_hashes)}_{str(output_hashes)}"
|
||||||
|
#print("node hash", node_hash)
|
||||||
|
nodes_hashes.append(node_hash)
|
||||||
|
#print(" ")
|
||||||
|
|
||||||
|
for link in node_group.links:
|
||||||
|
"""print("LINK", link) #dir(link)
|
||||||
|
print("FROM", link.from_node, link.from_socket)
|
||||||
|
print("TO", link.to_node, link.to_socket)"""
|
||||||
|
|
||||||
|
|
||||||
|
from_socket_default = link.from_socket.default_value if hasattr(link.from_socket, "default_value") else None
|
||||||
|
to_socket_default = link.to_socket.default_value if hasattr(link.to_socket, "default_value") else None
|
||||||
|
|
||||||
|
link_hash = f"{link.from_node.name}_{link.from_socket.name}_{from_socket_default}+{link.to_node.name}_{link.to_socket.name}_{to_socket_default}"
|
||||||
|
|
||||||
|
"""if hasattr(link.from_socket, "default_value"):
|
||||||
|
print("[FROM SOCKET]", link.from_socket.default_value)
|
||||||
|
if hasattr(link.to_socket, "default_value"):
|
||||||
|
print("[TO SOCKET]", link.to_socket.default_value)"""
|
||||||
|
|
||||||
|
links_hashes.append(link_hash)
|
||||||
|
#print("link_hash", link_hash)
|
||||||
|
|
||||||
|
return f"{str(modifier_inputs)}_{str(nodes_hashes)}_{str(links_hashes)}"
|
||||||
|
else:
|
||||||
|
return generic_fields_hasher(modifier_data, fields_to_ignore_generic)
|
||||||
|
|
||||||
return str(fields)
|
|
||||||
|
|
||||||
def modifiers_hash(object, settings):
|
def modifiers_hash(object, settings):
|
||||||
print("modifiers", object.modifiers)
|
|
||||||
|
|
||||||
modifiers = []
|
modifiers = []
|
||||||
for modifier in object.modifiers:
|
for modifier in object.modifiers:
|
||||||
print("modifier", modifier )# modifier.node_group)
|
print("modifier", modifier )# modifier.node_group)
|
||||||
try:
|
|
||||||
print("MODIFIER FIEEEEEEELD", modifier.ratio) # apparently this only works for non geometry nodes ??
|
|
||||||
except: pass
|
|
||||||
modifiers.append(modifier_hash(modifier, settings))
|
modifiers.append(modifier_hash(modifier, settings))
|
||||||
|
print(" ")
|
||||||
return str(hash(str(modifiers)))
|
return str(hash(str(modifiers)))
|
||||||
|
|
||||||
def serialize_scene(settings):
|
def serialize_scene(settings):
|
||||||
|
@ -34,8 +34,15 @@ class AutoExportSettings(PropertyGroup):
|
|||||||
|
|
||||||
materials_in_depth_scan : BoolProperty(
|
materials_in_depth_scan : BoolProperty(
|
||||||
name='In depth scan of materials (could be slow)',
|
name='In depth scan of materials (could be slow)',
|
||||||
description='serializes more details of materials in order to detect changes (slower, but more accurate in detecting changes)',
|
description='serializes more details of materials in order to detect changes (could be slower, but much more accurate in detecting changes)',
|
||||||
default=False,
|
default=True,
|
||||||
|
update=save_settings
|
||||||
|
) # type: ignore
|
||||||
|
|
||||||
|
modifiers_in_depth_scan : BoolProperty(
|
||||||
|
name='In depth scan of modifiers (could be slow)',
|
||||||
|
description='serializes more details of modifiers (particularly geometry nodes) in order to detect changes (could be slower, but much more accurate in detecting changes)',
|
||||||
|
default=True,
|
||||||
update=save_settings
|
update=save_settings
|
||||||
) # type: ignore
|
) # type: ignore
|
||||||
|
|
||||||
|
@ -32,8 +32,12 @@ def draw_settings_ui(layout, auto_export_settings):
|
|||||||
section.enabled = controls_enabled
|
section.enabled = controls_enabled
|
||||||
section.prop(auto_export_settings, "change_detection", text="Use change detection")
|
section.prop(auto_export_settings, "change_detection", text="Use change detection")
|
||||||
|
|
||||||
section.prop(auto_export_settings, "materials_in_depth_scan", text="Detailed materials scan")
|
|
||||||
|
|
||||||
|
section = section.box()
|
||||||
|
section.enabled = controls_enabled and auto_export_settings.change_detection
|
||||||
|
|
||||||
|
section.prop(auto_export_settings, "materials_in_depth_scan", text="Detailed materials scan")
|
||||||
|
section.prop(auto_export_settings, "modifiers_in_depth_scan", text="Detailed modifiers scan")
|
||||||
|
|
||||||
header, panel = layout.panel("Blueprints", default_closed=False)
|
header, panel = layout.panel("Blueprints", default_closed=False)
|
||||||
header.label(text="Blueprints")
|
header.label(text="Blueprints")
|
||||||
|
Loading…
Reference in New Issue
Block a user