mirror of
https://github.com/kaosat-dev/Blender_bevy_components_workflow.git
synced 2024-11-22 11:50:53 +00:00
feat(Blenvy): improvements to change detection & co
* node trees are now using the same logic as other types * cleaned up & restructured code accordingly * added more failure handling for project serialization & diffing * experimenting with ways to deal with scene renaming * minor tweaks
This commit is contained in:
parent
0b5cdcc4a5
commit
2b6e17a6b7
@ -102,6 +102,8 @@ General issues:
|
||||
- [ ] add tests for disabled components
|
||||
- [x] fix auto export workflow
|
||||
- [ ] should we write the previous _xxx data only AFTER a sucessfull export only ?
|
||||
- [ ] add hashing of modifiers/ geometry nodes in serialize scene
|
||||
- [x] add hashing of modifiers/ geometry nodes in serialize scene
|
||||
- [ ] add ability to FORCE export specific blueprints & levels
|
||||
- [ ] undo after a save removes any saved "serialized scene" data ? DIG into this
|
||||
- [ ] undo after a save removes any saved "serialized scene" data ? DIG into this
|
||||
- [ ] handle scene renames between saves (breaks diffing)
|
||||
- [ ] change scene selector to work on actual scenes aka to deal with renamed scenes
|
@ -4,21 +4,27 @@ from .serialize_scene import serialize_scene
|
||||
from blenvy.settings import load_settings, upsert_settings
|
||||
|
||||
def bubble_up_changes(object, changes_per_scene):
|
||||
if object.parent:
|
||||
if object is not None and object.parent:
|
||||
changes_per_scene[object.parent.name] = bpy.data.objects[object.parent.name]
|
||||
bubble_up_changes(object.parent, changes_per_scene)
|
||||
|
||||
import uuid
|
||||
def serialize_current(settings):
|
||||
# sigh... you need to save & reset the frame otherwise it saves the values AT THE CURRENT FRAME WHICH CAN DIFFER ACROSS SCENES
|
||||
current_frames = [scene.frame_current for scene in bpy.data.scenes]
|
||||
for scene in bpy.data.scenes:
|
||||
scene.frame_set(0)
|
||||
if scene.id_test == '':
|
||||
print("GENERATE ID")
|
||||
scene.id_test = str(uuid.uuid4())
|
||||
print("SCENE ID", scene.id_test)
|
||||
|
||||
current_scene = bpy.context.window.scene
|
||||
bpy.context.window.scene = bpy.data.scenes[0]
|
||||
#serialize scene at frame 0
|
||||
"""with bpy.context.temp_override(scene=bpy.data.scenes[1]):
|
||||
bpy.context.scene.frame_set(0)"""
|
||||
|
||||
current = serialize_scene(settings)
|
||||
bpy.context.window.scene = current_scene
|
||||
|
||||
@ -32,8 +38,13 @@ def get_changes_per_scene(settings):
|
||||
previous = load_settings(".blenvy.project_serialized_previous")
|
||||
current = serialize_current(settings)
|
||||
|
||||
|
||||
# determine changes
|
||||
changes_per_scene = project_diff(previous, current)
|
||||
changes_per_scene = {}
|
||||
try:
|
||||
changes_per_scene = project_diff(previous, current, settings)
|
||||
except Exception as error:
|
||||
print("failed to compare current serialized scenes to previous ones", error)
|
||||
|
||||
# save the current project as previous
|
||||
upsert_settings(".blenvy.project_serialized_previous", current, overwrite=True)
|
||||
@ -42,46 +53,56 @@ def get_changes_per_scene(settings):
|
||||
return changes_per_scene
|
||||
|
||||
|
||||
def project_diff(previous, current):
|
||||
def project_diff(previous, current, settings):
|
||||
"""print("previous", previous)
|
||||
print("current", current)"""
|
||||
if previous is None or current is None:
|
||||
return {}
|
||||
print("HERE")
|
||||
print("Settings", settings,"current", current, "previous", previous)
|
||||
|
||||
changes_per_scene = {}
|
||||
|
||||
# TODO : how do we deal with changed scene names ???
|
||||
# possible ? on each save, inject an id into each scene, that cannot be copied over
|
||||
|
||||
print('TEST SCENE', bpy.data.scenes.get("ULTRA LEVEL2"), None)
|
||||
|
||||
for scene in current:
|
||||
print("SCENE", scene)
|
||||
previous_object_names = list(previous[scene].keys())
|
||||
current_object_names =list(current[scene].keys())
|
||||
added = list(set(current_object_names) - set(previous_object_names))
|
||||
removed = list(set(previous_object_names) - set(current_object_names))
|
||||
|
||||
for obj in added:
|
||||
if not scene in changes_per_scene:
|
||||
changes_per_scene[scene] = {}
|
||||
changes_per_scene[scene][obj] = bpy.data.objects[obj]
|
||||
|
||||
# TODO: how do we deal with this, as we obviously do not have data for removed objects ?
|
||||
for obj in removed:
|
||||
if not scene in changes_per_scene:
|
||||
changes_per_scene[scene] = {}
|
||||
changes_per_scene[scene][obj] = None
|
||||
|
||||
for object_name in list(current[scene].keys()): # TODO : exclude directly added/removed objects
|
||||
if object_name in previous[scene]:
|
||||
current_obj = current[scene][object_name]
|
||||
prev_obj = previous[scene][object_name]
|
||||
same = str(current_obj) == str(prev_obj)
|
||||
if scene in previous: # we can only compare scenes that are in both previous and current data
|
||||
|
||||
if not same:
|
||||
if not scene in changes_per_scene:
|
||||
changes_per_scene[scene] = {}
|
||||
previous_object_names = list(previous[scene].keys())
|
||||
added = list(set(current_object_names) - set(previous_object_names))
|
||||
removed = list(set(previous_object_names) - set(current_object_names))
|
||||
|
||||
for obj in added:
|
||||
if not scene in changes_per_scene:
|
||||
changes_per_scene[scene] = {}
|
||||
changes_per_scene[scene][obj] = bpy.data.objects[obj] if obj in bpy.data.objects else None
|
||||
|
||||
# TODO: how do we deal with this, as we obviously do not have data for removed objects ?
|
||||
for obj in removed:
|
||||
if not scene in changes_per_scene:
|
||||
changes_per_scene[scene] = {}
|
||||
changes_per_scene[scene][obj] = None
|
||||
|
||||
changes_per_scene[scene][object_name] = bpy.data.objects[object_name]
|
||||
bubble_up_changes(bpy.data.objects[object_name], changes_per_scene[scene])
|
||||
# now bubble up for instances & parents
|
||||
for object_name in list(current[scene].keys()): # TODO : exclude directly added/removed objects
|
||||
if object_name in previous[scene]:
|
||||
current_obj = current[scene][object_name]
|
||||
prev_obj = previous[scene][object_name]
|
||||
same = str(current_obj) == str(prev_obj)
|
||||
|
||||
if not same:
|
||||
if not scene in changes_per_scene:
|
||||
changes_per_scene[scene] = {}
|
||||
|
||||
target_object = bpy.data.objects[object_name] if object_name in bpy.data.objects else None
|
||||
changes_per_scene[scene][object_name] = target_object
|
||||
bubble_up_changes(target_object, changes_per_scene[scene])
|
||||
# now bubble up for instances & parents
|
||||
else:
|
||||
print(f"scene {scene} not present in previous data")
|
||||
|
||||
return changes_per_scene
|
@ -50,22 +50,75 @@ def _lookup_collection(data):
|
||||
def _lookup_materialLineArt(data):
|
||||
return generic_fields_hasher_evolved(data, fields_to_ignore=fields_to_ignore_generic)
|
||||
|
||||
# used for various node trees: shaders, modifiers etc
|
||||
def node_tree(node_tree):
|
||||
print("SCANNING NODE TREE", node_tree)
|
||||
|
||||
# storage for hashing
|
||||
links_hashes = []
|
||||
nodes_hashes = []
|
||||
root_inputs = dict(node_tree) # probably useless for materials, contains settings for certain modifiers
|
||||
|
||||
for node in node_tree.nodes:
|
||||
#print("node", node, node.type, node.name, node.label)
|
||||
|
||||
input_hashes = []
|
||||
for input in node.inputs:
|
||||
#print(" input", input, "label", input.label, "name", input.name, dir(input))
|
||||
default_value = getattr(input, 'default_value', None)
|
||||
input_hash = f"{convert_field(default_value)}"
|
||||
input_hashes.append(input_hash)
|
||||
|
||||
output_hashes = []
|
||||
# IF the node itself is a group input, its outputs are the inputs of the geometry node (yes, not easy)
|
||||
node_in_use = True
|
||||
for (index, output) in enumerate(node.outputs):
|
||||
# print(" output", output, "label", output.label, "name", output.name, "generated name", f"Socket_{index+1}")
|
||||
default_value = getattr(output, 'default_value', None)
|
||||
output_hash = f"{convert_field(default_value)}"
|
||||
output_hashes.append(output_hash)
|
||||
|
||||
node_in_use = node_in_use and default_value is not None
|
||||
#print("NODE IN USE", node_in_use)
|
||||
|
||||
node_fields_to_ignore = fields_to_ignore_generic + ['internal_links', 'inputs', 'outputs']
|
||||
node_hash = f"{generic_fields_hasher_evolved(node, node_fields_to_ignore)}_{str(input_hashes)}_{str(output_hashes)}"
|
||||
#print("node hash", node_hash)
|
||||
#print("node hash", str(input_hashes))
|
||||
nodes_hashes.append(node_hash)
|
||||
|
||||
for link in node_tree.links:
|
||||
"""print("LINK", link, dir(link))
|
||||
print("FROM", link.from_node, link.from_socket)
|
||||
print("TO", link.to_node, link.to_socket)"""
|
||||
|
||||
from_socket_default = link.from_socket.default_value if hasattr(link.from_socket, "default_value") else None
|
||||
to_socket_default = link.to_socket.default_value if hasattr(link.to_socket, "default_value") else None
|
||||
link_hash = f"{link.from_node.name}_{link.from_socket.name}_{from_socket_default}+{link.to_node.name}_{link.to_socket.name}_{to_socket_default}"
|
||||
|
||||
links_hashes.append(link_hash)
|
||||
|
||||
#print("node hashes",nodes_hashes, "links_hashes", links_hashes)
|
||||
print("root_inputs", root_inputs)
|
||||
return f"{str(root_inputs)}_{str(nodes_hashes)}_{str(links_hashes)}"
|
||||
|
||||
|
||||
type_lookups = {
|
||||
Color: _lookup_color,#lambda input: print("dsf")',
|
||||
bpy.types.FloatVectorAttribute: _lookup_array2,
|
||||
bpy.types.bpy_prop_array: _lookup_array,
|
||||
bpy.types.PropertyGroup: _lookup_prop_group,
|
||||
bpy.types.bpy_prop_collection: _lookup_collection,
|
||||
bpy.types.MaterialLineArt: _lookup_materialLineArt
|
||||
bpy.types.MaterialLineArt: _lookup_materialLineArt,
|
||||
bpy.types.NodeTree: node_tree,
|
||||
}
|
||||
|
||||
def convert_field(raw_value, field_name="", scan_node_tree=True):
|
||||
# nodes are a special case: # TODO: find out their types & move these to type lookups
|
||||
"""# nodes are a special case: # TODO: find out their types & move these to type lookups
|
||||
if field_name in ["node_tree", "node_group"] and scan_node_tree:
|
||||
print("scan node tree")
|
||||
print("scan node tree", inspect.getmro(type(raw_value)))
|
||||
return node_tree(raw_value)
|
||||
|
||||
|
||||
"""
|
||||
conversion_lookup = None # type_lookups.get(type(raw_value), None)
|
||||
all_types = inspect.getmro(type(raw_value))
|
||||
for s_type in all_types:
|
||||
@ -83,6 +136,7 @@ def convert_field(raw_value, field_name="", scan_node_tree=True):
|
||||
|
||||
return field_value
|
||||
|
||||
# just a helper , for shorthand
|
||||
def obj_to_dict(object):
|
||||
try:
|
||||
return dict(object)
|
||||
@ -194,59 +248,6 @@ def armature_hash(obj):
|
||||
print("bone", bone, bone_hash(bone))"""
|
||||
return str(fields)
|
||||
|
||||
# used for various node trees: shaders, modifiers etc
|
||||
def node_tree(node_tree):
|
||||
print("SCANNING NODE TREE", node_tree)
|
||||
|
||||
# storage for hashing
|
||||
links_hashes = []
|
||||
nodes_hashes = []
|
||||
root_inputs = dict(node_tree) # probably useless for materials, contains settings for certain modifiers
|
||||
|
||||
for node in node_tree.nodes:
|
||||
#print("node", node, node.type, node.name, node.label)
|
||||
|
||||
input_hashes = []
|
||||
for input in node.inputs:
|
||||
#print(" input", input, "label", input.label, "name", input.name, dir(input))
|
||||
default_value = getattr(input, 'default_value', None)
|
||||
input_hash = f"{convert_field(default_value)}"
|
||||
input_hashes.append(input_hash)
|
||||
|
||||
output_hashes = []
|
||||
# IF the node itself is a group input, its outputs are the inputs of the geometry node (yes, not easy)
|
||||
node_in_use = True
|
||||
for (index, output) in enumerate(node.outputs):
|
||||
# print(" output", output, "label", output.label, "name", output.name, "generated name", f"Socket_{index+1}")
|
||||
default_value = getattr(output, 'default_value', None)
|
||||
output_hash = f"{convert_field(default_value)}"
|
||||
output_hashes.append(output_hash)
|
||||
|
||||
node_in_use = node_in_use and default_value is not None
|
||||
#print("NODE IN USE", node_in_use)
|
||||
|
||||
node_fields_to_ignore = fields_to_ignore_generic + ['internal_links', 'inputs', 'outputs']
|
||||
node_hash = f"{generic_fields_hasher_evolved(node, node_fields_to_ignore)}_{str(input_hashes)}_{str(output_hashes)}"
|
||||
#print("node hash", node_hash)
|
||||
#print("node hash", str(input_hashes))
|
||||
nodes_hashes.append(node_hash)
|
||||
|
||||
for link in node_tree.links:
|
||||
"""print("LINK", link, dir(link))
|
||||
print("FROM", link.from_node, link.from_socket)
|
||||
print("TO", link.to_node, link.to_socket)"""
|
||||
|
||||
from_socket_default = link.from_socket.default_value if hasattr(link.from_socket, "default_value") else None
|
||||
to_socket_default = link.to_socket.default_value if hasattr(link.to_socket, "default_value") else None
|
||||
link_hash = f"{link.from_node.name}_{link.from_socket.name}_{from_socket_default}+{link.to_node.name}_{link.to_socket.name}_{to_socket_default}"
|
||||
|
||||
links_hashes.append(link_hash)
|
||||
|
||||
#print("node hashes",nodes_hashes, "links_hashes", links_hashes)
|
||||
print("root_inputs", root_inputs)
|
||||
return f"{str(root_inputs)}_{str(nodes_hashes)}_{str(links_hashes)}"
|
||||
|
||||
|
||||
def material_hash(material, settings):
|
||||
scan_node_tree = settings.auto_export.materials_in_depth_scan
|
||||
hashed_material_except_node_tree = generic_fields_hasher_evolved(material, fields_to_ignore_generic, scan_node_tree=scan_node_tree)
|
||||
@ -290,10 +291,29 @@ def serialize_scene(settings):
|
||||
cache = {"materials":{}}
|
||||
print("serializing scene")
|
||||
data = {}
|
||||
|
||||
|
||||
# render settings are injected into each scene
|
||||
|
||||
|
||||
# TODO: only go through scenes actually in our list
|
||||
for scene in bpy.data.scenes:
|
||||
# ignore temporary scenes
|
||||
if scene.name.startswith(TEMPSCENE_PREFIX):
|
||||
continue
|
||||
data[scene.name] = {}
|
||||
|
||||
custom_properties = custom_properties_hash(scene) if len(scene.keys()) > 0 else None
|
||||
eevee_settings = generic_fields_hasher_evolved(scene.eevee, fields_to_ignore=fields_to_ignore_generic) # TODO: ignore most of the fields
|
||||
scene_field_hashes = {
|
||||
"custom_properties": custom_properties,
|
||||
"eevee": eevee_settings
|
||||
}
|
||||
print("SCENE WORLD", scene.world, dir(scene.eevee))
|
||||
#generic_fields_hasher_evolved(scene.eevee, fields_to_ignore=fields_to_ignore_generic)
|
||||
data[scene.name]["____scene_settings"] = str(hash(str(scene_field_hashes)))
|
||||
|
||||
|
||||
for object in scene.objects:
|
||||
object = bpy.data.objects[object.name]
|
||||
|
||||
|
@ -43,6 +43,8 @@ class AutoExportTracker(PropertyGroup):
|
||||
def register(cls):
|
||||
bpy.types.WindowManager.auto_export_tracker = PointerProperty(type=AutoExportTracker)
|
||||
|
||||
bpy.types.Scene.id_test = StringProperty(default="")
|
||||
|
||||
# setup handlers for updates & saving
|
||||
#bpy.app.handlers.save_post.append(cls.save_handler)
|
||||
#bpy.app.handlers.depsgraph_update_post.append(cls.deps_update_handler)
|
||||
@ -58,6 +60,8 @@ class AutoExportTracker(PropertyGroup):
|
||||
except:pass"""
|
||||
del bpy.types.WindowManager.auto_export_tracker
|
||||
|
||||
del bpy.types.Scene.id_test
|
||||
|
||||
@classmethod
|
||||
def save_handler(cls, scene, depsgraph):
|
||||
print("-------------")
|
||||
@ -73,6 +77,7 @@ class AutoExportTracker(PropertyGroup):
|
||||
@classmethod
|
||||
def deps_post_update_handler(cls, scene, depsgraph):
|
||||
# print("change detection enabled", cls.change_detection_enabled)
|
||||
print("change detected", list(map(lambda x: x.name, list(bpy.data.scenes))))
|
||||
|
||||
"""ops = bpy.context.window_manager.operators
|
||||
print("last operators", ops)
|
||||
|
Loading…
Reference in New Issue
Block a user