mirror of
https://github.com/kaosat-dev/Blender_bevy_components_workflow.git
synced 2024-11-22 20:00:53 +00:00
feat(Blenvy:Blender): overhauled & upgraded project serialization & diffing
* now also outputing seperate collections hash & materials hash from serialize_project * changed project_diff to do diffing of materials & collections * hooked up output data to export logic * related tweaks & improvements
This commit is contained in:
parent
270202d24f
commit
1059858363
@ -189,7 +189,9 @@ Blender side:
|
|||||||
- [x] disable 'export_hierarchy_full_collections' for all cases: not reliable and redudant
|
- [x] disable 'export_hierarchy_full_collections' for all cases: not reliable and redudant
|
||||||
- [ ] fix systematic material exports despite no changes
|
- [ ] fix systematic material exports despite no changes
|
||||||
- [ ] investigate lack of detection of changes of adding/changing components
|
- [ ] investigate lack of detection of changes of adding/changing components
|
||||||
- [ ] change scene serialization to account for collections ...sigh
|
- [x] change scene serialization to account for collections ...sigh
|
||||||
|
- [x] also add one NOT PER scene for materials, to fix the above issue with materials
|
||||||
|
- [ ] move material caching into hash material
|
||||||
- [ ] also remove ____dummy____.bin when export format is gltf
|
- [ ] also remove ____dummy____.bin when export format is gltf
|
||||||
|
|
||||||
- [ ] fix/cleanup asset information injection (also needed for hot reload)
|
- [ ] fix/cleanup asset information injection (also needed for hot reload)
|
||||||
@ -269,7 +271,7 @@ Bevy Side:
|
|||||||
- [x] account for changes impact both parent & children (ie "world" and "blueprint3") for example, which leads to a crash as there is double despawn /respawn so we need to filter things out
|
- [x] account for changes impact both parent & children (ie "world" and "blueprint3") for example, which leads to a crash as there is double despawn /respawn so we need to filter things out
|
||||||
- [x] if there are many assets/blueprints that have changed at the same time, it causes issues similar to the above, so apply a similar fix
|
- [x] if there are many assets/blueprints that have changed at the same time, it causes issues similar to the above, so apply a similar fix
|
||||||
- [x] also ignore any entities currently spawning (better to loose some information, than cause a crash)
|
- [x] also ignore any entities currently spawning (better to loose some information, than cause a crash)
|
||||||
- [ ] something is off with blueprint level components
|
- [ ] analyse what is off with blueprint level components
|
||||||
- [ ] add the root blueprint itself to the assets either on the blender side or on the bevy side programatically
|
- [ ] add the root blueprint itself to the assets either on the blender side or on the bevy side programatically
|
||||||
- [x] for sub blueprint tracking: do not propagate/ deal with parent blueprints if they are not themselves Spawning (ie filter out by "BlueprintSpawning")
|
- [x] for sub blueprint tracking: do not propagate/ deal with parent blueprints if they are not themselves Spawning (ie filter out by "BlueprintSpawning")
|
||||||
- [ ] invalidate despawned entity & parent entities AABB
|
- [ ] invalidate despawned entity & parent entities AABB
|
||||||
|
@ -6,7 +6,7 @@ def is_blueprint_always_export(blueprint):
|
|||||||
return blueprint.collection['always_export'] if 'always_export' in blueprint.collection else False
|
return blueprint.collection['always_export'] if 'always_export' in blueprint.collection else False
|
||||||
|
|
||||||
# this also takes the split/embed mode into account: if a nested collection changes AND embed is active, its container collection should also be exported
|
# this also takes the split/embed mode into account: if a nested collection changes AND embed is active, its container collection should also be exported
|
||||||
def get_blueprints_to_export(changes_per_scene, changed_export_parameters, blueprints_data, settings):
|
def get_blueprints_to_export(changes_per_scene, changes_per_collection, changed_export_parameters, blueprints_data, settings):
|
||||||
export_gltf_extension = getattr(settings, "export_gltf_extension", ".glb")
|
export_gltf_extension = getattr(settings, "export_gltf_extension", ".glb")
|
||||||
blueprints_path_full = getattr(settings,"blueprints_path_full", "")
|
blueprints_path_full = getattr(settings,"blueprints_path_full", "")
|
||||||
change_detection = getattr(settings.auto_export, "change_detection")
|
change_detection = getattr(settings.auto_export, "change_detection")
|
||||||
@ -37,8 +37,9 @@ def get_blueprints_to_export(changes_per_scene, changed_export_parameters, bluep
|
|||||||
|
|
||||||
# also deal with blueprints that are always marked as "always_export"
|
# also deal with blueprints that are always marked as "always_export"
|
||||||
blueprints_always_export = [blueprint for blueprint in internal_blueprints if is_blueprint_always_export(blueprint)]
|
blueprints_always_export = [blueprint for blueprint in internal_blueprints if is_blueprint_always_export(blueprint)]
|
||||||
|
changed_blueprints_based_on_changed_collections = [blueprint for blueprint in internal_blueprints if blueprint.collection in changes_per_collection.values()]
|
||||||
|
|
||||||
blueprints_to_export = list(set(changed_blueprints + blueprints_not_on_disk + blueprints_always_export))
|
blueprints_to_export = list(set(changed_blueprints + blueprints_not_on_disk + blueprints_always_export + changed_blueprints_based_on_changed_collections))
|
||||||
|
|
||||||
|
|
||||||
# filter out blueprints that are not marked & deal with the different combine modes
|
# filter out blueprints that are not marked & deal with the different combine modes
|
||||||
|
@ -17,7 +17,7 @@ from ..levels.bevy_scene_components import remove_scene_components, upsert_scene
|
|||||||
|
|
||||||
|
|
||||||
"""this is the main 'central' function for all auto export """
|
"""this is the main 'central' function for all auto export """
|
||||||
def auto_export(changes_per_scene, changed_export_parameters, settings):
|
def auto_export(changes_per_scene, changes_per_collection, changes_per_material, changed_export_parameters, settings):
|
||||||
# have the export parameters (not auto export, just gltf export) have changed: if yes (for example switch from glb to gltf, compression or not, animations or not etc), we need to re-export everything
|
# have the export parameters (not auto export, just gltf export) have changed: if yes (for example switch from glb to gltf, compression or not, animations or not etc), we need to re-export everything
|
||||||
print ("changed_export_parameters", changed_export_parameters)
|
print ("changed_export_parameters", changed_export_parameters)
|
||||||
try:
|
try:
|
||||||
@ -63,15 +63,15 @@ def auto_export(changes_per_scene, changed_export_parameters, settings):
|
|||||||
if do_export_blueprints:
|
if do_export_blueprints:
|
||||||
print("EXPORTING")
|
print("EXPORTING")
|
||||||
# get blueprints/collections infos
|
# get blueprints/collections infos
|
||||||
(blueprints_to_export) = get_blueprints_to_export(changes_per_scene, changed_export_parameters, blueprints_data, settings)
|
(blueprints_to_export) = get_blueprints_to_export(changes_per_scene, changes_per_collection, changed_export_parameters, blueprints_data, settings)
|
||||||
|
|
||||||
# get level/main scenes infos
|
# get level/main scenes infos
|
||||||
(main_scenes_to_export) = get_levels_to_export(changes_per_scene, changed_export_parameters, blueprints_data, settings)
|
(main_scenes_to_export) = get_levels_to_export(changes_per_scene, changes_per_collection, changed_export_parameters, blueprints_data, settings)
|
||||||
|
|
||||||
# since materials export adds components we need to call this before blueprints are exported
|
# since materials export adds components we need to call this before blueprints are exported
|
||||||
# export materials & inject materials components into relevant objects
|
# export materials & inject materials components into relevant objects
|
||||||
# FIXME: improve change detection, perhaps even add "material changes"
|
# FIXME: improve change detection, perhaps even add "material changes"
|
||||||
if export_materials_library and (changed_export_parameters or len(changes_per_scene.keys()) > 0 ):
|
if export_materials_library and (changed_export_parameters or len(changes_per_material.keys()) > 0 ):
|
||||||
export_materials(blueprints_data.blueprint_names, settings.library_scenes, settings)
|
export_materials(blueprints_data.blueprint_names, settings.library_scenes, settings)
|
||||||
|
|
||||||
# update the list of tracked exports
|
# update the list of tracked exports
|
||||||
|
@ -14,16 +14,19 @@ def prepare_and_export():
|
|||||||
if auto_export_settings.auto_export: # only do the actual exporting if auto export is actually enabled
|
if auto_export_settings.auto_export: # only do the actual exporting if auto export is actually enabled
|
||||||
|
|
||||||
# determine changed objects
|
# determine changed objects
|
||||||
per_scene_changes, project_hash = get_changes_per_scene(settings=blenvy)
|
per_scene_changes, per_collection_changes, per_material_changes, project_hash = get_changes_per_scene(settings=blenvy)
|
||||||
# determine changed parameters
|
# determine changed parameters
|
||||||
setting_changes, current_common_settings, current_export_settings, current_gltf_settings = get_setting_changes()
|
setting_changes, current_common_settings, current_export_settings, current_gltf_settings = get_setting_changes()
|
||||||
print("changes: settings:", setting_changes)
|
print("changes: settings:", setting_changes)
|
||||||
print("changes: scenes:", per_scene_changes)
|
print("changes: scenes:", per_scene_changes)
|
||||||
|
print("changes: collections:", per_collection_changes)
|
||||||
|
print("changes: materials:", per_material_changes)
|
||||||
|
|
||||||
|
|
||||||
print("project_hash", project_hash)
|
print("project_hash", project_hash)
|
||||||
# do the actual export
|
# do the actual export
|
||||||
# blenvy.auto_export.dry_run = 'NO_EXPORT'#'DISABLED'#
|
# blenvy.auto_export.dry_run = 'NO_EXPORT'#'DISABLED'#
|
||||||
auto_export(per_scene_changes, setting_changes, blenvy)
|
auto_export(per_scene_changes, per_collection_changes, per_material_changes, setting_changes, blenvy)
|
||||||
|
|
||||||
# -------------------------------------
|
# -------------------------------------
|
||||||
# now that this point is reached, the export should have run correctly, so we can save all the current state to the "previous one"
|
# now that this point is reached, the export should have run correctly, so we can save all the current state to the "previous one"
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
import bpy
|
import bpy
|
||||||
from .serialize_scene import serialize_scene
|
from .serialize_project import serialize_project
|
||||||
from blenvy.settings import load_settings, upsert_settings
|
from blenvy.settings import load_settings, upsert_settings
|
||||||
|
|
||||||
def bubble_up_changes(object, changes_per_scene):
|
def bubble_up_changes(object, changes_per_scene):
|
||||||
@ -26,7 +26,7 @@ def serialize_current(settings):
|
|||||||
"""with bpy.context.temp_override(scene=bpy.data.scenes[1]):
|
"""with bpy.context.temp_override(scene=bpy.data.scenes[1]):
|
||||||
bpy.context.scene.frame_set(0)"""
|
bpy.context.scene.frame_set(0)"""
|
||||||
|
|
||||||
current = serialize_scene(settings)
|
current = serialize_project(settings)
|
||||||
bpy.context.window.scene = current_scene
|
bpy.context.window.scene = current_scene
|
||||||
|
|
||||||
# reset previous frames
|
# reset previous frames
|
||||||
@ -41,12 +41,14 @@ def get_changes_per_scene(settings):
|
|||||||
|
|
||||||
# determine changes
|
# determine changes
|
||||||
changes_per_scene = {}
|
changes_per_scene = {}
|
||||||
|
changes_per_collection = {}
|
||||||
|
changes_per_material = {}
|
||||||
try:
|
try:
|
||||||
changes_per_scene = project_diff(previous, current, settings)
|
(changes_per_scene, changes_per_collection, changes_per_material) = project_diff(previous, current, settings)
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
print("failed to compare current serialized scenes to previous ones", error)
|
print("failed to compare current serialized scenes to previous ones: Error:", error)
|
||||||
|
|
||||||
return changes_per_scene, current
|
return changes_per_scene, changes_per_collection, changes_per_material, current
|
||||||
|
|
||||||
|
|
||||||
def project_diff(previous, current, settings):
|
def project_diff(previous, current, settings):
|
||||||
@ -56,16 +58,19 @@ def project_diff(previous, current, settings):
|
|||||||
return {}
|
return {}
|
||||||
|
|
||||||
changes_per_scene = {}
|
changes_per_scene = {}
|
||||||
|
changes_per_collection = {}
|
||||||
|
changes_per_material = {}
|
||||||
|
|
||||||
# TODO : how do we deal with changed scene names ???
|
# TODO : how do we deal with changed scene names ???
|
||||||
# possible ? on each save, inject an id into each scene, that cannot be copied over
|
# possible ? on each save, inject an id into each scene, that cannot be copied over
|
||||||
|
current_scenes = current["scenes"]
|
||||||
|
previous_scenes = previous["scenes"]
|
||||||
|
for scene in current_scenes:
|
||||||
|
current_object_names =list(current_scenes[scene].keys())
|
||||||
|
|
||||||
for scene in current:
|
if scene in previous_scenes: # we can only compare scenes that are in both previous and current data
|
||||||
current_object_names =list(current[scene].keys())
|
|
||||||
|
|
||||||
if scene in previous: # we can only compare scenes that are in both previous and current data
|
previous_object_names = list(previous_scenes[scene].keys())
|
||||||
|
|
||||||
previous_object_names = list(previous[scene].keys())
|
|
||||||
added = list(set(current_object_names) - set(previous_object_names))
|
added = list(set(current_object_names) - set(previous_object_names))
|
||||||
removed = list(set(previous_object_names) - set(current_object_names))
|
removed = list(set(previous_object_names) - set(current_object_names))
|
||||||
|
|
||||||
@ -80,10 +85,10 @@ def project_diff(previous, current, settings):
|
|||||||
changes_per_scene[scene] = {}
|
changes_per_scene[scene] = {}
|
||||||
changes_per_scene[scene][obj] = None
|
changes_per_scene[scene][obj] = None
|
||||||
|
|
||||||
for object_name in list(current[scene].keys()): # TODO : exclude directly added/removed objects
|
for object_name in list(current_scenes[scene].keys()): # TODO : exclude directly added/removed objects
|
||||||
if object_name in previous[scene]:
|
if object_name in previous_scenes[scene]:
|
||||||
current_obj = current[scene][object_name]
|
current_obj = current_scenes[scene][object_name]
|
||||||
prev_obj = previous[scene][object_name]
|
prev_obj = previous_scenes[scene][object_name]
|
||||||
same = str(current_obj) == str(prev_obj)
|
same = str(current_obj) == str(prev_obj)
|
||||||
|
|
||||||
if not same:
|
if not same:
|
||||||
@ -97,4 +102,35 @@ def project_diff(previous, current, settings):
|
|||||||
else:
|
else:
|
||||||
print(f"scene {scene} not present in previous data")
|
print(f"scene {scene} not present in previous data")
|
||||||
|
|
||||||
return changes_per_scene
|
|
||||||
|
|
||||||
|
current_collections = current["collections"]
|
||||||
|
previous_collections = previous["collections"]
|
||||||
|
|
||||||
|
for collection_name in current_collections:
|
||||||
|
if collection_name in previous_collections:
|
||||||
|
current_collection = current_collections[collection_name]
|
||||||
|
prev_collection = previous_collections[collection_name]
|
||||||
|
same = str(current_collection) == str(prev_collection)
|
||||||
|
|
||||||
|
if not same:
|
||||||
|
#if not collection_name in changes_per_collection:
|
||||||
|
target_collection = bpy.data.collections[collection_name] if collection_name in bpy.data.collections else None
|
||||||
|
changes_per_collection[collection_name] = target_collection
|
||||||
|
|
||||||
|
# process changes to materials
|
||||||
|
current_materials = current["materials"]
|
||||||
|
previous_materials = previous["materials"]
|
||||||
|
|
||||||
|
for material_name in current_materials:
|
||||||
|
if material_name in previous_materials:
|
||||||
|
current_material = current_materials[material_name]
|
||||||
|
prev_material = previous_materials[material_name]
|
||||||
|
same = str(current_material) == str(prev_material)
|
||||||
|
|
||||||
|
if not same:
|
||||||
|
#if not material_name in changes_per_material:
|
||||||
|
target_material = bpy.data.materials[material_name] if material_name in bpy.data.materials else None
|
||||||
|
changes_per_material[material_name] = target_material
|
||||||
|
|
||||||
|
return (changes_per_scene, changes_per_collection, changes_per_material)
|
@ -300,6 +300,7 @@ def materials_hash(obj, cache, settings):
|
|||||||
|
|
||||||
return str(h1_hash(str(materials)))
|
return str(h1_hash(str(materials)))
|
||||||
|
|
||||||
|
|
||||||
def modifier_hash(modifier_data, settings):
|
def modifier_hash(modifier_data, settings):
|
||||||
scan_node_tree = settings.auto_export.modifiers_in_depth_scan
|
scan_node_tree = settings.auto_export.modifiers_in_depth_scan
|
||||||
#print("HASHING MODIFIER", modifier_data.name)
|
#print("HASHING MODIFIER", modifier_data.name)
|
||||||
@ -316,10 +317,12 @@ def modifiers_hash(object, settings):
|
|||||||
#print(" ")
|
#print(" ")
|
||||||
return str(h1_hash(str(modifiers)))
|
return str(h1_hash(str(modifiers)))
|
||||||
|
|
||||||
def serialize_scene(settings):
|
def serialize_project(settings):
|
||||||
cache = {"materials":{}}
|
cache = {"materials":{}}
|
||||||
print("serializing scenes")
|
print("serializing scenes")
|
||||||
data = {}
|
|
||||||
|
|
||||||
|
per_scene = {}
|
||||||
|
|
||||||
|
|
||||||
# render settings are injected into each scene
|
# render settings are injected into each scene
|
||||||
@ -331,7 +334,7 @@ def serialize_scene(settings):
|
|||||||
# ignore temporary scenes
|
# ignore temporary scenes
|
||||||
if scene.name.startswith(TEMPSCENE_PREFIX):
|
if scene.name.startswith(TEMPSCENE_PREFIX):
|
||||||
continue
|
continue
|
||||||
data[scene.name] = {}
|
per_scene[scene.name] = {}
|
||||||
|
|
||||||
custom_properties = custom_properties_hash(scene) if len(scene.keys()) > 0 else None
|
custom_properties = custom_properties_hash(scene) if len(scene.keys()) > 0 else None
|
||||||
eevee_settings = generic_fields_hasher_evolved(scene.eevee, fields_to_ignore=fields_to_ignore_generic) # TODO: ignore most of the fields
|
eevee_settings = generic_fields_hasher_evolved(scene.eevee, fields_to_ignore=fields_to_ignore_generic) # TODO: ignore most of the fields
|
||||||
@ -345,7 +348,7 @@ def serialize_scene(settings):
|
|||||||
#generic_fields_hasher_evolved(scene.eevee, fields_to_ignore=fields_to_ignore_generic)
|
#generic_fields_hasher_evolved(scene.eevee, fields_to_ignore=fields_to_ignore_generic)
|
||||||
# FIXME: how to deal with this cleanly
|
# FIXME: how to deal with this cleanly
|
||||||
print("SCENE CUSTOM PROPS", custom_properties)
|
print("SCENE CUSTOM PROPS", custom_properties)
|
||||||
data[scene.name]["____scene_settings"] = str(h1_hash(str(scene_field_hashes)))
|
per_scene[scene.name]["____scene_settings"] = str(h1_hash(str(scene_field_hashes)))
|
||||||
|
|
||||||
|
|
||||||
for object in scene.objects:
|
for object in scene.objects:
|
||||||
@ -382,13 +385,42 @@ def serialize_scene(settings):
|
|||||||
|
|
||||||
object_field_hashes_filtered = {key: object_field_hashes[key] for key in object_field_hashes.keys() if object_field_hashes[key] is not None}
|
object_field_hashes_filtered = {key: object_field_hashes[key] for key in object_field_hashes.keys() if object_field_hashes[key] is not None}
|
||||||
objectHash = str(h1_hash(str(object_field_hashes_filtered)))
|
objectHash = str(h1_hash(str(object_field_hashes_filtered)))
|
||||||
data[scene.name][object.name] = objectHash
|
per_scene[scene.name][object.name] = objectHash
|
||||||
|
|
||||||
|
per_collection = {}
|
||||||
|
# also hash collections (important to catch component changes per blueprints/collections)
|
||||||
|
collections_in_scene = [collection for collection in bpy.data.collections if scene.user_of_id(collection)]
|
||||||
|
for collection in bpy.data.collections:# collections_in_scene:
|
||||||
|
#loc, rot, scale = bpy.context.object.matrix_world.decompose()
|
||||||
|
#visibility = collection.visible_get()
|
||||||
|
custom_properties = custom_properties_hash(collection) if len(collection.keys()) > 0 else None
|
||||||
|
# parent = collection.parent.name if collection.parent else None
|
||||||
|
#collections = [collection.name for collection in object.users_collection]
|
||||||
|
|
||||||
|
collection_field_hashes = {
|
||||||
|
"name": collection.name,
|
||||||
|
# "visibility": visibility,
|
||||||
|
"custom_properties": custom_properties,
|
||||||
|
#"parent": parent,
|
||||||
|
#"collections": collections,
|
||||||
|
}
|
||||||
|
|
||||||
|
collection_field_hashes_filtered = {key: collection_field_hashes[key] for key in collection_field_hashes.keys() if collection_field_hashes[key] is not None}
|
||||||
|
collectionHash = str(h1_hash(str(collection_field_hashes_filtered)))
|
||||||
|
per_collection[collection.name] = collectionHash
|
||||||
|
|
||||||
|
# and also hash materials to avoid constanstly exporting materials libraries, and only
|
||||||
|
# actually this should be similar to change detections for scenes
|
||||||
|
per_material = {}
|
||||||
|
for material in bpy.data.materials:
|
||||||
|
per_material[material.name] = str(h1_hash(material_hash(material, settings)))
|
||||||
|
print("materials_hash", per_material)
|
||||||
|
|
||||||
"""print("data", data)
|
"""print("data", data)
|
||||||
print("")
|
print("")
|
||||||
print("")
|
print("")
|
||||||
print("data json", json.dumps(data))"""
|
print("data json", json.dumps(data))"""
|
||||||
|
|
||||||
return data # json.dumps(data)
|
return {"scenes": per_scene, "collections": per_collection, "materials": per_material}
|
||||||
|
|
||||||
|
|
@ -55,7 +55,7 @@ def should_level_be_exported(scene_name, changed_export_parameters, changes_per_
|
|||||||
)
|
)
|
||||||
|
|
||||||
# this also takes the split/embed mode into account: if a collection instance changes AND embed is active, its container level/world should also be exported
|
# this also takes the split/embed mode into account: if a collection instance changes AND embed is active, its container level/world should also be exported
|
||||||
def get_levels_to_export(changes_per_scene, changed_export_parameters, blueprints_data, settings):
|
def get_levels_to_export(changes_per_scene, changes_per_collection, changed_export_parameters, blueprints_data, settings):
|
||||||
# determine list of main scenes to export
|
# determine list of main scenes to export
|
||||||
# we have more relaxed rules to determine if the main scenes have changed : any change is ok, (allows easier handling of changes, render settings etc)
|
# we have more relaxed rules to determine if the main scenes have changed : any change is ok, (allows easier handling of changes, render settings etc)
|
||||||
main_scenes_to_export = [scene_name for scene_name in settings.main_scenes_names if should_level_be_exported(scene_name, changed_export_parameters, changes_per_scene, blueprints_data, settings)]
|
main_scenes_to_export = [scene_name for scene_name in settings.main_scenes_names if should_level_be_exported(scene_name, changed_export_parameters, changes_per_scene, blueprints_data, settings)]
|
||||||
|
Loading…
Reference in New Issue
Block a user