Compare commits

...

3 Commits

9 changed files with 318 additions and 35 deletions

Binary file not shown.

View File

@ -23,7 +23,6 @@ from ..modules.bevy_scene_components import upsert_scene_components
def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
# have the export parameters (not auto export, just gltf export) have changed: if yes (for example switch from glb to gltf, compression or not, animations or not etc), we need to re-export everything
print ("changed_export_parameters", changed_export_parameters)
try:
# path to the current blend file
file_path = bpy.data.filepath
@ -70,6 +69,9 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
print("main scenes", main_scene_names, "library_scenes", library_scene_names)
print("export_output_folder", export_output_folder)
analysis_experiment(level_scenes, library_scenes)
if export_scene_settings:
# inject/ update scene components
upsert_scene_components(bpy.context.scene, bpy.context.scene.world, main_scene_names)
@ -85,7 +87,7 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
(collections, collections_to_export, library_collections, collections_per_scene) = get_collections_to_export(changes_per_scene, changed_export_parameters, addon_prefs)
# get level/main scenes infos
(main_scenes_to_export) = get_levels_to_export(changes_per_scene, changed_export_parameters, addon_prefs)
(main_scenes_to_export) = get_levels_to_export(changes_per_scene, changed_export_parameters, collections, addon_prefs)
# since materials export adds components we need to call this before blueprints are exported
# export materials & inject materials components into relevant objects
@ -153,3 +155,190 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
bpy.context.window_manager.popup_menu(error_message, title="Error", icon='ERROR')
class Blueprint:
def __init__(self, name):
self.name = name
self.local = True
self.scene = "" # Not sure, could be usefull for tracking
self.instances = []
self.objects = []
self.nested_blueprints = []
self.collection = None # should we just sublclass ?
def __repr__(self):
return f'Name: {self.name} Local: {self.local} Instances: {self.instances}, Objects: {self.objects}, nested_blueprints: {self.nested_blueprints}'
def __str__(self):
return f'Name: "{self.name}", Local: {self.local}, Instances: {self.instances}, Objects: {self.objects}, nested_blueprints: {self.nested_blueprints}'
# blueprints: any collection with either
# - an instance
# - marked as asset
# - with the "auto_export" flag
# https://blender.stackexchange.com/questions/167878/how-to-get-all-collections-of-the-current-scene
def analysis_experiment(main_scenes, library_scenes):
export_marked_assets = True
blueprints = {}
blueprints_from_objects = {}
collections = []
blueprints_candidates = {}
# main scenes
blueprint_instances_per_main_scene = {}
internal_collection_instances = {}
external_collection_instances = {}
for scene in main_scenes:# should it only be main scenes ? what about collection instances inside other scenes ?
print("scene", scene)
for object in scene.objects:
print("object", object.name)
if object.instance_type == 'COLLECTION':
collection = object.instance_collection
collection_name = object.instance_collection.name
print(" from collection:", collection_name)
collection_from_library = False
for scene in library_scenes: # should be only in library scenes
collection_from_library = scene.user_of_id(collection) > 0 # TODO: also check if it is an imported asset
if collection_from_library:
break
collection_category = internal_collection_instances if collection_from_library else external_collection_instances
if not collection_name in collection_category.keys():
print("ADDING INSTANCE OF", collection_name, "object", object.name, "categ", collection_category)
collection_category[collection_name] = [] #.append(collection_name)
collection_category[collection_name].append(object)
if not collection_from_library:
for property_name in object.keys():
print("stuff", property_name)
for property_name in collection.keys():
print("OTHER", property_name)
# blueprints[collection_name].instances.append(object)
# FIXME: this only account for direct instances of blueprints, not for any nested blueprint inside a blueprint
if scene.name not in blueprint_instances_per_main_scene.keys():
blueprint_instances_per_main_scene[scene.name] = []
blueprint_instances_per_main_scene[scene.name].append(collection_name)
"""# add any indirect ones
# FIXME: needs to be recursive, either here or above
for nested_blueprint in blueprints[collection_name].nested_blueprints:
if not nested_blueprint in blueprint_instances_per_main_scene[scene.name]:
blueprint_instances_per_main_scene[scene.name].append(nested_blueprint)"""
for collection in bpy.data.collections:
print("collection", collection, collection.name_full, "users", collection.users)
collection_from_library = False
for scene in library_scenes: # should be only in library scenes
collection_from_library = scene.user_of_id(collection) > 0
if collection_from_library:
break
if not collection_from_library:
continue
if (
'AutoExport' in collection and collection['AutoExport'] == True # get marked collections
or export_marked_assets and collection.asset_data is not None # or if you have marked collections as assets you can auto export them too
or collection.name in list(internal_collection_instances.keys()) # or if the collection has an instance in one of the main scenes
):
blueprint = Blueprint(collection.name)
blueprint.local = True
blueprint.objects = [object.name for object in collection.all_objects if not object.instance_type == 'COLLECTION'] # inneficient, double loop
blueprint.nested_blueprints = [object.instance_collection.name for object in collection.all_objects if object.instance_type == 'COLLECTION'] # FIXME: not precise enough, aka "what is a blueprint"
blueprint.collection = collection
blueprint.instances = internal_collection_instances[collection.name] if collection.name in internal_collection_instances else []
blueprints[collection.name] = blueprint
# now create reverse lookup , so you can find the collection from any of its contained objects
for object in collection.all_objects:
blueprints_from_objects[object.name] = collection.name
#
collections.append(collection)
# add any collection that has an instance in the main scenes, but is not present in any of the scenes (IE NON LOCAL)
for collection_name in external_collection_instances:
collection = bpy.data.collections[collection_name]
blueprint = Blueprint(collection.name)
blueprint.local = False
blueprint.objects = [object.name for object in collection.all_objects if not object.instance_type == 'COLLECTION'] # inneficient, double loop
blueprint.nested_blueprints = [object.instance_collection.name for object in collection.all_objects if object.instance_type == 'COLLECTION'] # FIXME: not precise enough, aka "what is a blueprint"
blueprint.collection = collection
blueprint.instances = external_collection_instances[collection.name] if collection.name in external_collection_instances else []
blueprints[collection.name] = blueprint
# now create reverse lookup , so you can find the collection from any of its contained objects
for object in collection.all_objects:
blueprints_from_objects[object.name] = collection.name
# then add any nested collections at root level
for blueprint_name in list(blueprints.keys()):
parent_blueprint = blueprints[blueprint_name]
for nested_blueprint_name in parent_blueprint.nested_blueprints:
if not nested_blueprint_name in blueprints.keys():
collection = bpy.data.collections[nested_blueprint_name]
blueprint = Blueprint(collection.name)
blueprint.local = parent_blueprint.local
blueprint.objects = [object.name for object in collection.all_objects if not object.instance_type == 'COLLECTION'] # inneficient, double loop
blueprint.nested_blueprints = [object.instance_collection.name for object in collection.all_objects if object.instance_type == 'COLLECTION'] # FIXME: not precise enough, aka "what is a blueprint"
blueprint.collection = collection
blueprint.instances = external_collection_instances[collection.name] if collection.name in external_collection_instances else []
blueprints[collection.name] = blueprint
blueprints = dict(sorted(blueprints.items()))
print("BLUEPRINTS")
for blueprint_name in blueprints:
print(" ", blueprints[blueprint_name])
print("BLUEPRINTS LOOKUP")
print(blueprints_from_objects)
print("BLUEPRINT INSTANCES PER MAIN SCENE")
print(blueprint_instances_per_main_scene)
changes_test = {'Library': {
'Blueprint1_mesh': bpy.data.objects['Blueprint1_mesh'],
'Fox_mesh': bpy.data.objects['Fox_mesh'],
'External_blueprint2_Cylinder': bpy.data.objects['External_blueprint2_Cylinder']}
}
# which main scene has been impacted by this
# does one of the main scenes contain an INSTANCE of an impacted blueprint
for scene in main_scenes:
changed_objects = list(changes_test["Library"].keys()) # just a hack for testing
#bluprint_instances_in_scene = blueprint_instances_per_main_scene[scene.name]
#print("instances per scene", bluprint_instances_in_scene, "changed_objects", changed_objects)
changed_blueprints_with_instances_in_scene = [blueprints_from_objects[changed] for changed in changed_objects if changed in blueprints_from_objects]
print("changed_blueprints_with_instances_in_scene", changed_blueprints_with_instances_in_scene)
level_needs_export = len(changed_blueprints_with_instances_in_scene) > 0
if level_needs_export:
print("level needs export", scene.name)
for scene in library_scenes:
changed_objects = list(changes_test[scene.name].keys())
changed_blueprints = [blueprints_from_objects[changed] for changed in changed_objects if changed in blueprints_from_objects]
# we only care about local blueprints/collections
changed_local_blueprints = [blueprint_name for blueprint_name in changed_blueprints if blueprint_name in blueprints.keys() and blueprints[blueprint_name].local]
print("changed blueprints", changed_local_blueprints)

View File

@ -2,8 +2,32 @@ import bpy
from .export_blueprints import check_if_blueprint_on_disk
from ..helpers.helpers_scenes import (get_scenes, )
def changed_object_in_scene(scene_name, changes_per_scene, collections, collection_instances_combine_mode):
print("BLAAAAAAAAAAAAAAAAAAAAAAAAAAAAH", scene_name, "combo mode", collection_instances_combine_mode, "changes", changes_per_scene, "collections", collections)
# TODO: IF collection_instances_combine_mode is not 'split' check for each scene if any object in changes_per_scene has an instance in the scene
# Embed / EmbedExternal
if collection_instances_combine_mode == 0: # 1 => Embed
return False
"""if scene_name in list(changes_per_scene.keys()):
print("here", scene_name)"""
for scene_name_current in list(bpy.data.scenes.keys()):
for object in bpy.data.scenes[scene_name_current].objects:
print("foo", changes_per_scene[scene_name_current])
if object.instance_type == 'COLLECTION':
collection_name = object.instance_collection.name
if object.name in list(changes_per_scene[scene_name_current].keys()):
print("changed object", object.name)
return True # even a single object is enough to flag the scene
# TODO: this should also take the split/embed mode into account: if a collection instance changes AND embed is active, its container level/world should also be exported
def get_levels_to_export(changes_per_scene, changed_export_parameters, addon_prefs):
def get_levels_to_export(changes_per_scene, changed_export_parameters, collections, addon_prefs):
print("TOTOOO")
export_change_detection = getattr(addon_prefs, "export_change_detection")
export_gltf_extension = getattr(addon_prefs, "export_gltf_extension")
export_models_path = getattr(addon_prefs, "export_models_path")
@ -11,10 +35,8 @@ def get_levels_to_export(changes_per_scene, changed_export_parameters, addon_pre
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs)
# TODO: IF collection_instances_combine_mode is not 'split' check for each scene if any object in changes_per_scene has an instance in the scene
# print("levels export", "export_change_detection", export_change_detection, "changed_export_parameters",changed_export_parameters, "export_models_path", export_models_path, "export_gltf_extension", export_gltf_extension, "changes_per_scene", changes_per_scene)
# determine list of main scenes to export
# we have more relaxed rules to determine if the main scenes have changed : any change is ok, (allows easier handling of changes, render settings etc)
main_scenes_to_export = [scene_name for scene_name in main_scene_names if not export_change_detection or changed_export_parameters or scene_name in changes_per_scene.keys() or not check_if_blueprint_on_disk(scene_name, export_models_path, export_gltf_extension)]
main_scenes_to_export = [scene_name for scene_name in main_scene_names if not export_change_detection or changed_export_parameters or scene_name in changes_per_scene.keys() or not check_if_blueprint_on_disk(scene_name, export_models_path, export_gltf_extension) or changed_object_in_scene(scene_name, changes_per_scene, collections, collection_instances_combine_mode)]
return (main_scenes_to_export)

View File

@ -209,6 +209,7 @@ def set_active_collection(scene, collection_name):
# find which of the library scenes the given collection stems from
# TODO: does not seem efficient at all ?
# TODO: remove, unused
def get_source_scene(collection_name, library_scenes):
match = None
for scene in library_scenes:

View File

@ -62,24 +62,13 @@ def copy_animation_data(source, target):
markers_per_animation[animation_name][marker.frame] = []
markers_per_animation[animation_name][marker.frame].append(marker.name)
"""if target.animation_data == None:
target.animation_data_create()
target.animation_data.action = source.animation_data.action.copy()"""
# alternative method, using the built-in link animation operator
#
#previous_active_object = bpy.context.view_layer.objects.active
"""bpy.context.view_layer.objects.active = source
bpy.ops.object.select_all(action='DESELECT')
#Transfer data from active object to selected objects
target.select_set(True) """
# best method, using the built-in link animation operator
with bpy.context.temp_override(active_object=source, selected_editable_objects=[target]):
bpy.ops.object.make_links_data(type='ANIMATION')
"""if target.animation_data == None:
target.animation_data_create()
target.animation_data.action = source.animation_data.action.copy()
print("copying animation data for", source.name, target.animation_data)
properties = [p.identifier for p in source.animation_data.bl_rna.properties if not p.is_readonly]
@ -90,6 +79,7 @@ def copy_animation_data(source, target):
# we add an "AnimationInfos" component
target['AnimationInfos'] = f'(animations: {animations_infos})'.replace("'","")
# and animation markers
markers_formated = '{'
for animation in markers_per_animation.keys():
markers_formated += f'"{animation}":'
@ -102,9 +92,6 @@ def copy_animation_data(source, target):
target["AnimationMarkers"] = f'( {markers_formated} )'
def duplicate_object(object, parent, combine_mode, destination_collection, library_collections, legacy_mode, nester=""):
copy = None
if object.instance_type == 'COLLECTION' and (combine_mode == 'Split' or (combine_mode == 'EmbedExternal' and (object.instance_collection.name in library_collections)) ):
@ -127,7 +114,6 @@ def duplicate_object(object, parent, combine_mode, destination_collection, libra
empty_obj["BlueprintsList"] = f"({json.dumps(dict(children_per_collection))})"
# empty_obj["AnimationMarkers"] = '({"animation_name": {5: "Marker_1"} })'
#'({5: "sdf"})'#.replace('"',"'") #f"({json.dumps(dict(animation_foo))})"
#empty_obj["Assets"] = {"Animations": [], "Materials": [], "Models":[], "Textures":[], "Audio":[], "Other":[]}
@ -151,8 +137,6 @@ def duplicate_object(object, parent, combine_mode, destination_collection, libra
if parent_empty is not None:
copy.parent = parent_empty
"""
# print(nester, "copy", copy)
# do this both for empty replacements & normal copies
if parent is not None:
copy.parent = parent

View File

@ -174,6 +174,15 @@ def materials_hash(obj, cache):
# materials = [material_hash(material_slot.material) if not material_slot.material.name in cache["materials"] else cache["materials"][material_slot.material.name] for material_slot in obj.material_slots]
return str(hash(str(materials)))
def custom_properties_hash(obj):
custom_properties = {}
for property_name in obj.keys():
if property_name not in '_RNA_UI' and property_name != 'components_meta':
custom_properties[property_name] = obj[property_name]
return str(hash(str(custom_properties)))
def serialize_scene():
cache = {"materials":{}}
print("serializing scene")
@ -184,16 +193,12 @@ def serialize_scene():
data[scene.name] = {}
for object in scene.objects:
object = bpy.data.objects[object.name]
#print("object", object.name, object.location)
#loc, rot, scale = bpy.context.object.matrix_world.decompose()
transform = str((object.location, object.rotation_euler, object.scale)) #str((object.matrix_world.to_translation(), object.matrix_world.to_euler('XYZ'), object.matrix_world.to_quaternion()))#
visibility = object.visible_get()
#print("object type", object.type)
custom_properties = {}
for K in object.keys():
if K not in '_RNA_UI' and K != 'components_meta':
#print( K , "-" , object[K] )
custom_properties[K] = object[K]
custom_properties = custom_properties_hash(object) if len(object.keys()) > 0 else None
animations = animation_hash(object)
mesh = mesh_hash(object) if object.type == 'MESH' else None
camera = camera_hash(object) if object.type == 'CAMERA' else None
@ -225,5 +230,4 @@ def serialize_scene():
return json.dumps(data)
#loc, rot, scale = bpy.context.object.matrix_world.decompose()

View File

@ -157,6 +157,85 @@ def test_export_change_tracking_custom_properties(setup_data):
assert modification_times[world_file_index] != modification_times_first[world_file_index]
assert other_files_modification_times == other_files_modification_times_first
def test_export_change_tracking_custom_properties_collection_instances_combine_mode_embed(setup_data):
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")
models_path = os.path.join(assets_root_path, "models")
auto_export_operator = bpy.ops.export_scenes.auto_gltf
# with change detection
# first, configure things
# we use the global settings for that
export_props = {
"main_scene_names" : ['World'],
"library_scene_names": ['Library'],
"collection_instances_combine_mode":"Embed"
}
# store settings for the auto_export part
stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
stored_auto_settings.clear()
stored_auto_settings.write(json.dumps(export_props))
gltf_settings = {
"export_animations": False,
"export_optimize_animation_size": False
}
# and store settings for the gltf part
stored_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings")
stored_gltf_settings.clear()
stored_gltf_settings.write(json.dumps(gltf_settings))
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_legacy_mode=False,
export_materials_library=False
)
world_file_path = os.path.join(models_path, "World.glb")
assert os.path.exists(world_file_path) == True
models_library_path = os.path.join(models_path, "library")
model_library_file_paths = list(map(lambda file_name: os.path.join(models_library_path, file_name), sorted(os.listdir(models_library_path))))
modification_times_first = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
mapped_files_to_timestamps_and_index = {}
for (index, file_path) in enumerate(model_library_file_paths+ [world_file_path]):
file_path = pathlib.Path(file_path).stem
mapped_files_to_timestamps_and_index[file_path] = (modification_times_first[index], index)
# now add a custom property to the cube in the main scene & export again
print("----------------")
print("library change (custom property)")
print("----------------")
bpy.data.objects["Blueprint1_mesh"]["test_property"] = 42
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_legacy_mode=False,
export_materials_library=False
)
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
assert modification_times != modification_times_first
# only the "world" file should have changed
world_file_index = mapped_files_to_timestamps_and_index["World"][1]
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index]]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [world_file_index]]
assert modification_times[world_file_index] != modification_times_first[world_file_index]
assert other_files_modification_times == other_files_modification_times_first
def test_export_change_tracking_light_properties(setup_data):
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")

View File

@ -12,6 +12,10 @@
- [ ] look into caching for serialize scene
- [ ] replace field name based logic with type base logic
- [ ] to make things easier overall we need a mapping of Blueprints/Collections to
- [ ] their instances
- [ ] their objects/sub collections instances etc
- [ ] remove bulk of tracker related code
- [ ] clean up
- [x] split up change detection in settings to its own panel