feat(auto_export):

* added "marked" flag to Blueprints structs
 * added correct support for combine modes for blueprints & levels
 * added correct support for nested blueprints
 * added additional helpers for blueprints
 * fleshed out tests even more
 * fixed some tests
 * updated tests data
This commit is contained in:
kaosat.dev 2024-04-23 20:31:35 +02:00
parent 791861c06f
commit 8a3cecf0c9
11 changed files with 187 additions and 65 deletions

View File

@ -50,7 +50,9 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
# here we do a bit of workaround by creating an override # TODO: do this at the "UI" level # here we do a bit of workaround by creating an override # TODO: do this at the "UI" level
export_blueprints_path = os.path.join(folder_path, export_output_folder, getattr(addon_prefs,"export_blueprints_path")) if getattr(addon_prefs,"export_blueprints_path") != '' else folder_path export_blueprints_path = os.path.join(folder_path, export_output_folder, getattr(addon_prefs,"export_blueprints_path")) if getattr(addon_prefs,"export_blueprints_path") != '' else folder_path
#print('addon_prefs', AutoExportGltfAddonPreferences.__annotations__)#)addon_prefs.__annotations__) #print('addon_prefs', AutoExportGltfAddonPreferences.__annotations__)#)addon_prefs.__annotations__)
if hasattr(addon_prefs, "__annotations__") :
print("collection_instances_combine_mode", addon_prefs.collection_instances_combine_mode)
"""if hasattr(addon_prefs, "__annotations__") :
tmp = {} tmp = {}
for k in AutoExportGltfAddonPreferences.__annotations__: for k in AutoExportGltfAddonPreferences.__annotations__:
item = AutoExportGltfAddonPreferences.__annotations__[k] item = AutoExportGltfAddonPreferences.__annotations__[k]
@ -62,7 +64,7 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
tmp[k] = v tmp[k] = v
addon_prefs = SimpleNamespace(**tmp) #copy.deepcopy(addon_prefs) addon_prefs = SimpleNamespace(**tmp) #copy.deepcopy(addon_prefs)
addon_prefs.__annotations__ = tmp addon_prefs.__annotations__ = tmp"""
addon_prefs.export_blueprints_path = export_blueprints_path addon_prefs.export_blueprints_path = export_blueprints_path
addon_prefs.export_gltf_extension = gltf_extension addon_prefs.export_gltf_extension = gltf_extension
addon_prefs.export_models_path = export_models_path addon_prefs.export_models_path = export_models_path

View File

@ -34,21 +34,27 @@ def get_collections_to_export(changes_per_scene, changed_export_parameters, blue
# FIXME: double check this: why are we combining these two ? # FIXME: double check this: why are we combining these two ?
changed_blueprints += changed_local_blueprints changed_blueprints += changed_local_blueprints
# dealt with the different combine modes
if collection_instances_combine_mode == 1: # 0 => split (default) 1 => Embed 2 => Embed external
# we check for object specific overrides ...
filtered_changed_blueprints = []
for blueprint in changed_blueprints:
blueprint_instance = blueprints_data.internal_collection_instances.get(blueprint.name, None)
if blueprint_instance:
combine_mode = blueprint_instance['_combine'] if '_combine' in blueprint_instance else collection_instances_combine_mode # FIXME! yikes, should be "split"
print("combine mode", combine_mode)
if combine_mode == 0: # we only keep changed blueprints if mode is set to split (aka if a blueprint is merged, do not export ? )
# but wait, what if we have ONE instance of merge and others of split ? then we need to still split !
filtered_changed_blueprints.append(blueprint)
changed_blueprints = filtered_changed_blueprints
blueprints_to_export = list(set(changed_blueprints + blueprints_not_on_disk)) blueprints_to_export = list(set(changed_blueprints + blueprints_not_on_disk))
# filter out blueprints that are not marked & deal with the different combine modes
# we check for blueprint & object specific overrides ...
filtered_blueprints = []
for blueprint in blueprints_to_export:
if blueprint.marked:
filtered_blueprints.append(blueprint)
else:
blueprint_instances = blueprints_data.internal_collection_instances.get(blueprint.name, [])
# print("INSTANCES", blueprint_instances, blueprints_data.internal_collection_instances)
# marked blueprints that have changed are always exported, regardless of whether they are in use (have instances) or not
for blueprint_instance in blueprint_instances:
combine_mode = blueprint_instance['_combine'] if '_combine' in blueprint_instance else collection_instances_combine_mode
if combine_mode == "Split": # we only keep changed blueprints if mode is set to split for at least one instance (aka if ALL instances of a blueprint are merged, do not export ? )
filtered_blueprints.append(blueprint)
blueprints_to_export = list(set(filtered_blueprints))
# changed/all blueprints to export # changed/all blueprints to export
return (blueprints_to_export) return (blueprints_to_export)

View File

@ -5,21 +5,38 @@ from ..helpers.helpers_scenes import (get_scenes, )
# IF collection_instances_combine_mode is not 'split' check for each scene if any object in changes_per_scene has an instance in the scene # IF collection_instances_combine_mode is not 'split' check for each scene if any object in changes_per_scene has an instance in the scene
def changed_object_in_scene(scene_name, changes_per_scene, blueprints_data, collection_instances_combine_mode): def changed_object_in_scene(scene_name, changes_per_scene, blueprints_data, collection_instances_combine_mode):
# Embed / EmbedExternal # Embed / EmbedExternal
if collection_instances_combine_mode == 0: # 1 => Embed """if collection_instances_combine_mode == "Split": # 1 => Embed
return False return False"""
blueprints_from_objects = blueprints_data.blueprints_from_objects blueprints_from_objects = blueprints_data.blueprints_from_objects
bluprint_instances_in_scene = blueprints_data.blueprint_instances_per_main_scene[scene_name] blueprint_instances_in_scene = blueprints_data.blueprint_instances_per_main_scene.get(scene_name, None)
if blueprint_instances_in_scene is not None:
changed_objects = [object_name for change in changes_per_scene.values() for object_name in change.keys()] changed_objects = [object_name for change in changes_per_scene.values() for object_name in change.keys()]
changed_blueprints = [blueprints_from_objects[changed] for changed in changed_objects if changed in blueprints_from_objects] changed_blueprints = [blueprints_from_objects[changed] for changed in changed_objects if changed in blueprints_from_objects]
changed_blueprints_with_instances_in_scene = [bla for bla in changed_blueprints if bla.name in bluprint_instances_in_scene]#[blueprints_from_objects[changed] for changed in changed_objects if changed in blueprints_from_objects and changed in bluprint_instances_in_scene] changed_blueprints_with_instances_in_scene = [blueprint for blueprint in changed_blueprints if blueprint.name in blueprint_instances_in_scene.keys()]
level_needs_export = len(changed_blueprints_with_instances_in_scene) > 0
print("changed_blueprints", changed_blueprints) changed_blueprint_instances= [object for blueprint in changed_blueprints_with_instances_in_scene for object in blueprint_instances_in_scene[blueprint.name]]
print("bluprint_instances_in_scene", bluprint_instances_in_scene, "changed_objects", changed_objects, "changed_blueprints_with_instances_in_scene", changed_blueprints_with_instances_in_scene) # print("changed_blueprint_instances", changed_blueprint_instances,)
level_needs_export = False
for blueprint_instance in changed_blueprint_instances:
blueprint = blueprints_data.blueprint_name_from_instances[blueprint_instance]
combine_mode = blueprint_instance['_combine'] if '_combine' in blueprint_instance else collection_instances_combine_mode
#print("COMBINE MODE FOR OBJECT", combine_mode)
if combine_mode == 'Embed':
level_needs_export = True
break
elif combine_mode == 'EmbedExternal' and not blueprint.local:
level_needs_export = True
break
# changes => list of changed objects (regardless of wether they have been changed in main scene or in lib scene)
# wich of those objects are blueprint instances
# we need a list of changed objects that are blueprint instances
return level_needs_export return level_needs_export
return False
# this also takes the split/embed mode into account: if a collection instance changes AND embed is active, its container level/world should also be exported # this also takes the split/embed mode into account: if a collection instance changes AND embed is active, its container level/world should also be exported

View File

@ -185,13 +185,13 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
auto_settings_changed = sorted(json.loads(previous_auto_settings.as_string()).items()) != sorted(json.loads(current_auto_settings.as_string()).items()) if current_auto_settings != None else False auto_settings_changed = sorted(json.loads(previous_auto_settings.as_string()).items()) != sorted(json.loads(current_auto_settings.as_string()).items()) if current_auto_settings != None else False
gltf_settings_changed = sorted(json.loads(previous_gltf_settings.as_string()).items()) != sorted(json.loads(current_gltf_settings.as_string()).items()) if current_gltf_settings != None else False gltf_settings_changed = sorted(json.loads(previous_gltf_settings.as_string()).items()) != sorted(json.loads(current_gltf_settings.as_string()).items()) if current_gltf_settings != None else False
"""print("auto settings previous", sorted(json.loads(previous_auto_settings.as_string()).items())) print("auto settings previous", sorted(json.loads(previous_auto_settings.as_string()).items()))
print("auto settings current", sorted(json.loads(current_auto_settings.as_string()).items())) print("auto settings current", sorted(json.loads(current_auto_settings.as_string()).items()))
print("auto_settings_changed", auto_settings_changed) print("auto_settings_changed", auto_settings_changed)
print("gltf settings previous", sorted(json.loads(previous_gltf_settings.as_string()).items())) print("gltf settings previous", sorted(json.loads(previous_gltf_settings.as_string()).items()))
print("gltf settings current", sorted(json.loads(current_gltf_settings.as_string()).items())) print("gltf settings current", sorted(json.loads(current_gltf_settings.as_string()).items()))
print("gltf_settings_changed", gltf_settings_changed)""" print("gltf_settings_changed", gltf_settings_changed)
changed = auto_settings_changed or gltf_settings_changed changed = auto_settings_changed or gltf_settings_changed
# now write the current settings to the "previous settings" # now write the current settings to the "previous settings"

View File

@ -7,6 +7,7 @@ class Blueprint:
def __init__(self, name): def __init__(self, name):
self.name = name self.name = name
self.local = True self.local = True
self.marked = False # If marked as asset or with auto_export flag, always export if changed
self.scene = None # Not sure, could be usefull for tracking self.scene = None # Not sure, could be usefull for tracking
self.instances = [] self.instances = []
@ -48,6 +49,7 @@ def blueprints_scan(main_scenes, library_scenes, addon_prefs):
blueprints = {} blueprints = {}
blueprints_from_objects = {} blueprints_from_objects = {}
blueprint_name_from_instances = {}
collections = [] collections = []
# main scenes # main scenes
@ -55,15 +57,21 @@ def blueprints_scan(main_scenes, library_scenes, addon_prefs):
internal_collection_instances = {} internal_collection_instances = {}
external_collection_instances = {} external_collection_instances = {}
# meh
def add_object_to_collection_instances(collection_name, object, internal=True):
collection_category = internal_collection_instances if internal else external_collection_instances
if not collection_name in collection_category.keys():
#print("ADDING INSTANCE OF", collection_name, "object", object.name, "categ", collection_category)
collection_category[collection_name] = [] #.append(collection_name)
collection_category[collection_name].append(object)
for scene in main_scenes:# should it only be main scenes ? what about collection instances inside other scenes ? for scene in main_scenes:# should it only be main scenes ? what about collection instances inside other scenes ?
print("scene", scene, scene.name)
for object in scene.objects: for object in scene.objects:
#print("object", object.name) #print("object", object.name)
if object.instance_type == 'COLLECTION': if object.instance_type == 'COLLECTION':
collection = object.instance_collection collection = object.instance_collection
collection_name = object.instance_collection.name collection_name = object.instance_collection.name
#print(" from collection:", collection_name) #print(" from collection:", collection_name)
print("scene there", scene, scene.name)
collection_from_library = False collection_from_library = False
for library_scene in library_scenes: # should be only in library scenes for library_scene in library_scenes: # should be only in library scenes
@ -71,11 +79,7 @@ def blueprints_scan(main_scenes, library_scenes, addon_prefs):
if collection_from_library: if collection_from_library:
break break
collection_category = internal_collection_instances if collection_from_library else external_collection_instances add_object_to_collection_instances(collection_name=collection_name, object=object, internal = collection_from_library)
if not collection_name in collection_category.keys():
#print("ADDING INSTANCE OF", collection_name, "object", object.name, "categ", collection_category)
collection_category[collection_name] = [] #.append(collection_name)
collection_category[collection_name].append(object)
# experiment with custom properties from assets stored in other blend files # experiment with custom properties from assets stored in other blend files
"""if not collection_from_library: """if not collection_from_library:
@ -86,11 +90,14 @@ def blueprints_scan(main_scenes, library_scenes, addon_prefs):
# blueprints[collection_name].instances.append(object) # blueprints[collection_name].instances.append(object)
print("bla", scene.name)
# FIXME: this only account for direct instances of blueprints, not for any nested blueprint inside a blueprint # FIXME: this only account for direct instances of blueprints, not for any nested blueprint inside a blueprint
if scene.name not in blueprint_instances_per_main_scene.keys(): if scene.name not in blueprint_instances_per_main_scene.keys():
blueprint_instances_per_main_scene[scene.name] = [] blueprint_instances_per_main_scene[scene.name] = {}
blueprint_instances_per_main_scene[scene.name].append(collection_name) if collection_name not in blueprint_instances_per_main_scene[scene.name].keys():
blueprint_instances_per_main_scene[scene.name][collection_name] = []
blueprint_instances_per_main_scene[scene.name][collection_name].append(object)
blueprint_name_from_instances[object] = collection_name
"""# add any indirect ones """# add any indirect ones
# FIXME: needs to be recursive, either here or above # FIXME: needs to be recursive, either here or above
@ -119,6 +126,7 @@ def blueprints_scan(main_scenes, library_scenes, addon_prefs):
): ):
blueprint = Blueprint(collection.name) blueprint = Blueprint(collection.name)
blueprint.local = True blueprint.local = True
blueprint.marked = 'AutoExport' in collection and collection['AutoExport'] == True or export_marked_assets and collection.asset_data is not None
blueprint.objects = [object.name for object in collection.all_objects if not object.instance_type == 'COLLECTION'] # inneficient, double loop blueprint.objects = [object.name for object in collection.all_objects if not object.instance_type == 'COLLECTION'] # inneficient, double loop
blueprint.nested_blueprints = [object.instance_collection.name for object in collection.all_objects if object.instance_type == 'COLLECTION'] # FIXME: not precise enough, aka "what is a blueprint" blueprint.nested_blueprints = [object.instance_collection.name for object in collection.all_objects if object.instance_type == 'COLLECTION'] # FIXME: not precise enough, aka "what is a blueprint"
blueprint.collection = collection blueprint.collection = collection
@ -126,6 +134,12 @@ def blueprints_scan(main_scenes, library_scenes, addon_prefs):
blueprint.scene = defined_in_scene blueprint.scene = defined_in_scene
blueprints[collection.name] = blueprint blueprints[collection.name] = blueprint
# add nested collections to internal/external_collection instances
# FIXME: inneficient, third loop over all_objects
for object in collection.all_objects:
if object.instance_type == 'COLLECTION':
add_object_to_collection_instances(collection_name=object.instance_collection.name, object=object, internal = blueprint.local)
# now create reverse lookup , so you can find the collection from any of its contained objects # now create reverse lookup , so you can find the collection from any of its contained objects
for object in collection.all_objects: for object in collection.all_objects:
blueprints_from_objects[object.name] = blueprint#collection.name blueprints_from_objects[object.name] = blueprint#collection.name
@ -133,18 +147,24 @@ def blueprints_scan(main_scenes, library_scenes, addon_prefs):
# #
collections.append(collection) collections.append(collection)
# add any collection that has an instance in the main scenes, but is not present in any of the scenes (IE NON LOCAL) # add any collection that has an instance in the main scenes, but is not present in any of the scenes (IE NON LOCAL/ EXTERNAL)
for collection_name in external_collection_instances: for collection_name in external_collection_instances:
collection = bpy.data.collections[collection_name] collection = bpy.data.collections[collection_name]
blueprint = Blueprint(collection.name) blueprint = Blueprint(collection.name)
blueprint.local = False blueprint.local = False
blueprint.marked = True #external ones are always marked, as they have to have been marked in their original file #'AutoExport' in collection and collection['AutoExport'] == True
blueprint.objects = [object.name for object in collection.all_objects if not object.instance_type == 'COLLECTION'] # inneficient, double loop blueprint.objects = [object.name for object in collection.all_objects if not object.instance_type == 'COLLECTION'] # inneficient, double loop
blueprint.nested_blueprints = [object.instance_collection.name for object in collection.all_objects if object.instance_type == 'COLLECTION'] # FIXME: not precise enough, aka "what is a blueprint" blueprint.nested_blueprints = [object.instance_collection.name for object in collection.all_objects if object.instance_type == 'COLLECTION'] # FIXME: not precise enough, aka "what is a blueprint"
blueprint.collection = collection blueprint.collection = collection
blueprint.instances = external_collection_instances[collection.name] if collection.name in external_collection_instances else [] blueprint.instances = external_collection_instances[collection.name] if collection.name in external_collection_instances else []
blueprints[collection.name] = blueprint blueprints[collection.name] = blueprint
# add nested collections to internal/external_collection instances
# FIXME: inneficient, third loop over all_objects
"""for object in collection.all_objects:
if object.instance_type == 'COLLECTION':
add_object_to_collection_instances(collection_name=object.instance_collection.name, object=object, internal = blueprint.local)"""
# now create reverse lookup , so you can find the collection from any of its contained objects # now create reverse lookup , so you can find the collection from any of its contained objects
for object in collection.all_objects: for object in collection.all_objects:
blueprints_from_objects[object.name] = blueprint#collection.name blueprints_from_objects[object.name] = blueprint#collection.name
@ -154,6 +174,7 @@ def blueprints_scan(main_scenes, library_scenes, addon_prefs):
# TODO: do this recursively # TODO: do this recursively
for blueprint_name in list(blueprints.keys()): for blueprint_name in list(blueprints.keys()):
parent_blueprint = blueprints[blueprint_name] parent_blueprint = blueprints[blueprint_name]
for nested_blueprint_name in parent_blueprint.nested_blueprints: for nested_blueprint_name in parent_blueprint.nested_blueprints:
if not nested_blueprint_name in blueprints.keys(): if not nested_blueprint_name in blueprints.keys():
collection = bpy.data.collections[nested_blueprint_name] collection = bpy.data.collections[nested_blueprint_name]
@ -166,6 +187,7 @@ def blueprints_scan(main_scenes, library_scenes, addon_prefs):
blueprint.scene = parent_blueprint.scene if parent_blueprint.local else None blueprint.scene = parent_blueprint.scene if parent_blueprint.local else None
blueprints[collection.name] = blueprint blueprints[collection.name] = blueprint
# now create reverse lookup , so you can find the collection from any of its contained objects # now create reverse lookup , so you can find the collection from any of its contained objects
for object in collection.all_objects: for object in collection.all_objects:
blueprints_from_objects[object.name] = blueprint#collection.name blueprints_from_objects[object.name] = blueprint#collection.name
@ -173,7 +195,7 @@ def blueprints_scan(main_scenes, library_scenes, addon_prefs):
blueprints = dict(sorted(blueprints.items())) blueprints = dict(sorted(blueprints.items()))
print("BLUEPRINTS") '''print("BLUEPRINTS")
for blueprint_name in blueprints: for blueprint_name in blueprints:
print(" ", blueprints[blueprint_name]) print(" ", blueprints[blueprint_name])
@ -181,7 +203,7 @@ def blueprints_scan(main_scenes, library_scenes, addon_prefs):
print(blueprints_from_objects)""" print(blueprints_from_objects)"""
print("BLUEPRINT INSTANCES PER MAIN SCENE") print("BLUEPRINT INSTANCES PER MAIN SCENE")
print(blueprint_instances_per_main_scene) print(blueprint_instances_per_main_scene)'''
"""changes_test = {'Library': { """changes_test = {'Library': {
@ -248,6 +270,8 @@ def blueprints_scan(main_scenes, library_scenes, addon_prefs):
# not sure about these two # not sure about these two
"internal_collection_instances": internal_collection_instances, "internal_collection_instances": internal_collection_instances,
"external_collection_instances": external_collection_instances, "external_collection_instances": external_collection_instances,
"blueprint_name_from_instances": blueprint_name_from_instances
} }
return SimpleNamespace(**data) return SimpleNamespace(**data)
@ -257,7 +281,7 @@ import json
from .object_makers import (make_empty) from .object_makers import (make_empty)
def inject_blueprints_list_into_main_scene(scene, blueprints_data): def inject_blueprints_list_into_main_scene(scene, blueprints_data):
print("injecting assets/blueprints data into scene") # print("injecting assets/blueprints data into scene")
root_collection = scene.collection root_collection = scene.collection
assets_list = None assets_list = None
assets_list_name = f"assets_list_{scene.name}_components" assets_list_name = f"assets_list_{scene.name}_components"
@ -272,13 +296,11 @@ def inject_blueprints_list_into_main_scene(scene, blueprints_data):
blueprint_names_for_scene = blueprints_data.blueprint_instances_per_main_scene.get(scene.name, None) blueprint_names_for_scene = blueprints_data.blueprint_instances_per_main_scene.get(scene.name, None)
# find all blueprints used in a scene # find all blueprints used in a scene
if blueprint_names_for_scene: # what are the blueprints used in this scene, inject those into the assets list component if blueprint_names_for_scene: # what are the blueprints used in this scene, inject those into the assets list component
print("blueprint_names_for_scene", blueprint_names_for_scene)
children_per_blueprint = {} children_per_blueprint = {}
for blueprint_name in blueprint_names_for_scene: for blueprint_name in blueprint_names_for_scene:
blueprint = blueprints_data.blueprints_per_name.get(blueprint_name, None) blueprint = blueprints_data.blueprints_per_name.get(blueprint_name, None)
if blueprint: if blueprint:
children_per_blueprint[blueprint_name] = blueprint.nested_blueprints children_per_blueprint[blueprint_name] = blueprint.nested_blueprints
print("new logic blueprints list", children_per_blueprint)
assets_list["BlueprintsList"] = f"({json.dumps(dict(children_per_blueprint))})" assets_list["BlueprintsList"] = f"({json.dumps(dict(children_per_blueprint))})"
def remove_blueprints_list_from_main_scene(scene): def remove_blueprints_list_from_main_scene(scene):

View File

@ -94,7 +94,7 @@ def copy_animation_data(source, target):
def duplicate_object(object, parent, combine_mode, destination_collection, blueprints_data, legacy_mode, nester=""): def duplicate_object(object, parent, combine_mode, destination_collection, blueprints_data, legacy_mode, nester=""):
copy = None copy = None
internal_blueprint_names = [blueprint.name for blueprint in blueprints_data.internal_blueprints] internal_blueprint_names = [blueprint.name for blueprint in blueprints_data.internal_blueprints]
print("COMBINE MODE", combine_mode) # print("COMBINE MODE", combine_mode)
if object.instance_type == 'COLLECTION' and (combine_mode == 'Split' or (combine_mode == 'EmbedExternal' and (object.instance_collection.name in internal_blueprint_names)) ): if object.instance_type == 'COLLECTION' and (combine_mode == 'Split' or (combine_mode == 'EmbedExternal' and (object.instance_collection.name in internal_blueprint_names)) ):
#print("creating empty for", object.name, object.instance_collection.name, internal_blueprint_names, combine_mode) #print("creating empty for", object.name, object.instance_collection.name, internal_blueprint_names, combine_mode)
collection_name = object.instance_collection.name collection_name = object.instance_collection.name
@ -114,7 +114,6 @@ def duplicate_object(object, parent, combine_mode, destination_collection, bluep
blueprint = blueprints_data.blueprints_per_name.get(blueprint_name, None) blueprint = blueprints_data.blueprints_per_name.get(blueprint_name, None)
if blueprint: if blueprint:
children_per_blueprint[blueprint_name] = blueprint.nested_blueprints children_per_blueprint[blueprint_name] = blueprint.nested_blueprints
print("new logic blueprints list", children_per_blueprint)
empty_obj["BlueprintsList"] = f"({json.dumps(dict(children_per_blueprint))})" empty_obj["BlueprintsList"] = f"({json.dumps(dict(children_per_blueprint))})"
# we copy custom properties over from our original object to our empty # we copy custom properties over from our original object to our empty
@ -123,14 +122,13 @@ def duplicate_object(object, parent, combine_mode, destination_collection, bluep
empty_obj[component_name] = component_value empty_obj[component_name] = component_value
copy = empty_obj copy = empty_obj
else: else:
# for objects which are NOT collection instances # for objects which are NOT collection instances or when embeding
# we create a copy of our object and its children, to leave the original one as it is # we create a copy of our object and its children, to leave the original one as it is
original_name = object.name original_name = object.name
object.name = original_name + "____bak" object.name = original_name + "____bak"
copy = object.copy() copy = object.copy()
copy.name = original_name copy.name = original_name
destination_collection.objects.link(copy) destination_collection.objects.link(copy)
"""if object.parent == None: """if object.parent == None:

View File

@ -1 +1 @@
{"Blueprint6_animated":["Fox"],"Collection 2 1":["Empty_in_sub_collection"],"Blueprint1.001":["Blueprint1_mesh"],"Blueprint3":["Blueprint3_mesh","Blueprint3_mesh"],"b_Spine02_03":["b_Neck_04","b_RightUpperArm_06","b_LeftUpperArm_09","b_Neck_04","b_RightUpperArm_06","b_LeftUpperArm_09"],"_rootJoint":["b_Root_00","b_Root_00"],"Cube.001":["Cube.002","Cylinder","Cube.002","Cylinder"],"b_Root_00":["b_Hip_01","b_Hip_01"],"Collection 2":["Blueprint8_animated_no_bones","Collection 2 1","Empty_in_collection","Spot"],"Blueprint6_animated.001":["Fox"],"Collection":["Blueprint1.001","Blueprint4_nested","Blueprint6_animated","Blueprint7_hierarchy","Camera","Cube","Empty","Light","Plane"],"Camera":["Camera Gizmo"],"Blueprint7_hierarchy.001":["Blueprint4_nested.001","Cube.001"],"b_Spine01_02":["b_Spine02_03","b_Spine02_03"],"Light":["Light","DirectionalLight Gizmo"],"Cylinder":["Cylinder.001","Cylinder.001"],"b_Tail01_012":["b_Tail02_013","b_Tail02_013"],"b_RightLeg01_019":["b_RightLeg02_020","b_RightLeg02_020"],"b_LeftFoot01_017":["b_LeftFoot02_018","b_LeftFoot02_018"],"Cube":["Cube"],"b_Hip_01":["b_Spine01_02","b_Tail01_012","b_LeftLeg01_015","b_RightLeg01_019","b_Spine01_02","b_Tail01_012","b_LeftLeg01_015","b_RightLeg01_019"],"b_LeftForeArm_010":["b_LeftHand_011","b_LeftHand_011"],"Fox":["Fox_mesh","_rootJoint","Fox_mesh","_rootJoint"],"Blueprint8_animated_no_bones":["Cylinder.002"],"Parent_Object":["Cube.003","Blueprint1","Cylinder.001"],"b_RightUpperArm_06":["b_RightForeArm_07","b_RightForeArm_07"],"world":["no_name"],"b_LeftLeg01_015":["b_LeftLeg02_016","b_LeftLeg02_016"],"Spot":["Spot"],"b_RightFoot01_021":["b_RightFoot02_022","b_RightFoot02_022"],"Blueprint7_hierarchy":["Cube.001"],"b_RightLeg02_020":["b_RightFoot01_021","b_RightFoot01_021"],"Blueprint4_nested":["Blueprint3"],"Fox_mesh":["fox1","fox1"],"b_RightForeArm_07":["b_RightHand_08","b_RightHand_08"],"Blueprint3_mesh":["Cylinder","Cylinder"],"Blueprint1_mesh":["Cube.001","Cube.001"],"b_Neck_04":["b_Head_05","b_Head_05"],"b_LeftUpperArm_09":["b_LeftForeArm_010","b_LeftForeArm_010"],"no_name":["Parent_Object","Blueprint6_animated.001","lighting_components_World","assets_list_World_components","Collection","Collection 2"],"Cylinder.002":["Cylinder.003"],"Blueprint4_nested.001":["Blueprint3"],"Blueprint1":["Blueprint1_mesh"],"Cylinder.001":["Cylinder.002","Blueprint7_hierarchy.001","Empty_as_child"],"b_Tail02_013":["b_Tail03_014","b_Tail03_014"],"b_LeftLeg02_016":["b_LeftFoot01_017","b_LeftFoot01_017"],"Plane":["Plane"]} {"b_RightForeArm_07":["b_RightHand_08","b_RightHand_08"],"Blueprint1_mesh":["Cube.001","Cube.001"],"b_LeftFoot01_017":["b_LeftFoot02_018","b_LeftFoot02_018"],"Cylinder.001":["Cylinder.002","Blueprint7_hierarchy.001","Empty_as_child"],"Blueprint8_animated_no_bones":["Cylinder.002"],"b_LeftLeg01_015":["b_LeftLeg02_016","b_LeftLeg02_016"],"b_Spine02_03":["b_Neck_04","b_RightUpperArm_06","b_LeftUpperArm_09","b_Neck_04","b_RightUpperArm_06","b_LeftUpperArm_09"],"Blueprint4_nested":["Blueprint3"],"Collection 2":["Blueprint8_animated_no_bones","Collection 2 1","Empty_in_collection","Spot"],"b_Spine01_02":["b_Spine02_03","b_Spine02_03"],"b_Neck_04":["b_Head_05","b_Head_05"],"Fox_mesh":["fox1","fox1"],"Blueprint7_hierarchy.001":["Blueprint4_nested.001","Cube.001"],"Collection 2 1":["Empty_in_sub_collection"],"Blueprint3_mesh":["Cylinder","Cylinder"],"Plane":["Plane"],"Blueprint1":["Blueprint1_mesh"],"Cylinder":["Cylinder.001","Cylinder.001"],"b_Tail02_013":["b_Tail03_014","b_Tail03_014"],"b_RightLeg02_020":["b_RightFoot01_021","b_RightFoot01_021"],"Blueprint4_nested.001":["Blueprint3"],"Light":["Light","DirectionalLight Gizmo"],"Blueprint1.001":["Blueprint1_mesh"],"b_Root_00":["b_Hip_01","b_Hip_01"],"Blueprint3":["Blueprint3_mesh","Blueprint3_mesh"],"b_RightLeg01_019":["b_RightLeg02_020","b_RightLeg02_020"],"b_LeftLeg02_016":["b_LeftFoot01_017","b_LeftFoot01_017"],"b_RightUpperArm_06":["b_RightForeArm_07","b_RightForeArm_07"],"Camera":["Camera Gizmo"],"b_LeftUpperArm_09":["b_LeftForeArm_010","b_LeftForeArm_010"],"b_Tail01_012":["b_Tail02_013","b_Tail02_013"],"world":["no_name"],"no_name":["Parent_Object","Blueprint6_animated.001","lighting_components_World","assets_list_World_components","Collection","Collection 2"],"Cube.001":["Cube.002","Cylinder","Cube.002","Cylinder"],"Cylinder.002":["Cylinder.003"],"_rootJoint":["b_Root_00","b_Root_00"],"Spot":["Spot"],"Collection":["Blueprint1.001","Blueprint4_nested","Blueprint6_animated","Blueprint7_hierarchy","Camera","Cube","Empty","External_blueprint","External_blueprint2","Light","Plane"],"Blueprint6_animated":["Fox"],"Cube":["Cube"],"Parent_Object":["Cube.003","Blueprint1","Cylinder.001"],"b_Hip_01":["b_Spine01_02","b_Tail01_012","b_LeftLeg01_015","b_RightLeg01_019","b_Spine01_02","b_Tail01_012","b_LeftLeg01_015","b_RightLeg01_019"],"b_RightFoot01_021":["b_RightFoot02_022","b_RightFoot02_022"],"Blueprint7_hierarchy":["Cube.001"],"Blueprint6_animated.001":["Fox"],"b_LeftForeArm_010":["b_LeftHand_011","b_LeftHand_011"],"Fox":["Fox_mesh","_rootJoint","Fox_mesh","_rootJoint"]}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 657 KiB

After

Width:  |  Height:  |  Size: 646 KiB

View File

@ -201,15 +201,28 @@ def test_export_change_tracking_custom_properties_collection_instances_combine_m
assert os.path.exists(world_file_path) == True assert os.path.exists(world_file_path) == True
models_library_path = os.path.join(models_path, "library") models_library_path = os.path.join(models_path, "library")
model_library_file_paths = list(map(lambda file_name: os.path.join(models_library_path, file_name), sorted(os.listdir(models_library_path))))
modification_times_first = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path])) blueprint1_file_path = os.path.join(models_library_path, "Blueprint1.glb")
assert os.path.exists(blueprint1_file_path) == False
mapped_files_to_timestamps_and_index = {} mapped_files_to_timestamps_and_index = {}
for (index, file_path) in enumerate(model_library_file_paths+ [world_file_path]): model_library_file_paths = []
all_files_paths = []
if os.path.exists(models_library_path):
model_library_file_paths = list(map(lambda file_name: os.path.join(models_library_path, file_name), sorted(os.listdir(models_library_path))))
all_files_paths = model_library_file_paths + [world_file_path]
else:
all_files_paths = [world_file_path]
modification_times_first = list(map(lambda file_path: os.path.getmtime(file_path), all_files_paths))
for (index, file_path) in enumerate(all_files_paths):
file_path = pathlib.Path(file_path).stem file_path = pathlib.Path(file_path).stem
mapped_files_to_timestamps_and_index[file_path] = (modification_times_first[index], index) mapped_files_to_timestamps_and_index[file_path] = (modification_times_first[index], index)
# now add a custom property to the cube in the library scene & export again # now add a custom property to the cube in the library scene & export again
# this should trigger changes in the main scene as well since the mode is embed & this blueprints has an instance in the main scene
print("----------------") print("----------------")
print("library change (custom property)") print("library change (custom property)")
print("----------------") print("----------------")
@ -228,6 +241,10 @@ def test_export_change_tracking_custom_properties_collection_instances_combine_m
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path])) modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
assert modification_times != modification_times_first assert modification_times != modification_times_first
# there should not be a "Blueprint1" file
assert os.path.exists(blueprint1_file_path) == False
# only the "world" file should have changed # only the "world" file should have changed
world_file_index = mapped_files_to_timestamps_and_index["World"][1] world_file_index = mapped_files_to_timestamps_and_index["World"][1]
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index]] other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index]]
@ -236,6 +253,65 @@ def test_export_change_tracking_custom_properties_collection_instances_combine_m
assert modification_times[world_file_index] != modification_times_first[world_file_index] assert modification_times[world_file_index] != modification_times_first[world_file_index]
assert other_files_modification_times == other_files_modification_times_first assert other_files_modification_times == other_files_modification_times_first
# reset the comparing
modification_times_first = modification_times
# now we set the _combine mode of the instance to "split", so auto_export should:
# * not take the changes into account in the main scene
# * export the blueprint (so file for Blueprint1 will be changed)
bpy.data.objects["Blueprint1"]["_combine"] = "Split"
# but first do an export so that the changes to _combine are not taken into account
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_legacy_mode=False,
export_materials_library=False
)
modification_times_first = modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
print("----------------")
print("library change (custom property, forced 'Split' combine mode )")
print("----------------")
bpy.data.objects["Blueprint1_mesh"]["test_property"] = 151
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_legacy_mode=False,
export_materials_library=False
)
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
assert modification_times != modification_times_first
# the "world" file should have changed
world_file_index = mapped_files_to_timestamps_and_index["World"][1]
# the "Blueprint1" file should now exist
assert os.path.exists(blueprint1_file_path) == True
# and the "Blueprint1" file too
#blueprint1_file_index = mapped_files_to_timestamps_and_index["Blueprint1"][1]
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index]]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [world_file_index]]
assert modification_times[world_file_index] != modification_times_first[world_file_index]
#assert modification_times[blueprint1_file_index] != modification_times_first[blueprint1_file_index]
assert other_files_modification_times == other_files_modification_times_first
def test_export_change_tracking_light_properties(setup_data): def test_export_change_tracking_light_properties(setup_data):
root_path = "../../testing/bevy_example" root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets") assets_root_path = os.path.join(root_path, "assets")
@ -745,7 +821,6 @@ def test_export_various_chained_changes(setup_data):
print("----------------") print("----------------")
print("library change (nested blueprint) ") print("library change (nested blueprint) ")
print("----------------") print("----------------")
bpy.context.window_manager.auto_export_tracker.enable_change_detection() # FIXME: should not be needed, but ..
bpy.data.objects["Blueprint3_mesh"].location= [0, 0.1 ,2] bpy.data.objects["Blueprint3_mesh"].location= [0, 0.1 ,2]

View File

@ -56,9 +56,12 @@ def test_export_no_parameters(setup_data):
models_path = os.path.join(assets_root_path, "models") models_path = os.path.join(assets_root_path, "models")
auto_export_operator = bpy.ops.export_scenes.auto_gltf auto_export_operator = bpy.ops.export_scenes.auto_gltf
# make sure to clear any parameters first
stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
stored_auto_settings.clear()
stored_auto_settings.write(json.dumps({}))
# first test exporting withouth any parameters set, this should not export anything # first test exporting withouth any parameters set, this should not export anything
auto_export_operator( auto_export_operator(
auto_export=True, auto_export=True,
direct_mode=True, direct_mode=True,
@ -160,8 +163,6 @@ def test_export_changed_parameters(setup_data):
modification_times_no_change = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths)) modification_times_no_change = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths))
assert modification_times_no_change == modification_times_first assert modification_times_no_change == modification_times_first
# export again, this time changing the gltf settings # export again, this time changing the gltf settings
print("third export, changed gltf parameters") print("third export, changed gltf parameters")
@ -188,7 +189,7 @@ def test_export_changed_parameters(setup_data):
assert modification_times_changed_gltf != modification_times_first assert modification_times_changed_gltf != modification_times_first
modification_times_first = modification_times_changed_gltf modification_times_first = modification_times_changed_gltf
# now run it again, withouth changes, timestamps should be identical # now run it again, without changes, timestamps should be identical
auto_export_operator( auto_export_operator(
auto_export=True, auto_export=True,
@ -242,11 +243,9 @@ def test_export_changed_parameters(setup_data):
export_scene_settings=True, export_scene_settings=True,
export_blueprints=True, export_blueprints=True,
export_legacy_mode=False, export_legacy_mode=False,
export_materials_library=True export_materials_library=False
) )
modification_times_changed_gltf = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths)) modification_times_changed_gltf = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths))
assert modification_times_changed_gltf == modification_times_first assert modification_times_changed_gltf == modification_times_first
modification_times_first = modification_times_changed_gltf modification_times_first = modification_times_changed_gltf

View File

@ -33,6 +33,9 @@
- [x] internal_collections => replace with "internal_collections" or "local_collections" - [x] internal_collections => replace with "internal_collections" or "local_collections"
- [ ] fix COMBINE MODE passed as int instead of enum value - [ ] fix COMBINE MODE passed as int instead of enum value
=> comes from our custom logic for add_on prefs
- [ ] double check compares to "None" values
- [x] move all things that alter data "permanently" to pre-save - [x] move all things that alter data "permanently" to pre-save
- [x] lighting/ scene components injection - [x] lighting/ scene components injection
- [x] blueprintNames ? - [x] blueprintNames ?