refactor(auto_export): ported the internal logic over to the new, more optimised blueprints helpers

* changed functions to use the new blueprint objects & helpers
 * removed old, redundant code
 * a ton of related cleanups
This commit is contained in:
kaosat.dev 2024-04-22 15:43:31 +02:00
parent 8b3c2e8ff4
commit 9af2cba1cf
12 changed files with 452 additions and 621 deletions

View File

@ -5,6 +5,7 @@ from types import SimpleNamespace
import bpy
import traceback
from .preferences import AutoExportGltfAddonPreferences
from .get_collections_to_export import get_collections_to_export
@ -12,11 +13,13 @@ from .get_levels_to_export import get_levels_to_export
from .get_standard_exporter_settings import get_standard_exporter_settings
from .export_main_scenes import export_main_scene
from .export_blueprints import check_if_blueprint_on_disk, check_if_blueprints_exist, export_blueprints_from_collections
from .export_blueprints import export_blueprints
from ..helpers.helpers_scenes import (get_scenes, )
from ..helpers.helpers_blueprints import blueprints_scan
from ..modules.export_materials import cleanup_materials, export_materials
from ..modules.bevy_scene_components import upsert_scene_components
from ..modules.bevy_scene_components import remove_scene_components, upsert_scene_components
"""this is the main 'central' function for all auto export """
@ -32,7 +35,7 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
#should we use change detection or not
export_change_detection = getattr(addon_prefs, "export_change_detection")
export_blueprints = getattr(addon_prefs,"export_blueprints")
do_export_blueprints = getattr(addon_prefs,"export_blueprints")
export_output_folder = getattr(addon_prefs,"export_output_folder")
export_models_path = os.path.join(folder_path, export_output_folder)
@ -69,45 +72,46 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
print("main scenes", main_scene_names, "library_scenes", library_scene_names)
print("export_output_folder", export_output_folder)
analysis_experiment(level_scenes, library_scenes)
blueprints_data = blueprints_scan(level_scenes, library_scenes, addon_prefs)
blueprints_per_scene = blueprints_data.blueprints_per_scenes
internal_blueprints = [blueprint.name for blueprint in blueprints_data.internal_blueprints]
external_blueprints = [blueprint.name for blueprint in blueprints_data.external_blueprints]
if export_scene_settings:
# inject/ update scene components
upsert_scene_components(bpy.context.scene, bpy.context.scene.world, main_scene_names)
upsert_scene_components(level_scenes)
#inject/ update light shadow information
for light in bpy.data.lights:
enabled = 'true' if light.use_shadow else 'false'
light['BlenderLightShadows'] = f"(enabled: {enabled}, buffer_bias: {light.shadow_buffer_bias})"
# export
if export_blueprints:
if do_export_blueprints:
print("EXPORTING")
# get blueprints/collections infos
(collections, collections_to_export, library_collections, collections_per_scene) = get_collections_to_export(changes_per_scene, changed_export_parameters, addon_prefs)
(blueprints_to_export) = get_collections_to_export(changes_per_scene, changed_export_parameters, blueprints_data, addon_prefs)
# get level/main scenes infos
(main_scenes_to_export) = get_levels_to_export(changes_per_scene, changed_export_parameters, collections, addon_prefs)
(main_scenes_to_export) = get_levels_to_export(changes_per_scene, changed_export_parameters, blueprints_data, addon_prefs)
# since materials export adds components we need to call this before blueprints are exported
# export materials & inject materials components into relevant objects
if export_materials_library:
export_materials(collections, library_scenes, folder_path, addon_prefs)
export_materials(blueprints_data.blueprint_names, library_scenes, folder_path, addon_prefs)
# update the list of tracked exports
exports_total = len(collections_to_export) + len(main_scenes_to_export) + (1 if export_materials_library else 0)
exports_total = len(blueprints_to_export) + len(main_scenes_to_export) + (1 if export_materials_library else 0)
bpy.context.window_manager.auto_export_tracker.exports_total = exports_total
bpy.context.window_manager.auto_export_tracker.exports_count = exports_total
print("-------------------------------")
#print("collections: all:", collections)
#print("collections: changed:", changed_collections)
#print("collections: not found on disk:", collections_not_on_disk)
print("collections: in library:", library_collections)
print("collections: to export:", collections_to_export)
print("collections: per_scene:", collections_per_scene)
print("BLUEPRINTS: local/internal:", internal_blueprints)
print("BLUEPRINTS: external:", external_blueprints)
print("BLUEPRINTS: per_scene:", blueprints_per_scene)
print("-------------------------------")
print("BLUEPRINTS: to export:", collections_to_export)
print("BLUEPRINTS: to export:", [blueprint.name for blueprint in blueprints_to_export])
print("-------------------------------")
print("MAIN SCENES: to export:", main_scenes_to_export)
print("-------------------------------")
@ -121,17 +125,17 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
print("export MAIN scenes")
for scene_name in main_scenes_to_export:
print(" exporting scene:", scene_name)
export_main_scene(bpy.data.scenes[scene_name], folder_path, addon_prefs, library_collections)
export_main_scene(bpy.data.scenes[scene_name], folder_path, addon_prefs, blueprints_data)
# now deal with blueprints/collections
do_export_library_scene = not export_change_detection or changed_export_parameters or len(collections_to_export) > 0
do_export_library_scene = not export_change_detection or changed_export_parameters or len(blueprints_to_export) > 0
if do_export_library_scene:
print("export LIBRARY")
# we only want to go through the library scenes where our collections to export are present
for (scene_name, collections_to_export) in collections_per_scene.items():
print(" exporting collections from scene:", scene_name)
print(" collections to export", collections_to_export)
export_blueprints_from_collections(collections_to_export, folder_path, addon_prefs, collections)
# we only want to go through the library scenes where our blueprints to export are present
"""for (scene_name, blueprints_to_export) in blueprints_per_scene.items():
print(" exporting blueprints from scene:", scene_name)
print(" blueprints to export", blueprints_to_export)"""
export_blueprints(blueprints_to_export, folder_path, addon_prefs, blueprints_data)
# reset current scene from backup
bpy.context.window.scene = old_current_scene
@ -140,12 +144,14 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
for obj in old_selections:
obj.select_set(True)
if export_materials_library:
cleanup_materials(collections, library_scenes)
cleanup_materials(blueprints_data.blueprint_names, library_scenes)
else:
for scene_name in main_scene_names:
export_main_scene(bpy.data.scenes[scene_name], folder_path, addon_prefs, [])
except Exception as error:
print(traceback.format_exc())
@ -154,191 +160,11 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
bpy.context.window_manager.popup_menu(error_message, title="Error", icon='ERROR')
finally:
# FIXME: error handling ? also redundant
[main_scene_names, main_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs)
if export_scene_settings:
# inject/ update scene components
remove_scene_components(main_scenes)
class Blueprint:
def __init__(self, name):
self.name = name
self.local = True
self.scene = "" # Not sure, could be usefull for tracking
self.instances = []
self.objects = []
self.nested_blueprints = []
self.collection = None # should we just sublclass ?
def __repr__(self):
return f'Name: {self.name} Local: {self.local} Instances: {self.instances}, Objects: {self.objects}, nested_blueprints: {self.nested_blueprints}'
def __str__(self):
return f'Name: "{self.name}", Local: {self.local}, Instances: {self.instances}, Objects: {self.objects}, nested_blueprints: {self.nested_blueprints}'
# blueprints: any collection with either
# - an instance
# - marked as asset
# - with the "auto_export" flag
# https://blender.stackexchange.com/questions/167878/how-to-get-all-collections-of-the-current-scene
def analysis_experiment(main_scenes, library_scenes):
export_marked_assets = True
blueprints = {}
blueprints_from_objects = {}
collections = []
blueprints_candidates = {}
# main scenes
blueprint_instances_per_main_scene = {}
internal_collection_instances = {}
external_collection_instances = {}
for scene in main_scenes:# should it only be main scenes ? what about collection instances inside other scenes ?
print("scene", scene)
for object in scene.objects:
print("object", object.name)
if object.instance_type == 'COLLECTION':
collection = object.instance_collection
collection_name = object.instance_collection.name
print(" from collection:", collection_name)
collection_from_library = False
for scene in library_scenes: # should be only in library scenes
collection_from_library = scene.user_of_id(collection) > 0 # TODO: also check if it is an imported asset
if collection_from_library:
break
collection_category = internal_collection_instances if collection_from_library else external_collection_instances
if not collection_name in collection_category.keys():
print("ADDING INSTANCE OF", collection_name, "object", object.name, "categ", collection_category)
collection_category[collection_name] = [] #.append(collection_name)
collection_category[collection_name].append(object)
if not collection_from_library:
for property_name in object.keys():
print("stuff", property_name)
for property_name in collection.keys():
print("OTHER", property_name)
# blueprints[collection_name].instances.append(object)
# FIXME: this only account for direct instances of blueprints, not for any nested blueprint inside a blueprint
if scene.name not in blueprint_instances_per_main_scene.keys():
blueprint_instances_per_main_scene[scene.name] = []
blueprint_instances_per_main_scene[scene.name].append(collection_name)
"""# add any indirect ones
# FIXME: needs to be recursive, either here or above
for nested_blueprint in blueprints[collection_name].nested_blueprints:
if not nested_blueprint in blueprint_instances_per_main_scene[scene.name]:
blueprint_instances_per_main_scene[scene.name].append(nested_blueprint)"""
for collection in bpy.data.collections:
print("collection", collection, collection.name_full, "users", collection.users)
collection_from_library = False
for scene in library_scenes: # should be only in library scenes
collection_from_library = scene.user_of_id(collection) > 0
if collection_from_library:
break
if not collection_from_library:
continue
if (
'AutoExport' in collection and collection['AutoExport'] == True # get marked collections
or export_marked_assets and collection.asset_data is not None # or if you have marked collections as assets you can auto export them too
or collection.name in list(internal_collection_instances.keys()) # or if the collection has an instance in one of the main scenes
):
blueprint = Blueprint(collection.name)
blueprint.local = True
blueprint.objects = [object.name for object in collection.all_objects if not object.instance_type == 'COLLECTION'] # inneficient, double loop
blueprint.nested_blueprints = [object.instance_collection.name for object in collection.all_objects if object.instance_type == 'COLLECTION'] # FIXME: not precise enough, aka "what is a blueprint"
blueprint.collection = collection
blueprint.instances = internal_collection_instances[collection.name] if collection.name in internal_collection_instances else []
blueprints[collection.name] = blueprint
# now create reverse lookup , so you can find the collection from any of its contained objects
for object in collection.all_objects:
blueprints_from_objects[object.name] = collection.name
#
collections.append(collection)
# add any collection that has an instance in the main scenes, but is not present in any of the scenes (IE NON LOCAL)
for collection_name in external_collection_instances:
collection = bpy.data.collections[collection_name]
blueprint = Blueprint(collection.name)
blueprint.local = False
blueprint.objects = [object.name for object in collection.all_objects if not object.instance_type == 'COLLECTION'] # inneficient, double loop
blueprint.nested_blueprints = [object.instance_collection.name for object in collection.all_objects if object.instance_type == 'COLLECTION'] # FIXME: not precise enough, aka "what is a blueprint"
blueprint.collection = collection
blueprint.instances = external_collection_instances[collection.name] if collection.name in external_collection_instances else []
blueprints[collection.name] = blueprint
# now create reverse lookup , so you can find the collection from any of its contained objects
for object in collection.all_objects:
blueprints_from_objects[object.name] = collection.name
# then add any nested collections at root level
for blueprint_name in list(blueprints.keys()):
parent_blueprint = blueprints[blueprint_name]
for nested_blueprint_name in parent_blueprint.nested_blueprints:
if not nested_blueprint_name in blueprints.keys():
collection = bpy.data.collections[nested_blueprint_name]
blueprint = Blueprint(collection.name)
blueprint.local = parent_blueprint.local
blueprint.objects = [object.name for object in collection.all_objects if not object.instance_type == 'COLLECTION'] # inneficient, double loop
blueprint.nested_blueprints = [object.instance_collection.name for object in collection.all_objects if object.instance_type == 'COLLECTION'] # FIXME: not precise enough, aka "what is a blueprint"
blueprint.collection = collection
blueprint.instances = external_collection_instances[collection.name] if collection.name in external_collection_instances else []
blueprints[collection.name] = blueprint
blueprints = dict(sorted(blueprints.items()))
print("BLUEPRINTS")
for blueprint_name in blueprints:
print(" ", blueprints[blueprint_name])
print("BLUEPRINTS LOOKUP")
print(blueprints_from_objects)
print("BLUEPRINT INSTANCES PER MAIN SCENE")
print(blueprint_instances_per_main_scene)
changes_test = {'Library': {
'Blueprint1_mesh': bpy.data.objects['Blueprint1_mesh'],
'Fox_mesh': bpy.data.objects['Fox_mesh'],
'External_blueprint2_Cylinder': bpy.data.objects['External_blueprint2_Cylinder']}
}
# which main scene has been impacted by this
# does one of the main scenes contain an INSTANCE of an impacted blueprint
for scene in main_scenes:
changed_objects = list(changes_test["Library"].keys()) # just a hack for testing
#bluprint_instances_in_scene = blueprint_instances_per_main_scene[scene.name]
#print("instances per scene", bluprint_instances_in_scene, "changed_objects", changed_objects)
changed_blueprints_with_instances_in_scene = [blueprints_from_objects[changed] for changed in changed_objects if changed in blueprints_from_objects]
print("changed_blueprints_with_instances_in_scene", changed_blueprints_with_instances_in_scene)
level_needs_export = len(changed_blueprints_with_instances_in_scene) > 0
if level_needs_export:
print("level needs export", scene.name)
for scene in library_scenes:
changed_objects = list(changes_test[scene.name].keys())
changed_blueprints = [blueprints_from_objects[changed] for changed in changed_objects if changed in blueprints_from_objects]
# we only care about local blueprints/collections
changed_local_blueprints = [blueprint_name for blueprint_name in changed_blueprints if blueprint_name in blueprints.keys() and blueprints[blueprint_name].local]
print("changed blueprints", changed_local_blueprints)

View File

@ -6,77 +6,39 @@ from ..helpers.generate_and_export import generate_and_export
from .export_gltf import (generate_gltf_export_preferences)
from ..helpers.helpers_scenes import clear_hollow_scene, copy_hollowed_collection_into
# export collections: all the collections that have an instance in the main scene AND any marked collections, even if they do not have instances
def export_collections(collections, folder_path, addon_prefs, gltf_export_preferences, library_collections):
# save current active collection
active_collection = bpy.context.view_layer.active_layer_collection
export_materials_library = getattr(addon_prefs,"export_materials_library")
for collection_name in collections:
print("exporting collection", collection_name)
gltf_output_path = os.path.join(folder_path, collection_name)
export_settings = { **gltf_export_preferences, 'use_active_scene': True, 'use_active_collection': True, 'use_active_collection_with_nested':True}
# if we are using the material library option, do not export materials, use placeholder instead
if export_materials_library:
export_settings['export_materials'] = 'PLACEHOLDER'
collection = bpy.data.collections[collection_name]
generate_and_export(
addon_prefs,
temp_scene_name=TEMPSCENE_PREFIX+collection.name,
export_settings=export_settings,
gltf_output_path=gltf_output_path,
tempScene_filler= lambda temp_collection: copy_hollowed_collection_into(collection, temp_collection, library_collections=library_collections, addon_prefs=addon_prefs),
tempScene_cleaner= lambda temp_scene, params: clear_hollow_scene(original_root_collection=collection, temp_scene=temp_scene, **params)
)
# reset active collection to the one we save before
bpy.context.view_layer.active_layer_collection = active_collection
def export_blueprints_from_collections(collections, folder_path, addon_prefs, library_collections):
def export_blueprints(blueprints, folder_path, addon_prefs, blueprints_data):
export_output_folder = getattr(addon_prefs,"export_output_folder")
gltf_export_preferences = generate_gltf_export_preferences(addon_prefs)
export_blueprints_path = os.path.join(folder_path, export_output_folder, getattr(addon_prefs,"export_blueprints_path")) if getattr(addon_prefs,"export_blueprints_path") != '' else folder_path
try:
export_collections(collections, export_blueprints_path, addon_prefs, gltf_export_preferences, library_collections)
# save current active collection
active_collection = bpy.context.view_layer.active_layer_collection
export_materials_library = getattr(addon_prefs,"export_materials_library")
for blueprint in blueprints:
print("exporting collection", blueprint.name)
gltf_output_path = os.path.join(export_blueprints_path, blueprint.name)
export_settings = { **gltf_export_preferences, 'use_active_scene': True, 'use_active_collection': True, 'use_active_collection_with_nested':True}
# if we are using the material library option, do not export materials, use placeholder instead
if export_materials_library:
export_settings['export_materials'] = 'PLACEHOLDER'
collection = bpy.data.collections[blueprint.name]
generate_and_export(
addon_prefs,
temp_scene_name=TEMPSCENE_PREFIX+collection.name,
export_settings=export_settings,
gltf_output_path=gltf_output_path,
tempScene_filler= lambda temp_collection: copy_hollowed_collection_into(collection, temp_collection, blueprints_data=blueprints_data, addon_prefs=addon_prefs),
tempScene_cleaner= lambda temp_scene, params: clear_hollow_scene(original_root_collection=collection, temp_scene=temp_scene, **params)
)
# reset active collection to the one we save before
bpy.context.view_layer.active_layer_collection = active_collection
except Exception as error:
print("failed to export collections to gltf: ", error)
raise error
# TODO : add a flag to also search of deeply nested components
def get_nested_components(object):
if object.instance_type == 'COLLECTION':
collection_name = object.instance_collection.name
collection = bpy.data.collections[collection_name]
all_objects = collection.all_objects
result = []
for object in all_objects:
components = dict(object)
if len(components.keys()) > 0:
result += [(object, components)]
return result
return []
#for collection in traverse_tree(collection):
# for object in collection.all_objects
def check_if_blueprints_exist(collections, folder_path, extension):
not_found_blueprints = []
for collection_name in collections:
gltf_output_path = os.path.join(folder_path, collection_name + extension)
# print("gltf_output_path", gltf_output_path)
found = os.path.exists(gltf_output_path) and os.path.isfile(gltf_output_path)
if not found:
not_found_blueprints.append(collection_name)
return not_found_blueprints
def check_if_blueprint_on_disk(scene_name, folder_path, extension):
gltf_output_path = os.path.join(folder_path, scene_name + extension)
found = os.path.exists(gltf_output_path) and os.path.isfile(gltf_output_path)
print("level", scene_name, "found", found, "path", gltf_output_path)
return found

View File

@ -5,15 +5,15 @@ from ..constants import TEMPSCENE_PREFIX
from ..helpers.generate_and_export import generate_and_export
from .export_gltf import (generate_gltf_export_preferences, export_gltf)
from ..modules.bevy_dynamic import is_object_dynamic, is_object_static
from ..helpers.helpers_scenes import clear_hollow_scene, copy_hollowed_collection_into, inject_blueprints_list_into_main_scene, remove_blueprints_list_from_main_scene
from ..helpers.helpers_scenes import clear_hollow_scene, copy_hollowed_collection_into
from ..helpers.helpers_blueprints import inject_blueprints_list_into_main_scene, remove_blueprints_list_from_main_scene
# export all main scenes
def export_main_scenes(scenes, folder_path, addon_prefs):
for scene in scenes:
export_main_scene(scene, folder_path, addon_prefs)
def export_main_scene(scene, folder_path, addon_prefs, library_collections):
def export_main_scene(scene, folder_path, addon_prefs, blueprints_data):
gltf_export_preferences = generate_gltf_export_preferences(addon_prefs)
export_output_folder = getattr(addon_prefs,"export_output_folder")
export_blueprints = getattr(addon_prefs,"export_blueprints")
@ -32,7 +32,7 @@ def export_main_scene(scene, folder_path, addon_prefs, library_collections):
if export_blueprints :
if not legacy_mode:
inject_blueprints_list_into_main_scene(scene)
inject_blueprints_list_into_main_scene(scene, blueprints_data)
if export_separate_dynamic_and_static_objects:
#print("SPLIT STATIC AND DYNAMIC")
@ -42,7 +42,7 @@ def export_main_scene(scene, folder_path, addon_prefs, library_collections):
temp_scene_name=TEMPSCENE_PREFIX,
export_settings=export_settings,
gltf_output_path=gltf_output_path,
tempScene_filler= lambda temp_collection: copy_hollowed_collection_into(scene.collection, temp_collection, library_collections=library_collections, filter=is_object_static, addon_prefs=addon_prefs),
tempScene_filler= lambda temp_collection: copy_hollowed_collection_into(scene.collection, temp_collection, blueprints_data=blueprints_data, filter=is_object_static, addon_prefs=addon_prefs),
tempScene_cleaner= lambda temp_scene, params: clear_hollow_scene(original_root_collection=scene.collection, temp_scene=temp_scene, **params)
)
@ -53,7 +53,7 @@ def export_main_scene(scene, folder_path, addon_prefs, library_collections):
temp_scene_name=TEMPSCENE_PREFIX,
export_settings=export_settings,
gltf_output_path=gltf_output_path,
tempScene_filler= lambda temp_collection: copy_hollowed_collection_into(scene.collection, temp_collection, library_collections=library_collections, filter=is_object_dynamic, addon_prefs=addon_prefs),
tempScene_filler= lambda temp_collection: copy_hollowed_collection_into(scene.collection, temp_collection, blueprints_data=blueprints_data, filter=is_object_dynamic, addon_prefs=addon_prefs),
tempScene_cleaner= lambda temp_scene, params: clear_hollow_scene(original_root_collection=scene.collection, temp_scene=temp_scene, **params)
)
@ -64,7 +64,7 @@ def export_main_scene(scene, folder_path, addon_prefs, library_collections):
temp_scene_name=TEMPSCENE_PREFIX,
export_settings=export_settings,
gltf_output_path=gltf_output_path,
tempScene_filler= lambda temp_collection: copy_hollowed_collection_into(scene.collection, temp_collection, library_collections=library_collections, addon_prefs=addon_prefs),
tempScene_filler= lambda temp_collection: copy_hollowed_collection_into(scene.collection, temp_collection, blueprints_data=blueprints_data, addon_prefs=addon_prefs),
tempScene_cleaner= lambda temp_scene, params: clear_hollow_scene(original_root_collection=scene.collection, temp_scene=temp_scene, **params)
)

View File

@ -1,67 +1,44 @@
import os
import bpy
from .export_blueprints import check_if_blueprint_on_disk, check_if_blueprints_exist, export_blueprints_from_collections
from ..helpers.helpers_collections import get_exportable_collections
from ..helpers.helpers_collections import (get_collections_in_library, get_exportable_collections, get_collections_per_scene, find_collection_ascendant_target_collection)
from ..helpers.helpers_scenes import (get_scenes, )
from ..helpers.helpers_blueprints import find_blueprints_not_on_disk
# TODO: this should also take the split/embed mode into account: if a nested collection changes AND embed is active, its container collection should also be exported
def get_collections_to_export(changes_per_scene, changed_export_parameters, addon_prefs):
def get_collections_to_export(changes_per_scene, changed_export_parameters, blueprints_data, addon_prefs):
export_change_detection = getattr(addon_prefs, "export_change_detection")
export_gltf_extension = getattr(addon_prefs, "export_gltf_extension", ".glb")
export_blueprints_path = getattr(addon_prefs,"export_blueprints_path", "")
collection_instances_combine_mode = getattr(addon_prefs, "collection_instances_combine_mode")
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs)
# get a list of all collections actually in use
(collections, blueprint_hierarchy) = get_exportable_collections(level_scenes, library_scenes, addon_prefs)
collections_to_export = collections # just for clarity
internal_blueprints = blueprints_data.internal_blueprints
blueprints_to_export = internal_blueprints # just for clarity
# print("export_change_detection", export_change_detection, "changed_export_parameters", changed_export_parameters, "changes_per_scene", changes_per_scene)
# if the export parameters have changed, bail out early
# we need to re_export everything if the export parameters have been changed
if export_change_detection and not changed_export_parameters:
changed_collections = []
changed_blueprints = []
# first check if all collections have already been exported before (if this is the first time the exporter is run
# in your current Blender session for example)
collections_not_on_disk = check_if_blueprints_exist(collections, export_blueprints_path, export_gltf_extension)
blueprints_not_on_disk = find_blueprints_not_on_disk(internal_blueprints, export_blueprints_path, export_gltf_extension)
# create parent relations for all collections # TODO: optimise this
collection_parents = dict()
for collection in bpy.data.collections:
collection_parents[collection.name] = None
for collection in bpy.data.collections:
for ch in collection.children:
collection_parents[ch.name] = collection.name
for scene in library_scenes:
if scene.name in changes_per_scene:
print("scanning", scene.name)
changed_objects = list(changes_per_scene[scene.name].keys())
changed_blueprints = [blueprints_data.blueprints_from_objects[changed] for changed in changed_objects if changed in blueprints_data.blueprints_from_objects]
print("changed_blueprints", changed_blueprints)
# we only care about local blueprints/collections
changed_local_blueprints = [blueprint for blueprint in changed_blueprints if blueprint.name in blueprints_data.blueprints_per_name.keys() and blueprint.local]
print("changed_local_blueprints blueprints", changed_local_blueprints)
changed_blueprints += changed_local_blueprints
# determine which collections have changed
for scene, objects in changes_per_scene.items():
print(" changed scene", scene)
for obj_name, obj in list(objects.items()):
object_collections = list(obj.users_collection) if hasattr(obj, 'users_collection') else []
object_collection_names = list(map(lambda collection: collection.name, object_collections))
if len(object_collection_names) > 1:
print("ERRROR for",obj_name,"objects in multiple collections not supported")
else:
object_collection_name = object_collection_names[0] if len(object_collection_names) > 0 else None
#recurse updwards until we find one of our collections (or not)
matching_collection = find_collection_ascendant_target_collection(collection_parents, collections, object_collection_name)
if matching_collection is not None:
changed_collections.append(matching_collection)
collections_to_export = list(set(changed_collections + collections_not_on_disk))
# this needs to be done based on all previously collected collections, not the ones that we filter out based on their presence in the library scenes
collections_per_scene = get_collections_per_scene(collections_to_export, library_scenes)
print("CHANGED BLUEPRINTS", changed_blueprints)
# collections that do not come from a library should not be exported as seperate blueprints
# FIMXE: logic is erroneous, needs to be changed
library_collections = get_collections_in_library(library_scenes)
collections_to_export = list(set(collections_to_export).intersection(set(library_collections)))
# all collections, collections to export
return (collections, collections_to_export, library_collections, collections_per_scene)
blueprints_to_export = list(set(changed_blueprints + blueprints_not_on_disk))
# changed/all blueprints to export
return (blueprints_to_export)

View File

@ -1,5 +1,5 @@
import bpy
from .export_blueprints import check_if_blueprint_on_disk
from ..helpers.helpers_blueprints import check_if_blueprint_on_disk
from ..helpers.helpers_scenes import (get_scenes, )
def changed_object_in_scene(scene_name, changes_per_scene, collections, collection_instances_combine_mode):
@ -26,7 +26,7 @@ def changed_object_in_scene(scene_name, changes_per_scene, collections, collecti
# TODO: this should also take the split/embed mode into account: if a collection instance changes AND embed is active, its container level/world should also be exported
def get_levels_to_export(changes_per_scene, changed_export_parameters, collections, addon_prefs):
def get_levels_to_export(changes_per_scene, changed_export_parameters, blueprints_data, addon_prefs):
print("TOTOOO")
export_change_detection = getattr(addon_prefs, "export_change_detection")
export_gltf_extension = getattr(addon_prefs, "export_gltf_extension")
@ -34,9 +34,12 @@ def get_levels_to_export(changes_per_scene, changed_export_parameters, collectio
collection_instances_combine_mode = getattr(addon_prefs, "collection_instances_combine_mode")
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs)
# or changed_object_in_scene(scene_name, changes_per_scene, collections, collection_instances_combine_mode)
# print("levels export", "export_change_detection", export_change_detection, "changed_export_parameters",changed_export_parameters, "export_models_path", export_models_path, "export_gltf_extension", export_gltf_extension, "changes_per_scene", changes_per_scene)
# determine list of main scenes to export
# we have more relaxed rules to determine if the main scenes have changed : any change is ok, (allows easier handling of changes, render settings etc)
main_scenes_to_export = [scene_name for scene_name in main_scene_names if not export_change_detection or changed_export_parameters or scene_name in changes_per_scene.keys() or not check_if_blueprint_on_disk(scene_name, export_models_path, export_gltf_extension) or changed_object_in_scene(scene_name, changes_per_scene, collections, collection_instances_combine_mode)]
main_scenes_to_export = [scene_name for scene_name in main_scene_names if not export_change_detection or changed_export_parameters or scene_name in changes_per_scene.keys() or not check_if_blueprint_on_disk(scene_name, export_models_path, export_gltf_extension) ]
print("main_scenes_to_export", main_scenes_to_export, changes_per_scene)
return (main_scenes_to_export)

View File

@ -12,7 +12,6 @@ from .get_collections_to_export import get_collections_to_export
from ..constants import TEMPSCENE_PREFIX
from .internals import CollectionsToExport
from ..helpers.helpers_scenes import (get_scenes)
from ..helpers.helpers_collections import (get_exportable_collections)
from .preferences import AutoExportGltfAddonPreferences
class AutoExportTracker(PropertyGroup):
@ -183,7 +182,7 @@ class AutoExportTracker(PropertyGroup):
addon_prefs = SimpleNamespace(**tmp)
#print("cls.changed_objects_per_scene", cls.changed_objects_per_scene)
(collections, collections_to_export, library_collections, collections_per_scene) = get_collections_to_export(cls.changed_objects_per_scene, export_settings_changed, addon_prefs)
(collections, collections_to_export, internal_collections, collections_per_scene) = get_collections_to_export(cls.changed_objects_per_scene, export_settings_changed, addon_prefs)
#print("collections to export", collections_to_export)
try:
# we save this list of collections in the context

View File

@ -0,0 +1,284 @@
import os
from types import SimpleNamespace
import bpy
class Blueprint:
def __init__(self, name):
self.name = name
self.local = True
self.scene = None # Not sure, could be usefull for tracking
self.instances = []
self.objects = []
self.nested_blueprints = []
self.collection = None # should we just sublclass ?
def __repr__(self):
return f'Name: {self.name} Local: {self.local}, Scene: {self.scene}, Instances: {self.instances}, Objects: {self.objects}, nested_blueprints: {self.nested_blueprints}'
def __str__(self):
return f'Name: "{self.name}", Local: {self.local}, Scene: {self.scene}, Instances: {self.instances}, Objects: {self.objects}, nested_blueprints: {self.nested_blueprints}'
def find_blueprints_not_on_disk(blueprints, folder_path, extension):
not_found_blueprints = []
for blueprint in blueprints:
gltf_output_path = os.path.join(folder_path, blueprint.name + extension)
# print("gltf_output_path", gltf_output_path)
found = os.path.exists(gltf_output_path) and os.path.isfile(gltf_output_path)
if not found:
not_found_blueprints.append(blueprint)
return not_found_blueprints
def check_if_blueprint_on_disk(scene_name, folder_path, extension):
gltf_output_path = os.path.join(folder_path, scene_name + extension)
found = os.path.exists(gltf_output_path) and os.path.isfile(gltf_output_path)
print("level", scene_name, "found", found, "path", gltf_output_path)
return found
# blueprints: any collection with either
# - an instance
# - marked as asset
# - with the "auto_export" flag
# https://blender.stackexchange.com/questions/167878/how-to-get-all-collections-of-the-current-scene
def blueprints_scan(main_scenes, library_scenes, addon_prefs):
export_marked_assets = getattr(addon_prefs,"export_marked_assets")
blueprints = {}
blueprints_from_objects = {}
collections = []
# main scenes
blueprint_instances_per_main_scene = {}
internal_collection_instances = {}
external_collection_instances = {}
for scene in main_scenes:# should it only be main scenes ? what about collection instances inside other scenes ?
print("scene", scene, scene.name)
for object in scene.objects:
#print("object", object.name)
if object.instance_type == 'COLLECTION':
collection = object.instance_collection
collection_name = object.instance_collection.name
#print(" from collection:", collection_name)
print("scene there", scene, scene.name)
collection_from_library = False
for library_scene in library_scenes: # should be only in library scenes
collection_from_library = library_scene.user_of_id(collection) > 0 # TODO: also check if it is an imported asset
if collection_from_library:
break
collection_category = internal_collection_instances if collection_from_library else external_collection_instances
if not collection_name in collection_category.keys():
#print("ADDING INSTANCE OF", collection_name, "object", object.name, "categ", collection_category)
collection_category[collection_name] = [] #.append(collection_name)
collection_category[collection_name].append(object)
# experiment with custom properties from assets stored in other blend files
"""if not collection_from_library:
for property_name in object.keys():
print("stuff", property_name)
for property_name in collection.keys():
print("OTHER", property_name)"""
# blueprints[collection_name].instances.append(object)
print("bla", scene.name)
# FIXME: this only account for direct instances of blueprints, not for any nested blueprint inside a blueprint
if scene.name not in blueprint_instances_per_main_scene.keys():
blueprint_instances_per_main_scene[scene.name] = []
blueprint_instances_per_main_scene[scene.name].append(collection_name)
"""# add any indirect ones
# FIXME: needs to be recursive, either here or above
for nested_blueprint in blueprints[collection_name].nested_blueprints:
if not nested_blueprint in blueprint_instances_per_main_scene[scene.name]:
blueprint_instances_per_main_scene[scene.name].append(nested_blueprint)"""
for collection in bpy.data.collections:
#print("collection", collection, collection.name_full, "users", collection.users)
collection_from_library = False
defined_in_scene = None
for scene in library_scenes: # should be only in library scenes
collection_from_library = scene.user_of_id(collection) > 0
if collection_from_library:
defined_in_scene = scene
break
if not collection_from_library:
continue
if (
'AutoExport' in collection and collection['AutoExport'] == True # get marked collections
or export_marked_assets and collection.asset_data is not None # or if you have marked collections as assets you can auto export them too
or collection.name in list(internal_collection_instances.keys()) # or if the collection has an instance in one of the main scenes
):
blueprint = Blueprint(collection.name)
blueprint.local = True
blueprint.objects = [object.name for object in collection.all_objects if not object.instance_type == 'COLLECTION'] # inneficient, double loop
blueprint.nested_blueprints = [object.instance_collection.name for object in collection.all_objects if object.instance_type == 'COLLECTION'] # FIXME: not precise enough, aka "what is a blueprint"
blueprint.collection = collection
blueprint.instances = internal_collection_instances[collection.name] if collection.name in internal_collection_instances else []
blueprint.scene = defined_in_scene
blueprints[collection.name] = blueprint
# now create reverse lookup , so you can find the collection from any of its contained objects
for object in collection.all_objects:
blueprints_from_objects[object.name] = blueprint#collection.name
#
collections.append(collection)
# add any collection that has an instance in the main scenes, but is not present in any of the scenes (IE NON LOCAL)
for collection_name in external_collection_instances:
collection = bpy.data.collections[collection_name]
blueprint = Blueprint(collection.name)
blueprint.local = False
blueprint.objects = [object.name for object in collection.all_objects if not object.instance_type == 'COLLECTION'] # inneficient, double loop
blueprint.nested_blueprints = [object.instance_collection.name for object in collection.all_objects if object.instance_type == 'COLLECTION'] # FIXME: not precise enough, aka "what is a blueprint"
blueprint.collection = collection
blueprint.instances = external_collection_instances[collection.name] if collection.name in external_collection_instances else []
blueprints[collection.name] = blueprint
# now create reverse lookup , so you can find the collection from any of its contained objects
for object in collection.all_objects:
blueprints_from_objects[object.name] = blueprint#collection.name
# then add any nested collections at root level (so we can have a flat list, regardless of nesting)
# TODO: do this recursively
for blueprint_name in list(blueprints.keys()):
parent_blueprint = blueprints[blueprint_name]
for nested_blueprint_name in parent_blueprint.nested_blueprints:
if not nested_blueprint_name in blueprints.keys():
collection = bpy.data.collections[nested_blueprint_name]
blueprint = Blueprint(collection.name)
blueprint.local = parent_blueprint.local
blueprint.objects = [object.name for object in collection.all_objects if not object.instance_type == 'COLLECTION'] # inneficient, double loop
blueprint.nested_blueprints = [object.instance_collection.name for object in collection.all_objects if object.instance_type == 'COLLECTION'] # FIXME: not precise enough, aka "what is a blueprint"
blueprint.collection = collection
blueprint.instances = external_collection_instances[collection.name] if collection.name in external_collection_instances else []
blueprint.scene = parent_blueprint.scene if parent_blueprint.local else None
blueprints[collection.name] = blueprint
# now create reverse lookup , so you can find the collection from any of its contained objects
for object in collection.all_objects:
blueprints_from_objects[object.name] = blueprint#collection.name
blueprints = dict(sorted(blueprints.items()))
print("BLUEPRINTS")
for blueprint_name in blueprints:
print(" ", blueprints[blueprint_name])
"""print("BLUEPRINTS LOOKUP")
print(blueprints_from_objects)"""
print("BLUEPRINT INSTANCES PER MAIN SCENE")
print(blueprint_instances_per_main_scene)
"""changes_test = {'Library': {
'Blueprint1_mesh': bpy.data.objects['Blueprint1_mesh'],
'Fox_mesh': bpy.data.objects['Fox_mesh'],
'External_blueprint2_Cylinder': bpy.data.objects['External_blueprint2_Cylinder']}
}
# which main scene has been impacted by this
# does one of the main scenes contain an INSTANCE of an impacted blueprint
for scene in main_scenes:
changed_objects = list(changes_test["Library"].keys()) # just a hack for testing
#bluprint_instances_in_scene = blueprint_instances_per_main_scene[scene.name]
#print("instances per scene", bluprint_instances_in_scene, "changed_objects", changed_objects)
changed_blueprints_with_instances_in_scene = [blueprints_from_objects[changed] for changed in changed_objects if changed in blueprints_from_objects]
print("changed_blueprints_with_instances_in_scene", changed_blueprints_with_instances_in_scene)
level_needs_export = len(changed_blueprints_with_instances_in_scene) > 0
if level_needs_export:
print("level needs export", scene.name)
for scene in library_scenes:
changed_objects = list(changes_test[scene.name].keys())
changed_blueprints = [blueprints_from_objects[changed] for changed in changed_objects if changed in blueprints_from_objects]
# we only care about local blueprints/collections
changed_local_blueprints = [blueprint_name for blueprint_name in changed_blueprints if blueprint_name in blueprints.keys() and blueprints[blueprint_name].local]
print("changed blueprints", changed_local_blueprints)"""
# additional helper data structures for lookups etc
blueprints_per_name = blueprints
blueprints = [] # flat list
internal_blueprints = []
external_blueprints = []
blueprints_per_scenes = {}
for blueprint in blueprints_per_name.values():
blueprints.append(blueprint)
if blueprint.local:
internal_blueprints.append(blueprint)
if blueprint.scene:
if not blueprint.scene.name in blueprints_per_scenes:
blueprints_per_scenes[blueprint.scene.name] = []
blueprints_per_scenes[blueprint.scene.name].append(blueprint.name) # meh
else:
external_blueprints.append(blueprint)
data = {
"blueprints": blueprints,
"blueprints_per_name": blueprints_per_name,
"blueprint_names": list(blueprints_per_name.keys()),
"blueprints_from_objects": blueprints_from_objects,
"internal_blueprints": internal_blueprints,
"external_blueprints": external_blueprints,
"blueprints_per_scenes": blueprints_per_scenes,
"blueprint_instances_per_main_scene": blueprint_instances_per_main_scene
# not sure about these two
#internal_collection_instances,
#external_collection_instances
}
return SimpleNamespace(**data)
import json
from .object_makers import (make_empty)
def inject_blueprints_list_into_main_scene(scene, blueprints_data):
print("injecting assets/blueprints data into scene")
root_collection = scene.collection
assets_list = None
assets_list_name = f"assets_list_{scene.name}_components"
for object in scene.objects:
if object.name == assets_list_name:
assets_list = object
break
if assets_list is None:
assets_list = make_empty(assets_list_name, [0,0,0], [0,0,0], [0,0,0], root_collection)
blueprint_names_for_scene = blueprints_data.blueprint_instances_per_main_scene.get(scene.name, None)
# find all blueprints used in a scene
if blueprint_names_for_scene: # what are the blueprints used in this scene, inject those into the assets list component
print("blueprint_names_for_scene", blueprint_names_for_scene)
children_per_blueprint = {}
for blueprint_name in blueprint_names_for_scene:
blueprint = blueprints_data.blueprints_per_name.get(blueprint_name, None)
if blueprint:
children_per_blueprint[blueprint_name] = blueprint.nested_blueprints
print("new logic blueprints list", children_per_blueprint)
assets_list["BlueprintsList"] = f"({json.dumps(dict(children_per_blueprint))})"
def remove_blueprints_list_from_main_scene(scene):
assets_list = None
assets_list_name = f"assets_list_{scene.name}_components"
for object in scene.objects:
if object.name == assets_list_name:
assets_list = object
if assets_list is not None:
bpy.data.objects.remove(assets_list, do_unlink=True)

View File

@ -1,185 +1,11 @@
import bpy
# returns the list of the collections in use for a given scene
def get_used_collections(scene):
root_collection = scene.collection
scene_objects = [o for o in root_collection.all_objects]
collection_names = set()
used_collections = []
for object in scene_objects:
#print("object ", object)
if object.instance_type == 'COLLECTION':
collection_name = object.instance_collection.name
if not collection_name in collection_names:
collection_names.add(collection_name)
used_collections.append(object.instance_collection)
#print("scene objects", scene_objects)
return (collection_names, used_collections)
# gets all collections that should ALWAYS be exported to their respective gltf files, even if they are not used in the main scene/level
def get_marked_collections(scene, addon_prefs):
export_marked_assets = getattr(addon_prefs,"export_marked_assets")
# print("checking library for marked collections")
root_collection = scene.collection
marked_collections = []
collection_names = []
for collection in traverse_tree(root_collection):
if 'AutoExport' in collection and collection['AutoExport'] == True:
marked_collections.append(collection)
collection_names.append(collection.name)
# if you have marked collections as assets you can auto export them too
if export_marked_assets and collection.asset_data is not None:
marked_collections.append(collection)
collection_names.append(collection.name)
return (collection_names, marked_collections)
# gets all collections within collections that might also be relevant
def get_sub_collections(collections, parent=None, children_per_collection=None):
if parent == None:
parent = CollectionNode()
if children_per_collection == None:
children_per_collection = {}
collection_names = set()
used_collections = []
for root_collection in collections:
#print("collections", collections)
node = CollectionNode(name=root_collection.name, parent=parent)
parent.children.append(node)
#print("root collection", root_collection.name)
for collection in traverse_tree(root_collection): # TODO: filter out COLLECTIONS that have the flatten flag (unlike the flatten flag on colleciton instances themselves)
#print("sub", collection)
node_name = collection.name
children_per_collection[node_name] = []
#print(" scanning", collection.name)
for object in collection.objects:
#print("FLATTEN", object.name, 'Flatten' in object)
if object.instance_type == 'COLLECTION' : # and not 'Flatten' in object:
collection_name = object.instance_collection.name
#print("sub obj", collection_name)
# FIXME: not sure:
children_per_collection[node_name].append(collection_name)
(sub_names, sub_collections) = get_sub_collections([object.instance_collection], node, children_per_collection)
if len(list(sub_names)) > 0:
children_per_collection[node_name] += (list(sub_names))
#print(" found sub collection in use", object.name, object.instance_collection)
if not collection_name in collection_names:
collection_names.add(collection_name)
used_collections.append(object.instance_collection)
collection_names.update(sub_names)
#for sub in traverse_tree(root_collection):
return (collection_names, used_collections)
# FIXME: get rid of this, ugh
def flatten_collection_tree(node, children_per_collection):
children_per_collection[node.name] = []
for child in node.children:
if not node.name in children_per_collection[node.name]:
children_per_collection[node.name].append(child.name)
flatten_collection_tree(child, children_per_collection)
children_per_collection[node.name] = list(set( children_per_collection[node.name]))
class CollectionNode :
def __init__(self, name="", parent=None):
self.name = name
self.children = []
self.changed = False
self.parent = parent
return
def __str__(self):
children = list(map(lambda child: str(child), self.children))
return "name: " +self.name + ", children:" + str(children)
# get exportable collections from lists of mains scenes and lists of library scenes
def get_exportable_collections(main_scenes, library_scenes, addon_prefs):
all_collections = []
all_collection_names = []
root_node = CollectionNode()
root_node.name = "root"
children_per_collection = {}
for main_scene in main_scenes:
(collection_names, collections) = get_used_collections(main_scene)
all_collection_names = all_collection_names + list(collection_names)
all_collections = all_collections + collections
for library_scene in library_scenes:
marked_collections = get_marked_collections(library_scene, addon_prefs)
all_collection_names = all_collection_names + marked_collections[0]
all_collections = all_collections + marked_collections[1]
(collection_names, collections) = get_sub_collections(all_collections, root_node, children_per_collection)
all_collection_names = all_collection_names + list(collection_names)
children_per_collection = {}
flatten_collection_tree(root_node, children_per_collection)
#print("ROOT NODE", children_per_collection) #
return (all_collection_names, children_per_collection)
def get_collections_per_scene(collection_names, library_scenes):
collections_per_scene = {}
for scene in library_scenes:
root_collection = scene.collection
for cur_collection in traverse_tree(root_collection):
if cur_collection.name in collection_names:
if not scene.name in collections_per_scene:
collections_per_scene[scene.name] = []
collections_per_scene[scene.name].append(cur_collection.name)
return collections_per_scene
def get_collections_in_library(library_scenes):
# now that we have the collections that are in use by collection instances, check if those collections are actully present in the library scenes
collections = []
collection_names = []
for library_scene in library_scenes:
root_collection = library_scene.collection
for collection in traverse_tree(root_collection):
collections.append(collection)
collection_names.append(collection.name)
return collection_names
def get_collection_hierarchy(root_col, levels=1):
"""Read hierarchy of the collections in the scene"""
level_lookup = {}
def recurse(root_col, parent, depth):
if depth > levels:
return
if isinstance(parent, bpy.types.Collection):
level_lookup.setdefault(parent, []).append(root_col)
for child in root_col.children:
recurse(child, root_col, depth + 1)
recurse(root_col, root_col.children, 0)
return level_lookup
# traverse all collections
def traverse_tree(t):
yield t
for child in t.children:
yield from traverse_tree(child)
# the active collection is a View Layer concept, so you actually have to find the active LayerCollection
# which must be done recursively
def find_layer_collection_recursive(find, col):
# print("root collection", col)
for c in col.children:
# print("child collection", c)
if c.collection == find:
return c
return None
#Recursivly transverse layer_collection for a particular name
def recurLayerCollection(layerColl, collName):
found = None
@ -189,37 +15,9 @@ def recurLayerCollection(layerColl, collName):
found = recurLayerCollection(layer, collName)
if found:
return found
# traverse the collection hierarchy updward until you find one collection inside target_collections
def find_collection_ascendant_target_collection(collection_parents, target_collections, collection):
if collection == None:
return None
if collection in target_collections:
return collection
if collection in collection_parents:
parent = collection_parents[collection]
else:
return None
return find_collection_ascendant_target_collection(collection_parents, target_collections, parent)
def set_active_collection(scene, collection_name):
layer_collection = bpy.data.scenes[scene.name].view_layers['ViewLayer'].layer_collection
layerColl = recurLayerCollection(layer_collection, collection_name)
# set active collection to the collection
bpy.context.view_layer.active_layer_collection = layerColl
# find which of the library scenes the given collection stems from
# TODO: does not seem efficient at all ?
# TODO: remove, unused
def get_source_scene(collection_name, library_scenes):
match = None
for scene in library_scenes:
root_collection = scene.collection
found = False
for cur_collection in traverse_tree(root_collection):
if cur_collection.name == collection_name:
found = True
break
if found:
match = scene
break
return match

View File

@ -1,6 +1,5 @@
import json
import bpy
from .helpers_collections import (CollectionNode, get_sub_collections, get_used_collections, set_active_collection)
from .object_makers import (make_empty)
@ -92,10 +91,12 @@ def copy_animation_data(source, target):
target["AnimationMarkers"] = f'( {markers_formated} )'
def duplicate_object(object, parent, combine_mode, destination_collection, library_collections, legacy_mode, nester=""):
def duplicate_object(object, parent, combine_mode, destination_collection, blueprints_data, legacy_mode, nester=""):
copy = None
if object.instance_type == 'COLLECTION' and (combine_mode == 'Split' or (combine_mode == 'EmbedExternal' and (object.instance_collection.name in library_collections)) ):
#print("creating empty for", object.name, object.instance_collection.name, library_collections, combine_mode)
internal_blueprint_names = [blueprint.name for blueprint in blueprints_data.internal_blueprints]
if object.instance_type == 'COLLECTION' and (combine_mode == 'Split' or (combine_mode == 'EmbedExternal' and (object.instance_collection.name in internal_blueprint_names)) ):
#print("creating empty for", object.name, object.instance_collection.name, internal_blueprint_names, combine_mode)
collection_name = object.instance_collection.name
original_name = object.name
@ -107,15 +108,13 @@ def duplicate_object(object, parent, combine_mode, destination_collection, libra
# we also inject a list of all sub blueprints, so that the bevy side can preload them
if not legacy_mode:
root_node = CollectionNode()
root_node.name = "root"
children_per_collection = {}
get_sub_collections([object.instance_collection], root_node, children_per_collection)
empty_obj["BlueprintsList"] = f"({json.dumps(dict(children_per_collection))})"
# empty_obj["AnimationMarkers"] = '({"animation_name": {5: "Marker_1"} })'
#'({5: "sdf"})'#.replace('"',"'") #f"({json.dumps(dict(animation_foo))})"
#empty_obj["Assets"] = {"Animations": [], "Materials": [], "Models":[], "Textures":[], "Audio":[], "Other":[]}
blueprint_name = collection_name
children_per_blueprint = {}
blueprint = blueprints_data.blueprints_per_name.get(blueprint_name, None)
if blueprint:
children_per_blueprint[blueprint_name] = blueprint.nested_blueprints
print("new logic blueprints list", children_per_blueprint)
empty_obj["BlueprintsList"] = f"({json.dumps(dict(children_per_blueprint))})"
# we copy custom properties over from our original object to our empty
for component_name, component_value in object.items():
@ -144,10 +143,10 @@ def duplicate_object(object, parent, combine_mode, destination_collection, libra
copy_animation_data(object, copy)
for child in object.children:
duplicate_object(child, copy, combine_mode, destination_collection, library_collections, legacy_mode, nester+" ")
duplicate_object(child, copy, combine_mode, destination_collection, blueprints_data, legacy_mode, nester+" ")
# copies the contents of a collection into another one while replacing library instances with empties
def copy_hollowed_collection_into(source_collection, destination_collection, parent_empty=None, filter=None, library_collections=[], addon_prefs={}):
def copy_hollowed_collection_into(source_collection, destination_collection, parent_empty=None, filter=None, blueprints_data=None, addon_prefs={}):
collection_instances_combine_mode = getattr(addon_prefs, "collection_instances_combine_mode")
legacy_mode = getattr(addon_prefs, "export_legacy_mode")
@ -159,7 +158,7 @@ def copy_hollowed_collection_into(source_collection, destination_collection, par
#check if a specific collection instance does not have an ovveride for combine_mode
combine_mode = object['_combine'] if '_combine' in object else collection_instances_combine_mode
parent = parent_empty
duplicate_object(object, parent, combine_mode, destination_collection, library_collections, legacy_mode)
duplicate_object(object, parent, combine_mode, destination_collection, blueprints_data, legacy_mode)
# for every child-collection of the source, copy its content into a new sub-collection of the destination
for collection in source_collection.children:
@ -174,7 +173,7 @@ def copy_hollowed_collection_into(source_collection, destination_collection, par
destination_collection = destination_collection,
parent_empty = collection_placeholder,
filter = filter,
library_collections = library_collections,
blueprints_data = blueprints_data,
addon_prefs=addon_prefs
)
@ -223,44 +222,3 @@ def get_scenes(addon_prefs):
library_scenes = list(map(lambda name: bpy.data.scenes[name], library_scene_names))
return [level_scene_names, level_scenes, library_scene_names, library_scenes]
def inject_blueprints_list_into_main_scene(scene):
print("injecting assets/blueprints data into scene")
root_collection = scene.collection
assets_list = None
assets_list_name = f"assets_list_{scene.name}_components"
for object in scene.objects:
if object.name == assets_list_name:
assets_list = object
break
if assets_list is None:
assets_list = make_empty(assets_list_name, [0,0,0], [0,0,0], [0,0,0], root_collection)
# find all blueprints used in a scene
# TODO: export a tree rather than a flat list ? because you could have potential clashing items in flat lists (amongst other issues)
(collection_names, collections) = get_used_collections(scene)
root_node = CollectionNode()
root_node.name = "root"
children_per_collection = {}
#print("collection_names", collection_names, "collections", collections)
get_sub_collections(collections, root_node, children_per_collection)
# what about marked assets ?
# what about audio assets ?
# what about materials ?
# object['MaterialInfo'] = '(name: "'+material.name+'", source: "'+current_project_name + '")'
#assets_list["blueprints_direct"] = list(collection_names)
assets_list["BlueprintsList"] = f"({json.dumps(dict(children_per_collection))})"
#assets_list["Materials"]= '()'
def remove_blueprints_list_from_main_scene(scene):
assets_list = None
assets_list_name = f"assets_list_{scene.name}_components"
for object in scene.objects:
if object.name == assets_list_name:
assets_list = object
if assets_list is not None:
bpy.data.objects.remove(assets_list, do_unlink=True)

View File

@ -1,36 +1,36 @@
import bpy
from ..helpers.object_makers import make_empty
def upsert_scene_components(scene, world, main_scene_names):
#should only be run in one of the main scenes
if scene.name not in main_scene_names:
return
root_collection = scene.collection
lighting_components = None
print("upsert scene components", scene.name, scene.objects)
for object in scene.objects:
if object.name == "lighting_components_"+scene.name:
lighting_components = object
break
# TODO: replace this with placing scene level custom properties once support for that has been added to bevy_gltf
def upsert_scene_components(main_scenes):
for scene in main_scenes:
lighting_components_name = f"lighting_components_{scene.name}"
lighting_components = bpy.data.objects.get(lighting_components_name, None)
if not lighting_components:
root_collection = scene.collection
lighting_components = make_empty('lighting_components_'+scene.name, [0,0,0], [0,0,0], [0,0,0], root_collection)
if lighting_components is None:
lighting_components = make_empty('lighting_components_'+scene.name, [0,0,0], [0,0,0], [0,0,0], root_collection)
if scene.world is not None:
lighting_components['BlenderBackgroundShader'] = ambient_color_to_component(scene.world)
lighting_components['BlenderShadowSettings'] = scene_shadows_to_component(scene)
if world is not None:
lighting_components['BlenderBackgroundShader'] = ambient_color_to_component(world)
if scene.eevee.use_bloom:
lighting_components['BloomSettings'] = scene_bloom_to_component(scene)
elif 'BloomSettings' in lighting_components:
del lighting_components['BloomSettings']
lighting_components['BlenderShadowSettings'] = scene_shadows_to_component(scene)
if scene.eevee.use_gtao:
lighting_components['SSAOSettings'] = scene_ao_to_component(scene)
elif 'SSAOSettings' in lighting_components:
del lighting_components['SSAOSettings']
if scene.eevee.use_bloom:
lighting_components['BloomSettings'] = scene_bloom_to_component(scene)
elif 'BloomSettings' in lighting_components:
del lighting_components['BloomSettings']
if scene.eevee.use_gtao:
lighting_components['SSAOSettings'] = scene_ao_to_component(scene)
elif 'SSAOSettings' in lighting_components:
del lighting_components['SSAOSettings']
def remove_scene_components(main_scenes):
for scene in main_scenes:
lighting_components_name = f"lighting_components_{scene.name}"
lighting_components = bpy.data.objects.get(lighting_components_name, None)
if lighting_components:
bpy.data.objects.remove(lighting_components, do_unlink=True)
def ambient_color_to_component(world):

View File

@ -4,7 +4,7 @@ from pathlib import Path
from ..helpers.generate_and_export import generate_and_export
from ..helpers.helpers_collections import (set_active_collection, traverse_tree)
from ..helpers.helpers_collections import (traverse_tree)
from ..auto_export.export_gltf import (export_gltf, generate_gltf_export_preferences)
from ..helpers.object_makers import make_cube

View File

@ -13,8 +13,32 @@
- [ ] replace field name based logic with type base logic
- [ ] to make things easier overall we need a mapping of Blueprints/Collections to
- [ ] their instances
- [ ] their objects/sub collections instances etc
- [x] their instances
- [x] their objects/sub collections instances etc
- [ ] a mapping of objects to the blueprints they belong to
- [ ] things to alter/remove using the new & improved Blueprints/collections scanning and mapping
- [x] get_sub_collections => remove , but rewrite how BlueprintsList are generated
- [x] get_used_collections => remove , but rewrite how BlueprintsList are generated
- [x] get_exportable_collections => remove , but replace with new function to get exportable blueprints
- [x] get_collections_per_scene
- [x] get_collections_in_library
- [ ] traverse_tree => keep, used
- [x] find_layer_collection_recursive => remove, unused
- [ ] recurLayerCollection => unclear, analyse
- [x] find_collection_ascendant_target_collection => remove, double check
- [x] set_active_collection => keep, used
- [x] get_source_scene => remove, unused
- [x] assets_list["BlueprintsList"]
BLUEPRINTS LIST {'Blueprint1': [], 'Blueprint6_animated': [], 'Blueprint4_nested': ['Blueprint3'], 'Blueprint3': [], 'Blueprint7_hierarchy': [], 'External_blueprint': [], 'External_blueprint2': ['External_blueprint3'], 'External_blueprint3': [], 'Blueprint8_animated_no_bones': []}
- [x] internal_collections => replace with "internal_collections" or "local_collections"
- [x] move all things that alter data "permanently" to pre-save
- [x] lighting/ scene components injection
- [x] blueprintNames ?
- [x] or more simple: just remove them after save as we do for others: lighting_components
- [ ] update cleanup_materials
- [ ] remove bulk of tracker related code
- [ ] clean up