feat(auto_export):

* added operator to deal with the root path & sub paths in a smarter way
 * various paths are not settable "manually" as pure text anymore
 * overhauled ui for paths
 * overhauled internal handling of paths
 * further refinements to paths handling
 * related fixes & tweaks
 * UI restructure & split out of change detection
 * basic export/assets folder now only available in Ui when blueprints are not in use
 * overhauled bevy integration tests, now done in "two passes" as the external library needs to be exported first
 * cleanups & improvements to the test above
 * started overhauling & massively simplifying the rest of the tests, starting with change_tracking
This commit is contained in:
kaosat.dev 2024-04-29 00:02:50 +02:00
parent f0d7ccd454
commit 26ea12cc4c
17 changed files with 552 additions and 855 deletions

View File

@ -181,6 +181,7 @@ class OT_OpenFilebrowser(Operator, ImportHelper):
default='*.json',
options={'HIDDEN'}
) # type: ignore
def execute(self, context):
"""Do something with the selected file(s)."""
#filename, extension = os.path.splitext(self.filepath)

View File

@ -41,7 +41,7 @@ from .ui.main import (GLTF_PT_auto_export_change_detection, GLTF_PT_auto_export_
GLTF_PT_auto_export_SidePanel
)
from .ui.operators import (SCENES_LIST_OT_actions)
from .ui.operators import (OT_OpenFolderbrowser, SCENES_LIST_OT_actions)
from .helpers.generate_complete_preferences_dict import generate_complete_preferences_dict_gltf
@ -104,6 +104,8 @@ classes = [
SCENE_UL_GLTF_auto_export,
SCENES_LIST_OT_actions,
OT_OpenFolderbrowser,
AutoExportGLTF,
#AutoExportGltfAddonPreferences,
@ -118,8 +120,6 @@ classes = [
GLTF_PT_auto_export_blueprints,
GLTF_PT_auto_export_SidePanel,
GLTF_PT_auto_export_blueprints_list,
GLTF_PT_auto_export_changes_list,
AutoExportTracker,
]

View File

@ -8,7 +8,7 @@ import traceback
from .preferences import AutoExportGltfAddonPreferences
from .get_collections_to_export import get_collections_to_export
from .get_blueprints_to_export import get_blueprints_to_export
from .get_levels_to_export import get_levels_to_export
from .get_standard_exporter_settings import get_standard_exporter_settings
@ -30,27 +30,33 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
# path to the current blend file
file_path = bpy.data.filepath
# Get the folder
folder_path = os.path.dirname(file_path)
blend_file_path = os.path.dirname(file_path)
# get the preferences for our addon
export_root_folder = getattr(addon_prefs, "export_root_folder")
export_output_folder = getattr(addon_prefs,"export_output_folder")
export_models_path = os.path.join(blend_file_path, export_output_folder)
#should we use change detection or not
export_change_detection = getattr(addon_prefs, "export_change_detection")
export_scene_settings = getattr(addon_prefs,"export_scene_settings")
do_export_blueprints = getattr(addon_prefs,"export_blueprints")
export_output_folder = getattr(addon_prefs,"export_output_folder")
export_models_path = os.path.join(folder_path, export_output_folder)
export_materials_library = getattr(addon_prefs,"export_materials_library")
export_scene_settings = getattr(addon_prefs,"export_scene_settings")
print("export_materials_library", export_materials_library)
# standard gltf export settings are stored differently
standard_gltf_exporter_settings = get_standard_exporter_settings()
gltf_extension = standard_gltf_exporter_settings.get("export_format", 'GLB')
gltf_extension = '.glb' if gltf_extension == 'GLB' else '.gltf'
# here we do a bit of workaround by creating an override # TODO: do this at the "UI" level
export_blueprints_path = os.path.join(folder_path, export_output_folder, getattr(addon_prefs,"export_blueprints_path")) if getattr(addon_prefs,"export_blueprints_path") != '' else folder_path
#print('addon_prefs', AutoExportGltfAddonPreferences.__annotations__)#)addon_prefs.__annotations__)
# generate the actual complete output path
export_blueprints_path = os.path.join(blend_file_path, export_root_folder, getattr(addon_prefs,"export_blueprints_path"))
export_levels_path = os.path.join(blend_file_path, export_root_folder, getattr(addon_prefs, "export_levels_path"))
print("export_blueprints_path", export_blueprints_path)
# here we do a bit of workaround by creating an override # TODO: do this at the "UI" level
print("collection_instances_combine_mode", addon_prefs.collection_instances_combine_mode)
"""if hasattr(addon_prefs, "__annotations__") :
tmp = {}
@ -66,6 +72,7 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
addon_prefs = SimpleNamespace(**tmp) #copy.deepcopy(addon_prefs)
addon_prefs.__annotations__ = tmp"""
addon_prefs.export_blueprints_path = export_blueprints_path
addon_prefs.export_levels_path = export_levels_path
addon_prefs.export_gltf_extension = gltf_extension
addon_prefs.export_models_path = export_models_path
@ -91,7 +98,7 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
if do_export_blueprints:
print("EXPORTING")
# get blueprints/collections infos
(blueprints_to_export) = get_collections_to_export(changes_per_scene, changed_export_parameters, blueprints_data, addon_prefs)
(blueprints_to_export) = get_blueprints_to_export(changes_per_scene, changed_export_parameters, blueprints_data, addon_prefs)
# get level/main scenes infos
(main_scenes_to_export) = get_levels_to_export(changes_per_scene, changed_export_parameters, blueprints_data, addon_prefs)
@ -99,7 +106,7 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
# since materials export adds components we need to call this before blueprints are exported
# export materials & inject materials components into relevant objects
if export_materials_library:
export_materials(blueprints_data.blueprint_names, library_scenes, folder_path, addon_prefs)
export_materials(blueprints_data.blueprint_names, library_scenes, blend_file_path, addon_prefs)
# update the list of tracked exports
exports_total = len(blueprints_to_export) + len(main_scenes_to_export) + (1 if export_materials_library else 0)
@ -131,7 +138,7 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
print("export MAIN scenes")
for scene_name in main_scenes_to_export:
print(" exporting scene:", scene_name)
export_main_scene(bpy.data.scenes[scene_name], folder_path, addon_prefs, blueprints_data)
export_main_scene(bpy.data.scenes[scene_name], blend_file_path, addon_prefs, blueprints_data)
# now deal with blueprints/collections
do_export_library_scene = not export_change_detection or changed_export_parameters or len(blueprints_to_export) > 0
@ -141,7 +148,7 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
"""for (scene_name, blueprints_to_export) in blueprints_per_scene.items():
print(" exporting blueprints from scene:", scene_name)
print(" blueprints to export", blueprints_to_export)"""
export_blueprints(blueprints_to_export, folder_path, addon_prefs, blueprints_data)
export_blueprints(blueprints_to_export, blend_file_path, addon_prefs, blueprints_data)
# reset current scene from backup
bpy.context.window.scene = old_current_scene
@ -154,7 +161,7 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
else:
for scene_name in main_scene_names:
export_main_scene(bpy.data.scenes[scene_name], folder_path, addon_prefs, [])
export_main_scene(bpy.data.scenes[scene_name], blend_file_path, addon_prefs, [])

View File

@ -7,10 +7,10 @@ from .export_gltf import (generate_gltf_export_preferences)
from ..helpers.helpers_scenes import clear_hollow_scene, copy_hollowed_collection_into
def export_blueprints(blueprints, folder_path, addon_prefs, blueprints_data):
export_output_folder = getattr(addon_prefs,"export_output_folder")
def export_blueprints(blueprints, blend_file_path, addon_prefs, blueprints_data):
export_blueprints_path = getattr(addon_prefs,"export_blueprints_path")
gltf_export_preferences = generate_gltf_export_preferences(addon_prefs)
export_blueprints_path = os.path.join(folder_path, export_output_folder, getattr(addon_prefs,"export_blueprints_path")) if getattr(addon_prefs,"export_blueprints_path") != '' else folder_path
try:
# save current active collection
active_collection = bpy.context.view_layer.active_layer_collection

View File

@ -1,5 +1,6 @@
import os
import bpy
from pathlib import Path
from ..constants import TEMPSCENE_PREFIX
from ..helpers.generate_and_export import generate_and_export
@ -9,17 +10,19 @@ from ..helpers.helpers_scenes import clear_hollow_scene, copy_hollowed_collectio
from ..helpers.helpers_blueprints import inject_blueprints_list_into_main_scene, remove_blueprints_list_from_main_scene
# export all main scenes
def export_main_scenes(scenes, folder_path, addon_prefs):
def export_main_scenes(scenes, blend_file_path, addon_prefs):
for scene in scenes:
export_main_scene(scene, folder_path, addon_prefs)
export_main_scene(scene, blend_file_path, addon_prefs)
def export_main_scene(scene, folder_path, addon_prefs, blueprints_data):
def export_main_scene(scene, blend_file_path, addon_prefs, blueprints_data):
gltf_export_preferences = generate_gltf_export_preferences(addon_prefs)
export_root_folder = getattr(addon_prefs, "export_root_folder")
export_output_folder = getattr(addon_prefs,"export_output_folder")
export_levels_path = getattr(addon_prefs,"export_levels_path")
export_blueprints = getattr(addon_prefs,"export_blueprints")
export_separate_dynamic_and_static_objects = getattr(addon_prefs, "export_separate_dynamic_and_static_objects")
gltf_output_path = os.path.join(folder_path, export_output_folder, scene.name)
export_settings = { **gltf_export_preferences,
'use_active_scene': True,
'use_active_collection':True,
@ -30,6 +33,7 @@ def export_main_scene(scene, folder_path, addon_prefs, blueprints_data):
}
if export_blueprints :
gltf_output_path = os.path.join(export_levels_path, scene.name)
inject_blueprints_list_into_main_scene(scene, blueprints_data)
@ -46,7 +50,7 @@ def export_main_scene(scene, folder_path, addon_prefs, blueprints_data):
)
# then export all dynamic objects
gltf_output_path = os.path.join(folder_path, export_output_folder, scene.name+ "_dynamic")
gltf_output_path = os.path.join(export_levels_path, scene.name+ "_dynamic")
generate_and_export(
addon_prefs,
temp_scene_name=TEMPSCENE_PREFIX,
@ -68,6 +72,7 @@ def export_main_scene(scene, folder_path, addon_prefs, blueprints_data):
)
else:
gltf_output_path = os.path.join(export_root_folder, export_output_folder, scene.name)
print(" exporting gltf to", gltf_output_path, ".gltf/glb")
export_gltf(gltf_output_path, export_settings)

View File

@ -1,10 +1,10 @@
import bpy
import os
from ..helpers.helpers_scenes import (get_scenes, )
from ..helpers.helpers_blueprints import find_blueprints_not_on_disk
# TODO: this should also take the split/embed mode into account: if a nested collection changes AND embed is active, its container collection should also be exported
def get_collections_to_export(changes_per_scene, changed_export_parameters, blueprints_data, addon_prefs):
def get_blueprints_to_export(changes_per_scene, changed_export_parameters, blueprints_data, addon_prefs):
export_change_detection = getattr(addon_prefs, "export_change_detection")
export_gltf_extension = getattr(addon_prefs, "export_gltf_extension", ".glb")
export_blueprints_path = getattr(addon_prefs,"export_blueprints_path", "")

View File

@ -43,14 +43,13 @@ def changed_object_in_scene(scene_name, changes_per_scene, blueprints_data, coll
def get_levels_to_export(changes_per_scene, changed_export_parameters, blueprints_data, addon_prefs):
export_change_detection = getattr(addon_prefs, "export_change_detection")
export_gltf_extension = getattr(addon_prefs, "export_gltf_extension")
export_models_path = getattr(addon_prefs, "export_models_path")
export_levels_path = getattr(addon_prefs, "export_levels_path")
collection_instances_combine_mode = getattr(addon_prefs, "collection_instances_combine_mode")
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs)
# determine list of main scenes to export
# we have more relaxed rules to determine if the main scenes have changed : any change is ok, (allows easier handling of changes, render settings etc)
main_scenes_to_export = [scene_name for scene_name in main_scene_names if not export_change_detection or changed_export_parameters or scene_name in changes_per_scene.keys() or changed_object_in_scene(scene_name, changes_per_scene, blueprints_data, collection_instances_combine_mode) or not check_if_blueprint_on_disk(scene_name, export_models_path, export_gltf_extension) ]
main_scenes_to_export = [scene_name for scene_name in main_scene_names if not export_change_detection or changed_export_parameters or scene_name in changes_per_scene.keys() or changed_object_in_scene(scene_name, changes_per_scene, blueprints_data, collection_instances_combine_mode) or not check_if_blueprint_on_disk(scene_name, export_levels_path, export_gltf_extension) ]
print("main_scenes_to_export", main_scenes_to_export, changes_per_scene)
return (main_scenes_to_export)

View File

@ -4,6 +4,8 @@ from bpy.types import Operator
from bpy_extras.io_utils import ExportHelper
from bpy.props import (IntProperty, StringProperty, BoolProperty)
from ..ui.operators import OT_OpenFolderbrowser, draw_folder_browser
#from ..ui.main import GLTF_PT_auto_export_general, GLTF_PT_auto_export_main, GLTF_PT_auto_export_root
from .preferences import (AutoExportGltfAddonPreferences, AutoExportGltfPreferenceNames)
@ -35,10 +37,6 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences):#, ExportHelper):
'export_change_detection',
'export_scene_settings',
'main_scenes',
'library_scenes',
'main_scenes_index',
'library_scenes_index',
'main_scene_names',
'library_scene_names',
@ -108,6 +106,7 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences):#, ExportHelper):
return export_props
def save_settings(self, context):
print("save settings")
auto_export_settings = self.format_settings()
self.properties['main_scene_names'] = auto_export_settings['main_scene_names']
self.properties['library_scene_names'] = auto_export_settings['library_scene_names']
@ -121,7 +120,7 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences):#, ExportHelper):
#print("saving settings", bpy.data.texts[".gltf_auto_export_settings"].as_string(), "raw", json.dumps(export_props))
def load_settings(self, context):
# print("loading settings")
print("loading settings")
settings = None
try:
settings = bpy.data.texts[".gltf_auto_export_settings"].as_string()
@ -160,6 +159,8 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences):#, ExportHelper):
print("error", error)
self.report({"ERROR"}, "Loading export settings failed. Removed corrupted settings")
bpy.data.texts.remove(bpy.data.texts[".gltf_auto_export_settings"])
else:
self.will_save_settings = True
"""
This should ONLY be run when actually doing exports/aka calling auto_export function, because we only care about the difference in settings between EXPORTS
@ -312,7 +313,7 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences):#, ExportHelper):
self.load_settings(context)
if self.will_save_settings:
self.save_settings(context)
#print("self", self.auto_export)
if self.auto_export: # only do the actual exporting if auto export is actually enabled
#changes_per_scene = context.window_manager.auto_export_tracker.changed_objects_per_scene
@ -334,14 +335,11 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences):#, ExportHelper):
def invoke(self, context, event):
#print("invoke")
bpy.context.window_manager.auto_export_tracker.disable_change_detection()
self.load_settings(context)
wm = context.window_manager
#wm.fileselect_add(self)
return context.window_manager.invoke_props_dialog(self, title="Auto export", width=640)
#context.window_manager.modal_handler_add(self)
return {'RUNNING_MODAL'}
"""def modal(self, context, event):
@ -372,15 +370,25 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences):#, ExportHelper):
section = layout.box()
section.enabled = controls_enabled
section.prop(self, "export_root_folder")
section.prop(operator, "export_output_folder", text="Export Folder relative to root")
draw_folder_browser(section, "Export root folder", self.export_root_folder, "export_root_folder")
row = section.row()
draw_folder_browser(row, "Assets Folder (non blueprints mode only)", self.export_root_folder, "export_output_folder")
row.enabled = not self.export_blueprints
section.prop(operator, "export_blueprints")
section.prop(operator, "export_scene_settings")
section.prop(operator, "export_change_detection")
"""header, panel = layout.panel("my_panel_id", default_closed=False)
header.label(text="Hello World")
if panel:
panel.label(text="Success")"""
toggle_icon = "TRIA_DOWN" if self.show_change_detection_settings else "TRIA_RIGHT"
layout.prop(operator, "show_change_detection_settings", text="Change Detection", icon=toggle_icon)
if self.show_change_detection_settings:
section = layout.box()
section.enabled = controls_enabled
section.prop(operator, "export_change_detection", text="Use change detection")
# main/level scenes
toggle_icon = "TRIA_DOWN" if self.show_scene_settings else "TRIA_RIGHT"
layout.prop(operator, "show_scene_settings", text="Scenes", icon=toggle_icon)
@ -436,17 +444,19 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences):#, ExportHelper):
if self.show_blueprint_settings:
section = layout.box()
section.enabled = controls_enabled
section.prop(operator, "export_blueprints")
section = section.box()
section.enabled = controls_enabled and self.export_blueprints
# collections/blueprints
section.prop(operator, "export_blueprints_path")
draw_folder_browser(section, "Blueprints folder", self.export_root_folder, "export_blueprints_path")
#section.prop(operator, "export_blueprints_path")
section.prop(operator, "collection_instances_combine_mode")
section.prop(operator, "export_marked_assets")
section.separator()
section.prop(operator, "export_levels_path")
draw_folder_browser(section, "Levels folder", self.export_root_folder, "export_levels_path")
#section.prop(operator, "export_levels_path")
section.prop(operator, "export_separate_dynamic_and_static_objects")
section.separator()
@ -454,7 +464,8 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences):#, ExportHelper):
section.prop(operator, "export_materials_library")
section = section.box()
section.enabled = controls_enabled and self.export_materials_library
section.prop(operator, "export_materials_path")
draw_folder_browser(section, 'Materials folder', self.export_root_folder, "export_materials_path")
#section.prop(operator, "export_materials_path")
def cancel(self, context):

View File

@ -14,12 +14,16 @@ AutoExportGltfPreferenceNames = [
'will_save_settings',
'direct_mode',# specific to main auto_export operator
'show_general_settings',
'auto_export',
'export_root_folder',
'export_output_folder',
'export_change_detection',
'export_scene_settings',
'show_change_detection_settings',
'export_change_detection',
'show_scene_settings',
'main_scenes',
'library_scenes',
'main_scenes_index',
@ -27,6 +31,7 @@ AutoExportGltfPreferenceNames = [
'main_scene_names',
'library_scene_names',
'show_blueprint_settings',
'export_blueprints',
'export_blueprints_path',
'export_marked_assets',
@ -80,17 +85,31 @@ class AutoExportGltfAddonPreferences(AddonPreferences):
export_root_folder: StringProperty(
name = "Project Root Path",
description="The root folder of your (Bevy) project (not assets!)",
subtype='DIR_PATH',
default='../',
update=on_export_output_folder_updated) # type: ignore
# subtype='DIR_PATH',
default='../'
#update=on_export_output_folder_updated) # type: ignore
)
export_output_folder: StringProperty(
name='Export folder',
description='The root folder for all exports(relative to the root folder/path) Defaults to "assets" ',
default='./assets',
subtype='DIR_PATH',
options={'HIDDEN'},
update=on_export_output_folder_updated
#subtype='DIR_PATH',
options={'HIDDEN'}
# update=on_export_output_folder_updated
) # type: ignore
# for UI only, workaround for lacking panels
show_change_detection_settings: BoolProperty(
name="show change detection settings",
description="show/hide change detection settings (UI only: has no impact on exports)",
default=True
) # type: ignore
export_change_detection: BoolProperty(
name='Change detection',
description='Use change detection to determine what/if should be exported',
default=True
) # type: ignore
# scenes
@ -101,12 +120,6 @@ class AutoExportGltfAddonPreferences(AddonPreferences):
default=True
) # type: ignore
export_change_detection: BoolProperty(
name='Change detection',
description='Use change detection to determine what/if should be exported',
default=True
) # type: ignore
# scene components
export_scene_settings: BoolProperty(
name='Export scene settings',
@ -124,22 +137,22 @@ class AutoExportGltfAddonPreferences(AddonPreferences):
export_blueprints: BoolProperty(
name='Export Blueprints',
description='Replaces collection instances with an Empty with a BlueprintName custom property',
description='Replaces collection instances with an Empty with a BlueprintName custom property, and enabled a lot more features !',
default=True
) # type: ignore
export_blueprints_path: StringProperty(
name='Blueprints path',
description='path to export the blueprints to (relative to the export folder)',
default='./blueprints',
subtype='DIR_PATH'
default='assets/blueprints',
#subtype='DIR_PATH'
) # type: ignore
export_levels_path: StringProperty(
name='Levels path',
description='path to export the levels (main scenes) to (relative to the export folder)',
default='./levels',
subtype='DIR_PATH'
default='assets/levels',
#subtype='DIR_PATH'
) # type: ignore
export_separate_dynamic_and_static_objects: BoolProperty(
@ -155,11 +168,12 @@ class AutoExportGltfAddonPreferences(AddonPreferences):
description='remove materials from blueprints and use the material library instead',
default=False
) # type: ignore
export_materials_path: StringProperty(
name='Materials path',
description='path to export the materials libraries to (relative to the export folder)',
default='./materials',
subtype='DIR_PATH'
default='assets/materials',
#subtype='DIR_PATH'
) # type: ignore
""" combine mode can be

View File

@ -7,7 +7,7 @@ from bpy.types import (PropertyGroup)
from bpy.props import (PointerProperty, IntProperty, StringProperty)
from .did_export_settings_change import did_export_settings_change
from .get_collections_to_export import get_collections_to_export
from .get_blueprints_to_export import get_blueprints_to_export
from ..constants import TEMPSCENE_PREFIX
from .internals import BlueprintsToExport
@ -182,7 +182,7 @@ class AutoExportTracker(PropertyGroup):
addon_prefs = SimpleNamespace(**tmp)
#print("cls.changed_objects_per_scene", cls.changed_objects_per_scene)
(collections, collections_to_export, internal_collections, collections_per_scene) = get_collections_to_export(cls.changed_objects_per_scene, export_settings_changed, addon_prefs)
(collections, collections_to_export, internal_collections, collections_per_scene) = get_blueprints_to_export(cls.changed_objects_per_scene, export_settings_changed, addon_prefs)
#print("collections to export", collections_to_export)
try:
# we save this list of collections in the context

View File

@ -93,6 +93,8 @@ def clear_materials_scene(temp_scene):
def export_materials(collections, library_scenes, folder_path, addon_prefs):
gltf_export_preferences = generate_gltf_export_preferences(addon_prefs)
export_materials_path = getattr(addon_prefs,"export_materials_path")
export_root_folder = getattr(addon_prefs, "export_root_folder")
used_material_names = get_all_materials(collections, library_scenes)
current_project_name = Path(bpy.context.blend_data.filepath).stem
@ -105,7 +107,8 @@ def export_materials(collections, library_scenes, folder_path, addon_prefs):
'use_renderable': False,
'export_apply':True
}
gltf_output_path = os.path.join(folder_path, export_materials_path, current_project_name + "_materials_library")
gltf_output_path = os.path.join(export_root_folder, export_materials_path, current_project_name + "_materials_library")
print(" exporting Materials to", gltf_output_path, ".gltf/glb")

View File

@ -1 +1 @@
{"b_RightForeArm_07":["b_RightHand_08","b_RightHand_08"],"Blueprint1_mesh":["Cube.001","Cube.001"],"b_LeftFoot01_017":["b_LeftFoot02_018","b_LeftFoot02_018"],"Cylinder.001":["Cylinder.002","Blueprint7_hierarchy.001","Empty_as_child"],"Blueprint8_animated_no_bones":["Cylinder.002"],"b_LeftLeg01_015":["b_LeftLeg02_016","b_LeftLeg02_016"],"b_Spine02_03":["b_Neck_04","b_RightUpperArm_06","b_LeftUpperArm_09","b_Neck_04","b_RightUpperArm_06","b_LeftUpperArm_09"],"Blueprint4_nested":["Blueprint3"],"Collection 2":["Blueprint8_animated_no_bones","Collection 2 1","Empty_in_collection","Spot"],"b_Spine01_02":["b_Spine02_03","b_Spine02_03"],"b_Neck_04":["b_Head_05","b_Head_05"],"Fox_mesh":["fox1","fox1"],"Blueprint7_hierarchy.001":["Blueprint4_nested.001","Cube.001"],"Collection 2 1":["Empty_in_sub_collection"],"Blueprint3_mesh":["Cylinder","Cylinder"],"Plane":["Plane"],"Blueprint1":["Blueprint1_mesh"],"Cylinder":["Cylinder.001","Cylinder.001"],"b_Tail02_013":["b_Tail03_014","b_Tail03_014"],"b_RightLeg02_020":["b_RightFoot01_021","b_RightFoot01_021"],"Blueprint4_nested.001":["Blueprint3"],"Light":["Light","DirectionalLight Gizmo"],"Blueprint1.001":["Blueprint1_mesh"],"b_Root_00":["b_Hip_01","b_Hip_01"],"Blueprint3":["Blueprint3_mesh","Blueprint3_mesh"],"b_RightLeg01_019":["b_RightLeg02_020","b_RightLeg02_020"],"b_LeftLeg02_016":["b_LeftFoot01_017","b_LeftFoot01_017"],"b_RightUpperArm_06":["b_RightForeArm_07","b_RightForeArm_07"],"Camera":["Camera Gizmo"],"b_LeftUpperArm_09":["b_LeftForeArm_010","b_LeftForeArm_010"],"b_Tail01_012":["b_Tail02_013","b_Tail02_013"],"world":["no_name"],"no_name":["Parent_Object","Blueprint6_animated.001","lighting_components_World","assets_list_World_components","Collection","Collection 2"],"Cube.001":["Cube.002","Cylinder","Cube.002","Cylinder"],"Cylinder.002":["Cylinder.003"],"_rootJoint":["b_Root_00","b_Root_00"],"Spot":["Spot"],"Collection":["Blueprint1.001","Blueprint4_nested","Blueprint6_animated","Blueprint7_hierarchy","Camera","Cube","Empty","External_blueprint","External_blueprint2","Light","Plane"],"Blueprint6_animated":["Fox"],"Cube":["Cube"],"Parent_Object":["Cube.003","Blueprint1","Cylinder.001"],"b_Hip_01":["b_Spine01_02","b_Tail01_012","b_LeftLeg01_015","b_RightLeg01_019","b_Spine01_02","b_Tail01_012","b_LeftLeg01_015","b_RightLeg01_019"],"b_RightFoot01_021":["b_RightFoot02_022","b_RightFoot02_022"],"Blueprint7_hierarchy":["Cube.001"],"Blueprint6_animated.001":["Fox"],"b_LeftForeArm_010":["b_LeftHand_011","b_LeftHand_011"],"Fox":["Fox_mesh","_rootJoint","Fox_mesh","_rootJoint"]}
{"Blueprint7_hierarchy.001":["Blueprint4_nested.001","Cube.001"],"Cylinder":["Cylinder.001","Cylinder.001"],"Blueprint8_animated_no_bones":["Cylinder.002"],"Blueprint7_hierarchy":["Cube.001"],"Collection 2":["Blueprint8_animated_no_bones","Collection 2 1","Empty_in_collection","Spot"],"Fox_mesh":["fox1","fox1"],"_rootJoint":["b_Root_00","b_Root_00"],"b_Root_00":["b_Hip_01","b_Hip_01"],"Blueprint1":["Blueprint1_mesh"],"Fox":["Fox_mesh","_rootJoint","Fox_mesh","_rootJoint"],"Light":["Light","DirectionalLight Gizmo"],"b_Spine01_02":["b_Spine02_03","b_Spine02_03"],"b_RightLeg01_019":["b_RightLeg02_020","b_RightLeg02_020"],"b_LeftFoot01_017":["b_LeftFoot02_018","b_LeftFoot02_018"],"b_LeftForeArm_010":["b_LeftHand_011","b_LeftHand_011"],"Collection":["Blueprint1.001","Blueprint4_nested","Blueprint6_animated","Blueprint7_hierarchy","Camera","Cube","Empty","External_blueprint","External_blueprint2","Light","Plane"],"Cylinder.001":["Cylinder.002","Blueprint7_hierarchy.001","Empty_as_child"],"b_Hip_01":["b_Spine01_02","b_Tail01_012","b_LeftLeg01_015","b_RightLeg01_019","b_Spine01_02","b_Tail01_012","b_LeftLeg01_015","b_RightLeg01_019"],"world":["no_name"],"Parent_Object":["Cube.003","Blueprint1","Cylinder.001"],"Blueprint6_animated.001":["Fox"],"Blueprint4_nested":["Blueprint3"],"Blueprint6_animated":["Fox"],"Cube.001":["Cube.002","Cylinder","Cube.002","Cylinder"],"b_Spine02_03":["b_Neck_04","b_RightUpperArm_06","b_LeftUpperArm_09","b_Neck_04","b_RightUpperArm_06","b_LeftUpperArm_09"],"b_LeftLeg01_015":["b_LeftLeg02_016","b_LeftLeg02_016"],"Blueprint4_nested.001":["Blueprint3"],"b_Tail02_013":["b_Tail03_014","b_Tail03_014"],"b_RightForeArm_07":["b_RightHand_08","b_RightHand_08"],"External_blueprint2_Cylinder":["Cylinder"],"Blueprint3":["Blueprint3_mesh","Blueprint3_mesh"],"External_blueprint2":["External_blueprint2_Cylinder","External_blueprint3"],"b_LeftUpperArm_09":["b_LeftForeArm_010","b_LeftForeArm_010"],"Cube":["Cube"],"Plane":["Plane"],"no_name":["Parent_Object","Blueprint6_animated.001","lighting_components_World","assets_list_World_components","Collection","Collection 2"],"Collection 2 1":["Empty_in_sub_collection"],"External_blueprint_mesh":["Cube.001"],"b_LeftLeg02_016":["b_LeftFoot01_017","b_LeftFoot01_017"],"Cylinder.002":["Cylinder.003"],"b_RightLeg02_020":["b_RightFoot01_021","b_RightFoot01_021"],"b_Neck_04":["b_Head_05","b_Head_05"],"b_RightUpperArm_06":["b_RightForeArm_07","b_RightForeArm_07"],"Spot":["Spot"],"External_blueprint3_Cone":["Cone"],"External_blueprint":["External_blueprint_mesh"],"Blueprint3_mesh":["Cylinder","Cylinder"],"External_blueprint3":["External_blueprint3_Cone"],"Camera":["Camera Gizmo"],"Blueprint1_mesh":["Cube.001","Cube.001"],"Blueprint1.001":["Blueprint1_mesh"],"b_Tail01_012":["b_Tail02_013","b_Tail02_013"],"b_RightFoot01_021":["b_RightFoot02_022","b_RightFoot02_022"]}

View File

@ -13,14 +13,32 @@ from pixelmatch.contrib.PIL import pixelmatch
def setup_data(request):
print("\nSetting up resources...")
def finalizer():
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")
blueprints_path = os.path.join(assets_root_path, "blueprints")
levels_path = os.path.join(assets_root_path, "levels")
models_path = os.path.join(assets_root_path, "models")
materials_path = os.path.join(assets_root_path, "materials")
yield {
"root_path": root_path,
"models_path": models_path,
"blueprints_path": blueprints_path,
"levels_path": levels_path,
"materials_path":materials_path
}
def finalizer():
#other_materials_path = os.path.join("../../testing", "other_materials")
print("\nPerforming teardown...")
if os.path.exists(blueprints_path):
shutil.rmtree(blueprints_path)
if os.path.exists(levels_path):
shutil.rmtree(levels_path)
if os.path.exists(models_path):
shutil.rmtree(models_path)
@ -52,9 +70,7 @@ def setup_data(request):
- removes generated files
"""
def test_export_complex(setup_data):
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")
models_path = os.path.join(assets_root_path, "models")
root_path = setup_data["root_path"]
auto_export_operator = bpy.ops.export_scenes.auto_gltf
# with change detection
@ -87,7 +103,11 @@ def test_export_complex(setup_data):
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_root_folder = os.path.abspath(root_path),
#export_blueprints_path = os.path.join("assets", "models", "library"),
export_output_folder = os.path.join("assets", "models"), #"./models",
#export_levels_path = os.path.join("assets", "models"),
export_scene_settings=True,
export_blueprints=True,
export_materials_library=True
@ -98,14 +118,14 @@ def test_export_complex(setup_data):
# blueprint4 => has an instance, with nested blueprint3, should export
# blueprint5 => has NO instance, not marked as asset, should NOT export
assert os.path.exists(os.path.join(models_path, "World.glb")) == True
assert os.path.exists(os.path.join(models_path, "library", "Blueprint1.glb")) == True
assert os.path.exists(os.path.join(models_path, "library", "Blueprint2.glb")) == True
assert os.path.exists(os.path.join(models_path, "library", "Blueprint3.glb")) == True
assert os.path.exists(os.path.join(models_path, "library", "Blueprint4_nested.glb")) == True
assert os.path.exists(os.path.join(models_path, "library", "Blueprint5.glb")) == False
assert os.path.exists(os.path.join(models_path, "library", "Blueprint6_animated.glb")) == True
assert os.path.exists(os.path.join(models_path, "library", "Blueprint7_hierarchy.glb")) == True
assert os.path.exists(os.path.join(setup_data["levels_path"], "World.glb")) == True
assert os.path.exists(os.path.join(setup_data["blueprints_path"], "Blueprint1.glb")) == True
assert os.path.exists(os.path.join(setup_data["blueprints_path"], "Blueprint2.glb")) == True
assert os.path.exists(os.path.join(setup_data["blueprints_path"], "Blueprint3.glb")) == True
assert os.path.exists(os.path.join(setup_data["blueprints_path"], "Blueprint4_nested.glb")) == True
assert os.path.exists(os.path.join(setup_data["blueprints_path"], "Blueprint5.glb")) == False
assert os.path.exists(os.path.join(setup_data["blueprints_path"], "Blueprint6_animated.glb")) == True
assert os.path.exists(os.path.join(setup_data["blueprints_path"], "Blueprint7_hierarchy.glb")) == True
# 'assets_list_'+scene.name+"_components" should have been removed after the export
assets_list_object_name = "assets_list_"+"World"+"_components"

View File

@ -0,0 +1,69 @@
import os
import json
import pytest
import bpy
@pytest.fixture
def setup_data(request):
print("\nSetting up resources...")
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")
blueprints_path = os.path.join(assets_root_path, "blueprints")
levels_path = os.path.join(assets_root_path, "levels")
models_path = os.path.join(assets_root_path, "models")
materials_path = os.path.join(assets_root_path, "materials")
yield {
"root_path": root_path,
"models_path": models_path,
"blueprints_path": blueprints_path,
"levels_path": levels_path,
"materials_path":materials_path
}
# this runs the external blueprints file
def test_export_external_blueprints(setup_data):
root_path = setup_data["root_path"]
auto_export_operator = bpy.ops.export_scenes.auto_gltf
# with change detection
# first, configure things
# we use the global settings for that
export_props = {
"main_scene_names" : [],
"library_scene_names": ['Library'],
}
gltf_settings = {
"export_animations": True,
"export_optimize_animation_size": False
}
# store settings for the auto_export part
stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
stored_auto_settings.clear()
stored_auto_settings.write(json.dumps(export_props))
# and store settings for the gltf part
stored_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings")
stored_gltf_settings.clear()
stored_gltf_settings.write(json.dumps(gltf_settings))
auto_export_operator(
auto_export=True,
direct_mode=True,
export_root_folder = os.path.abspath(root_path),
#export_blueprints_path = os.path.join("assets", "models", "library"),
#export_output_folder = os.path.join("assets", "models"), #"./models",
#export_levels_path = os.path.join("assets", "models"),
export_scene_settings=False,
export_blueprints=True,
export_materials_library=True,
export_marked_assets= True
)
assert os.path.exists(os.path.join(setup_data["blueprints_path"], "External_blueprint.glb")) == True
assert os.path.exists(os.path.join(setup_data["blueprints_path"], "External_blueprint2.glb")) == True
assert os.path.exists(os.path.join(setup_data["blueprints_path"], "External_blueprint3.glb")) == True

View File

@ -1,23 +1,39 @@
import bpy
import os
import json
import mathutils
import pytest
import shutil
import pathlib
import mathutils
@pytest.fixture
def setup_data(request):
print("\nSetting up resources...")
def finalizer():
#other_materials_path = os.path.join("../../testing", "other_materials")
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")
blueprints_path = os.path.join(assets_root_path, "blueprints")
levels_path = os.path.join(assets_root_path, "levels")
models_path = os.path.join(assets_root_path, "models")
materials_path = os.path.join(assets_root_path, "materials")
#other_materials_path = os.path.join("../../testing", "other_materials")
yield {
"root_path": root_path,
"models_path": models_path,
"blueprints_path": blueprints_path,
"levels_path": levels_path,
"materials_path":materials_path
}
def finalizer():
print("\nPerforming teardown...")
if os.path.exists(blueprints_path):
shutil.rmtree(blueprints_path)
if os.path.exists(levels_path):
shutil.rmtree(levels_path)
if os.path.exists(models_path):
shutil.rmtree(models_path)
@ -40,51 +56,8 @@ def setup_data(request):
return None
import bpy
import os
import json
import pytest
import shutil
@pytest.fixture
def setup_data(request):
print("\nSetting up resources...")
def finalizer():
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")
models_path = os.path.join(assets_root_path, "models")
materials_path = os.path.join(assets_root_path, "materials")
#other_materials_path = os.path.join("../../testing", "other_materials")
print("\nPerforming teardown...")
if os.path.exists(models_path):
shutil.rmtree(models_path)
if os.path.exists(materials_path):
shutil.rmtree(materials_path)
diagnostics_file_path = os.path.join(root_path, "bevy_diagnostics.json")
if os.path.exists(diagnostics_file_path):
os.remove(diagnostics_file_path)
hierarchy_file_path = os.path.join(root_path, "bevy_hierarchy.json")
if os.path.exists(hierarchy_file_path):
os.remove(hierarchy_file_path)
screenshot_observed_path = os.path.join(root_path, "screenshot.png")
if os.path.exists(screenshot_observed_path):
os.remove(screenshot_observed_path)
request.addfinalizer(finalizer)
return None
def test_export_change_tracking_custom_properties(setup_data):
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")
models_path = os.path.join(assets_root_path, "models")
auto_export_operator = bpy.ops.export_scenes.auto_gltf
def prepare_auto_export(auto_export_overrides={}):
# with change detection
# first, configure things
@ -92,6 +65,7 @@ def test_export_change_tracking_custom_properties(setup_data):
export_props = {
"main_scene_names" : ['World'],
"library_scene_names": ['Library'],
**auto_export_overrides
}
# store settings for the auto_export part
@ -108,551 +82,195 @@ def test_export_change_tracking_custom_properties(setup_data):
stored_gltf_settings.clear()
stored_gltf_settings.write(json.dumps(gltf_settings))
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
)
world_file_path = os.path.join(models_path, "World.glb")
assert os.path.exists(world_file_path) == True
models_library_path = os.path.join(models_path, "library")
model_library_file_paths = list(map(lambda file_name: os.path.join(models_library_path, file_name), sorted(os.listdir(models_library_path))))
modification_times_first = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
mapped_files_to_timestamps_and_index = {}
for (index, file_path) in enumerate(model_library_file_paths+ [world_file_path]):
file_path = pathlib.Path(file_path).stem
mapped_files_to_timestamps_and_index[file_path] = (modification_times_first[index], index)
# now add a custom property to the cube in the main scene & export again
print("----------------")
print("main scene change (custom property)")
print("----------------")
bpy.data.objects["Cube"]["test_property"] = 42
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
)
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
assert modification_times != modification_times_first
# only the "world" file should have changed
world_file_index = mapped_files_to_timestamps_and_index["World"][1]
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index]]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [world_file_index]]
assert modification_times[world_file_index] != modification_times_first[world_file_index]
assert other_files_modification_times == other_files_modification_times_first
def test_export_change_tracking_custom_properties_collection_instances_combine_mode_embed(setup_data):
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")
models_path = os.path.join(assets_root_path, "models")
def run_auto_export(setup_data):
auto_export_operator = bpy.ops.export_scenes.auto_gltf
# with change detection
# first, configure things
# we use the global settings for that
export_props = {
"main_scene_names" : ['World'],
"library_scene_names": ['Library'],
"collection_instances_combine_mode":"Embed"
}
# store settings for the auto_export part
stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
stored_auto_settings.clear()
stored_auto_settings.write(json.dumps(export_props))
gltf_settings = {
"export_animations": False,
"export_optimize_animation_size": False
}
# and store settings for the gltf part
stored_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings")
stored_gltf_settings.clear()
stored_gltf_settings.write(json.dumps(gltf_settings))
auto_export_operator(
auto_export=True,
direct_mode=True,
export_root_folder = os.path.abspath(setup_data["root_path"]),
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
)
world_file_path = os.path.join(models_path, "World.glb")
assert os.path.exists(world_file_path) == True
levels_path = setup_data["levels_path"]
level_file_paths = list(map(lambda file_name: os.path.join(levels_path, file_name), sorted(os.listdir(levels_path)))) if os.path.exists(levels_path) else []
models_library_path = os.path.join(models_path, "library")
blueprint1_file_path = os.path.join(models_library_path, "Blueprint1.glb")
assert os.path.exists(blueprint1_file_path) == False
blueprints_path = setup_data["blueprints_path"]
blueprints_file_paths = list(map(lambda file_name: os.path.join(blueprints_path, file_name), sorted(os.listdir(blueprints_path)))) if os.path.exists(blueprints_path) else []
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), blueprints_file_paths + level_file_paths))
# assert os.path.exists(world_file_path) == True
mapped_files_to_timestamps_and_index = {}
model_library_file_paths = []
all_files_paths = []
if os.path.exists(models_library_path):
model_library_file_paths = list(map(lambda file_name: os.path.join(models_library_path, file_name), sorted(os.listdir(models_library_path))))
all_files_paths = model_library_file_paths + [world_file_path]
else:
all_files_paths = [world_file_path]
modification_times_first = list(map(lambda file_path: os.path.getmtime(file_path), all_files_paths))
for (index, file_path) in enumerate(all_files_paths):
for (index, file_path) in enumerate(blueprints_file_paths + level_file_paths):
file_path = pathlib.Path(file_path).stem
mapped_files_to_timestamps_and_index[file_path] = (modification_times_first[index], index)
mapped_files_to_timestamps_and_index[file_path] = (modification_times[index], index)
# now add a custom property to the cube in the library scene & export again
# this should trigger changes in the main scene as well since the mode is embed & this blueprints has an instance in the main scene
print("----------------")
print("library change (custom property)")
print("----------------")
return (modification_times, mapped_files_to_timestamps_and_index)
bpy.data.objects["Blueprint1_mesh"]["test_property"] = 42
def run_auto_export_and_compare(setup_data, changes, expected_changed_files = []):
(modification_times_first, mapped ) = run_auto_export(setup_data)
for index, change in enumerate(changes):
change()
(modification_times, mapped ) = run_auto_export(setup_data)
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
)
changed_files = expected_changed_files[index]
changed_file_indices = [mapped[changed_file][1] for changed_file in changed_files]
#print("changed files", changed_files, changed_file_indices, "mapped", mapped)
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in changed_file_indices]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in changed_file_indices]
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
assert modification_times != modification_times_first
# there should not be a "Blueprint1" file
assert os.path.exists(blueprint1_file_path) == False
# only the "world" file should have changed
world_file_index = mapped_files_to_timestamps_and_index["World"][1]
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index]]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [world_file_index]]
assert modification_times[world_file_index] != modification_times_first[world_file_index]
assert other_files_modification_times == other_files_modification_times_first
print("other_files_modification_times_new ", other_files_modification_times)
print("other_files_modification_times_first", other_files_modification_times_first)
for changed_file_index in changed_file_indices:
#print("modification_times_new [changed_file_index]", modification_times[changed_file_index])
#print("modification_times_first[changed_file_index]", modification_times_first[changed_file_index])
if changed_file_index in modification_times_first and changed_file_index in modification_times:
assert modification_times[changed_file_index] != modification_times_first[changed_file_index], f"failure in change: {index}, at file {changed_file_index}"
# TODO: we should throw an error in the "else" case ?
assert other_files_modification_times == other_files_modification_times_first , f"failure in change: {index}"
# reset the comparing
modification_times_first = modification_times
def test_export_change_tracking_custom_properties(setup_data):
# set things up
prepare_auto_export()
def first_change():
# now add a custom property to the cube in the main scene & export again
print("----------------")
print("main scene change (custom property)")
print("----------------")
bpy.data.objects["Cube"]["test_property"] = 42
run_auto_export_and_compare(
setup_data=setup_data,
changes=[first_change],
expected_changed_files = [["World"]] # only the "world" file should have changed
)
def test_export_change_tracking_custom_properties_collection_instances_combine_mode_embed(setup_data):
# set things up
prepare_auto_export({"collection_instances_combine_mode": "Embed"})
def first_change():
# we have no change, but we also have no blueprints exported, because of the embed mode
blueprint1_file_path = os.path.join(setup_data["blueprints_path"], "Blueprint1.glb")
assert os.path.exists(blueprint1_file_path) == False
def second_change():
# add a custom property to the cube in the library scene & export again
# this should trigger changes in the main scene as well since the mode is embed & this blueprints has an instance in the main scene
print("----------------")
print("library change (custom property)")
print("----------------")
bpy.data.objects["Blueprint1_mesh"]["test_property"] = 42
def third_change():
# now we set the _combine mode of the instance to "split", so auto_export should:
# * not take the changes into account in the main scene
# * export the blueprint (so file for Blueprint1 will be changed)
bpy.data.objects["Blueprint1"]["_combine"] = "Split"
# but first do an export so that the changes to _combine are not taken into account
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
)
modification_times_first = modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
def fourth_change():
print("----------------")
print("library change (custom property, forced 'Split' combine mode )")
print("----------------")
bpy.data.objects["Blueprint1_mesh"]["test_property"] = 151
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
run_auto_export_and_compare(
setup_data=setup_data,
changes=[first_change, second_change, third_change, fourth_change],
expected_changed_files = [[], ["World"], ["World"], ["World"]] # only the "world" file should have changed
)
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
assert modification_times != modification_times_first
# the "world" file should have changed
world_file_index = mapped_files_to_timestamps_and_index["World"][1]
# the "Blueprint1" file should now exist
assert os.path.exists(blueprint1_file_path) == True
# and the "Blueprint1" file too
#blueprint1_file_index = mapped_files_to_timestamps_and_index["Blueprint1"][1]
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index]]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [world_file_index]]
assert modification_times[world_file_index] != modification_times_first[world_file_index]
#assert modification_times[blueprint1_file_index] != modification_times_first[blueprint1_file_index]
assert other_files_modification_times == other_files_modification_times_first
def test_export_change_tracking_light_properties(setup_data):
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")
models_path = os.path.join(assets_root_path, "models")
auto_export_operator = bpy.ops.export_scenes.auto_gltf
# with change detection
# first, configure things
# we use the global settings for that
export_props = {
"main_scene_names" : ['World'],
"library_scene_names": ['Library'],
}
# store settings for the auto_export part
stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
stored_auto_settings.clear()
stored_auto_settings.write(json.dumps(export_props))
gltf_settings = {
"export_animations": False,
"export_optimize_animation_size": False
}
# and store settings for the gltf part
stored_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings")
stored_gltf_settings.clear()
stored_gltf_settings.write(json.dumps(gltf_settings))
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
)
world_file_path = os.path.join(models_path, "World.glb")
assert os.path.exists(world_file_path) == True
models_library_path = os.path.join(models_path, "library")
model_library_file_paths = list(map(lambda file_name: os.path.join(models_library_path, file_name), sorted(os.listdir(models_library_path))))
modification_times_first = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
mapped_files_to_timestamps_and_index = {}
for (index, file_path) in enumerate(model_library_file_paths+ [world_file_path]):
file_path = pathlib.Path(file_path).stem
mapped_files_to_timestamps_and_index[file_path] = (modification_times_first[index], index)
# set things up
prepare_auto_export()
def first_change():
# now add a custom property to the cube in the main scene & export again
print("----------------")
print("main scene change (light, energy)")
print("----------------")
bpy.data.lights["Light"].energy = 100
#world_file_path = os.path.join(setup_data["levels_path"], "World.glb")
#assert os.path.exists(world_file_path) == True
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
)
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
assert modification_times != modification_times_first
# only the "world" file should have changed
world_file_index = mapped_files_to_timestamps_and_index["World"][1]
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index]]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [world_file_index]]
assert modification_times[world_file_index] != modification_times_first[world_file_index]
assert other_files_modification_times == other_files_modification_times_first
# reset the comparing
modification_times_first = modification_times
def second_change():
print("----------------")
print("main scene change (light, shadow_cascade_count)")
print("----------------")
bpy.data.lights["Light"].shadow_cascade_count = 2
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
)
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
assert modification_times != modification_times_first
# only the "world" file should have changed
world_file_index = mapped_files_to_timestamps_and_index["World"][1]
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index]]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [world_file_index]]
assert modification_times[world_file_index] != modification_times_first[world_file_index]
assert other_files_modification_times == other_files_modification_times_first
# reset the comparing
modification_times_first = modification_times
def third_change():
print("----------------")
print("main scene change (light, use_shadow)")
print("----------------")
bpy.data.lights["Light"].use_shadow = False
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
run_auto_export_and_compare(
setup_data=setup_data,
changes=[first_change, second_change, third_change],
expected_changed_files = [["World"], ["World"], ["World"]] # only the "world" file should have changed
)
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
assert modification_times != modification_times_first
# only the "world" file should have changed
world_file_index = mapped_files_to_timestamps_and_index["World"][1]
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index]]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [world_file_index]]
assert modification_times[world_file_index] != modification_times_first[world_file_index]
assert other_files_modification_times == other_files_modification_times_first
def test_export_change_tracking_camera_properties(setup_data):
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")
models_path = os.path.join(assets_root_path, "models")
auto_export_operator = bpy.ops.export_scenes.auto_gltf
# with change detection
# first, configure things
# we use the global settings for that
export_props = {
"main_scene_names" : ['World'],
"library_scene_names": ['Library'],
}
# store settings for the auto_export part
stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
stored_auto_settings.clear()
stored_auto_settings.write(json.dumps(export_props))
gltf_settings = {
"export_animations": False,
"export_optimize_animation_size": False
}
# and store settings for the gltf part
stored_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings")
stored_gltf_settings.clear()
stored_gltf_settings.write(json.dumps(gltf_settings))
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
)
world_file_path = os.path.join(models_path, "World.glb")
assert os.path.exists(world_file_path) == True
models_library_path = os.path.join(models_path, "library")
model_library_file_paths = list(map(lambda file_name: os.path.join(models_library_path, file_name), sorted(os.listdir(models_library_path))))
modification_times_first = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
mapped_files_to_timestamps_and_index = {}
for (index, file_path) in enumerate(model_library_file_paths+ [world_file_path]):
file_path = pathlib.Path(file_path).stem
mapped_files_to_timestamps_and_index[file_path] = (modification_times_first[index], index)
# set things up
prepare_auto_export()
def first_change():
print("----------------")
print("main scene change (camera)")
print("----------------")
bpy.data.cameras["Camera"].angle = 0.5
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
run_auto_export_and_compare(
setup_data=setup_data,
changes=[first_change],
expected_changed_files = [["World"]] # only the "world" file should have changed
)
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
assert modification_times != modification_times_first
# only the "world" file should have changed
world_file_index = mapped_files_to_timestamps_and_index["World"][1]
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index]]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [world_file_index]]
assert modification_times[world_file_index] != modification_times_first[world_file_index]
assert other_files_modification_times == other_files_modification_times_first
def test_export_change_tracking_material_properties(setup_data):
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")
models_path = os.path.join(assets_root_path, "models")
auto_export_operator = bpy.ops.export_scenes.auto_gltf
# with change detection
# first, configure things
# we use the global settings for that
export_props = {
"main_scene_names" : ['World'],
"library_scene_names": ['Library'],
}
# store settings for the auto_export part
stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
stored_auto_settings.clear()
stored_auto_settings.write(json.dumps(export_props))
gltf_settings = {
"export_animations": False,
"export_optimize_animation_size": False
}
# and store settings for the gltf part
stored_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings")
stored_gltf_settings.clear()
stored_gltf_settings.write(json.dumps(gltf_settings))
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
)
world_file_path = os.path.join(models_path, "World.glb")
assert os.path.exists(world_file_path) == True
models_library_path = os.path.join(models_path, "library")
model_library_file_paths = list(map(lambda file_name: os.path.join(models_library_path, file_name), sorted(os.listdir(models_library_path))))
modification_times_first = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
mapped_files_to_timestamps_and_index = {}
for (index, file_path) in enumerate(model_library_file_paths+ [world_file_path]):
file_path = pathlib.Path(file_path).stem
mapped_files_to_timestamps_and_index[file_path] = (modification_times_first[index], index)
# set things up
prepare_auto_export()
def first_change():
print("----------------")
print("main scene change (material, clip)")
print("----------------")
bpy.data.materials["Material.001"].blend_method = 'CLIP'
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
)
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
assert modification_times != modification_times_first
# the material is assigned to Blueprint 1 so in normal (split mode) only the "Blueprint1" file should have changed
blueprint1_file_index = mapped_files_to_timestamps_and_index["Blueprint1"][1]
# the same material is assigned to Blueprint 7 so in normal (split mode) only the "Blueprint1" file should have changed
blueprint7_file_index = mapped_files_to_timestamps_and_index["Blueprint7_hierarchy"][1]
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [blueprint1_file_index, blueprint7_file_index]]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [blueprint1_file_index, blueprint7_file_index]]
assert modification_times[blueprint1_file_index] != modification_times_first[blueprint1_file_index]
assert modification_times[blueprint7_file_index] != modification_times_first[blueprint7_file_index]
assert other_files_modification_times == other_files_modification_times_first
# reset the comparing
modification_times_first = modification_times
def second_change():
print("----------------")
print("main scene change (material, alpha_threshold)")
print("----------------")
bpy.data.materials["Material.001"].alpha_threshold = 0.2
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
)
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
assert modification_times != modification_times_first
# the material is assigned to Blueprint 1 so in normal (split mode) only the "Blueprint1" file should have changed
blueprint1_file_index = mapped_files_to_timestamps_and_index["Blueprint1"][1]
# the same material is assigned to Blueprint 7 so in normal (split mode) only the "Blueprint1" file should have changed
blueprint7_file_index = mapped_files_to_timestamps_and_index["Blueprint7_hierarchy"][1]
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [blueprint1_file_index, blueprint7_file_index]]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [blueprint1_file_index, blueprint7_file_index]]
assert modification_times[blueprint1_file_index] != modification_times_first[blueprint1_file_index]
assert modification_times[blueprint7_file_index] != modification_times_first[blueprint7_file_index]
assert other_files_modification_times == other_files_modification_times_first
# reset the comparing
modification_times_first = modification_times
def third_change():
print("----------------")
print("main scene change (material, diffuse_color)")
print("----------------")
bpy.data.materials["Material.001"].diffuse_color[0] = 0.2
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
)
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
assert modification_times != modification_times_first
run_auto_export_and_compare(
setup_data=setup_data,
changes=[first_change, second_change, third_change],
expected_changed_files = [["Blueprint1", "Blueprint7_hierarchy"], ["Blueprint1", "Blueprint7_hierarchy"], ["Blueprint1", "Blueprint7_hierarchy"]]
# the material is assigned to Blueprint 1 so in normal (split mode) only the "Blueprint1" file should have changed
blueprint1_file_index = mapped_files_to_timestamps_and_index["Blueprint1"][1]
# the same material is assigned to Blueprint 7 so in normal (split mode) only the "Blueprint1" file should have changed
blueprint7_file_index = mapped_files_to_timestamps_and_index["Blueprint7_hierarchy"][1]
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [blueprint1_file_index, blueprint7_file_index]]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [blueprint1_file_index, blueprint7_file_index]]
assert modification_times[blueprint1_file_index] != modification_times_first[blueprint1_file_index]
assert modification_times[blueprint7_file_index] != modification_times_first[blueprint7_file_index]
assert other_files_modification_times == other_files_modification_times_first
)
"""
@ -666,74 +284,16 @@ def test_export_change_tracking_material_properties(setup_data):
"""
def test_export_various_chained_changes(setup_data):
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")
models_path = os.path.join(assets_root_path, "models")
auto_export_operator = bpy.ops.export_scenes.auto_gltf
# with change detection
# first, configure things
# we use the global settings for that
export_props = {
"main_scene_names" : ['World'],
"library_scene_names": ['Library'],
}
# store settings for the auto_export part
stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
stored_auto_settings.clear()
stored_auto_settings.write(json.dumps(export_props))
gltf_settings = {
"export_animations": False,
"export_optimize_animation_size": False
}
# and store settings for the gltf part
stored_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings")
stored_gltf_settings.clear()
stored_gltf_settings.write(json.dumps(gltf_settings))
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
)
world_file_path = os.path.join(models_path, "World.glb")
assert os.path.exists(world_file_path) == True
models_library_path = os.path.join(models_path, "library")
model_library_file_paths = list(map(lambda file_name: os.path.join(models_library_path, file_name), sorted(os.listdir(models_library_path))))
modification_times_first = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
mapped_files_to_timestamps_and_index = {}
for (index, file_path) in enumerate(model_library_file_paths+ [world_file_path]):
file_path = pathlib.Path(file_path).stem
mapped_files_to_timestamps_and_index[file_path] = (modification_times_first[index], index)
print("files", mapped_files_to_timestamps_and_index)
#print("mod times", modification_times_first)
def first_change():
# export again with no changes
print("----------------")
print("no changes")
print("----------------")
bpy.context.window_manager.auto_export_tracker.enable_change_detection() # FIXME: should not be needed, but ..
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
)
modification_times_no_change = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
assert modification_times_no_change == modification_times_first
world_file_path = os.path.join(setup_data["levels_path"], "World.glb")
assert os.path.exists(world_file_path) == True
def second_change():
# now move the main cube & export again
print("----------------")
print("main scene change")
@ -742,28 +302,7 @@ def test_export_various_chained_changes(setup_data):
bpy.context.window_manager.auto_export_tracker.enable_change_detection() # FIXME: should not be needed, but ..
bpy.data.objects["Cube"].location = [1, 0, 0]
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
)
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
assert modification_times != modification_times_first
# only the "world" file should have changed
world_file_index = mapped_files_to_timestamps_and_index["World"][1]
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index]]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [world_file_index]]
assert modification_times[world_file_index] != modification_times_first[world_file_index]
assert other_files_modification_times == other_files_modification_times_first
# reset the comparing
modification_times_first = modification_times
def third_change():
# now same, but move the cube in the library
print("----------------")
print("library change (blueprint) ")
@ -772,31 +311,7 @@ def test_export_various_chained_changes(setup_data):
bpy.data.objects["Blueprint1_mesh"].location = [1, 2, 1]
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
)
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
assert modification_times != modification_times_first
# the "world" file should have changed (TODO: double check: this is since changing an instances collection changes the instance too ?)
world_file_index = mapped_files_to_timestamps_and_index["World"][1]
# and the blueprint1 file too, since that is the collection we changed
blueprint1_file_index = mapped_files_to_timestamps_and_index["Blueprint1"][1]
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index, blueprint1_file_index]]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [world_file_index, blueprint1_file_index]]
assert modification_times[world_file_index] == modification_times_first[world_file_index]
assert modification_times[blueprint1_file_index] != modification_times_first[blueprint1_file_index]
assert other_files_modification_times == other_files_modification_times_first
# reset the comparing
modification_times_first = modification_times
def fourth_change():
# now change something in a nested blueprint
print("----------------")
print("library change (nested blueprint) ")
@ -804,35 +319,7 @@ def test_export_various_chained_changes(setup_data):
bpy.data.objects["Blueprint3_mesh"].location= [0, 0.1 ,2]
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
)
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
assert modification_times != modification_times_first
# the "world" file should not have changed
world_file_index = mapped_files_to_timestamps_and_index["World"][1]
# the blueprint3 file should have changed, since that is the collection we changed
blueprint3_file_index = mapped_files_to_timestamps_and_index["Blueprint3"][1]
# the blueprint4 file NOT, since, while it contains an instance of the collection we changed, the default export mode is "split"
blueprint4_file_index = mapped_files_to_timestamps_and_index["Blueprint4_nested"][1]
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index, blueprint3_file_index, blueprint4_file_index]]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [world_file_index, blueprint3_file_index, blueprint4_file_index]]
assert modification_times[world_file_index] == modification_times_first[world_file_index]
assert modification_times[blueprint3_file_index] != modification_times_first[blueprint3_file_index]
assert modification_times[blueprint4_file_index] == modification_times_first[blueprint4_file_index]
assert other_files_modification_times == other_files_modification_times_first
# reset the comparing
modification_times_first = modification_times
def fifth_change():
# now same, but using an operator
print("----------------")
print("change using operator")
@ -845,20 +332,16 @@ def test_export_various_chained_changes(setup_data):
bpy.ops.object.transform_apply()
bpy.ops.transform.translate(value=(3.5, 0, 0), constraint_axis=(True, False, False))
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
run_auto_export_and_compare(
setup_data=setup_data,
changes=[first_change, second_change, third_change, fourth_change, fifth_change],
expected_changed_files = [
[],
["World"], # only the "world" file should have changed
["Blueprint1"],# The blueprint1 file should have changed, since that is the collection we changed, not the world, since we are in "split mode by default"
["Blueprint3"],# The blueprint3 file should have changed, since that is the collection we changed # the blueprint4 file NOT, since, while it contains an instance of the collection we changed, the default export mode is "split"
["World"]
]
)
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
assert modification_times != modification_times_first
modification_times_first = modification_times
#bpy.context.window_manager.auto_export_tracker.enable_change_detection() # FIXME: should not be needed, but ..

View File

@ -36,6 +36,8 @@
=> comes from our custom logic for add_on prefs
- [ ] double check compares to "None" values
- [ ] add tests for relative/absolute paths
- [x] move all things that alter data "permanently" to pre-save
- [x] lighting/ scene components injection
- [x] blueprintNames ?

View File

@ -81,3 +81,86 @@ class SCENES_LIST_OT_actions(Operator):
return {"FINISHED"}
import os
from bpy_extras.io_utils import ImportHelper
class OT_OpenFolderbrowser(Operator, ImportHelper):
"""Browse for registry json file"""
bl_idname = "generic.open_folderbrowser"
bl_label = "Select folder"
# Define this to tell 'fileselect_add' that we want a directoy
directory: bpy.props.StringProperty(
name="Outdir Path",
description="selected folder"
# subtype='DIR_PATH' is not needed to specify the selection mode.
# But this will be anyway a directory path.
) # type: ignore
# Filters folders
filter_folder: bpy.props.BoolProperty(
default=True,
options={"HIDDEN"}
) # type: ignore
target_property: bpy.props.StringProperty(
name="target_property",
options={'HIDDEN'}
) # type: ignore
def execute(self, context):
"""Do something with the selected file(s)."""
operator = context.active_operator
new_path = self.directory
target_path_name = self.target_property
# path to the current blend file
blend_file_path = bpy.data.filepath
# Get the folder
blend_file_folder_path = os.path.dirname(blend_file_path)
print("blend_file_folder_path", blend_file_folder_path)
print("new_path", self.directory, self.target_property, operator)
path_names = ['export_output_folder', 'export_blueprints_path', 'export_levels_path', 'export_materials_path']
export_root_folder = operator.export_root_folder
#export_root_path_absolute = os.path.join(blend_file_folder_path, export_root_folder)
if target_path_name == 'export_root_folder':
print("changing root new_path")
# we need to change all other relative paths before setting the new absolute path
for path_name in path_names:
# get absolute path
relative_path = getattr(operator, path_name, None)
if relative_path is not None:
absolute_path = os.path.join(export_root_folder, relative_path)
print("absolute path for", path_name, absolute_path)
relative_path = os.path.relpath(absolute_path, new_path)
setattr(operator, path_name, relative_path)
# store the root path as relative to the current blend file
setattr(operator, target_path_name, new_path)
else:
relative_path = os.path.relpath(new_path, export_root_folder)
setattr(operator, target_path_name, relative_path)
#filename, extension = os.path.splitext(self.filepath)
return {'FINISHED'}
def draw_folder_browser(layout, label, value, target_property):
row = layout.row()
row.label(text=label)
'''box = row.box()
box.scale_y = 0.5
box.label(text=value)'''
col = row.column()
col.enabled = False
col.prop(bpy.context.active_operator, target_property, text="")
folder_selector = row.operator(OT_OpenFolderbrowser.bl_idname, icon="FILE_FOLDER", text="")
folder_selector.target_property = target_property #"export_root_folder"