Compare commits

...

2 Commits

Author SHA1 Message Date
kaosat.dev c2dc0324c3 feat(bevy_components):
* added "hack-ish" tweak to make sure component additions/delections etc
are picked up correctly by the change tracker in auto_export
 * cleaned up a few operator names
 * very minor tweaks
2024-04-10 00:21:52 +02:00
kaosat.dev a630494f88 feat(auto_export): continued fixes & tons of restructuring
* cleaned up most of the change detection tracking
 * updated/ fleshed out & improved the tests
 * improved sidebar ui (wip)
   * moved listing of blueprints to export to sidebar
   * improved display of changed objects
   * currently restructuring rest of the code to be able to display BEFORE saving
what will get exported & what not
 * a ton of other cleanups & tweaks
2024-04-10 00:18:34 +02:00
13 changed files with 395 additions and 245 deletions

View File

@ -0,0 +1,6 @@
import rna_prop_ui
# fake way to make our operator's changes be visible to the change/depsgraph update handler in gltf_auto_export
def ping_depsgraph_update(object):
rna_prop_ui.rna_idprop_ui_create(object, "________temp", default=0)
rna_prop_ui.rna_idprop_ui_prop_clear(object, "________temp")

View File

@ -1,6 +1,8 @@
import bpy
from bpy.props import (StringProperty, BoolProperty, PointerProperty)
from bpy_types import (PropertyGroup)
from .helpers import ping_depsgraph_update
from ..propGroups.conversions_from_prop_group import property_group_value_to_custom_property_value
from ..propGroups.conversions_to_prop_group import property_group_value_from_custom_property_value
@ -127,6 +129,8 @@ def add_metadata_to_components_without_metadata(object):
if component_name == "components_meta":
continue
upsert_component_in_object(object, component_name, registry)
# adds a component to an object (including metadata) using the provided component definition & optional value
def add_component_to_object(object, component_definition, value=None):
@ -149,6 +153,8 @@ def add_component_to_object(object, component_definition, value=None):
del object["__disable__update"]
object[short_name] = value
ping_depsgraph_update(object)
def upsert_component_in_object(object, component_name, registry):
# print("upsert_component_in_object", object, "component name", component_name)
@ -223,6 +229,8 @@ def copy_propertyGroup_values_to_another_object(source_object, target_object, co
if field_name in source_propertyGroup:
target_propertyGroup[field_name] = source_propertyGroup[field_name]
apply_propertyGroup_values_to_object_customProperties(target_object)
ping_depsgraph_update(object)
# TODO: move to propgroups ?
def apply_propertyGroup_values_to_object_customProperties(object):
@ -295,6 +303,16 @@ def remove_component_from_object(object, component_name):
break
for index in to_remove:
components_metadata.remove(index)
ping_depsgraph_update(object)
return True
def add_component_from_custom_property(object):
add_metadata_to_components_without_metadata(object)
apply_customProperty_values_to_object_propertyGroups(object)
ping_depsgraph_update(object)
def toggle_component(object, component_name):
components_in_object = object.components_meta.components
component_meta = next(filter(lambda component: component["name"] == component_name, components_in_object), None)
if component_meta != None:
component_meta.visible = not component_meta.visible

View File

@ -3,12 +3,13 @@ import json
import bpy
from bpy_types import Operator
from bpy.props import (StringProperty)
from .metadata import add_component_to_object, add_metadata_to_components_without_metadata, apply_customProperty_values_to_object_propertyGroups, apply_propertyGroup_values_to_object_customProperties_for_component, copy_propertyGroup_values_to_another_object, find_component_definition_from_short_name, remove_component_from_object
from .metadata import add_component_from_custom_property, add_component_to_object, add_metadata_to_components_without_metadata, apply_customProperty_values_to_object_propertyGroups, apply_propertyGroup_values_to_object_customProperties_for_component, copy_propertyGroup_values_to_another_object, find_component_definition_from_short_name, remove_component_from_object, toggle_component
class AddComponentOperator(Operator):
"""Add component to blueprint"""
"""Add Bevy component to object"""
bl_idname = "object.add_bevy_component"
bl_label = "Add component to blueprint Operator"
bl_label = "Add component to object Operator"
bl_options = {"UNDO"}
component_type: StringProperty(
@ -29,7 +30,7 @@ class AddComponentOperator(Operator):
return {'FINISHED'}
class CopyComponentOperator(Operator):
"""Copy component from blueprint"""
"""Copy Bevy component from object"""
bl_idname = "object.copy_bevy_component"
bl_label = "Copy component Operator"
bl_options = {"UNDO"}
@ -66,9 +67,9 @@ class CopyComponentOperator(Operator):
class PasteComponentOperator(Operator):
"""Paste component to blueprint"""
"""Paste Bevy component to object"""
bl_idname = "object.paste_bevy_component"
bl_label = "Paste component to blueprint Operator"
bl_label = "Paste component to object Operator"
bl_options = {"UNDO"}
def execute(self, context):
@ -91,7 +92,7 @@ class PasteComponentOperator(Operator):
return {'FINISHED'}
class RemoveComponentOperator(Operator):
"""Remove component from object"""
"""Remove Bevy component from object"""
bl_idname = "object.remove_bevy_component"
bl_label = "Remove component from object Operator"
bl_options = {"UNDO"}
@ -117,12 +118,11 @@ class RemoveComponentOperator(Operator):
remove_component_from_object(object, self.component_name)
else:
self.report({"ERROR"}, "The object/ component to remove ("+ self.component_name +") does not exist")
return {'FINISHED'}
class RemoveComponentFromAllObjectsOperator(Operator):
"""Remove component from all object"""
"""Remove Bevy component from all object"""
bl_idname = "object.remove_bevy_component_all"
bl_label = "Remove component from all objects Operator"
bl_options = {"UNDO"}
@ -172,7 +172,7 @@ class RenameHelper(bpy.types.PropertyGroup):
del bpy.types.WindowManager.bevy_component_rename_helper
class OT_rename_component(Operator):
"""Rename component"""
"""Rename Bevy component"""
bl_idname = "object.rename_bevy_component"
bl_label = "rename component"
bl_options = {"UNDO"}
@ -270,7 +270,7 @@ class OT_rename_component(Operator):
class GenerateComponent_From_custom_property_Operator(Operator):
"""generate components from custom property"""
"""Generate Bevy components from custom property"""
bl_idname = "object.generate_bevy_component_from_custom_property"
bl_label = "Generate component from custom_property Operator"
bl_options = {"UNDO"}
@ -285,8 +285,7 @@ class GenerateComponent_From_custom_property_Operator(Operator):
error = False
try:
add_metadata_to_components_without_metadata(object)
apply_customProperty_values_to_object_propertyGroups(object)
add_component_from_custom_property(object)
except Exception as error:
del object["__disable__update"] # make sure custom properties are updateable afterwards, even in the case of failure
error = True
@ -297,7 +296,7 @@ class GenerateComponent_From_custom_property_Operator(Operator):
class Fix_Component_Operator(Operator):
"""attempt to fix component"""
"""Attempt to fix Bevy component"""
bl_idname = "object.fix_bevy_component"
bl_label = "Fix component (attempts to)"
bl_options = {"UNDO"}
@ -322,7 +321,7 @@ class Fix_Component_Operator(Operator):
return {'FINISHED'}
class Toggle_ComponentVisibility(Operator):
"""toggles components visibility"""
"""Toggle Bevy component's visibility"""
bl_idname = "object.toggle_bevy_component_visibility"
bl_label = "Toggle component visibility"
bl_options = {"UNDO"}
@ -334,10 +333,6 @@ class Toggle_ComponentVisibility(Operator):
def execute(self, context):
object = context.object
components_in_object = object.components_meta.components
component_meta = next(filter(lambda component: component["name"] == self.component_name, components_in_object), None)
if component_meta != None:
component_meta.visible = not component_meta.visible
toggle_component(object, self.component_name)
return {'FINISHED'}

View File

@ -16,6 +16,7 @@ import json
import bpy
from bpy.types import Context
from bpy.props import (StringProperty, BoolProperty, IntProperty, PointerProperty)
import rna_prop_ui
# from .extension import ExampleExtensionProperties, GLTF_PT_UserExtensionPanel, unregister_panel
@ -29,7 +30,7 @@ from .auto_export.internals import (SceneLink,
CollectionsToExport,
CUSTOM_PG_sceneName
)
from .ui.main import (GLTF_PT_auto_export_main,
from .ui.main import (GLTF_PT_auto_export_changes_list, GLTF_PT_auto_export_main,
GLTF_PT_auto_export_root,
GLTF_PT_auto_export_general,
GLTF_PT_auto_export_scenes,
@ -112,8 +113,9 @@ classes = [
GLTF_PT_auto_export_general,
GLTF_PT_auto_export_scenes,
GLTF_PT_auto_export_blueprints,
GLTF_PT_auto_export_collections_list,
GLTF_PT_auto_export_SidePanel,
GLTF_PT_auto_export_collections_list,
GLTF_PT_auto_export_changes_list,
AutoExportTracker,
]
@ -192,6 +194,7 @@ def register():
"""bpy.utils.register_class(AutoExportExtensionProperties)
bpy.types.Scene.AutoExportExtensionProperties = bpy.props.PointerProperty(type=AutoExportExtensionProperties)"""
def unregister():
for cls in classes:

View File

@ -3,12 +3,12 @@ import os
import bpy
import traceback
from .get_collections_to_export import get_collections_to_export
from .export_main_scenes import export_main_scene
from .export_blueprints import check_if_blueprint_on_disk, check_if_blueprints_exist, export_blueprints_from_collections
from .get_standard_exporter_settings import get_standard_exporter_settings
from ..helpers.helpers_scenes import (get_scenes, )
from ..helpers.helpers_collections import (get_collections_in_library, get_exportable_collections, get_collections_per_scene, find_collection_ascendant_target_collection)
from ..modules.export_materials import cleanup_materials, export_materials
from ..modules.bevy_scene_components import upsert_scene_components
@ -19,6 +19,7 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
print ("changed_export_parameters", changed_export_parameters)
try:
# path to the current blend file
file_path = bpy.data.filepath
# Get the folder
folder_path = os.path.dirname(file_path)
@ -34,10 +35,6 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs)
# standard gltf export settings are stored differently
standard_gltf_exporter_settings = get_standard_exporter_settings()
print("main scenes", main_scene_names, "library_scenes", library_scene_names)
print("export_output_folder", export_output_folder)
@ -53,81 +50,13 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
if export_blueprints:
print("EXPORTING")
# create parent relations for all collections
collection_parents = dict()
for collection in bpy.data.collections:
collection_parents[collection.name] = None
for collection in bpy.data.collections:
for ch in collection.children:
collection_parents[ch.name] = collection.name
# get a list of all collections actually in use
(collections, blueprint_hierarchy) = get_exportable_collections(level_scenes, library_scenes, addon_prefs)
# first check if all collections have already been exported before (if this is the first time the exporter is run
# in your current Blender session for example)
export_blueprints_path = os.path.join(folder_path, export_output_folder, getattr(addon_prefs,"export_blueprints_path")) if getattr(addon_prefs,"export_blueprints_path") != '' else folder_path
export_levels_path = os.path.join(folder_path, export_output_folder)
gltf_extension = standard_gltf_exporter_settings.get("export_format", 'GLB')
gltf_extension = '.glb' if gltf_extension == 'GLB' else '.gltf'
collections_not_on_disk = check_if_blueprints_exist(collections, export_blueprints_path, gltf_extension)
changed_collections = []
for scene, objects in changes_per_scene.items():
print(" changed scene", scene)
for obj_name, obj in objects.items():
object_collections = list(obj.users_collection) if hasattr(obj, 'users_collection') else []
object_collection_names = list(map(lambda collection: collection.name, object_collections))
if len(object_collection_names) > 1:
print("ERRROR for",obj_name,"objects in multiple collections not supported")
else:
object_collection_name = object_collection_names[0] if len(object_collection_names) > 0 else None
#recurse updwards until we find one of our collections (or not)
matching_collection = find_collection_ascendant_target_collection(collection_parents, collections, object_collection_name)
if matching_collection is not None:
changed_collections.append(matching_collection)
collections_to_export = list(set(changed_collections + collections_not_on_disk)) if export_change_detection else collections
# we need to re_export everything if the export parameters have been changed
collections_to_export = collections if changed_export_parameters else collections_to_export
collections_per_scene = get_collections_per_scene(collections_to_export, library_scenes)
# collections that do not come from a library should not be exported as seperate blueprints
# FIMXE: logic is erroneous, needs to be changed
library_collections = get_collections_in_library(library_scenes)
collections_to_export = list(set(collections_to_export).intersection(set(library_collections)))
(collections, collections_to_export, main_scenes_to_export, library_collections, collections_per_scene, blueprint_hierarchy, export_levels_path, gltf_extension) = get_collections_to_export(folder_path, export_output_folder, changes_per_scene, changed_export_parameters, addon_prefs)
# since materials export adds components we need to call this before blueprints are exported
# export materials & inject materials components into relevant objects
if export_materials_library:
export_materials(collections, library_scenes, folder_path, addon_prefs)
main_scenes_to_export = [scene_name for scene_name in main_scene_names if not export_change_detection or changed_export_parameters or scene_name in changes_per_scene.keys() or not check_if_blueprint_on_disk(scene_name, export_levels_path, gltf_extension)]
bpy.context.window_manager.auto_export_tracker.exports_count = len(collections_to_export)
bpy.context.window_manager.auto_export_tracker.exports_count += len(main_scenes_to_export)
if export_materials_library:
bpy.context.window_manager.auto_export_tracker.exports_count += 1
print("-------------------------------")
print("collections: all:", collections)
print("collections: changed:", changed_collections)
print("collections: not found on disk:", collections_not_on_disk)
print("collections: in library:", library_collections)
print("collections: to export:", collections_to_export)
print("collections: per_scene:", collections_per_scene)
print("-------------------------------")
print("BLUEPRINTS: to export:", collections_to_export)
print("-------------------------------")
print("MAIN SCENES: to export:", main_scenes_to_export)
print("-------------------------------")
# backup current active scene
old_current_scene = bpy.context.scene
# backup current selections
@ -168,8 +97,6 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
for scene_name in main_scene_names:
export_main_scene(bpy.data.scenes[scene_name], folder_path, addon_prefs, [])
print("we are done with all export work",bpy.context.window_manager.auto_export_tracker.change_detection_enabled)
except Exception as error:
print(traceback.format_exc())

View File

@ -0,0 +1,87 @@
import os
import bpy
from .get_standard_exporter_settings import get_standard_exporter_settings
from .export_blueprints import check_if_blueprint_on_disk, check_if_blueprints_exist, export_blueprints_from_collections
from ..helpers.helpers_collections import get_exportable_collections
from ..helpers.helpers_collections import (get_collections_in_library, get_exportable_collections, get_collections_per_scene, find_collection_ascendant_target_collection)
from ..helpers.helpers_scenes import (get_scenes, )
def get_collections_to_export(folder_path, export_output_folder, changes_per_scene, changed_export_parameters, addon_prefs):
export_change_detection = getattr(addon_prefs, "export_change_detection")
export_materials_library = getattr(addon_prefs,"export_materials_library")
# standard gltf export settings are stored differently
standard_gltf_exporter_settings = get_standard_exporter_settings()
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs)
collection_parents = dict()
for collection in bpy.data.collections:
collection_parents[collection.name] = None
for collection in bpy.data.collections:
for ch in collection.children:
collection_parents[ch.name] = collection.name
# get a list of all collections actually in use
(collections, blueprint_hierarchy) = get_exportable_collections(level_scenes, library_scenes, addon_prefs)
# first check if all collections have already been exported before (if this is the first time the exporter is run
# in your current Blender session for example)
export_blueprints_path = os.path.join(folder_path, export_output_folder, getattr(addon_prefs,"export_blueprints_path")) if getattr(addon_prefs,"export_blueprints_path") != '' else folder_path
export_levels_path = os.path.join(folder_path, export_output_folder)
gltf_extension = standard_gltf_exporter_settings.get("export_format", 'GLB')
gltf_extension = '.glb' if gltf_extension == 'GLB' else '.gltf'
collections_not_on_disk = check_if_blueprints_exist(collections, export_blueprints_path, gltf_extension)
changed_collections = []
for scene, objects in changes_per_scene.items():
print(" changed scene", scene)
for obj_name, obj in objects.items():
object_collections = list(obj.users_collection) if hasattr(obj, 'users_collection') else []
object_collection_names = list(map(lambda collection: collection.name, object_collections))
if len(object_collection_names) > 1:
print("ERRROR for",obj_name,"objects in multiple collections not supported")
else:
object_collection_name = object_collection_names[0] if len(object_collection_names) > 0 else None
#recurse updwards until we find one of our collections (or not)
matching_collection = find_collection_ascendant_target_collection(collection_parents, collections, object_collection_name)
if matching_collection is not None:
changed_collections.append(matching_collection)
collections_to_export = list(set(changed_collections + collections_not_on_disk)) if export_change_detection else collections
# we need to re_export everything if the export parameters have been changed # TODO: perhaps do this BEFORE the rest above for better perfs
collections_to_export = collections if changed_export_parameters else collections_to_export
collections_per_scene = get_collections_per_scene(collections_to_export, library_scenes)
# collections that do not come from a library should not be exported as seperate blueprints
# FIMXE: logic is erroneous, needs to be changed
library_collections = get_collections_in_library(library_scenes)
collections_to_export = list(set(collections_to_export).intersection(set(library_collections)))
main_scenes_to_export = [scene_name for scene_name in main_scene_names if not export_change_detection or changed_export_parameters or scene_name in changes_per_scene.keys() or not check_if_blueprint_on_disk(scene_name, export_levels_path, gltf_extension)]
# update the list of tracked exports
exports_total = len(collections_to_export) + len(main_scenes_to_export) + (1 if export_materials_library else 0)
bpy.context.window_manager.auto_export_tracker.exports_total = exports_total
bpy.context.window_manager.auto_export_tracker.exports_count = exports_total
print("-------------------------------")
print("collections: all:", collections)
print("collections: changed:", changed_collections)
print("collections: not found on disk:", collections_not_on_disk)
print("collections: in library:", library_collections)
print("collections: to export:", collections_to_export)
print("collections: per_scene:", collections_per_scene)
print("-------------------------------")
print("BLUEPRINTS: to export:", collections_to_export)
print("-------------------------------")
print("MAIN SCENES: to export:", main_scenes_to_export)
print("-------------------------------")
return (collections, collections_to_export, main_scenes_to_export, library_collections, collections_per_scene, blueprint_hierarchy, export_levels_path, gltf_extension)

View File

@ -2,14 +2,10 @@ import json
import bpy
from bpy.types import Operator
from bpy_extras.io_utils import ExportHelper
from bpy.props import (IntProperty, StringProperty)
from bpy.props import (IntProperty)
from .preferences import (AutoExportGltfAddonPreferences, AutoExportGltfPreferenceNames)
from ..helpers.helpers_scenes import (get_scenes)
from ..helpers.helpers_collections import (get_exportable_collections)
from .auto_export import auto_export
from io_scene_gltf2 import (ExportGLTF2, GLTF_PT_export_main,ExportGLTF2_Base, GLTF_PT_export_include)
class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
"""auto export gltf"""
#bl_idname = "object.xxx"
@ -158,7 +154,6 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
# if there were no setting before, it is new, we need export
changed = False
print("previous_auto_settings", previous_auto_settings, "previous_gltf_settings", previous_gltf_settings)
if previous_auto_settings == None:
print("previous settings missing, exporting")
changed = True
@ -167,7 +162,7 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
previous_gltf_settings = bpy.data.texts.new(".gltf_auto_export_gltf_settings_previous")
previous_gltf_settings.write(json.dumps({}))
if current_gltf_settings == None:
current_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"]
current_gltf_settings = bpy.data.texts.new(".gltf_auto_export_gltf_settings")
current_gltf_settings.write(json.dumps({}))
changed = True
@ -199,65 +194,35 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
return changed
def execute(self, context):
print("execute")
#print("execute")
bpy.context.window_manager.auto_export_tracker.disable_change_detection()
if self.direct_mode:
self.load_settings(context)
if self.will_save_settings:
self.save_settings(context)
changes_per_scene = context.window_manager.auto_export_tracker.changed_objects_per_scene
if self.auto_export: # only do the actual exporting if auto export is actually enabled
changes_per_scene = context.window_manager.auto_export_tracker.changed_objects_per_scene
#& do the export
if self.direct_mode: #Do not auto export when applying settings in the menu, do it on save only
# disable change detection while the operator runs
if self.direct_mode: #Do not auto export when applying settings in the menu, do it on save only
#determine changed parameters
params_changed = self.did_export_settings_change()
auto_export(changes_per_scene, params_changed, self)
# cleanup
print("AUTO EXPORT DONE")
if bpy.context.window_manager.auto_export_tracker.exports_count == 0: # we need this in case there was nothing to export, to make sure change detection is enabled again
pass #print("YOLOOO")
#py.context.window_manager.auto_export_tracker.enable_change_detection()
#bpy.app.timers.register(bpy.context.window_manager.auto_export_tracker.enable_change_detection, first_interval=1)
#bpy.context.window_manager.auto_export_tracker.enable_change_detection()
# FIXME: wrong logic, this should be called only in an glTF2_post_export_callback
#bpy.app.timers.register(bpy.context.window_manager.auto_export_tracker.enable_change_detection, first_interval=1)
# cleanup
# reset the list of changes in the tracker
bpy.context.window_manager.auto_export_tracker.clear_changes()
print("AUTO EXPORT DONE")
bpy.app.timers.register(bpy.context.window_manager.auto_export_tracker.enable_change_detection, first_interval=0.1)
else:
print("auto export disabled, skipping")
"""if not self.direct_mode:
print("enabling")
bpy.context.window_manager.auto_export_tracker.enable_change_detection()"""
bpy.app.timers.register(bpy.context.window_manager.auto_export_tracker.enable_change_detection, first_interval=1)
return {'FINISHED'}
def invoke(self, context, event):
print("invoke")
#print("invoke")
bpy.context.window_manager.auto_export_tracker.disable_change_detection()
self.load_settings(context)
addon_prefs = self
"""[main_scene_names, level_scenes, library_scene_names, library_scenes]=get_scenes(addon_prefs)
(collections, _) = get_exportable_collections(level_scenes, library_scenes, addon_prefs)
try:
# we save this list of collections in the context
bpy.context.window_manager.exportedCollections.clear()
#TODO: add error handling for this
for collection_name in collections:
ui_info = bpy.context.window_manager.exportedCollections.add()
ui_info.name = collection_name
except Exception as error:
self.report({"ERROR"}, "Failed to populate list of exported collections/blueprints")"""
wm = context.window_manager
wm.fileselect_add(self)
return {'RUNNING_MODAL'}
def draw(self, context):

View File

@ -1,10 +1,13 @@
import json
from types import SimpleNamespace
import bpy
from bpy.types import (PropertyGroup)
from bpy.props import (PointerProperty, IntProperty, StringProperty)
from ..constants import TEMPSCENE_PREFIX
from .internals import CollectionsToExport
from ..helpers.helpers_scenes import (get_scenes)
from ..helpers.helpers_collections import (get_exportable_collections)
class AutoExportTracker(PropertyGroup):
@ -16,14 +19,18 @@ class AutoExportTracker(PropertyGroup):
last_operator = None
dummy_file_path = ""
exports_total : IntProperty(
name='exports_total',
description='Number of total exports',
default=0
) # type: ignore
exports_count : IntProperty(
name='exports_count',
description='Number of exports in progress',
default=0
) # type: ignore
@classmethod
def register(cls):
bpy.types.WindowManager.auto_export_tracker = PointerProperty(type=AutoExportTracker)
@ -62,15 +69,15 @@ class AutoExportTracker(PropertyGroup):
@classmethod
def deps_update_handler(cls, scene, depsgraph):
print("change detection enabled", cls.change_detection_enabled)
# print("change detection enabled", cls.change_detection_enabled)
ops = bpy.context.window_manager.operators
"""ops = bpy.context.window_manager.operators
print("last operators", ops)
for op in ops:
print("operator", op)
print("operator", op)"""
active_operator = bpy.context.active_operator
if active_operator:
print("Operator", active_operator.bl_label, active_operator.bl_idname)
#print("Operator", active_operator.bl_label, active_operator.bl_idname)
if active_operator.bl_idname == "EXPORT_SCENE_OT_gltf" and active_operator.gltf_export_id == "gltf_auto_export":
# we backup any existing gltf export settings, if there were any
scene = bpy.context.scene
@ -87,30 +94,28 @@ class AutoExportTracker(PropertyGroup):
# we force saving params
active_operator.will_save_settings = True
active_operator.auto_export = True
print("setting stuff for auto_export")
# only deal with changes if we are NOT in the mids of saving/exporting
if cls.change_detection_enabled:
# ignore anything going on with temporary scenes
if not scene.name.startswith(TEMPSCENE_PREFIX):
print("depsgraph_update_post", scene.name)
# print("depsgraph_update_post", scene.name)
changed_scene = scene.name or ""
#print("-------------")
if not changed_scene in cls.changed_objects_per_scene:
cls.changed_objects_per_scene[changed_scene] = {}
print("cls.changed_objects_per_scene", cls.changed_objects_per_scene)
depsgraph = bpy.context.evaluated_depsgraph_get()
# print("cls.changed_objects_per_scene", cls.changed_objects_per_scene)
# depsgraph = bpy.context.evaluated_depsgraph_get()
for obj in depsgraph.updates:
print("depsgraph update", obj)
#print("depsgraph update", obj)
if isinstance(obj.id, bpy.types.Object):
# get the actual object
object = bpy.data.objects[obj.id.name]
print("changed object", obj.id.name)
print("FOO","transforms", obj.is_updated_transform, "geometry", obj.is_updated_geometry)
# print(" changed object", obj.id.name,"transforms", obj.is_updated_transform, "geometry", obj.is_updated_geometry)
cls.changed_objects_per_scene[scene.name][obj.id.name] = object
elif isinstance(obj.id, bpy.types.Material): # or isinstance(obj.id, bpy.types.ShaderNodeTree):
print("changed material", obj.id, "scene", scene.name,)
# print(" changed material", obj.id, "scene", scene.name,)
material = bpy.data.materials[obj.id.name]
#now find which objects are using the material
for obj in bpy.data.objects:
@ -123,37 +128,68 @@ class AutoExportTracker(PropertyGroup):
items += len(cls.changed_objects_per_scene[scene_name].keys())
if items == 0:
cls.changed_objects_per_scene.clear()
print("changed_objects_per_scene", cls.changed_objects_per_scene)
# print("changed_objects_per_scene", cls.changed_objects_per_scene)
else:
cls.changed_objects_per_scene.clear()
# get a list of exportable collections for display
# keep it simple, just use Simplenamespace for compatibility with the rest of our code
addon_prefs = SimpleNamespace(**get_auto_exporter_settings())
print("addon prefs", addon_prefs)
addon_prefs.export_marked_assets = True
[_, level_scenes, _, library_scenes] = get_scenes(addon_prefs)
(collections, _) = get_exportable_collections(level_scenes, library_scenes, addon_prefs)
try:
# we save this list of collections in the context
bpy.context.window_manager.exportedCollections.clear()
#TODO: add error handling for this
for collection_name in collections:
ui_info = bpy.context.window_manager.exportedCollections.add()
ui_info.name = collection_name
except Exception as error:
pass
#self.report({"ERROR"}, "Failed to populate list of exported collections/blueprints")
"""depsgraph = bpy.context.evaluated_depsgraph_get()
for update in depsgraph.updates:
print("update", update)"""
def disable_change_detection(self):
print("disable change detection")
#print("disable change detection")
self.change_detection_enabled = False
self.__class__.change_detection_enabled = False
return None
def enable_change_detection(self):
print("enable change detection")
#print("enable change detection")
self.change_detection_enabled = True
self.__class__.change_detection_enabled = True
#FIXME: not sure about these
#print("bpy.context.window_manager.auto_export_tracker.change_detection_enabled", bpy.context.window_manager.auto_export_tracker.change_detection_enabled)
return None
def clear_changes(self):
self.changed_objects_per_scene.clear()
self.__class__.changed_objects_per_scene.clear()
# bpy.context.window_manager.auto_export_tracker.change_detection_enabled = True
print("bpy.context.window_manager.auto_export_tracker.change_detection_enabled", bpy.context.window_manager.auto_export_tracker.change_detection_enabled)
return None
def export_finished(self):
print("AAAAAAAAAAAAAAAAAAAAAAAAAAAAAHHHHHHHH export_finished")
bpy.context.window_manager.auto_export_tracker.exports_count -= 1
if bpy.context.window_manager.auto_export_tracker.exports_count == 0:
#print("preparing to reset change detection")
# bpy.app.timers.register(bpy.context.window_manager.auto_export_tracker.enable_change_detection, first_interval=1)
self.enable_change_detection()
#print("export_finished")
self.exports_count -= 1
if self.exports_count == 0:
print("preparing to reset change detection")
bpy.app.timers.register(self.enable_change_detection, first_interval=0.1)
#self.enable_change_detection()
return None
def get_auto_exporter_settings():
auto_exporter_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else None
if auto_exporter_settings != None:
try:
auto_exporter_settings = json.loads(auto_exporter_settings.as_string())
except:
auto_exporter_settings = {}
else:
auto_exporter_settings = {}
return auto_exporter_settings

View File

@ -212,8 +212,8 @@ def clear_hollow_scene(temp_scene, original_root_collection):
# convenience utility to get lists of scenes
def get_scenes(addon_prefs):
level_scene_names= list(map(lambda scene: scene.name, getattr(addon_prefs,"main_scenes"))) # getattr(addon_prefs, "main_scene_names_compact").split(',')#
library_scene_names = list(map(lambda scene: scene.name, getattr(addon_prefs,"library_scenes"))) #getattr(addon_prefs, "main_scene_names_compact").split(',')#
level_scene_names= getattr(addon_prefs,"main_scene_names") #list(map(lambda scene: scene.name, getattr(addon_prefs,"main_scenes")))
library_scene_names = getattr(addon_prefs,"library_scene_names") #list(map(lambda scene: scene.name, getattr(addon_prefs,"library_scenes")))
level_scene_names = list(filter(lambda name: name in bpy.data.scenes, level_scene_names))
library_scene_names = list(filter(lambda name: name in bpy.data.scenes, library_scene_names))

View File

@ -5,6 +5,7 @@ import mathutils
import pytest
import shutil
import pathlib
import rna_prop_ui
@pytest.fixture
def setup_data(request):
@ -80,6 +81,89 @@ def setup_data(request):
return None
def test_export_change_tracking_custom_properties(setup_data):
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")
models_path = os.path.join(assets_root_path, "models")
auto_export_operator = bpy.ops.export_scenes.auto_gltf
# with change detection
# first, configure things
# we use the global settings for that
export_props = {
"main_scene_names" : ['World'],
"library_scene_names": ['Library'],
}
# store settings for the auto_export part
stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
stored_auto_settings.clear()
stored_auto_settings.write(json.dumps(export_props))
gltf_settings = {
"export_animations": False,
"export_optimize_animation_size": False
}
# and store settings for the gltf part
stored_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings")
stored_gltf_settings.clear()
stored_gltf_settings.write(json.dumps(gltf_settings))
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_legacy_mode=False,
export_materials_library=False
)
world_file_path = os.path.join(models_path, "World.glb")
assert os.path.exists(world_file_path) == True
models_library_path = os.path.join(models_path, "library")
model_library_file_paths = list(map(lambda file_name: os.path.join(models_library_path, file_name), sorted(os.listdir(models_library_path))))
modification_times_first = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
mapped_files_to_timestamps_and_index = {}
for (index, file_path) in enumerate(model_library_file_paths+ [world_file_path]):
file_path = pathlib.Path(file_path).stem
mapped_files_to_timestamps_and_index[file_path] = (modification_times_first[index], index)
# now add a custom property to the cube in the main scene & export again
print("----------------")
print("main scene change (custom property)")
print("----------------")
bpy.context.window_manager.auto_export_tracker.enable_change_detection() # FIXME: should not be needed, but ..
bpy.data.objects["Cube"]["test_property"] = 42
#force an update
rna_prop_ui.rna_idprop_ui_create(bpy.data.objects["Cube"], "________temp", default=0)
rna_prop_ui.rna_idprop_ui_prop_clear(bpy.data.objects["Cube"], "________temp")
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_legacy_mode=False,
export_materials_library=False
)
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
assert modification_times != modification_times_first
# only the "world" file should have changed
world_file_index = mapped_files_to_timestamps_and_index["World"][1]
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index]]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [world_file_index]]
assert modification_times[world_file_index] != modification_times_first[world_file_index]
assert other_files_modification_times == other_files_modification_times_first
# reset the comparing
modification_times_first = modification_times
"""
- setup gltf parameters & auto_export parameters
@ -233,7 +317,7 @@ def test_export_changed_parameters(setup_data):
print("----------------")
bpy.context.window_manager.auto_export_tracker.enable_change_detection() # FIXME: should not be needed, but ..
bpy.data.objects["Blueprint3_mesh"]["test_component"] = 42
bpy.data.objects["Blueprint3_mesh"].location= [0, 0.1 ,2]
auto_export_operator(
auto_export=True,
@ -249,13 +333,17 @@ def test_export_changed_parameters(setup_data):
assert modification_times != modification_times_first
# the "world" file should have changed (TODO: double check: this is since changing an instances collection changes the instance too ?)
world_file_index = mapped_files_to_timestamps_and_index["World"][1]
# and the blueprint1 file too, since that is the collection we changed
blueprint1_file_index = mapped_files_to_timestamps_and_index["Blueprint1"][1]
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index, blueprint1_file_index]]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [world_file_index, blueprint1_file_index]]
# and the blueprint3 file too, since that is the collection we changed
blueprint3_file_index = mapped_files_to_timestamps_and_index["Blueprint3"][1]
# and the blueprint4 file too, since it contains the collection we changed
blueprint4_file_index = mapped_files_to_timestamps_and_index["Blueprint4_nested"][1]
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index, blueprint3_file_index, blueprint4_file_index]]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [world_file_index, blueprint3_file_index, blueprint4_file_index]]
assert modification_times[world_file_index] != modification_times_first[world_file_index]
assert modification_times[blueprint1_file_index] != modification_times_first[blueprint1_file_index]
assert modification_times[blueprint3_file_index] != modification_times_first[blueprint3_file_index]
assert modification_times[blueprint4_file_index] != modification_times_first[blueprint4_file_index]
assert other_files_modification_times == other_files_modification_times_first
# reset the comparing
modification_times_first = modification_times
@ -271,11 +359,15 @@ def test_export_changed_parameters(setup_data):
with bpy.context.temp_override(active_object=bpy.data.objects["Cube"]):
print("translate using operator")
bpy.ops.transform.translate(value=mathutils.Vector((2.0, 1.0, -5.0)))
bpy.ops.transform.rotate(value=0.378874, constraint_axis=(False, False, True), mirror=False, proportional_edit_falloff='SMOOTH', proportional_size=1)
bpy.ops.object.transform_apply()
bpy.ops.transform.translate(value=mathutils.Vector((2.0, 1.0, -5.0)))
bpy.ops.transform.rotate(value=0.378874, constraint_axis=(False, False, True), mirror=False, proportional_edit_falloff='SMOOTH', proportional_size=1)
bpy.ops.object.transform_apply()
bpy.ops.transform.translate(value=(0.5, 0, 0), constraint_axis=(True, False, False))
#force an update, as apparently all the operators above do not trigger changes ???
rna_prop_ui.rna_idprop_ui_create(bpy.data.objects["Cube"], "________temp", default=0)
rna_prop_ui.rna_idprop_ui_prop_clear(bpy.data.objects["Cube"], "________temp")
auto_export_operator(
auto_export=True,

View File

@ -57,7 +57,7 @@ def test_export_no_parameters(setup_data):
auto_export_operator = bpy.ops.export_scenes.auto_gltf
# first test exporting withouth any parameters set, this should export with default parameters gracefully
# first test exporting withouth any parameters set, this should not export anything
auto_export_operator(
auto_export=True,
@ -67,7 +67,35 @@ def test_export_no_parameters(setup_data):
export_materials_library=True
)
world_file_path = os.path.join(models_path, "World.glb")
assert os.path.exists(world_file_path) != True
def test_export_auto_export_parameters_only(setup_data):
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")
models_path = os.path.join(assets_root_path, "models")
auto_export_operator = bpy.ops.export_scenes.auto_gltf
export_props = {
"main_scene_names" : ['World'],
"library_scene_names": ['Library'],
}
# store settings for the auto_export part
stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
stored_auto_settings.clear()
stored_auto_settings.write(json.dumps(export_props))
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_legacy_mode=False,
export_materials_library=True
)
world_file_path = os.path.join(models_path, "World.glb")
assert os.path.exists(world_file_path) == True
def test_export_changed_parameters(setup_data):
root_path = "../../testing/bevy_example"

View File

@ -51,11 +51,12 @@ def test_export_do_not_export_blueprints(setup_data):
"main_scene_names" : ['World'],
"library_scene_names": ['Library']
}
stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
stored_settings.clear()
stored_settings.write(json.dumps(export_props))
stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
stored_auto_settings.clear()
stored_auto_settings.write(json.dumps(export_props))
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
@ -75,11 +76,13 @@ def test_export_custom_blueprints_path(setup_data):
"main_scene_names" : ['World'],
"library_scene_names": ['Library']
}
stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
stored_settings.clear()
stored_settings.write(json.dumps(export_props))
stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
stored_auto_settings.clear()
stored_auto_settings.write(json.dumps(export_props))
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
@ -104,6 +107,7 @@ def test_export_materials_library(setup_data):
stored_settings.write(json.dumps(export_props))
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
@ -129,6 +133,7 @@ def test_export_materials_library_custom_path(setup_data):
stored_settings.write(json.dumps(export_props))
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
@ -159,6 +164,7 @@ def test_export_collection_instances_combine_mode(setup_data): # TODO: change &
bpy.data.objects["Cube"]["dynamic"] = True
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_blueprints=True,
@ -184,6 +190,7 @@ def test_export_do_not_export_marked_assets(setup_data):
stored_settings.write(json.dumps(export_props))
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
@ -216,6 +223,7 @@ def test_export_separate_dynamic_and_static_objects(setup_data):
bpy.data.objects["Cube"]["dynamic"] = True
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
@ -242,6 +250,7 @@ def test_export_should_not_generate_orphan_data(setup_data):
stored_settings.write(json.dumps(export_props))
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,

View File

@ -1,19 +1,5 @@
from typing import Set
import bpy
from bpy.types import Context, Event, Operator
from bpy_extras.io_utils import ExportHelper
from bpy.props import (BoolProperty,
IntProperty,
StringProperty,
EnumProperty,
CollectionProperty
)
from ..auto_export import auto_export
from ..auto_export.preferences import (AutoExportGltfAddonPreferences, AutoExportGltfPreferenceNames)
from ..helpers.helpers_scenes import (get_scenes)
from ..helpers.helpers_collections import (get_exportable_collections)
######################################################
## ui logic & co
@ -49,15 +35,41 @@ class GLTF_PT_auto_export_SidePanel(bpy.types.Panel):
op = layout.operator("EXPORT_SCENES_OT_auto_gltf", text="Auto Export Settings")
op.auto_export = True
layout.label(text="changes since last save:")
changed_objects_per_scene = {}
for scene in context.window_manager.auto_export_tracker.changed_objects_per_scene:
if not scene in changed_objects_per_scene.keys():
changed_objects_per_scene[scene] = []
changed_objects_per_scene[scene]+= context.window_manager.auto_export_tracker.changed_objects_per_scene[scene].keys()
layout.label(text=str(changed_objects_per_scene))
#print("GLTF_PT_export_main", GLTF_PT_export_main.bl_parent_id)
class GLTF_PT_auto_export_changes_list(bpy.types.Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_label = "Changes per scene since last save "
bl_parent_id = "GLTF_PT_auto_export_SidePanel"
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
layout = self.layout
layout.use_property_split = True
layout.use_property_decorate = False # No animation.
#if "auto_export_tracker" in context.window_manager:
changed_objects_per_scene = context.window_manager.auto_export_tracker.changed_objects_per_scene
for scene_name in changed_objects_per_scene:
layout.label(text=f'{scene_name}')
for object_name in list(changed_objects_per_scene[scene_name].keys()):
row = layout.row()
row.label(text=f' {object_name}')
class GLTF_PT_auto_export_collections_list(bpy.types.Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_label = "Blueprints to export"
bl_parent_id = "GLTF_PT_auto_export_SidePanel"
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
layout = self.layout
layout.use_property_split = True
layout.use_property_decorate = False # No animation.
for collection in bpy.context.window_manager.exportedCollections:
row = layout.row()
row.label(text=collection.name)
# main ui in the file => export
class GLTF_PT_auto_export_main(bpy.types.Panel):
@ -260,34 +272,6 @@ class GLTF_PT_auto_export_blueprints(bpy.types.Panel):
# materials
layout.prop(operator, "export_materials_library")
layout.prop(operator, "export_materials_path")
class GLTF_PT_auto_export_collections_list(bpy.types.Panel):
bl_space_type = 'FILE_BROWSER'
bl_region_type = 'TOOL_PROPS'
bl_label = "Blueprints: Exported Collections"
bl_parent_id = "GLTF_PT_auto_export_blueprints"
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
sfile = context.space_data
operator = sfile.active_operator
return operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf" #"EXPORT_SCENE_OT_gltf"
def draw(self, context):
layout = self.layout
layout.use_property_split = True
layout.use_property_decorate = False # No animation.
sfile = context.space_data
operator = sfile.active_operator
layout.active = operator.auto_export and operator.export_blueprints
for collection in bpy.context.window_manager.exportedCollections:
row = layout.row()
row.label(text=collection.name)
class SCENE_UL_GLTF_auto_export(bpy.types.UIList):
# The draw_item function is called for each item of the collection that is visible in the list.