mirror of
https://github.com/kaosat-dev/Blender_bevy_components_workflow.git
synced 2024-11-23 04:10:53 +00:00
Compare commits
4 Commits
6a1594188e
...
db1a15ec63
Author | SHA1 | Date | |
---|---|---|---|
|
db1a15ec63 | ||
|
2f54bea7c9 | ||
|
9ec60a783b | ||
|
98930af5f5 |
@ -248,18 +248,15 @@ def apply_propertyGroup_values_to_object_customProperties(object):
|
|||||||
# apply component value(s) to custom property of a single component
|
# apply component value(s) to custom property of a single component
|
||||||
def apply_propertyGroup_values_to_object_customProperties_for_component(object, component_name):
|
def apply_propertyGroup_values_to_object_customProperties_for_component(object, component_name):
|
||||||
registry = bpy.context.window_manager.components_registry
|
registry = bpy.context.window_manager.components_registry
|
||||||
print("yallah", component_name)
|
|
||||||
(_, propertyGroup) = upsert_component_in_object(object, component_name, registry)
|
(_, propertyGroup) = upsert_component_in_object(object, component_name, registry)
|
||||||
component_definition = find_component_definition_from_short_name(component_name)
|
component_definition = find_component_definition_from_short_name(component_name)
|
||||||
if component_definition != None:
|
if component_definition != None:
|
||||||
print("merde")
|
|
||||||
value = property_group_value_to_custom_property_value(propertyGroup, component_definition, registry, None)
|
value = property_group_value_to_custom_property_value(propertyGroup, component_definition, registry, None)
|
||||||
object[component_name] = value
|
object[component_name] = value
|
||||||
|
|
||||||
components_metadata = object.components_meta.components
|
components_metadata = object.components_meta.components
|
||||||
componentMeta = next(filter(lambda component: component["name"] == component_name, components_metadata), None)
|
componentMeta = next(filter(lambda component: component["name"] == component_name, components_metadata), None)
|
||||||
if componentMeta:
|
if componentMeta:
|
||||||
print("here")
|
|
||||||
componentMeta.invalid = False
|
componentMeta.invalid = False
|
||||||
componentMeta.invalid_details = ""
|
componentMeta.invalid_details = ""
|
||||||
|
|
||||||
|
@ -18,12 +18,11 @@ class MissingBevyType(bpy.types.PropertyGroup):
|
|||||||
|
|
||||||
# helper function to deal with timer
|
# helper function to deal with timer
|
||||||
def toggle_watcher(self, context):
|
def toggle_watcher(self, context):
|
||||||
print("toggling watcher", self.watcher_enabled, watch_schema, self, bpy.app.timers)
|
#print("toggling watcher", self.watcher_enabled, watch_schema, self, bpy.app.timers)
|
||||||
if not self.watcher_enabled:
|
if not self.watcher_enabled:
|
||||||
try:
|
try:
|
||||||
bpy.app.timers.unregister(watch_schema)
|
bpy.app.timers.unregister(watch_schema)
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
print("failed to unregister", error)
|
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
self.watcher_active = True
|
self.watcher_active = True
|
||||||
@ -236,7 +235,6 @@ class ComponentsRegistry(PropertyGroup):
|
|||||||
try:
|
try:
|
||||||
bpy.app.timers.unregister(watch_schema)
|
bpy.app.timers.unregister(watch_schema)
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
print("failed to unregister", error)
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
del bpy.types.WindowManager.components_registry
|
del bpy.types.WindowManager.components_registry
|
||||||
|
@ -18,6 +18,7 @@ from bpy.types import Context
|
|||||||
from bpy.props import (StringProperty, BoolProperty, IntProperty, PointerProperty)
|
from bpy.props import (StringProperty, BoolProperty, IntProperty, PointerProperty)
|
||||||
import rna_prop_ui
|
import rna_prop_ui
|
||||||
|
|
||||||
|
|
||||||
# from .extension import ExampleExtensionProperties, GLTF_PT_UserExtensionPanel, unregister_panel
|
# from .extension import ExampleExtensionProperties, GLTF_PT_UserExtensionPanel, unregister_panel
|
||||||
|
|
||||||
from .auto_export.operators import AutoExportGLTF
|
from .auto_export.operators import AutoExportGLTF
|
||||||
@ -41,6 +42,8 @@ from .ui.main import (GLTF_PT_auto_export_changes_list, GLTF_PT_auto_export_main
|
|||||||
GLTF_PT_auto_export_SidePanel
|
GLTF_PT_auto_export_SidePanel
|
||||||
)
|
)
|
||||||
from .ui.operators import (SCENES_LIST_OT_actions)
|
from .ui.operators import (SCENES_LIST_OT_actions)
|
||||||
|
from .helpers.ping_depsgraph_update import ping_depsgraph_update
|
||||||
|
from .helpers.generate_complete_preferences_dict import generate_complete_preferences_dict_gltf
|
||||||
|
|
||||||
|
|
||||||
######################################################
|
######################################################
|
||||||
@ -149,12 +152,18 @@ def glTF2_post_export_callback(data):
|
|||||||
|
|
||||||
# get the parameters
|
# get the parameters
|
||||||
scene = bpy.context.scene
|
scene = bpy.context.scene
|
||||||
|
print(dict(scene))
|
||||||
if "glTF2ExportSettings" in scene:
|
if "glTF2ExportSettings" in scene:
|
||||||
|
print("write gltf settings")
|
||||||
settings = scene["glTF2ExportSettings"]
|
settings = scene["glTF2ExportSettings"]
|
||||||
export_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings")
|
export_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings")
|
||||||
# now write new settings
|
# now write new settings
|
||||||
export_settings.clear()
|
export_settings.clear()
|
||||||
export_settings.write(json.dumps(dict(settings)))
|
|
||||||
|
current_gltf_settings = generate_complete_preferences_dict_gltf(dict(settings))
|
||||||
|
print("current_gltf_settings", current_gltf_settings)
|
||||||
|
export_settings.write(json.dumps(current_gltf_settings))
|
||||||
|
print("done writing")
|
||||||
# now reset the original gltf_settings
|
# now reset the original gltf_settings
|
||||||
if gltf_settings_backup != "":
|
if gltf_settings_backup != "":
|
||||||
scene["glTF2ExportSettings"] = json.loads(gltf_settings_backup)
|
scene["glTF2ExportSettings"] = json.loads(gltf_settings_backup)
|
||||||
@ -168,6 +177,10 @@ def glTF2_post_export_callback(data):
|
|||||||
last_operator.filepath = ""
|
last_operator.filepath = ""
|
||||||
last_operator.gltf_export_id = ""
|
last_operator.gltf_export_id = ""
|
||||||
|
|
||||||
|
# AGAIN, something that does not work withouth a timer
|
||||||
|
bpy.app.timers.register(ping_depsgraph_update, first_interval=0.1)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def menu_func_import(self, context):
|
def menu_func_import(self, context):
|
||||||
self.layout.operator(AutoExportGLTF.bl_idname, text="glTF auto Export (.glb/gltf)")
|
self.layout.operator(AutoExportGLTF.bl_idname, text="glTF auto Export (.glb/gltf)")
|
||||||
@ -175,7 +188,11 @@ from bpy.app.handlers import persistent
|
|||||||
|
|
||||||
@persistent
|
@persistent
|
||||||
def post_update(scene, depsgraph):
|
def post_update(scene, depsgraph):
|
||||||
bpy.context.window_manager.auto_export_tracker.deps_update_handler( scene, depsgraph)
|
bpy.context.window_manager.auto_export_tracker.deps_post_update_handler( scene, depsgraph)
|
||||||
|
|
||||||
|
@persistent
|
||||||
|
def pre_update(scene, depsgraph):
|
||||||
|
bpy.context.window_manager.auto_export_tracker.deps_pre_update_handler( scene, depsgraph)
|
||||||
|
|
||||||
@persistent
|
@persistent
|
||||||
def post_save(scene, depsgraph):
|
def post_save(scene, depsgraph):
|
||||||
@ -185,6 +202,7 @@ def register():
|
|||||||
for cls in classes:
|
for cls in classes:
|
||||||
bpy.utils.register_class(cls)
|
bpy.utils.register_class(cls)
|
||||||
# for some reason, adding these directly to the tracker class in register() do not work reliably
|
# for some reason, adding these directly to the tracker class in register() do not work reliably
|
||||||
|
bpy.app.handlers.depsgraph_update_pre.append(pre_update)
|
||||||
bpy.app.handlers.depsgraph_update_post.append(post_update)
|
bpy.app.handlers.depsgraph_update_post.append(post_update)
|
||||||
bpy.app.handlers.save_post.append(post_save)
|
bpy.app.handlers.save_post.append(post_save)
|
||||||
|
|
||||||
@ -195,12 +213,12 @@ def register():
|
|||||||
"""bpy.utils.register_class(AutoExportExtensionProperties)
|
"""bpy.utils.register_class(AutoExportExtensionProperties)
|
||||||
bpy.types.Scene.AutoExportExtensionProperties = bpy.props.PointerProperty(type=AutoExportExtensionProperties)"""
|
bpy.types.Scene.AutoExportExtensionProperties = bpy.props.PointerProperty(type=AutoExportExtensionProperties)"""
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
def unregister():
|
||||||
for cls in classes:
|
for cls in classes:
|
||||||
bpy.utils.unregister_class(cls)
|
bpy.utils.unregister_class(cls)
|
||||||
bpy.types.TOPBAR_MT_file_export.remove(menu_func_import)
|
bpy.types.TOPBAR_MT_file_export.remove(menu_func_import)
|
||||||
|
|
||||||
|
bpy.app.handlers.depsgraph_update_pre.remove(pre_update)
|
||||||
bpy.app.handlers.depsgraph_update_post.remove(post_update)
|
bpy.app.handlers.depsgraph_update_post.remove(post_update)
|
||||||
bpy.app.handlers.save_post.remove(post_save)
|
bpy.app.handlers.save_post.remove(post_save)
|
||||||
|
|
||||||
|
@ -48,12 +48,11 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
|
|||||||
# here we do a bit of workaround by creating an override # TODO: do this at the "UI" level
|
# here we do a bit of workaround by creating an override # TODO: do this at the "UI" level
|
||||||
export_blueprints_path = os.path.join(folder_path, export_output_folder, getattr(addon_prefs,"export_blueprints_path")) if getattr(addon_prefs,"export_blueprints_path") != '' else folder_path
|
export_blueprints_path = os.path.join(folder_path, export_output_folder, getattr(addon_prefs,"export_blueprints_path")) if getattr(addon_prefs,"export_blueprints_path") != '' else folder_path
|
||||||
#print('addon_prefs', AutoExportGltfAddonPreferences.__annotations__)#)addon_prefs.__annotations__)
|
#print('addon_prefs', AutoExportGltfAddonPreferences.__annotations__)#)addon_prefs.__annotations__)
|
||||||
|
|
||||||
if hasattr(addon_prefs, "__annotations__") :
|
if hasattr(addon_prefs, "__annotations__") :
|
||||||
tmp = {}
|
tmp = {}
|
||||||
for k in AutoExportGltfAddonPreferences.__annotations__:
|
for k in AutoExportGltfAddonPreferences.__annotations__:
|
||||||
item = AutoExportGltfAddonPreferences.__annotations__[k]
|
item = AutoExportGltfAddonPreferences.__annotations__[k]
|
||||||
print("tutu",k, item.keywords.get('default', None) )
|
#print("tutu",k, item.keywords.get('default', None) )
|
||||||
default = item.keywords.get('default', None)
|
default = item.keywords.get('default', None)
|
||||||
tmp[k] = default
|
tmp[k] = default
|
||||||
|
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
import json
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
"""
|
||||||
|
This should ONLY be run when actually doing exports/aka calling auto_export function, because we only care about the difference in settings between EXPORTS
|
||||||
|
"""
|
||||||
|
def did_export_settings_change():
|
||||||
|
# compare both the auto export settings & the gltf settings
|
||||||
|
previous_auto_settings = bpy.data.texts[".gltf_auto_export_settings_previous"] if ".gltf_auto_export_settings_previous" in bpy.data.texts else None
|
||||||
|
previous_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings_previous"] if ".gltf_auto_export_gltf_settings_previous" in bpy.data.texts else None
|
||||||
|
|
||||||
|
current_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else None
|
||||||
|
current_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else None
|
||||||
|
|
||||||
|
#check if params have changed
|
||||||
|
|
||||||
|
# if there were no setting before, it is new, we need export
|
||||||
|
changed = False
|
||||||
|
if previous_auto_settings == None:
|
||||||
|
print("previous settings missing, exporting")
|
||||||
|
changed = True
|
||||||
|
elif previous_gltf_settings == None:
|
||||||
|
print("previous gltf settings missing, exporting")
|
||||||
|
changed = True
|
||||||
|
else:
|
||||||
|
auto_settings_changed = sorted(json.loads(previous_auto_settings.as_string()).items()) != sorted(json.loads(current_auto_settings.as_string()).items()) if current_auto_settings != None else False
|
||||||
|
gltf_settings_changed = sorted(json.loads(previous_gltf_settings.as_string()).items()) != sorted(json.loads(current_gltf_settings.as_string()).items()) if current_gltf_settings != None else False
|
||||||
|
|
||||||
|
"""print("auto settings previous", sorted(json.loads(previous_auto_settings.as_string()).items()))
|
||||||
|
print("auto settings current", sorted(json.loads(current_auto_settings.as_string()).items()))
|
||||||
|
print("auto_settings_changed", auto_settings_changed)"""
|
||||||
|
|
||||||
|
"""print("gltf settings previous", sorted(json.loads(previous_gltf_settings.as_string()).items()))
|
||||||
|
print("gltf settings current", sorted(json.loads(current_gltf_settings.as_string()).items()))
|
||||||
|
print("gltf_settings_changed", gltf_settings_changed)"""
|
||||||
|
|
||||||
|
changed = auto_settings_changed or gltf_settings_changed
|
||||||
|
|
||||||
|
return changed
|
@ -50,7 +50,6 @@ def generate_gltf_export_preferences(addon_prefs):
|
|||||||
|
|
||||||
|
|
||||||
standard_gltf_exporter_settings = get_standard_exporter_settings()
|
standard_gltf_exporter_settings = get_standard_exporter_settings()
|
||||||
#print("standard settings", standard_gltf_exporter_settings)
|
|
||||||
|
|
||||||
constant_keys = [
|
constant_keys = [
|
||||||
'use_selection',
|
'use_selection',
|
||||||
|
@ -6,6 +6,7 @@ from ..helpers.helpers_collections import get_exportable_collections
|
|||||||
from ..helpers.helpers_collections import (get_collections_in_library, get_exportable_collections, get_collections_per_scene, find_collection_ascendant_target_collection)
|
from ..helpers.helpers_collections import (get_collections_in_library, get_exportable_collections, get_collections_per_scene, find_collection_ascendant_target_collection)
|
||||||
from ..helpers.helpers_scenes import (get_scenes, )
|
from ..helpers.helpers_scenes import (get_scenes, )
|
||||||
|
|
||||||
|
# TODO: this should also take the split/embed mode into account: if a nested collection changes AND embed is active, its container collection should also be exported
|
||||||
def get_collections_to_export(changes_per_scene, changed_export_parameters, addon_prefs):
|
def get_collections_to_export(changes_per_scene, changed_export_parameters, addon_prefs):
|
||||||
export_change_detection = getattr(addon_prefs, "export_change_detection")
|
export_change_detection = getattr(addon_prefs, "export_change_detection")
|
||||||
export_gltf_extension = getattr(addon_prefs, "export_gltf_extension", ".glb")
|
export_gltf_extension = getattr(addon_prefs, "export_gltf_extension", ".glb")
|
||||||
@ -16,7 +17,7 @@ def get_collections_to_export(changes_per_scene, changed_export_parameters, addo
|
|||||||
(collections, blueprint_hierarchy) = get_exportable_collections(level_scenes, library_scenes, addon_prefs)
|
(collections, blueprint_hierarchy) = get_exportable_collections(level_scenes, library_scenes, addon_prefs)
|
||||||
collections_to_export = collections # just for clarity
|
collections_to_export = collections # just for clarity
|
||||||
|
|
||||||
print("export_change_detection", export_change_detection, export_gltf_extension, export_blueprints_path)
|
# print("export_change_detection", export_change_detection, "changed_export_parameters", changed_export_parameters, "changes_per_scene", changes_per_scene)
|
||||||
|
|
||||||
# if the export parameters have changed, bail out early
|
# if the export parameters have changed, bail out early
|
||||||
# we need to re_export everything if the export parameters have been changed
|
# we need to re_export everything if the export parameters have been changed
|
||||||
@ -38,7 +39,7 @@ def get_collections_to_export(changes_per_scene, changed_export_parameters, addo
|
|||||||
# determine which collections have changed
|
# determine which collections have changed
|
||||||
for scene, objects in changes_per_scene.items():
|
for scene, objects in changes_per_scene.items():
|
||||||
print(" changed scene", scene)
|
print(" changed scene", scene)
|
||||||
for obj_name, obj in objects.items():
|
for obj_name, obj in list(objects.items()):
|
||||||
object_collections = list(obj.users_collection) if hasattr(obj, 'users_collection') else []
|
object_collections = list(obj.users_collection) if hasattr(obj, 'users_collection') else []
|
||||||
object_collection_names = list(map(lambda collection: collection.name, object_collections))
|
object_collection_names = list(map(lambda collection: collection.name, object_collections))
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@ import bpy
|
|||||||
from .export_blueprints import check_if_blueprint_on_disk
|
from .export_blueprints import check_if_blueprint_on_disk
|
||||||
from ..helpers.helpers_scenes import (get_scenes, )
|
from ..helpers.helpers_scenes import (get_scenes, )
|
||||||
|
|
||||||
|
# TODO: this should also take the split/embed mode into account: if a collection instance changes AND embed is active, its container level/world should also be exported
|
||||||
def get_levels_to_export(changes_per_scene, changed_export_parameters, addon_prefs):
|
def get_levels_to_export(changes_per_scene, changed_export_parameters, addon_prefs):
|
||||||
export_change_detection = getattr(addon_prefs, "export_change_detection")
|
export_change_detection = getattr(addon_prefs, "export_change_detection")
|
||||||
export_gltf_extension = getattr(addon_prefs, "export_gltf_extension")
|
export_gltf_extension = getattr(addon_prefs, "export_gltf_extension")
|
||||||
@ -9,6 +10,7 @@ def get_levels_to_export(changes_per_scene, changed_export_parameters, addon_pre
|
|||||||
|
|
||||||
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs)
|
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs)
|
||||||
|
|
||||||
|
# print("levels export", "export_change_detection", export_change_detection, "changed_export_parameters",changed_export_parameters, "export_models_path", export_models_path, "export_gltf_extension", export_gltf_extension, "changes_per_scene", changes_per_scene)
|
||||||
# determine list of main scenes to export
|
# determine list of main scenes to export
|
||||||
# we have more relaxed rules to determine if the main scenes have changed : any change is ok, (allows easier handling of changes, render settings etc)
|
# we have more relaxed rules to determine if the main scenes have changed : any change is ok, (allows easier handling of changes, render settings etc)
|
||||||
main_scenes_to_export = [scene_name for scene_name in main_scene_names if not export_change_detection or changed_export_parameters or scene_name in changes_per_scene.keys() or not check_if_blueprint_on_disk(scene_name, export_models_path, export_gltf_extension)]
|
main_scenes_to_export = [scene_name for scene_name in main_scene_names if not export_change_detection or changed_export_parameters or scene_name in changes_per_scene.keys() or not check_if_blueprint_on_disk(scene_name, export_models_path, export_gltf_extension)]
|
||||||
|
@ -3,18 +3,29 @@ import bpy
|
|||||||
from bpy.types import Operator
|
from bpy.types import Operator
|
||||||
from bpy_extras.io_utils import ExportHelper
|
from bpy_extras.io_utils import ExportHelper
|
||||||
from bpy.props import (IntProperty)
|
from bpy.props import (IntProperty)
|
||||||
|
|
||||||
|
|
||||||
from .preferences import (AutoExportGltfAddonPreferences, AutoExportGltfPreferenceNames)
|
from .preferences import (AutoExportGltfAddonPreferences, AutoExportGltfPreferenceNames)
|
||||||
from .auto_export import auto_export
|
from .auto_export import auto_export
|
||||||
|
from ..helpers.generate_complete_preferences_dict import generate_complete_preferences_dict_auto
|
||||||
|
from ..helpers.serialize_scene import serialize_scene
|
||||||
|
|
||||||
|
def bubble_up_changes(object, changes_per_scene):
|
||||||
|
if object.parent:
|
||||||
|
changes_per_scene[object.parent.name] = bpy.data.objects[object.parent.name]
|
||||||
|
bubble_up_changes(object.parent, changes_per_scene)
|
||||||
|
|
||||||
|
|
||||||
class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
|
class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
|
||||||
"""auto export gltf"""
|
"""auto export gltf"""
|
||||||
#bl_idname = "object.xxx"
|
#bl_idname = "object.xxx"
|
||||||
bl_idname = "export_scenes.auto_gltf"
|
bl_idname = "export_scenes.auto_gltf"
|
||||||
bl_label = "Apply settings"
|
bl_label = "Apply settings"
|
||||||
bl_options = {'PRESET', 'UNDO'}
|
bl_options = {'PRESET'} # we do not add UNDO otherwise it leads to an invisible operation that resets the state of the saved serialized scene, breaking compares for normal undo/redo operations
|
||||||
# ExportHelper mixin class uses this
|
# ExportHelper mixin class uses this
|
||||||
filename_ext = ''
|
filename_ext = ''
|
||||||
|
|
||||||
|
|
||||||
#list of settings (other than purely gltf settings) whose change should trigger a re-generation of gltf files
|
#list of settings (other than purely gltf settings) whose change should trigger a re-generation of gltf files
|
||||||
white_list = ['auto_export',
|
white_list = ['auto_export',
|
||||||
'export_main_scene_name',
|
'export_main_scene_name',
|
||||||
@ -89,13 +100,16 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
|
|||||||
return export_props
|
return export_props
|
||||||
|
|
||||||
def save_settings(self, context):
|
def save_settings(self, context):
|
||||||
export_props = self.format_settings()
|
auto_export_settings = self.format_settings()
|
||||||
self.properties['main_scene_names'] = export_props['main_scene_names']
|
self.properties['main_scene_names'] = auto_export_settings['main_scene_names']
|
||||||
self.properties['library_scene_names'] = export_props['library_scene_names']
|
self.properties['library_scene_names'] = auto_export_settings['library_scene_names']
|
||||||
|
|
||||||
stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
|
stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
|
||||||
stored_settings.clear()
|
stored_settings.clear()
|
||||||
stored_settings.write(json.dumps(export_props))
|
|
||||||
|
auto_export_settings = generate_complete_preferences_dict_auto(auto_export_settings)
|
||||||
|
stored_settings.write(json.dumps(auto_export_settings))
|
||||||
|
print("saved settings", auto_export_settings)
|
||||||
#print("saving settings", bpy.data.texts[".gltf_auto_export_settings"].as_string(), "raw", json.dumps(export_props))
|
#print("saving settings", bpy.data.texts[".gltf_auto_export_settings"].as_string(), "raw", json.dumps(export_props))
|
||||||
|
|
||||||
def load_settings(self, context):
|
def load_settings(self, context):
|
||||||
@ -182,6 +196,7 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
|
|||||||
changed = auto_settings_changed or gltf_settings_changed
|
changed = auto_settings_changed or gltf_settings_changed
|
||||||
# now write the current settings to the "previous settings"
|
# now write the current settings to the "previous settings"
|
||||||
if current_auto_settings != None:
|
if current_auto_settings != None:
|
||||||
|
print("writing settings")
|
||||||
previous_auto_settings = bpy.data.texts[".gltf_auto_export_settings_previous"] if ".gltf_auto_export_settings_previous" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings_previous")
|
previous_auto_settings = bpy.data.texts[".gltf_auto_export_settings_previous"] if ".gltf_auto_export_settings_previous" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings_previous")
|
||||||
previous_auto_settings.clear()
|
previous_auto_settings.clear()
|
||||||
previous_auto_settings.write(current_auto_settings.as_string()) # TODO : check if this is always valid
|
previous_auto_settings.write(current_auto_settings.as_string()) # TODO : check if this is always valid
|
||||||
@ -193,6 +208,96 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
|
|||||||
|
|
||||||
return changed
|
return changed
|
||||||
|
|
||||||
|
def did_objects_change(self):
|
||||||
|
# sigh... you need to save & reset the frame otherwise it saves the values AT THE CURRENT FRAME WHICH CAN DIFFER ACROSS SCENES
|
||||||
|
current_frames = [scene.frame_current for scene in bpy.data.scenes]
|
||||||
|
for scene in bpy.data.scenes:
|
||||||
|
scene.frame_set(0)
|
||||||
|
|
||||||
|
current_scene = bpy.context.window.scene
|
||||||
|
bpy.context.window.scene = bpy.data.scenes[0]
|
||||||
|
#serialize scene at frame 0
|
||||||
|
"""with bpy.context.temp_override(scene=bpy.data.scenes[1]):
|
||||||
|
bpy.context.scene.frame_set(0)"""
|
||||||
|
current = serialize_scene()
|
||||||
|
bpy.context.window.scene = current_scene
|
||||||
|
|
||||||
|
# reset previous frames
|
||||||
|
for (index, scene) in enumerate(bpy.data.scenes):
|
||||||
|
scene.frame_set(int(current_frames[index]))
|
||||||
|
|
||||||
|
previous_stored = bpy.data.texts[".TESTING"] if ".TESTING" in bpy.data.texts else None # bpy.data.texts.new(".TESTING")
|
||||||
|
if previous_stored == None:
|
||||||
|
previous_stored = bpy.data.texts.new(".TESTING")
|
||||||
|
previous_stored.write(current)
|
||||||
|
return {}
|
||||||
|
previous = json.loads(previous_stored.as_string())
|
||||||
|
current = json.loads(current)
|
||||||
|
|
||||||
|
changes_per_scene = {}
|
||||||
|
# TODO : how do we deal with changed scene names ???
|
||||||
|
for scene in current:
|
||||||
|
print('scene', scene)
|
||||||
|
previous_object_names = list(previous[scene].keys())
|
||||||
|
current_object_names =list(current[scene].keys())
|
||||||
|
#print("previous_object_names", len(previous_object_names), previous_object_names)
|
||||||
|
#print("current_object_names", len(current_object_names), current_object_names)
|
||||||
|
|
||||||
|
"""if len(previous_object_names) > len(current_object_names):
|
||||||
|
print("removed")
|
||||||
|
if len(current_object_names) > len(previous_object_names):
|
||||||
|
print("added")"""
|
||||||
|
added = list(set(current_object_names) - set(previous_object_names))
|
||||||
|
removed = list(set(previous_object_names) - set(current_object_names))
|
||||||
|
"""print("removed", removed)
|
||||||
|
print("added",added)"""
|
||||||
|
for obj in added:
|
||||||
|
if not scene in changes_per_scene:
|
||||||
|
changes_per_scene[scene] = {}
|
||||||
|
changes_per_scene[scene][obj] = bpy.data.objects[obj]
|
||||||
|
# TODO: how do we deal with this, as we obviously do not have data for removed objects ?
|
||||||
|
for obj in removed:
|
||||||
|
if not scene in changes_per_scene:
|
||||||
|
changes_per_scene[scene] = {}
|
||||||
|
changes_per_scene[scene][obj] = None # bpy.data.objects[obj]
|
||||||
|
|
||||||
|
for object_name in list(current[scene].keys()): # todo : exclude directly added/removed objects
|
||||||
|
#print("ob", object_name)
|
||||||
|
if object_name in previous[scene]:
|
||||||
|
# print("object", object_name,"in previous scene, comparing")
|
||||||
|
current_obj = current[scene][object_name]
|
||||||
|
prev_obj = previous[scene][object_name]
|
||||||
|
same = str(current_obj) == str(prev_obj)
|
||||||
|
|
||||||
|
if "Camera" in object_name:
|
||||||
|
pass#print(" current", current_obj, prev_obj)
|
||||||
|
"""if "Fox" in object_name:
|
||||||
|
print(" current", current_obj)
|
||||||
|
print(" previou", prev_obj)
|
||||||
|
print(" same?", same)"""
|
||||||
|
#print("foo", same)
|
||||||
|
if not same:
|
||||||
|
""" print(" current", current_obj)
|
||||||
|
print(" previou", prev_obj)"""
|
||||||
|
if not scene in changes_per_scene:
|
||||||
|
changes_per_scene[scene] = {}
|
||||||
|
|
||||||
|
changes_per_scene[scene][object_name] = bpy.data.objects[object_name]
|
||||||
|
bubble_up_changes(bpy.data.objects[object_name], changes_per_scene[scene])
|
||||||
|
# now bubble up for instances & parents
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
previous_stored.clear()
|
||||||
|
previous_stored.write(json.dumps(current))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
print("changes per scene alternative", changes_per_scene)
|
||||||
|
return changes_per_scene
|
||||||
|
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
#print("execute")
|
#print("execute")
|
||||||
bpy.context.window_manager.auto_export_tracker.disable_change_detection()
|
bpy.context.window_manager.auto_export_tracker.disable_change_detection()
|
||||||
@ -202,10 +307,13 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
|
|||||||
self.save_settings(context)
|
self.save_settings(context)
|
||||||
|
|
||||||
if self.auto_export: # only do the actual exporting if auto export is actually enabled
|
if self.auto_export: # only do the actual exporting if auto export is actually enabled
|
||||||
changes_per_scene = context.window_manager.auto_export_tracker.changed_objects_per_scene
|
#changes_per_scene = context.window_manager.auto_export_tracker.changed_objects_per_scene
|
||||||
|
|
||||||
#& do the export
|
#& do the export
|
||||||
if self.direct_mode: #Do not auto export when applying settings in the menu, do it on save only
|
if self.direct_mode: #Do not auto export when applying settings in the menu, do it on save only
|
||||||
#determine changed parameters
|
# determine changed objects
|
||||||
|
changes_per_scene = self.did_objects_change()
|
||||||
|
# determine changed parameters
|
||||||
params_changed = self.did_export_settings_change()
|
params_changed = self.did_export_settings_change()
|
||||||
auto_export(changes_per_scene, params_changed, self)
|
auto_export(changes_per_scene, params_changed, self)
|
||||||
# cleanup
|
# cleanup
|
||||||
|
@ -6,6 +6,7 @@ import bpy
|
|||||||
from bpy.types import (PropertyGroup)
|
from bpy.types import (PropertyGroup)
|
||||||
from bpy.props import (PointerProperty, IntProperty, StringProperty)
|
from bpy.props import (PointerProperty, IntProperty, StringProperty)
|
||||||
|
|
||||||
|
from .did_export_settings_change import did_export_settings_change
|
||||||
from .get_collections_to_export import get_collections_to_export
|
from .get_collections_to_export import get_collections_to_export
|
||||||
|
|
||||||
from ..constants import TEMPSCENE_PREFIX
|
from ..constants import TEMPSCENE_PREFIX
|
||||||
@ -73,7 +74,32 @@ class AutoExportTracker(PropertyGroup):
|
|||||||
# all our logic is done, mark this as done
|
# all our logic is done, mark this as done
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def deps_update_handler(cls, scene, depsgraph):
|
def deps_pre_update_handler(cls, scene, depsgraph):
|
||||||
|
pass
|
||||||
|
#print("before depsgraph update", scene, depsgraph)
|
||||||
|
|
||||||
|
# only deal with changes if we are NOT in the mids of saving/exporting
|
||||||
|
"""if cls.change_detection_enabled:
|
||||||
|
# ignore anything going on with temporary scenes
|
||||||
|
if not scene.name.startswith(TEMPSCENE_PREFIX):
|
||||||
|
print("depsgraph_update_post", scene.name)
|
||||||
|
changed_scene = scene.name or ""
|
||||||
|
#print("-------------")
|
||||||
|
|
||||||
|
# print("cls.changed_objects_per_scene", cls.changed_objects_per_scene)
|
||||||
|
# depsgraph = bpy.context.evaluated_depsgraph_get()
|
||||||
|
for obj in depsgraph.updates:
|
||||||
|
#print("depsgraph update", obj)
|
||||||
|
if isinstance(obj.id, bpy.types.Object):
|
||||||
|
# get the actual object
|
||||||
|
object = bpy.data.objects[obj.id.name]
|
||||||
|
print(" changed object", obj.id.name, "changes", obj, "evalutated", obj.id.is_evaluated, "transforms", obj.is_updated_transform, "geometry", obj.is_updated_geometry)
|
||||||
|
elif isinstance(obj.id, bpy.types.Material): # or isinstance(obj.id, bpy.types.ShaderNodeTree):
|
||||||
|
# print(" changed material", obj.id, "scene", scene.name,)
|
||||||
|
pass"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def deps_post_update_handler(cls, scene, depsgraph):
|
||||||
# print("change detection enabled", cls.change_detection_enabled)
|
# print("change detection enabled", cls.change_detection_enabled)
|
||||||
|
|
||||||
"""ops = bpy.context.window_manager.operators
|
"""ops = bpy.context.window_manager.operators
|
||||||
@ -95,17 +121,21 @@ class AutoExportTracker(PropertyGroup):
|
|||||||
# we set the last operator here so we can clear the specific settings (yeah for overly complex logic)
|
# we set the last operator here so we can clear the specific settings (yeah for overly complex logic)
|
||||||
cls.last_operator = active_operator
|
cls.last_operator = active_operator
|
||||||
#print("active_operator", active_operator.has_active_exporter_extensions, active_operator.__annotations__.keys(), active_operator.filepath, active_operator.gltf_export_id)
|
#print("active_operator", active_operator.has_active_exporter_extensions, active_operator.__annotations__.keys(), active_operator.filepath, active_operator.gltf_export_id)
|
||||||
|
return
|
||||||
|
|
||||||
if active_operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf":
|
if active_operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf":
|
||||||
# we force saving params
|
# we force saving params
|
||||||
active_operator.will_save_settings = True
|
active_operator.will_save_settings = True
|
||||||
active_operator.auto_export = True
|
active_operator.auto_export = True
|
||||||
|
# if we are using the operator, bail out for the rest
|
||||||
print("setting stuff for auto_export")
|
print("setting stuff for auto_export")
|
||||||
|
return
|
||||||
|
|
||||||
# only deal with changes if we are NOT in the mids of saving/exporting
|
# only deal with changes if we are NOT in the mids of saving/exporting
|
||||||
if cls.change_detection_enabled:
|
if cls.change_detection_enabled:
|
||||||
# ignore anything going on with temporary scenes
|
# ignore anything going on with temporary scenes
|
||||||
if not scene.name.startswith(TEMPSCENE_PREFIX):
|
if not scene.name.startswith(TEMPSCENE_PREFIX):
|
||||||
# print("depsgraph_update_post", scene.name)
|
#print("depsgraph_update_post", scene.name)
|
||||||
changed_scene = scene.name or ""
|
changed_scene = scene.name or ""
|
||||||
#print("-------------")
|
#print("-------------")
|
||||||
if not changed_scene in cls.changed_objects_per_scene:
|
if not changed_scene in cls.changed_objects_per_scene:
|
||||||
@ -117,8 +147,10 @@ class AutoExportTracker(PropertyGroup):
|
|||||||
if isinstance(obj.id, bpy.types.Object):
|
if isinstance(obj.id, bpy.types.Object):
|
||||||
# get the actual object
|
# get the actual object
|
||||||
object = bpy.data.objects[obj.id.name]
|
object = bpy.data.objects[obj.id.name]
|
||||||
# print(" changed object", obj.id.name,"transforms", obj.is_updated_transform, "geometry", obj.is_updated_geometry)
|
#print(" changed object", obj.id.name, "changes", obj, "evalutated", obj.id.is_evaluated, "transforms", obj.is_updated_transform, "geometry", obj.is_updated_geometry)
|
||||||
|
if obj.is_updated_transform or obj.is_updated_geometry:
|
||||||
cls.changed_objects_per_scene[scene.name][obj.id.name] = object
|
cls.changed_objects_per_scene[scene.name][obj.id.name] = object
|
||||||
|
|
||||||
elif isinstance(obj.id, bpy.types.Material): # or isinstance(obj.id, bpy.types.ShaderNodeTree):
|
elif isinstance(obj.id, bpy.types.Material): # or isinstance(obj.id, bpy.types.ShaderNodeTree):
|
||||||
# print(" changed material", obj.id, "scene", scene.name,)
|
# print(" changed material", obj.id, "scene", scene.name,)
|
||||||
material = bpy.data.materials[obj.id.name]
|
material = bpy.data.materials[obj.id.name]
|
||||||
@ -127,29 +159,39 @@ class AutoExportTracker(PropertyGroup):
|
|||||||
for slot in obj.material_slots:
|
for slot in obj.material_slots:
|
||||||
if slot.material == material:
|
if slot.material == material:
|
||||||
cls.changed_objects_per_scene[scene.name][obj.name] = obj
|
cls.changed_objects_per_scene[scene.name][obj.name] = obj
|
||||||
|
#print("changed_objects_per_scene", cls.changed_objects_per_scene)
|
||||||
|
"""for obj_name_original in cls.changed_objects_per_scene[scene_name]:
|
||||||
|
if obj_name_original != ls.changed_objects_per_scene[scene_name][obj_name_original]"""
|
||||||
items = 0
|
items = 0
|
||||||
for scene_name in cls.changed_objects_per_scene:
|
for scene_name in cls.changed_objects_per_scene:
|
||||||
items += len(cls.changed_objects_per_scene[scene_name].keys())
|
items += len(cls.changed_objects_per_scene[scene_name].keys())
|
||||||
if items == 0:
|
if items == 0:
|
||||||
cls.changed_objects_per_scene.clear()
|
cls.changed_objects_per_scene.clear()
|
||||||
# print("changed_objects_per_scene", cls.changed_objects_per_scene)
|
#print("changed_objects_per_scene", cls.changed_objects_per_scene)
|
||||||
else:
|
|
||||||
cls.changed_objects_per_scene.clear()
|
|
||||||
|
|
||||||
|
# filter out invalid objects
|
||||||
|
"""for scene_name in cls.changed_objects_per_scene.keys():
|
||||||
|
bla = {}
|
||||||
|
for object_name in cls.changed_objects_per_scene[scene.name]:
|
||||||
|
object = cls.changed_objects_per_scene[scene.name][object_name]"""
|
||||||
|
#print("sdfsd", object, object.valid)
|
||||||
|
#if not cls.changed_objects_per_scene[scene.name][object_name].invalid:
|
||||||
|
# bla[object_name] = cls.changed_objects_per_scene[scene.name][object_name]
|
||||||
|
#cls.changed_objects_per_scene[scene.name]= bla
|
||||||
|
#cls.changed_objects_per_scene[scene_name] = [o for o in cls.changed_objects_per_scene[scene_name] if not o.invalid]
|
||||||
|
|
||||||
# get a list of exportable collections for display
|
# get a list of exportable collections for display
|
||||||
# keep it simple, just use Simplenamespace for compatibility with the rest of our code
|
# keep it simple, just use Simplenamespace for compatibility with the rest of our code
|
||||||
|
# TODO: debounce
|
||||||
|
|
||||||
|
"""export_settings_changed = did_export_settings_change()
|
||||||
tmp = {}
|
tmp = {}
|
||||||
for k in AutoExportGltfAddonPreferences.__annotations__:
|
for k in AutoExportGltfAddonPreferences.__annotations__:
|
||||||
item = AutoExportGltfAddonPreferences.__annotations__[k]
|
item = AutoExportGltfAddonPreferences.__annotations__[k]
|
||||||
print("tutu",k, item.keywords.get('default', None) )
|
|
||||||
default = item.keywords.get('default', None)
|
default = item.keywords.get('default', None)
|
||||||
tmp[k] = default
|
tmp[k] = default
|
||||||
auto_settings = get_auto_exporter_settings()
|
auto_settings = get_auto_exporter_settings()
|
||||||
for k in auto_settings:
|
for k in auto_settings:
|
||||||
print("k", k, auto_settings[k])
|
|
||||||
tmp[k] = auto_settings[k]
|
tmp[k] = auto_settings[k]
|
||||||
tmp['__annotations__'] = tmp
|
tmp['__annotations__'] = tmp
|
||||||
|
|
||||||
@ -162,17 +204,12 @@ class AutoExportTracker(PropertyGroup):
|
|||||||
export_blueprints_path = os.path.join(folder_path, export_output_folder, tmp["export_blueprints_path"]) if tmp["export_blueprints_path"] != '' else folder_path
|
export_blueprints_path = os.path.join(folder_path, export_output_folder, tmp["export_blueprints_path"]) if tmp["export_blueprints_path"] != '' else folder_path
|
||||||
tmp["export_blueprints_path"] = export_blueprints_path
|
tmp["export_blueprints_path"] = export_blueprints_path
|
||||||
tmp["export_models_path"] = export_models_path
|
tmp["export_models_path"] = export_models_path
|
||||||
|
|
||||||
addon_prefs = SimpleNamespace(**tmp)
|
addon_prefs = SimpleNamespace(**tmp)
|
||||||
|
|
||||||
#
|
#print("cls.changed_objects_per_scene", cls.changed_objects_per_scene)
|
||||||
|
(collections, collections_to_export, library_collections, collections_per_scene) = get_collections_to_export(cls.changed_objects_per_scene, export_settings_changed, addon_prefs)
|
||||||
#addon_prefs.export_blueprints_path = export_blueprints_path
|
#print("collections to export", collections_to_export)
|
||||||
#addon_prefs.export_gltf_extension = gltf_extension
|
|
||||||
#addon_prefs.export_models_path = export_models_path
|
|
||||||
|
|
||||||
|
|
||||||
(collections, collections_to_export, library_collections, collections_per_scene) = get_collections_to_export(cls.changed_objects_per_scene, False, addon_prefs)
|
|
||||||
print("collections to export", collections_to_export)
|
|
||||||
try:
|
try:
|
||||||
# we save this list of collections in the context
|
# we save this list of collections in the context
|
||||||
bpy.context.window_manager.exportedCollections.clear()
|
bpy.context.window_manager.exportedCollections.clear()
|
||||||
@ -183,7 +220,7 @@ class AutoExportTracker(PropertyGroup):
|
|||||||
except Exception as error:
|
except Exception as error:
|
||||||
pass
|
pass
|
||||||
#self.report({"ERROR"}, "Failed to populate list of exported collections/blueprints")
|
#self.report({"ERROR"}, "Failed to populate list of exported collections/blueprints")
|
||||||
|
"""
|
||||||
"""depsgraph = bpy.context.evaluated_depsgraph_get()
|
"""depsgraph = bpy.context.evaluated_depsgraph_get()
|
||||||
for update in depsgraph.updates:
|
for update in depsgraph.updates:
|
||||||
print("update", update)"""
|
print("update", update)"""
|
||||||
|
@ -0,0 +1,47 @@
|
|||||||
|
|
||||||
|
from ..auto_export.preferences import AutoExportGltfAddonPreferences
|
||||||
|
from io_scene_gltf2 import (ExportGLTF2_Base)
|
||||||
|
|
||||||
|
# given the input (actual) gltf settings, filters out any invalid/useless params & params that are equal to defaults
|
||||||
|
def generate_complete_preferences_dict_gltf(settings):
|
||||||
|
complete_preferences = {}
|
||||||
|
defaults = {}
|
||||||
|
gltf_parameters_to_ignore = ["use_active_collection", "use_active_collection_with_nested", "use_active_scene", "use_selection", "will_save_settings", "gltf_export_id"]
|
||||||
|
def filter_out(pair):
|
||||||
|
key, value = pair
|
||||||
|
if key in gltf_parameters_to_ignore:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
for k in ExportGLTF2_Base.__annotations__: # we use parameters from the base class of the standard gltf exporter, that contains all relevant parameters
|
||||||
|
item = ExportGLTF2_Base.__annotations__[k]
|
||||||
|
#print("item", item)
|
||||||
|
default = item.keywords.get('default', None)
|
||||||
|
#complete_preferences[k] = default
|
||||||
|
defaults[k] = default
|
||||||
|
|
||||||
|
for key in list(settings.keys()):
|
||||||
|
if key in defaults and settings[key] != defaults[key]: # only write out values different from defaults
|
||||||
|
complete_preferences[key] = settings[key]
|
||||||
|
|
||||||
|
complete_preferences = dict(filter(filter_out, dict(complete_preferences).items()))
|
||||||
|
return complete_preferences
|
||||||
|
|
||||||
|
# given the input (actual) auto settings, filters out any invalid/useless params & params that are equal to defaults
|
||||||
|
def generate_complete_preferences_dict_auto(settings):
|
||||||
|
complete_preferences = {}
|
||||||
|
defaults = {}
|
||||||
|
|
||||||
|
for k in AutoExportGltfAddonPreferences.__annotations__:
|
||||||
|
item = AutoExportGltfAddonPreferences.__annotations__[k]
|
||||||
|
default = item.keywords.get('default', None)
|
||||||
|
#complete_preferences[k] = default
|
||||||
|
defaults[k] = default
|
||||||
|
|
||||||
|
for key in list(settings.keys()):
|
||||||
|
if key in defaults:
|
||||||
|
if settings[key] != defaults[key]: # only write out values different from defaults
|
||||||
|
complete_preferences[key] = settings[key]
|
||||||
|
else:
|
||||||
|
complete_preferences[key] = settings[key]
|
||||||
|
return complete_preferences
|
@ -8,7 +8,7 @@ from .object_makers import (make_empty)
|
|||||||
custom_properties_to_filter_out = ['_combine', 'template', 'components_meta']
|
custom_properties_to_filter_out = ['_combine', 'template', 'components_meta']
|
||||||
|
|
||||||
def is_component_valid(object, component_name):
|
def is_component_valid(object, component_name):
|
||||||
if "components_meta" in object:
|
if "components_meta" in object or hasattr(object, "components_meta"):
|
||||||
target_components_metadata = object.components_meta.components
|
target_components_metadata = object.components_meta.components
|
||||||
component_meta = next(filter(lambda component: component["name"] == component_name, target_components_metadata), None)
|
component_meta = next(filter(lambda component: component["name"] == component_name, target_components_metadata), None)
|
||||||
if component_meta != None:
|
if component_meta != None:
|
||||||
@ -17,7 +17,8 @@ def is_component_valid(object, component_name):
|
|||||||
|
|
||||||
def remove_unwanted_custom_properties(object):
|
def remove_unwanted_custom_properties(object):
|
||||||
to_remove = []
|
to_remove = []
|
||||||
for component_name in object.keys():
|
component_names = list(object.keys()) # to avoid 'IDPropertyGroup changed size during iteration' issues
|
||||||
|
for component_name in component_names:
|
||||||
if not is_component_valid(object, component_name):
|
if not is_component_valid(object, component_name):
|
||||||
to_remove.append(component_name)
|
to_remove.append(component_name)
|
||||||
for cp in custom_properties_to_filter_out + to_remove:
|
for cp in custom_properties_to_filter_out + to_remove:
|
||||||
@ -27,7 +28,7 @@ def remove_unwanted_custom_properties(object):
|
|||||||
# TODO: rename actions ?
|
# TODO: rename actions ?
|
||||||
# reference https://github.com/KhronosGroup/glTF-Blender-IO/blob/main/addons/io_scene_gltf2/blender/exp/animation/gltf2_blender_gather_action.py#L481
|
# reference https://github.com/KhronosGroup/glTF-Blender-IO/blob/main/addons/io_scene_gltf2/blender/exp/animation/gltf2_blender_gather_action.py#L481
|
||||||
def copy_animation_data(source, target):
|
def copy_animation_data(source, target):
|
||||||
if source.animation_data and source.animation_data:
|
if source.animation_data:
|
||||||
ad = source.animation_data
|
ad = source.animation_data
|
||||||
|
|
||||||
blender_actions = []
|
blender_actions = []
|
||||||
@ -65,8 +66,27 @@ def copy_animation_data(source, target):
|
|||||||
target.animation_data_create()
|
target.animation_data_create()
|
||||||
target.animation_data.action = source.animation_data.action.copy()"""
|
target.animation_data.action = source.animation_data.action.copy()"""
|
||||||
# alternative method, using the built-in link animation operator
|
# alternative method, using the built-in link animation operator
|
||||||
|
|
||||||
|
#
|
||||||
|
#previous_active_object = bpy.context.view_layer.objects.active
|
||||||
|
"""bpy.context.view_layer.objects.active = source
|
||||||
|
|
||||||
|
bpy.ops.object.select_all(action='DESELECT')
|
||||||
|
#Transfer data from active object to selected objects
|
||||||
|
target.select_set(True) """
|
||||||
|
|
||||||
with bpy.context.temp_override(active_object=source, selected_editable_objects=[target]):
|
with bpy.context.temp_override(active_object=source, selected_editable_objects=[target]):
|
||||||
bpy.ops.object.make_links_data(type='ANIMATION')
|
bpy.ops.object.make_links_data(type='ANIMATION')
|
||||||
|
|
||||||
|
"""if target.animation_data == None:
|
||||||
|
target.animation_data_create()
|
||||||
|
|
||||||
|
print("copying animation data for", source.name, target.animation_data)
|
||||||
|
properties = [p.identifier for p in source.animation_data.bl_rna.properties if not p.is_readonly]
|
||||||
|
for prop in properties:
|
||||||
|
print("copying stuff", prop)
|
||||||
|
setattr(target.animation_data, prop, getattr(source.animation_data, prop))"""
|
||||||
|
|
||||||
# we add an "AnimationInfos" component
|
# we add an "AnimationInfos" component
|
||||||
target['AnimationInfos'] = f'(animations: {animations_infos})'.replace("'","")
|
target['AnimationInfos'] = f'(animations: {animations_infos})'.replace("'","")
|
||||||
|
|
||||||
@ -81,11 +101,7 @@ def copy_animation_data(source, target):
|
|||||||
markers_formated += '}'
|
markers_formated += '}'
|
||||||
target["AnimationMarkers"] = f'( {markers_formated} )'
|
target["AnimationMarkers"] = f'( {markers_formated} )'
|
||||||
|
|
||||||
"""print("copying animation data for", source.name, target.animation_data)
|
|
||||||
properties = [p.identifier for p in source.animation_data.bl_rna.properties if not p.is_readonly]
|
|
||||||
for prop in properties:
|
|
||||||
print("copying stuff", prop)
|
|
||||||
setattr(target.animation_data, prop, getattr(source.animation_data, prop))"""
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -198,8 +214,6 @@ def clear_hollow_scene(temp_scene, original_root_collection):
|
|||||||
for child_collection in collection.children:
|
for child_collection in collection.children:
|
||||||
restore_original_names(child_collection)
|
restore_original_names(child_collection)
|
||||||
|
|
||||||
# reset original names
|
|
||||||
restore_original_names(original_root_collection)
|
|
||||||
|
|
||||||
# remove any data we created
|
# remove any data we created
|
||||||
temp_root_collection = temp_scene.collection
|
temp_root_collection = temp_scene.collection
|
||||||
@ -207,13 +221,17 @@ def clear_hollow_scene(temp_scene, original_root_collection):
|
|||||||
for object in temp_scene_objects:
|
for object in temp_scene_objects:
|
||||||
#print("removing", object.name)
|
#print("removing", object.name)
|
||||||
bpy.data.objects.remove(object, do_unlink=True)
|
bpy.data.objects.remove(object, do_unlink=True)
|
||||||
|
|
||||||
# remove the temporary scene
|
# remove the temporary scene
|
||||||
bpy.data.scenes.remove(temp_scene, do_unlink=True)
|
bpy.data.scenes.remove(temp_scene, do_unlink=True)
|
||||||
|
|
||||||
|
# reset original names
|
||||||
|
restore_original_names(original_root_collection)
|
||||||
|
|
||||||
# convenience utility to get lists of scenes
|
# convenience utility to get lists of scenes
|
||||||
def get_scenes(addon_prefs):
|
def get_scenes(addon_prefs):
|
||||||
level_scene_names= getattr(addon_prefs,"main_scene_names") #list(map(lambda scene: scene.name, getattr(addon_prefs,"main_scenes")))
|
level_scene_names= getattr(addon_prefs,"main_scene_names", []) #list(map(lambda scene: scene.name, getattr(addon_prefs,"main_scenes")))
|
||||||
library_scene_names = getattr(addon_prefs,"library_scene_names") #list(map(lambda scene: scene.name, getattr(addon_prefs,"library_scenes")))
|
library_scene_names = getattr(addon_prefs,"library_scene_names", []) #list(map(lambda scene: scene.name, getattr(addon_prefs,"library_scenes")))
|
||||||
|
|
||||||
level_scene_names = list(filter(lambda name: name in bpy.data.scenes, level_scene_names))
|
level_scene_names = list(filter(lambda name: name in bpy.data.scenes, level_scene_names))
|
||||||
library_scene_names = list(filter(lambda name: name in bpy.data.scenes, library_scene_names))
|
library_scene_names = list(filter(lambda name: name in bpy.data.scenes, library_scene_names))
|
||||||
|
10
tools/gltf_auto_export/helpers/ping_depsgraph_update.py
Normal file
10
tools/gltf_auto_export/helpers/ping_depsgraph_update.py
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
import bpy
|
||||||
|
import rna_prop_ui
|
||||||
|
|
||||||
|
# fake way to make our operator's changes be visible to the change/depsgraph update handler in gltf_auto_export
|
||||||
|
def ping_depsgraph_update(object=None):
|
||||||
|
if object == None:
|
||||||
|
object = bpy.data.scenes[0]
|
||||||
|
rna_prop_ui.rna_idprop_ui_create(object, "________temp", default=0)
|
||||||
|
rna_prop_ui.rna_idprop_ui_prop_clear(object, "________temp")
|
||||||
|
return None
|
154
tools/gltf_auto_export/helpers/serialize_scene.py
Normal file
154
tools/gltf_auto_export/helpers/serialize_scene.py
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
import json
|
||||||
|
import numpy as np
|
||||||
|
import bpy
|
||||||
|
from ..constants import TEMPSCENE_PREFIX
|
||||||
|
|
||||||
|
fields_to_ignore_generic = ["tag", "type", "update_tag", "use_extra_user", "use_fake_user", "user_clear", "user_of_id", "user_remap", "users",
|
||||||
|
'animation_data_clear', 'animation_data_create', 'asset_clear', 'asset_data', 'asset_generate_preview', 'asset_mark', 'bl_rna', 'evaluated_get',
|
||||||
|
'library', 'library_weak_reference', 'make_local','name', 'name_full', 'original',
|
||||||
|
'override_create', 'override_hierarchy_create', 'override_library', 'preview', 'preview_ensure', 'rna_type',
|
||||||
|
'session_uid', 'copy', 'id_type', 'is_embedded_data', 'is_evaluated', 'is_library_indirect', 'is_missing', 'is_runtime_data']
|
||||||
|
|
||||||
|
# possible alternatives https://blender.stackexchange.com/questions/286010/bpy-detect-modified-mesh-data-vertices-edges-loops-or-polygons-for-cachin
|
||||||
|
def mesh_hash(obj):
|
||||||
|
# this is incomplete, how about edges ?
|
||||||
|
vertex_count = len(obj.data.vertices)
|
||||||
|
vertices_np = np.empty(vertex_count * 3, dtype=np.float32)
|
||||||
|
obj.data.vertices.foreach_get("co", vertices_np)
|
||||||
|
h = str(hash(vertices_np.tobytes()))
|
||||||
|
return h
|
||||||
|
|
||||||
|
# TODO: redo this one, this is essentially modifiec copy & pasted data, not fitting
|
||||||
|
def animation_hash(obj):
|
||||||
|
animation_data = obj.animation_data
|
||||||
|
if not animation_data:
|
||||||
|
return None
|
||||||
|
blender_actions = []
|
||||||
|
blender_tracks = {}
|
||||||
|
|
||||||
|
# TODO: this might need to be modified/ adapted to match the standard gltf exporter settings
|
||||||
|
for track in animation_data.nla_tracks:
|
||||||
|
strips = [strip for strip in track.strips if strip.action is not None]
|
||||||
|
for strip in strips:
|
||||||
|
# print(" ", source.name,'uses',strip.action.name, "active", strip.active, "action", strip.action)
|
||||||
|
blender_actions.append(strip.action)
|
||||||
|
blender_tracks[strip.action.name] = track.name
|
||||||
|
|
||||||
|
# Remove duplicate actions.
|
||||||
|
blender_actions = list(set(blender_actions))
|
||||||
|
# sort animations alphabetically (case insensitive) so they have a defined order and match Blender's Action list
|
||||||
|
blender_actions.sort(key = lambda a: a.name.lower())
|
||||||
|
|
||||||
|
markers_per_animation = {}
|
||||||
|
animations_infos = []
|
||||||
|
|
||||||
|
for action in blender_actions:
|
||||||
|
animation_name = blender_tracks[action.name]
|
||||||
|
animations_infos.append(
|
||||||
|
f'(name: "{animation_name}", frame_start: {action.frame_range[0]}, frame_end: {action.frame_range[1]}, frames_length: {action.frame_range[1] - action.frame_range[0]}, frame_start_override: {action.frame_start}, frame_end_override: {action.frame_end})'
|
||||||
|
)
|
||||||
|
markers_per_animation[animation_name] = {}
|
||||||
|
|
||||||
|
for marker in action.pose_markers:
|
||||||
|
if marker.frame not in markers_per_animation[animation_name]:
|
||||||
|
markers_per_animation[animation_name][marker.frame] = []
|
||||||
|
markers_per_animation[animation_name][marker.frame].append(marker.name)
|
||||||
|
|
||||||
|
compact_result = hash(str((blender_actions, blender_tracks, markers_per_animation, animations_infos)))
|
||||||
|
return compact_result
|
||||||
|
|
||||||
|
|
||||||
|
def camera_hash(obj):
|
||||||
|
camera_fields = ["angle", "angle_x", "angle_y", "animation_data", "background_images", "clip_end", "clip_start", "display_size", "dof", "fisheye_fov"]
|
||||||
|
camera_data = obj.data
|
||||||
|
fields_to_ignore= fields_to_ignore_generic
|
||||||
|
|
||||||
|
all_field_names = dir(camera_data)
|
||||||
|
fields = [getattr(camera_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||||
|
# TODO: the above is not enough, certain fields are left as bpy.data.xx
|
||||||
|
#print("camera", obj, fields)
|
||||||
|
return str(fields)
|
||||||
|
|
||||||
|
def light_hash(obj):
|
||||||
|
light_data = obj.data
|
||||||
|
fields_to_ignore = fields_to_ignore_generic
|
||||||
|
|
||||||
|
all_field_names = dir(light_data)
|
||||||
|
fields = [getattr(light_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||||
|
return str(fields)
|
||||||
|
|
||||||
|
def bones_hash(bones):
|
||||||
|
fields_to_ignore = fields_to_ignore_generic + ['AxisRollFromMatrix', 'MatrixFromAxisRoll', 'evaluate_envelope', 'convert_local_to_pose', 'foreach_get', 'foreach_set', 'get', 'set', 'find', 'items', 'keys', 'values']
|
||||||
|
|
||||||
|
bones_result = []
|
||||||
|
for bone in bones:
|
||||||
|
all_field_names = dir(bone)
|
||||||
|
fields = [getattr(bone, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||||
|
bones_result.append(fields)
|
||||||
|
#print("fields of bone", bones_result)
|
||||||
|
return str(hash(str(bones_result)))
|
||||||
|
|
||||||
|
# fixme: not good enough ?
|
||||||
|
def armature_hash(obj):
|
||||||
|
fields_to_ignore = fields_to_ignore_generic + ['display_type', 'is_editmode', 'pose_position', 'foreach_get', 'get']
|
||||||
|
fields_to_convert = {'bones': bones_hash}#, 'collections_all': bones_hash}
|
||||||
|
armature_data = obj.data
|
||||||
|
all_field_names = dir(armature_data)
|
||||||
|
|
||||||
|
fields = [getattr(armature_data, prop, None) if not prop in fields_to_convert.keys() else fields_to_convert[prop](getattr(armature_data, prop)) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||||
|
#print("ARMATURE", fields)
|
||||||
|
|
||||||
|
"""for bone in armature_data.bones:
|
||||||
|
print("bone", bone, bone_hash(bone))"""
|
||||||
|
return str(fields)
|
||||||
|
|
||||||
|
def serialize_scene():
|
||||||
|
print("serializing scene")
|
||||||
|
data = {}
|
||||||
|
for scene in bpy.data.scenes:
|
||||||
|
if scene.name.startswith(TEMPSCENE_PREFIX):
|
||||||
|
continue
|
||||||
|
data[scene.name] = {}
|
||||||
|
for object in scene.objects:
|
||||||
|
object = bpy.data.objects[object.name]
|
||||||
|
#print("object", object.name, object.location)
|
||||||
|
transform = str((object.location, object.rotation_euler, object.scale)) #str((object.matrix_world.to_translation(), object.matrix_world.to_euler('XYZ'), object.matrix_world.to_quaternion()))#
|
||||||
|
visibility = object.visible_get()
|
||||||
|
#print("object type", object.type)
|
||||||
|
custom_properties = {}
|
||||||
|
for K in object.keys():
|
||||||
|
if K not in '_RNA_UI' and K != 'components_meta':
|
||||||
|
#print( K , "-" , object[K] )
|
||||||
|
custom_properties[K] = object[K]
|
||||||
|
|
||||||
|
animations = animation_hash(object)
|
||||||
|
mesh = mesh_hash(object) if object.type == 'MESH' else None
|
||||||
|
camera = camera_hash(object) if object.type == 'CAMERA' else None
|
||||||
|
light = light_hash(object) if object.type == 'LIGHT' else None
|
||||||
|
armature = armature_hash(object) if object.type == 'ARMATURE' else None
|
||||||
|
parent = object.parent.name if object.parent else None
|
||||||
|
collections = [collection.name for collection in object.users_collection]
|
||||||
|
|
||||||
|
data[scene.name][object.name] = {
|
||||||
|
"name": object.name,
|
||||||
|
"transforms": transform,
|
||||||
|
"visibility": visibility,
|
||||||
|
"custom_properties": custom_properties,
|
||||||
|
"animations": animations,
|
||||||
|
"mesh": mesh,
|
||||||
|
"camera": camera,
|
||||||
|
"light": light,
|
||||||
|
"armature": armature,
|
||||||
|
"parent": parent,
|
||||||
|
"collections": collections
|
||||||
|
}
|
||||||
|
|
||||||
|
"""print("data", data)
|
||||||
|
print("")
|
||||||
|
print("")
|
||||||
|
print("data json", json.dumps(data))"""
|
||||||
|
|
||||||
|
return json.dumps(data)
|
||||||
|
|
||||||
|
#loc, rot, scale = bpy.context.object.matrix_world.decompose()
|
||||||
|
|
@ -60,8 +60,6 @@ def make_material_object(name, location=[0,0,0], rotation=[0,0,0], scale=[1,1,1]
|
|||||||
else:
|
else:
|
||||||
# no slots
|
# no slots
|
||||||
object.data.materials.append(material)
|
object.data.materials.append(material)
|
||||||
|
|
||||||
#bpy.context.view_layer.objects.active = original_active_object
|
|
||||||
return object
|
return object
|
||||||
|
|
||||||
|
|
||||||
|
@ -304,7 +304,7 @@ def test_export_changed_parameters(setup_data):
|
|||||||
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index, blueprint1_file_index]]
|
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index, blueprint1_file_index]]
|
||||||
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [world_file_index, blueprint1_file_index]]
|
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [world_file_index, blueprint1_file_index]]
|
||||||
|
|
||||||
assert modification_times[world_file_index] != modification_times_first[world_file_index]
|
assert modification_times[world_file_index] == modification_times_first[world_file_index]
|
||||||
assert modification_times[blueprint1_file_index] != modification_times_first[blueprint1_file_index]
|
assert modification_times[blueprint1_file_index] != modification_times_first[blueprint1_file_index]
|
||||||
assert other_files_modification_times == other_files_modification_times_first
|
assert other_files_modification_times == other_files_modification_times_first
|
||||||
# reset the comparing
|
# reset the comparing
|
||||||
@ -331,26 +331,23 @@ def test_export_changed_parameters(setup_data):
|
|||||||
|
|
||||||
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
|
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
|
||||||
assert modification_times != modification_times_first
|
assert modification_times != modification_times_first
|
||||||
# the "world" file should have changed (TODO: double check: this is since changing an instances collection changes the instance too ?)
|
# the "world" file should not have changed
|
||||||
world_file_index = mapped_files_to_timestamps_and_index["World"][1]
|
world_file_index = mapped_files_to_timestamps_and_index["World"][1]
|
||||||
# and the blueprint3 file too, since that is the collection we changed
|
# the blueprint3 file should have changed, since that is the collection we changed
|
||||||
blueprint3_file_index = mapped_files_to_timestamps_and_index["Blueprint3"][1]
|
blueprint3_file_index = mapped_files_to_timestamps_and_index["Blueprint3"][1]
|
||||||
# and the blueprint4 file too, since it contains the collection we changed
|
# the blueprint4 file NOT, since, while it contains an instance of the collection we changed, the default export mode is "split"
|
||||||
blueprint4_file_index = mapped_files_to_timestamps_and_index["Blueprint4_nested"][1]
|
blueprint4_file_index = mapped_files_to_timestamps_and_index["Blueprint4_nested"][1]
|
||||||
|
|
||||||
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index, blueprint3_file_index, blueprint4_file_index]]
|
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index, blueprint3_file_index, blueprint4_file_index]]
|
||||||
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [world_file_index, blueprint3_file_index, blueprint4_file_index]]
|
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [world_file_index, blueprint3_file_index, blueprint4_file_index]]
|
||||||
|
|
||||||
assert modification_times[world_file_index] != modification_times_first[world_file_index]
|
assert modification_times[world_file_index] == modification_times_first[world_file_index]
|
||||||
assert modification_times[blueprint3_file_index] != modification_times_first[blueprint3_file_index]
|
assert modification_times[blueprint3_file_index] != modification_times_first[blueprint3_file_index]
|
||||||
assert modification_times[blueprint4_file_index] != modification_times_first[blueprint4_file_index]
|
assert modification_times[blueprint4_file_index] == modification_times_first[blueprint4_file_index]
|
||||||
assert other_files_modification_times == other_files_modification_times_first
|
assert other_files_modification_times == other_files_modification_times_first
|
||||||
# reset the comparing
|
# reset the comparing
|
||||||
modification_times_first = modification_times
|
modification_times_first = modification_times
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# now same, but using an operator
|
# now same, but using an operator
|
||||||
print("----------------")
|
print("----------------")
|
||||||
print("change using operator")
|
print("change using operator")
|
||||||
@ -362,11 +359,7 @@ def test_export_changed_parameters(setup_data):
|
|||||||
bpy.ops.transform.translate(value=mathutils.Vector((2.0, 1.0, -5.0)))
|
bpy.ops.transform.translate(value=mathutils.Vector((2.0, 1.0, -5.0)))
|
||||||
bpy.ops.transform.rotate(value=0.378874, constraint_axis=(False, False, True), mirror=False, proportional_edit_falloff='SMOOTH', proportional_size=1)
|
bpy.ops.transform.rotate(value=0.378874, constraint_axis=(False, False, True), mirror=False, proportional_edit_falloff='SMOOTH', proportional_size=1)
|
||||||
bpy.ops.object.transform_apply()
|
bpy.ops.object.transform_apply()
|
||||||
bpy.ops.transform.translate(value=(0.5, 0, 0), constraint_axis=(True, False, False))
|
bpy.ops.transform.translate(value=(3.5, 0, 0), constraint_axis=(True, False, False))
|
||||||
|
|
||||||
#force an update, as apparently all the operators above do not trigger changes ???
|
|
||||||
rna_prop_ui.rna_idprop_ui_create(bpy.data.objects["Cube"], "________temp", default=0)
|
|
||||||
rna_prop_ui.rna_idprop_ui_prop_clear(bpy.data.objects["Cube"], "________temp")
|
|
||||||
|
|
||||||
|
|
||||||
auto_export_operator(
|
auto_export_operator(
|
||||||
|
@ -2,3 +2,8 @@
|
|||||||
- investigate clearing of changed_objects_per_scene
|
- investigate clearing of changed_objects_per_scene
|
||||||
- it seems bevy_components does not trigger updates
|
- it seems bevy_components does not trigger updates
|
||||||
- undo redo is ignored: ie save, do something, undo it, you still get changes
|
- undo redo is ignored: ie save, do something, undo it, you still get changes
|
||||||
|
|
||||||
|
|
||||||
|
- for collection instances:
|
||||||
|
* [ ] blueprints export should also take the split/embed mode into account: if a nested collection changes AND embed is active, its container collection should also be exported
|
||||||
|
* [ ] level exports should do the same
|
Loading…
Reference in New Issue
Block a user