mirror of
https://github.com/kaosat-dev/Blender_bevy_components_workflow.git
synced 2024-11-22 11:50:53 +00:00
feat(auto_export): lots of cleanups , tweaks etc
* additions to tests * ui changes * etc etc
This commit is contained in:
parent
1041656dc7
commit
4c6ed3bc2a
@ -31,7 +31,7 @@ from .auto_export.internals import (SceneLink,
|
|||||||
CollectionsToExport,
|
CollectionsToExport,
|
||||||
CUSTOM_PG_sceneName
|
CUSTOM_PG_sceneName
|
||||||
)
|
)
|
||||||
from .ui.main import (GLTF_PT_auto_export_changes_list, GLTF_PT_auto_export_main,
|
from .ui.main import (GLTF_PT_auto_export_change_detection, GLTF_PT_auto_export_changes_list, GLTF_PT_auto_export_main,
|
||||||
GLTF_PT_auto_export_root,
|
GLTF_PT_auto_export_root,
|
||||||
GLTF_PT_auto_export_general,
|
GLTF_PT_auto_export_general,
|
||||||
GLTF_PT_auto_export_scenes,
|
GLTF_PT_auto_export_scenes,
|
||||||
@ -42,7 +42,6 @@ from .ui.main import (GLTF_PT_auto_export_changes_list, GLTF_PT_auto_export_main
|
|||||||
GLTF_PT_auto_export_SidePanel
|
GLTF_PT_auto_export_SidePanel
|
||||||
)
|
)
|
||||||
from .ui.operators import (SCENES_LIST_OT_actions)
|
from .ui.operators import (SCENES_LIST_OT_actions)
|
||||||
from .helpers.ping_depsgraph_update import ping_depsgraph_update
|
|
||||||
from .helpers.generate_complete_preferences_dict import generate_complete_preferences_dict_gltf
|
from .helpers.generate_complete_preferences_dict import generate_complete_preferences_dict_gltf
|
||||||
|
|
||||||
|
|
||||||
@ -114,6 +113,7 @@ classes = [
|
|||||||
GLTF_PT_auto_export_main,
|
GLTF_PT_auto_export_main,
|
||||||
GLTF_PT_auto_export_root,
|
GLTF_PT_auto_export_root,
|
||||||
GLTF_PT_auto_export_general,
|
GLTF_PT_auto_export_general,
|
||||||
|
GLTF_PT_auto_export_change_detection,
|
||||||
GLTF_PT_auto_export_scenes,
|
GLTF_PT_auto_export_scenes,
|
||||||
GLTF_PT_auto_export_blueprints,
|
GLTF_PT_auto_export_blueprints,
|
||||||
GLTF_PT_auto_export_SidePanel,
|
GLTF_PT_auto_export_SidePanel,
|
||||||
@ -152,18 +152,14 @@ def glTF2_post_export_callback(data):
|
|||||||
|
|
||||||
# get the parameters
|
# get the parameters
|
||||||
scene = bpy.context.scene
|
scene = bpy.context.scene
|
||||||
print(dict(scene))
|
|
||||||
if "glTF2ExportSettings" in scene:
|
if "glTF2ExportSettings" in scene:
|
||||||
print("write gltf settings")
|
|
||||||
settings = scene["glTF2ExportSettings"]
|
settings = scene["glTF2ExportSettings"]
|
||||||
export_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings")
|
export_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings")
|
||||||
# now write new settings
|
# now write new settings
|
||||||
export_settings.clear()
|
export_settings.clear()
|
||||||
|
|
||||||
current_gltf_settings = generate_complete_preferences_dict_gltf(dict(settings))
|
current_gltf_settings = generate_complete_preferences_dict_gltf(dict(settings))
|
||||||
print("current_gltf_settings", current_gltf_settings)
|
|
||||||
export_settings.write(json.dumps(current_gltf_settings))
|
export_settings.write(json.dumps(current_gltf_settings))
|
||||||
print("done writing")
|
|
||||||
# now reset the original gltf_settings
|
# now reset the original gltf_settings
|
||||||
if gltf_settings_backup != "":
|
if gltf_settings_backup != "":
|
||||||
scene["glTF2ExportSettings"] = json.loads(gltf_settings_backup)
|
scene["glTF2ExportSettings"] = json.loads(gltf_settings_backup)
|
||||||
@ -176,12 +172,7 @@ def glTF2_post_export_callback(data):
|
|||||||
last_operator = bpy.context.window_manager.auto_export_tracker.last_operator
|
last_operator = bpy.context.window_manager.auto_export_tracker.last_operator
|
||||||
last_operator.filepath = ""
|
last_operator.filepath = ""
|
||||||
last_operator.gltf_export_id = ""
|
last_operator.gltf_export_id = ""
|
||||||
|
|
||||||
# AGAIN, something that does not work withouth a timer
|
|
||||||
bpy.app.timers.register(ping_depsgraph_update, first_interval=0.1)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def menu_func_import(self, context):
|
def menu_func_import(self, context):
|
||||||
self.layout.operator(AutoExportGLTF.bl_idname, text="glTF auto Export (.glb/gltf)")
|
self.layout.operator(AutoExportGLTF.bl_idname, text="glTF auto Export (.glb/gltf)")
|
||||||
from bpy.app.handlers import persistent
|
from bpy.app.handlers import persistent
|
||||||
@ -190,10 +181,6 @@ from bpy.app.handlers import persistent
|
|||||||
def post_update(scene, depsgraph):
|
def post_update(scene, depsgraph):
|
||||||
bpy.context.window_manager.auto_export_tracker.deps_post_update_handler( scene, depsgraph)
|
bpy.context.window_manager.auto_export_tracker.deps_post_update_handler( scene, depsgraph)
|
||||||
|
|
||||||
@persistent
|
|
||||||
def pre_update(scene, depsgraph):
|
|
||||||
bpy.context.window_manager.auto_export_tracker.deps_pre_update_handler( scene, depsgraph)
|
|
||||||
|
|
||||||
@persistent
|
@persistent
|
||||||
def post_save(scene, depsgraph):
|
def post_save(scene, depsgraph):
|
||||||
bpy.context.window_manager.auto_export_tracker.save_handler( scene, depsgraph)
|
bpy.context.window_manager.auto_export_tracker.save_handler( scene, depsgraph)
|
||||||
@ -202,7 +189,6 @@ def register():
|
|||||||
for cls in classes:
|
for cls in classes:
|
||||||
bpy.utils.register_class(cls)
|
bpy.utils.register_class(cls)
|
||||||
# for some reason, adding these directly to the tracker class in register() do not work reliably
|
# for some reason, adding these directly to the tracker class in register() do not work reliably
|
||||||
bpy.app.handlers.depsgraph_update_pre.append(pre_update)
|
|
||||||
bpy.app.handlers.depsgraph_update_post.append(post_update)
|
bpy.app.handlers.depsgraph_update_post.append(post_update)
|
||||||
bpy.app.handlers.save_post.append(post_save)
|
bpy.app.handlers.save_post.append(post_save)
|
||||||
|
|
||||||
@ -210,19 +196,13 @@ def register():
|
|||||||
bpy.types.TOPBAR_MT_file_export.append(menu_func_import)
|
bpy.types.TOPBAR_MT_file_export.append(menu_func_import)
|
||||||
bpy.types.WindowManager.gltf_settings_backup = StringProperty(default="")
|
bpy.types.WindowManager.gltf_settings_backup = StringProperty(default="")
|
||||||
|
|
||||||
"""bpy.utils.register_class(AutoExportExtensionProperties)
|
|
||||||
bpy.types.Scene.AutoExportExtensionProperties = bpy.props.PointerProperty(type=AutoExportExtensionProperties)"""
|
|
||||||
|
|
||||||
def unregister():
|
def unregister():
|
||||||
for cls in classes:
|
for cls in classes:
|
||||||
bpy.utils.unregister_class(cls)
|
bpy.utils.unregister_class(cls)
|
||||||
bpy.types.TOPBAR_MT_file_export.remove(menu_func_import)
|
bpy.types.TOPBAR_MT_file_export.remove(menu_func_import)
|
||||||
|
|
||||||
bpy.app.handlers.depsgraph_update_pre.remove(pre_update)
|
|
||||||
bpy.app.handlers.depsgraph_update_post.remove(post_update)
|
bpy.app.handlers.depsgraph_update_post.remove(post_update)
|
||||||
bpy.app.handlers.save_post.remove(post_save)
|
bpy.app.handlers.save_post.remove(post_save)
|
||||||
|
|
||||||
"""bpy.utils.unregister_class(AutoExportExtensionProperties)"""
|
|
||||||
|
|
||||||
if "gltf_auto_export" == "__main__":
|
if "gltf_auto_export" == "__main__":
|
||||||
register()
|
register()
|
||||||
|
@ -19,7 +19,7 @@ from ..modules.export_materials import cleanup_materials, export_materials
|
|||||||
from ..modules.bevy_scene_components import upsert_scene_components
|
from ..modules.bevy_scene_components import upsert_scene_components
|
||||||
|
|
||||||
|
|
||||||
"""Main function"""
|
"""this is the main 'central' function for all auto export """
|
||||||
def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
|
def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
|
||||||
# have the export parameters (not auto export, just gltf export) have changed: if yes (for example switch from glb to gltf, compression or not, animations or not etc), we need to re-export everything
|
# have the export parameters (not auto export, just gltf export) have changed: if yes (for example switch from glb to gltf, compression or not, animations or not etc), we need to re-export everything
|
||||||
print ("changed_export_parameters", changed_export_parameters)
|
print ("changed_export_parameters", changed_export_parameters)
|
||||||
|
@ -66,10 +66,6 @@ def generate_gltf_export_preferences(addon_prefs):
|
|||||||
for key in standard_gltf_exporter_settings.keys():
|
for key in standard_gltf_exporter_settings.keys():
|
||||||
if str(key) not in constant_keys:
|
if str(key) not in constant_keys:
|
||||||
gltf_export_preferences[key] = standard_gltf_exporter_settings.get(key)
|
gltf_export_preferences[key] = standard_gltf_exporter_settings.get(key)
|
||||||
print("")
|
|
||||||
print("export preferences for gltf exporter", gltf_export_preferences)
|
|
||||||
|
|
||||||
|
|
||||||
return gltf_export_preferences
|
return gltf_export_preferences
|
||||||
|
|
||||||
|
|
||||||
|
@ -11,6 +11,7 @@ def get_collections_to_export(changes_per_scene, changed_export_parameters, addo
|
|||||||
export_change_detection = getattr(addon_prefs, "export_change_detection")
|
export_change_detection = getattr(addon_prefs, "export_change_detection")
|
||||||
export_gltf_extension = getattr(addon_prefs, "export_gltf_extension", ".glb")
|
export_gltf_extension = getattr(addon_prefs, "export_gltf_extension", ".glb")
|
||||||
export_blueprints_path = getattr(addon_prefs,"export_blueprints_path", "")
|
export_blueprints_path = getattr(addon_prefs,"export_blueprints_path", "")
|
||||||
|
collection_instances_combine_mode = getattr(addon_prefs, "collection_instances_combine_mode")
|
||||||
|
|
||||||
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs)
|
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs)
|
||||||
# get a list of all collections actually in use
|
# get a list of all collections actually in use
|
||||||
|
@ -7,9 +7,12 @@ def get_levels_to_export(changes_per_scene, changed_export_parameters, addon_pre
|
|||||||
export_change_detection = getattr(addon_prefs, "export_change_detection")
|
export_change_detection = getattr(addon_prefs, "export_change_detection")
|
||||||
export_gltf_extension = getattr(addon_prefs, "export_gltf_extension")
|
export_gltf_extension = getattr(addon_prefs, "export_gltf_extension")
|
||||||
export_models_path = getattr(addon_prefs, "export_models_path")
|
export_models_path = getattr(addon_prefs, "export_models_path")
|
||||||
|
collection_instances_combine_mode = getattr(addon_prefs, "collection_instances_combine_mode")
|
||||||
|
|
||||||
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs)
|
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs)
|
||||||
|
|
||||||
|
# TODO: IF collection_instances_combine_mode is not 'split' check for each scene if any object in changes_per_scene has an instance in the scene
|
||||||
|
|
||||||
# print("levels export", "export_change_detection", export_change_detection, "changed_export_parameters",changed_export_parameters, "export_models_path", export_models_path, "export_gltf_extension", export_gltf_extension, "changes_per_scene", changes_per_scene)
|
# print("levels export", "export_change_detection", export_change_detection, "changed_export_parameters",changed_export_parameters, "export_models_path", export_models_path, "export_gltf_extension", export_gltf_extension, "changes_per_scene", changes_per_scene)
|
||||||
# determine list of main scenes to export
|
# determine list of main scenes to export
|
||||||
# we have more relaxed rules to determine if the main scenes have changed : any change is ok, (allows easier handling of changes, render settings etc)
|
# we have more relaxed rules to determine if the main scenes have changed : any change is ok, (allows easier handling of changes, render settings etc)
|
||||||
|
@ -237,7 +237,7 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
|
|||||||
changes_per_scene = {}
|
changes_per_scene = {}
|
||||||
# TODO : how do we deal with changed scene names ???
|
# TODO : how do we deal with changed scene names ???
|
||||||
for scene in current:
|
for scene in current:
|
||||||
print('scene', scene)
|
# print('scene', scene)
|
||||||
previous_object_names = list(previous[scene].keys())
|
previous_object_names = list(previous[scene].keys())
|
||||||
current_object_names =list(current[scene].keys())
|
current_object_names =list(current[scene].keys())
|
||||||
#print("previous_object_names", len(previous_object_names), previous_object_names)
|
#print("previous_object_names", len(previous_object_names), previous_object_names)
|
||||||
|
@ -73,31 +73,6 @@ class AutoExportTracker(PropertyGroup):
|
|||||||
cls.changed_objects_per_scene.clear()
|
cls.changed_objects_per_scene.clear()
|
||||||
# all our logic is done, mark this as done
|
# all our logic is done, mark this as done
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def deps_pre_update_handler(cls, scene, depsgraph):
|
|
||||||
pass
|
|
||||||
#print("before depsgraph update", scene, depsgraph)
|
|
||||||
|
|
||||||
# only deal with changes if we are NOT in the mids of saving/exporting
|
|
||||||
"""if cls.change_detection_enabled:
|
|
||||||
# ignore anything going on with temporary scenes
|
|
||||||
if not scene.name.startswith(TEMPSCENE_PREFIX):
|
|
||||||
print("depsgraph_update_post", scene.name)
|
|
||||||
changed_scene = scene.name or ""
|
|
||||||
#print("-------------")
|
|
||||||
|
|
||||||
# print("cls.changed_objects_per_scene", cls.changed_objects_per_scene)
|
|
||||||
# depsgraph = bpy.context.evaluated_depsgraph_get()
|
|
||||||
for obj in depsgraph.updates:
|
|
||||||
#print("depsgraph update", obj)
|
|
||||||
if isinstance(obj.id, bpy.types.Object):
|
|
||||||
# get the actual object
|
|
||||||
object = bpy.data.objects[obj.id.name]
|
|
||||||
print(" changed object", obj.id.name, "changes", obj, "evalutated", obj.id.is_evaluated, "transforms", obj.is_updated_transform, "geometry", obj.is_updated_geometry)
|
|
||||||
elif isinstance(obj.id, bpy.types.Material): # or isinstance(obj.id, bpy.types.ShaderNodeTree):
|
|
||||||
# print(" changed material", obj.id, "scene", scene.name,)
|
|
||||||
pass"""
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def deps_post_update_handler(cls, scene, depsgraph):
|
def deps_post_update_handler(cls, scene, depsgraph):
|
||||||
# print("change detection enabled", cls.change_detection_enabled)
|
# print("change detection enabled", cls.change_detection_enabled)
|
||||||
|
@ -166,7 +166,6 @@ def duplicate_object(object, parent, combine_mode, destination_collection, libra
|
|||||||
def copy_hollowed_collection_into(source_collection, destination_collection, parent_empty=None, filter=None, library_collections=[], addon_prefs={}):
|
def copy_hollowed_collection_into(source_collection, destination_collection, parent_empty=None, filter=None, library_collections=[], addon_prefs={}):
|
||||||
collection_instances_combine_mode = getattr(addon_prefs, "collection_instances_combine_mode")
|
collection_instances_combine_mode = getattr(addon_prefs, "collection_instances_combine_mode")
|
||||||
legacy_mode = getattr(addon_prefs, "export_legacy_mode")
|
legacy_mode = getattr(addon_prefs, "export_legacy_mode")
|
||||||
collection_instances_combine_mode= collection_instances_combine_mode
|
|
||||||
|
|
||||||
for object in source_collection.objects:
|
for object in source_collection.objects:
|
||||||
if object.name.endswith("____bak"): # some objects could already have been handled, ignore them
|
if object.name.endswith("____bak"): # some objects could already have been handled, ignore them
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import json
|
import json
|
||||||
|
from mathutils import Color
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import bpy
|
import bpy
|
||||||
from ..constants import TEMPSCENE_PREFIX
|
from ..constants import TEMPSCENE_PREFIX
|
||||||
@ -102,7 +103,79 @@ def armature_hash(obj):
|
|||||||
print("bone", bone, bone_hash(bone))"""
|
print("bone", bone, bone_hash(bone))"""
|
||||||
return str(fields)
|
return str(fields)
|
||||||
|
|
||||||
|
def field_value(data):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def color(color_data):
|
||||||
|
# print("color", color_data, type(color_data))
|
||||||
|
return str(peel_value(color_data))
|
||||||
|
|
||||||
|
def lineart(lineart_data):
|
||||||
|
fields_to_ignore = fields_to_ignore_generic
|
||||||
|
|
||||||
|
all_field_names = dir(lineart_data)
|
||||||
|
fields = [getattr(lineart_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||||
|
return str(fields)
|
||||||
|
|
||||||
|
def node_tree(nodetree_data):
|
||||||
|
fields_to_ignore = fields_to_ignore_generic+ ['contains_tree','get_output_node', 'interface_update', 'override_template_create']
|
||||||
|
all_field_names = dir(nodetree_data)
|
||||||
|
fields = [getattr(nodetree_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||||
|
|
||||||
|
# print("node tree", fields)
|
||||||
|
return str(fields)
|
||||||
|
|
||||||
|
|
||||||
|
def peel_value( value ):
|
||||||
|
try:
|
||||||
|
len( value )
|
||||||
|
return [ peel_value( x ) for x in value ]
|
||||||
|
except TypeError:
|
||||||
|
return value
|
||||||
|
|
||||||
|
def material_hash(material):
|
||||||
|
fields_to_ignore = fields_to_ignore_generic
|
||||||
|
fields_to_convert = {'diffuse_color': color, 'line_color': color, 'lineart': lineart, 'node_tree': node_tree} # TODO: perhaps use types rather than names
|
||||||
|
all_field_names = dir(material)
|
||||||
|
fields = [getattr(material, prop, None) if not prop in fields_to_convert.keys() else fields_to_convert[prop](getattr(material, prop)) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||||
|
|
||||||
|
type_of = [type(getattr(material, prop, None)) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||||
|
names = [prop for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||||
|
|
||||||
|
tutu = [t == Color for t in type_of] # bpy.types.MaterialLineArt bpy.types.ShaderNodeTree
|
||||||
|
#print("fields", type_of)
|
||||||
|
|
||||||
|
"""for prop in [prop for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]:
|
||||||
|
bla = getattr(material, prop, None)
|
||||||
|
if hasattr(bla, "rna_type"):
|
||||||
|
print("YOLO", prop, bla, peel_value(bla), "type", type(bla), bla.rna_type, bla.rna_type == bpy.types.FloatProperty, type(bla) == bpy.types.bpy_prop_collection)
|
||||||
|
print("types", type(bla) == bpy.types.bpy_prop_collection, type(bla) == bpy.types.FloatColorAttributeValue)"""
|
||||||
|
|
||||||
|
# print("oooooh", material, material.bl_rna.properties.items())
|
||||||
|
|
||||||
|
return str(fields)#str(hash(str(fields)))
|
||||||
|
|
||||||
|
# TODO: this is partially taken from export_materials utilities, perhaps we could avoid having to fetch things multiple times ?
|
||||||
|
def materials_hash(obj, cache):
|
||||||
|
# print("materials")
|
||||||
|
materials = []
|
||||||
|
for material_slot in obj.material_slots:
|
||||||
|
material = material_slot.material
|
||||||
|
cached_hash = cache['materials'].get(material.name, None)
|
||||||
|
if cached_hash:
|
||||||
|
# print("CACHHHHHED", cached_hash)
|
||||||
|
materials.append(cached_hash)
|
||||||
|
else:
|
||||||
|
mat = material_hash(material)
|
||||||
|
cache['materials'][material.name] = mat
|
||||||
|
materials.append(mat)
|
||||||
|
# print("NOT CACHHH", mat)
|
||||||
|
|
||||||
|
# materials = [material_hash(material_slot.material) if not material_slot.material.name in cache["materials"] else cache["materials"][material_slot.material.name] for material_slot in obj.material_slots]
|
||||||
|
return str(hash(str(materials)))
|
||||||
|
|
||||||
def serialize_scene():
|
def serialize_scene():
|
||||||
|
cache = {"materials":{}}
|
||||||
print("serializing scene")
|
print("serializing scene")
|
||||||
data = {}
|
data = {}
|
||||||
for scene in bpy.data.scenes:
|
for scene in bpy.data.scenes:
|
||||||
@ -128,6 +201,7 @@ def serialize_scene():
|
|||||||
armature = armature_hash(object) if object.type == 'ARMATURE' else None
|
armature = armature_hash(object) if object.type == 'ARMATURE' else None
|
||||||
parent = object.parent.name if object.parent else None
|
parent = object.parent.name if object.parent else None
|
||||||
collections = [collection.name for collection in object.users_collection]
|
collections = [collection.name for collection in object.users_collection]
|
||||||
|
materials = materials_hash(object, cache) if len(object.material_slots) > 0 else None
|
||||||
|
|
||||||
data[scene.name][object.name] = {
|
data[scene.name][object.name] = {
|
||||||
"name": object.name,
|
"name": object.name,
|
||||||
@ -140,7 +214,8 @@ def serialize_scene():
|
|||||||
"light": light,
|
"light": light,
|
||||||
"armature": armature,
|
"armature": armature,
|
||||||
"parent": parent,
|
"parent": parent,
|
||||||
"collections": collections
|
"collections": collections,
|
||||||
|
"materials": materials
|
||||||
}
|
}
|
||||||
|
|
||||||
"""print("data", data)
|
"""print("data", data)
|
||||||
|
@ -420,11 +420,11 @@ def test_export_change_tracking_material_properties(setup_data):
|
|||||||
mapped_files_to_timestamps_and_index[file_path] = (modification_times_first[index], index)
|
mapped_files_to_timestamps_and_index[file_path] = (modification_times_first[index], index)
|
||||||
|
|
||||||
print("----------------")
|
print("----------------")
|
||||||
print("main scene change (material)")
|
print("main scene change (material, clip)")
|
||||||
print("----------------")
|
print("----------------")
|
||||||
|
|
||||||
bpy.data.materials["Material.001"].blend_method = 'CLIP'
|
bpy.data.materials["Material.001"].blend_method = 'CLIP'
|
||||||
|
|
||||||
auto_export_operator(
|
auto_export_operator(
|
||||||
auto_export=True,
|
auto_export=True,
|
||||||
direct_mode=True,
|
direct_mode=True,
|
||||||
@ -437,12 +437,82 @@ def test_export_change_tracking_material_properties(setup_data):
|
|||||||
|
|
||||||
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
|
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
|
||||||
assert modification_times != modification_times_first
|
assert modification_times != modification_times_first
|
||||||
# only the "world" file should have changed
|
# the material is assigned to Blueprint 1 so in normal (split mode) only the "Blueprint1" file should have changed
|
||||||
world_file_index = mapped_files_to_timestamps_and_index["World"][1]
|
blueprint1_file_index = mapped_files_to_timestamps_and_index["Blueprint1"][1]
|
||||||
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [world_file_index]]
|
# the same material is assigned to Blueprint 7 so in normal (split mode) only the "Blueprint1" file should have changed
|
||||||
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [world_file_index]]
|
blueprint7_file_index = mapped_files_to_timestamps_and_index["Blueprint7_hierarchy"][1]
|
||||||
|
|
||||||
assert modification_times[world_file_index] != modification_times_first[world_file_index]
|
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [blueprint1_file_index, blueprint7_file_index]]
|
||||||
|
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [blueprint1_file_index, blueprint7_file_index]]
|
||||||
|
|
||||||
|
assert modification_times[blueprint1_file_index] != modification_times_first[blueprint1_file_index]
|
||||||
|
assert modification_times[blueprint7_file_index] != modification_times_first[blueprint7_file_index]
|
||||||
|
|
||||||
|
assert other_files_modification_times == other_files_modification_times_first
|
||||||
|
|
||||||
|
# reset the comparing
|
||||||
|
modification_times_first = modification_times
|
||||||
|
|
||||||
|
print("----------------")
|
||||||
|
print("main scene change (material, alpha_threshold)")
|
||||||
|
print("----------------")
|
||||||
|
bpy.data.materials["Material.001"].alpha_threshold = 0.2
|
||||||
|
|
||||||
|
auto_export_operator(
|
||||||
|
auto_export=True,
|
||||||
|
direct_mode=True,
|
||||||
|
export_output_folder="./models",
|
||||||
|
export_scene_settings=True,
|
||||||
|
export_blueprints=True,
|
||||||
|
export_legacy_mode=False,
|
||||||
|
export_materials_library=False
|
||||||
|
)
|
||||||
|
|
||||||
|
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
|
||||||
|
assert modification_times != modification_times_first
|
||||||
|
# the material is assigned to Blueprint 1 so in normal (split mode) only the "Blueprint1" file should have changed
|
||||||
|
blueprint1_file_index = mapped_files_to_timestamps_and_index["Blueprint1"][1]
|
||||||
|
# the same material is assigned to Blueprint 7 so in normal (split mode) only the "Blueprint1" file should have changed
|
||||||
|
blueprint7_file_index = mapped_files_to_timestamps_and_index["Blueprint7_hierarchy"][1]
|
||||||
|
|
||||||
|
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [blueprint1_file_index, blueprint7_file_index]]
|
||||||
|
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [blueprint1_file_index, blueprint7_file_index]]
|
||||||
|
|
||||||
|
assert modification_times[blueprint1_file_index] != modification_times_first[blueprint1_file_index]
|
||||||
|
assert modification_times[blueprint7_file_index] != modification_times_first[blueprint7_file_index]
|
||||||
|
assert other_files_modification_times == other_files_modification_times_first
|
||||||
|
|
||||||
|
|
||||||
|
# reset the comparing
|
||||||
|
modification_times_first = modification_times
|
||||||
|
|
||||||
|
print("----------------")
|
||||||
|
print("main scene change (material, diffuse_color)")
|
||||||
|
print("----------------")
|
||||||
|
bpy.data.materials["Material.001"].diffuse_color[0] = 0.2
|
||||||
|
|
||||||
|
auto_export_operator(
|
||||||
|
auto_export=True,
|
||||||
|
direct_mode=True,
|
||||||
|
export_output_folder="./models",
|
||||||
|
export_scene_settings=True,
|
||||||
|
export_blueprints=True,
|
||||||
|
export_legacy_mode=False,
|
||||||
|
export_materials_library=False
|
||||||
|
)
|
||||||
|
|
||||||
|
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths + [world_file_path]))
|
||||||
|
assert modification_times != modification_times_first
|
||||||
|
# the material is assigned to Blueprint 1 so in normal (split mode) only the "Blueprint1" file should have changed
|
||||||
|
blueprint1_file_index = mapped_files_to_timestamps_and_index["Blueprint1"][1]
|
||||||
|
# the same material is assigned to Blueprint 7 so in normal (split mode) only the "Blueprint1" file should have changed
|
||||||
|
blueprint7_file_index = mapped_files_to_timestamps_and_index["Blueprint7_hierarchy"][1]
|
||||||
|
|
||||||
|
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in [blueprint1_file_index, blueprint7_file_index]]
|
||||||
|
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in [blueprint1_file_index, blueprint7_file_index]]
|
||||||
|
|
||||||
|
assert modification_times[blueprint1_file_index] != modification_times_first[blueprint1_file_index]
|
||||||
|
assert modification_times[blueprint7_file_index] != modification_times_first[blueprint7_file_index]
|
||||||
assert other_files_modification_times == other_files_modification_times_first
|
assert other_files_modification_times == other_files_modification_times_first
|
||||||
|
|
||||||
|
|
||||||
@ -456,7 +526,7 @@ def test_export_change_tracking_material_properties(setup_data):
|
|||||||
- removes generated files
|
- removes generated files
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def test_export_various_changes(setup_data):
|
def test_export_various_chained_changes(setup_data):
|
||||||
root_path = "../../testing/bevy_example"
|
root_path = "../../testing/bevy_example"
|
||||||
assets_root_path = os.path.join(root_path, "assets")
|
assets_root_path = os.path.join(root_path, "assets")
|
||||||
models_path = os.path.join(assets_root_path, "models")
|
models_path = os.path.join(assets_root_path, "models")
|
||||||
@ -626,6 +696,7 @@ def test_export_various_changes(setup_data):
|
|||||||
assert modification_times[blueprint3_file_index] != modification_times_first[blueprint3_file_index]
|
assert modification_times[blueprint3_file_index] != modification_times_first[blueprint3_file_index]
|
||||||
assert modification_times[blueprint4_file_index] == modification_times_first[blueprint4_file_index]
|
assert modification_times[blueprint4_file_index] == modification_times_first[blueprint4_file_index]
|
||||||
assert other_files_modification_times == other_files_modification_times_first
|
assert other_files_modification_times == other_files_modification_times_first
|
||||||
|
|
||||||
# reset the comparing
|
# reset the comparing
|
||||||
modification_times_first = modification_times
|
modification_times_first = modification_times
|
||||||
|
|
||||||
@ -633,16 +704,14 @@ def test_export_various_changes(setup_data):
|
|||||||
print("----------------")
|
print("----------------")
|
||||||
print("change using operator")
|
print("change using operator")
|
||||||
print("----------------")
|
print("----------------")
|
||||||
bpy.context.window_manager.auto_export_tracker.enable_change_detection() # FIXME: should not be needed, but ..
|
|
||||||
|
|
||||||
with bpy.context.temp_override(active_object=bpy.data.objects["Cube"], selected_objects=[bpy.data.objects["Cube"]]):
|
with bpy.context.temp_override(active_object=bpy.data.objects["Cube"], selected_objects=[bpy.data.objects["Cube"]], scene=bpy.data.scenes["World"]):
|
||||||
print("translate using operator")
|
print("translate using operator")
|
||||||
bpy.ops.transform.translate(value=mathutils.Vector((2.0, 1.0, -5.0)))
|
bpy.ops.transform.translate(value=mathutils.Vector((2.0, 1.0, -5.0)))
|
||||||
bpy.ops.transform.rotate(value=0.378874, constraint_axis=(False, False, True), mirror=False, proportional_edit_falloff='SMOOTH', proportional_size=1)
|
bpy.ops.transform.rotate(value=0.378874, constraint_axis=(False, False, True), mirror=False, proportional_edit_falloff='SMOOTH', proportional_size=1)
|
||||||
bpy.ops.object.transform_apply()
|
bpy.ops.object.transform_apply()
|
||||||
bpy.ops.transform.translate(value=(3.5, 0, 0), constraint_axis=(True, False, False))
|
bpy.ops.transform.translate(value=(3.5, 0, 0), constraint_axis=(True, False, False))
|
||||||
|
|
||||||
|
|
||||||
auto_export_operator(
|
auto_export_operator(
|
||||||
auto_export=True,
|
auto_export=True,
|
||||||
direct_mode=True,
|
direct_mode=True,
|
||||||
|
@ -4,6 +4,14 @@
|
|||||||
- undo redo is ignored: ie save, do something, undo it, you still get changes
|
- undo redo is ignored: ie save, do something, undo it, you still get changes
|
||||||
|
|
||||||
|
|
||||||
- for collection instances:
|
- [ ] serialize scene
|
||||||
* [ ] blueprints export should also take the split/embed mode into account: if a nested collection changes AND embed is active, its container collection should also be exported
|
- [ ] for collection instances:
|
||||||
* [ ] level exports should do the same
|
* [ ] blueprints export should also take the split/embed mode into account: if a nested collection changes AND embed is active, its container collection should also be exported
|
||||||
|
* [ ] level exports should do the same
|
||||||
|
- [ ] add tests for the above
|
||||||
|
- [ ] look into caching for serialize scene
|
||||||
|
- [ ] replace field name based logic with type base logic
|
||||||
|
|
||||||
|
- [ ] remove bulk of tracker related code
|
||||||
|
- [ ] clean up
|
||||||
|
- [x] split up change detection in settings to its own panel
|
@ -143,10 +143,41 @@ class GLTF_PT_auto_export_general(bpy.types.Panel):
|
|||||||
|
|
||||||
layout.active = operator.auto_export
|
layout.active = operator.auto_export
|
||||||
layout.prop(operator, "export_output_folder")
|
layout.prop(operator, "export_output_folder")
|
||||||
layout.prop(operator, "export_change_detection")
|
|
||||||
layout.prop(operator, "export_scene_settings")
|
layout.prop(operator, "export_scene_settings")
|
||||||
layout.prop(operator, "export_legacy_mode")
|
layout.prop(operator, "export_legacy_mode")
|
||||||
|
|
||||||
|
|
||||||
|
class GLTF_PT_auto_export_change_detection(bpy.types.Panel):
|
||||||
|
bl_space_type = 'FILE_BROWSER'
|
||||||
|
bl_region_type = 'TOOL_PROPS'
|
||||||
|
bl_label = "Change detection"
|
||||||
|
bl_parent_id = "GLTF_PT_auto_export_root"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
sfile = context.space_data
|
||||||
|
operator = sfile.active_operator
|
||||||
|
|
||||||
|
return operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf" #"EXPORT_SCENE_OT_gltf"
|
||||||
|
def draw_header(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
sfile = context.space_data
|
||||||
|
operator = sfile.active_operator
|
||||||
|
layout.prop(operator, "export_change_detection", text="")
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
layout.use_property_split = True
|
||||||
|
layout.use_property_decorate = False # No animation.
|
||||||
|
|
||||||
|
sfile = context.space_data
|
||||||
|
operator = sfile.active_operator
|
||||||
|
|
||||||
|
layout.active = operator.auto_export
|
||||||
|
layout.prop(operator, "export_change_detection")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class GLTF_PT_auto_export_scenes(bpy.types.Panel):
|
class GLTF_PT_auto_export_scenes(bpy.types.Panel):
|
||||||
bl_space_type = 'FILE_BROWSER'
|
bl_space_type = 'FILE_BROWSER'
|
||||||
bl_region_type = 'TOOL_PROPS'
|
bl_region_type = 'TOOL_PROPS'
|
||||||
@ -251,8 +282,6 @@ class GLTF_PT_auto_export_blueprints(bpy.types.Panel):
|
|||||||
operator = sfile.active_operator
|
operator = sfile.active_operator
|
||||||
layout.prop(operator, "export_blueprints", text="")
|
layout.prop(operator, "export_blueprints", text="")
|
||||||
|
|
||||||
#self.layout.prop(operator, "auto_export", text="")
|
|
||||||
|
|
||||||
def draw(self, context):
|
def draw(self, context):
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
layout.use_property_split = True
|
layout.use_property_split = True
|
||||||
|
Loading…
Reference in New Issue
Block a user