diff --git a/tools/assets/materials/testing_materials_library.glb b/tools/assets/materials/testing_materials_library.glb deleted file mode 100644 index f355790..0000000 Binary files a/tools/assets/materials/testing_materials_library.glb and /dev/null differ diff --git a/tools/blenvy/__init__.py b/tools/blenvy/__init__.py new file mode 100644 index 0000000..fbdf335 --- /dev/null +++ b/tools/blenvy/__init__.py @@ -0,0 +1,185 @@ +bl_info = { + "name": "blenvy", + "author": "kaosigh", + "version": (0, 1, 0), + "blender": (3, 4, 0), + "location": "File > Import-Export", + "description": "tooling for the Bevy engine", + "warning": "", + "wiki_url": "https://github.com/kaosat-dev/Blender_bevy_components_workflow", + "tracker_url": "https://github.com/kaosat-dev/Blender_bevy_components_workflow/issues/new", + "category": "Import-Export" +} + +import bpy +from bpy.app.handlers import persistent +from bpy.props import (StringProperty) + +# components management +from .bevy_components.components.operators import CopyComponentOperator, Fix_Component_Operator, OT_rename_component, RemoveComponentFromAllObjectsOperator, RemoveComponentOperator, GenerateComponent_From_custom_property_Operator, PasteComponentOperator, AddComponentOperator, RenameHelper, Toggle_ComponentVisibility + +from .bevy_components.registry.registry import ComponentsRegistry,MissingBevyType +from .bevy_components.registry.operators import (COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_ALL, COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_CURRENT, COMPONENTS_OT_REFRESH_PROPGROUPS_FROM_CUSTOM_PROPERTIES_ALL, COMPONENTS_OT_REFRESH_PROPGROUPS_FROM_CUSTOM_PROPERTIES_CURRENT, OT_select_component_name_to_replace, OT_select_object, ReloadRegistryOperator, OT_OpenFilebrowser) +from .bevy_components.registry.ui import (BEVY_COMPONENTS_PT_Configuration, BEVY_COMPONENTS_PT_AdvancedToolsPanel, BEVY_COMPONENTS_PT_MissingTypesPanel, MISSING_TYPES_UL_List) + +from .bevy_components.components.metadata import (ComponentMetadata, ComponentsMeta) +from .bevy_components.components.lists import GENERIC_LIST_OT_actions, Generic_LIST_OT_AddItem, Generic_LIST_OT_RemoveItem, Generic_LIST_OT_SelectItem +from .bevy_components.components.maps import GENERIC_MAP_OT_actions +from .bevy_components.components.definitions_list import (ComponentDefinitionsList, ClearComponentDefinitionsList) +from .bevy_components.components.ui import (BEVY_COMPONENTS_PT_ComponentsPanel) + +# auto export +from .gltf_auto_export.auto_export.operators import AutoExportGLTF +from .gltf_auto_export.auto_export.tracker import AutoExportTracker +from .gltf_auto_export.auto_export.preferences import (AutoExportGltfAddonPreferences) + +from .gltf_auto_export.auto_export.internals import (SceneLink, + SceneLinks, + CollectionToExport, + BlueprintsToExport, + CUSTOM_PG_sceneName + ) +from .gltf_auto_export.ui.main import (GLTF_PT_auto_export_change_detection, GLTF_PT_auto_export_changes_list, GLTF_PT_auto_export_main, + GLTF_PT_auto_export_root, + GLTF_PT_auto_export_general, + GLTF_PT_auto_export_scenes, + GLTF_PT_auto_export_blueprints, + SCENE_UL_GLTF_auto_export, + + GLTF_PT_auto_export_SidePanel + ) +from .gltf_auto_export.ui.operators import (OT_OpenFolderbrowser, SCENES_LIST_OT_actions) + +# asset management +from .assets.ui import GLTF_PT_auto_export_assets +from .assets.assets_registry import AssetsRegistry +from .assets.operators import OT_add_bevy_asset, OT_remove_bevy_asset + +# blueprints management +from .blueprints.ui import GLTF_PT_auto_export_blueprints_list +from .blueprints.blueprints_registry import BlueprintsRegistry +from .blueprints.operators import OT_select_blueprint + +# blenvy core +from .core.ui import BLENVY_PT_SidePanel +from .core.blenvy_manager import BlenvyManager +from .core.operators import OT_switch_bevy_tooling + +classes = [ + # blenvy + BLENVY_PT_SidePanel, + + + # bevy components + AddComponentOperator, + CopyComponentOperator, + PasteComponentOperator, + RemoveComponentOperator, + RemoveComponentFromAllObjectsOperator, + Fix_Component_Operator, + OT_rename_component, + RenameHelper, + GenerateComponent_From_custom_property_Operator, + Toggle_ComponentVisibility, + + ComponentDefinitionsList, + ClearComponentDefinitionsList, + + ComponentMetadata, + ComponentsMeta, + MissingBevyType, + ComponentsRegistry, + + OT_OpenFilebrowser, + ReloadRegistryOperator, + COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_ALL, + COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_CURRENT, + + COMPONENTS_OT_REFRESH_PROPGROUPS_FROM_CUSTOM_PROPERTIES_ALL, + COMPONENTS_OT_REFRESH_PROPGROUPS_FROM_CUSTOM_PROPERTIES_CURRENT, + + OT_select_object, + OT_select_component_name_to_replace, + + BEVY_COMPONENTS_PT_ComponentsPanel, + BEVY_COMPONENTS_PT_AdvancedToolsPanel, + BEVY_COMPONENTS_PT_Configuration, + MISSING_TYPES_UL_List, + BEVY_COMPONENTS_PT_MissingTypesPanel, + + Generic_LIST_OT_SelectItem, + Generic_LIST_OT_AddItem, + Generic_LIST_OT_RemoveItem, + GENERIC_LIST_OT_actions, + + GENERIC_MAP_OT_actions, + + # gltf auto export + SceneLink, + SceneLinks, + CUSTOM_PG_sceneName, + SCENE_UL_GLTF_auto_export, + SCENES_LIST_OT_actions, + + OT_OpenFolderbrowser, + AutoExportGLTF, + + CollectionToExport, + BlueprintsToExport, + + GLTF_PT_auto_export_main, + GLTF_PT_auto_export_root, + GLTF_PT_auto_export_general, + GLTF_PT_auto_export_change_detection, + GLTF_PT_auto_export_scenes, + GLTF_PT_auto_export_blueprints, + GLTF_PT_auto_export_SidePanel, + AutoExportTracker, + + # blenvy + BlenvyManager, + OT_switch_bevy_tooling, + + AssetsRegistry, + OT_add_bevy_asset, + OT_remove_bevy_asset, + GLTF_PT_auto_export_assets, + + BlueprintsRegistry, + OT_select_blueprint, + GLTF_PT_auto_export_blueprints_list, +] + + +@persistent +def post_update(scene, depsgraph): + bpy.context.window_manager.auto_export_tracker.deps_post_update_handler( scene, depsgraph) + +@persistent +def post_save(scene, depsgraph): + bpy.context.window_manager.auto_export_tracker.save_handler( scene, depsgraph) + +@persistent +def post_load(file_name): + registry = bpy.context.window_manager.components_registry + if registry != None: + registry.load_settings() + +def register(): + for cls in classes: + bpy.utils.register_class(cls) + + bpy.app.handlers.load_post.append(post_load) + # for some reason, adding these directly to the tracker class in register() do not work reliably + bpy.app.handlers.depsgraph_update_post.append(post_update) + bpy.app.handlers.save_post.append(post_save) + +def unregister(): + for cls in classes: + bpy.utils.unregister_class(cls) + bpy.app.handlers.load_post.remove(post_load) + bpy.app.handlers.depsgraph_update_post.remove(post_update) + bpy.app.handlers.save_post.remove(post_save) + + +print("TOTO") \ No newline at end of file diff --git a/tools/gltf_auto_export/assets/__init__.py b/tools/blenvy/assets/__init__.py similarity index 100% rename from tools/gltf_auto_export/assets/__init__.py rename to tools/blenvy/assets/__init__.py diff --git a/tools/gltf_auto_export/assets/assets_registry.py b/tools/blenvy/assets/assets_registry.py similarity index 100% rename from tools/gltf_auto_export/assets/assets_registry.py rename to tools/blenvy/assets/assets_registry.py diff --git a/tools/gltf_auto_export/assets/operators.py b/tools/blenvy/assets/operators.py similarity index 96% rename from tools/gltf_auto_export/assets/operators.py rename to tools/blenvy/assets/operators.py index 1dc2f66..cc7bd33 100644 --- a/tools/gltf_auto_export/assets/operators.py +++ b/tools/blenvy/assets/operators.py @@ -15,7 +15,7 @@ class OT_add_bevy_asset(Operator): description="name of asset to add", ) # type: ignore - asset_type: bpy.types.WindowManager.asset_type_selector = EnumProperty( + asset_type: EnumProperty( items=( ('MODEL', "Model", ""), ('AUDIO', "Audio", ""), diff --git a/tools/gltf_auto_export/assets/ui.py b/tools/blenvy/assets/ui.py similarity index 85% rename from tools/gltf_auto_export/assets/ui.py rename to tools/blenvy/assets/ui.py index 382e1cd..cef22f4 100644 --- a/tools/gltf_auto_export/assets/ui.py +++ b/tools/blenvy/assets/ui.py @@ -4,7 +4,7 @@ import json class GLTF_PT_auto_export_assets(bpy.types.Panel): bl_space_type = 'VIEW_3D' bl_region_type = 'UI' - bl_label = "Assets" + bl_label = "" bl_parent_id = "BLENVY_PT_SidePanel" bl_options = {'DEFAULT_CLOSED'} @@ -12,6 +12,15 @@ class GLTF_PT_auto_export_assets(bpy.types.Panel): def poll(cls, context): return context.window_manager.blenvy.mode == 'ASSETS' + def draw_header(self, context): + layout = self.layout + name = "" + if context.collection is not None and context.collection.name == 'Scene Collection': + name = f"WORLD/LEVEL: {context.scene.name}" + else: + name = f"BLUEPRINT: {context.collection.name}" + layout.label(text=f"Assets For {name}") + def draw(self, context): layout = self.layout layout.use_property_split = True diff --git a/tools/gltf_auto_export/blenvy/__init__.py b/tools/blenvy/bevy_components/__init__.py similarity index 100% rename from tools/gltf_auto_export/blenvy/__init__.py rename to tools/blenvy/bevy_components/__init__.py diff --git a/tools/gltf_auto_export/blueprints/__init__.py b/tools/blenvy/bevy_components/components/__init__.py similarity index 100% rename from tools/gltf_auto_export/blueprints/__init__.py rename to tools/blenvy/bevy_components/components/__init__.py diff --git a/tools/blenvy/bevy_components/components/definitions_list.py b/tools/blenvy/bevy_components/components/definitions_list.py new file mode 100644 index 0000000..56cfc83 --- /dev/null +++ b/tools/blenvy/bevy_components/components/definitions_list.py @@ -0,0 +1,57 @@ +import bpy +from bpy.props import (StringProperty) + +# this one is for UI only, and its inner list contains a useable list of shortnames of components +class ComponentDefinitionsList(bpy.types.PropertyGroup): + + # FIXME: not sure, hard coded exclude list, feels wrong + exclude = ['Parent', 'Children'] + + def add_component_to_ui_list(self, context): + #print("add components to ui_list") + items = [] + type_infos = context.window_manager.components_registry.type_infos + for long_name in type_infos.keys(): + definition = type_infos[long_name] + short_name = definition["short_name"] + is_component = definition['isComponent'] if "isComponent" in definition else False + + if self.filter in short_name and is_component: + if not 'Handle' in short_name and not "Cow" in short_name and not "AssetId" in short_name and short_name not in self.exclude: # FIXME: hard coded, seems wrong + items.append((long_name, short_name, long_name)) + + items.sort(key=lambda a: a[1]) + return items + + @classmethod + def register(cls): + bpy.types.WindowManager.components_list = bpy.props.PointerProperty(type=ComponentDefinitionsList) + + @classmethod + def unregister(cls): + del bpy.types.WindowManager.components_list + + list : bpy.props.EnumProperty( + name="list", + description="list", + # items argument required to initialize, just filled with empty values + items = add_component_to_ui_list, + ) # type: ignore + filter: StringProperty( + name="component filter", + description="filter for the components list", + options={'TEXTEDIT_UPDATE'} + ) # type: ignore + + +class ClearComponentDefinitionsList(bpy.types.Operator): + ''' clear list of bpy.context.collection.component_definitions ''' + bl_label = "clear component definitions" + bl_idname = "components.clear_component_definitions" + + def execute(self, context): + # create a new item, assign its properties + bpy.context.collection.component_definitions.clear() + + return {'FINISHED'} + diff --git a/tools/blenvy/bevy_components/components/helpers.py b/tools/blenvy/bevy_components/components/helpers.py new file mode 100644 index 0000000..4a45b62 --- /dev/null +++ b/tools/blenvy/bevy_components/components/helpers.py @@ -0,0 +1,6 @@ +import rna_prop_ui + +# fake way to make our operator's changes be visible to the change/depsgraph update handler in gltf_auto_export +def ping_depsgraph_update(object): + rna_prop_ui.rna_idprop_ui_create(object, "________temp", default=0) + rna_prop_ui.rna_idprop_ui_prop_clear(object, "________temp") \ No newline at end of file diff --git a/tools/blenvy/bevy_components/components/lists.py b/tools/blenvy/bevy_components/components/lists.py new file mode 100644 index 0000000..a613a83 --- /dev/null +++ b/tools/blenvy/bevy_components/components/lists.py @@ -0,0 +1,170 @@ +import json +from bpy_types import Operator, UIList +from bpy.props import (StringProperty, EnumProperty, PointerProperty, FloatVectorProperty, IntProperty) + +class Generic_LIST_OT_AddItem(Operator): + """Add a new item to the list.""" + bl_idname = "generic_list.add_item" + bl_label = "Add a new item" + + property_group_path: StringProperty( + name="property group path", + description="", + ) # type: ignore + + component_name: StringProperty( + name="component name", + description="", + ) # type: ignore + + def execute(self, context): + print("") + object = context.object + # information is stored in component meta + components_in_object = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == self.component_name, components_in_object), None) + + propertyGroup = component_meta + for path_item in json.loads(self.property_group_path): + propertyGroup = getattr(propertyGroup, path_item) + + print("list container", propertyGroup, dict(propertyGroup)) + target_list = getattr(propertyGroup, "list") + index = getattr(propertyGroup, "list_index") + item = target_list.add() + propertyGroup.list_index = index + 1 # we use this to force the change detection + + print("added item", item, item.field_names, getattr(item, "field_names")) + print("") + return{'FINISHED'} + + +class Generic_LIST_OT_RemoveItem(Operator): + """Remove an item to the list.""" + bl_idname = "generic_list.remove_item" + bl_label = "Remove selected item" + + property_group_path: StringProperty( + name="property group path", + description="", + ) # type: ignore + + component_name: StringProperty( + name="component name", + description="", + ) # type: ignore + def execute(self, context): + print("remove from list", context.object) + + object = context.object + # information is stored in component meta + components_in_object = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == self.component_name, components_in_object), None) + + propertyGroup = component_meta + for path_item in json.loads(self.property_group_path): + propertyGroup = getattr(propertyGroup, path_item) + + target_list = getattr(propertyGroup, "list") + index = getattr(propertyGroup, "list_index") + target_list.remove(index) + propertyGroup.list_index = min(max(0, index - 1), len(target_list) - 1) + return{'FINISHED'} + + +class Generic_LIST_OT_SelectItem(Operator): + """Remove an item to the list.""" + bl_idname = "generic_list.select_item" + bl_label = "select an item" + + + property_group_path: StringProperty( + name="property group path", + description="", + ) # type: ignore + + component_name: StringProperty( + name="component name", + description="", + ) # type: ignore + + selection_index: IntProperty() # type: ignore + + def execute(self, context): + print("select in list", context.object) + + object = context.object + # information is stored in component meta + components_in_object = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == self.component_name, components_in_object), None) + + propertyGroup = component_meta + for path_item in json.loads(self.property_group_path): + propertyGroup = getattr(propertyGroup, path_item) + + target_list = getattr(propertyGroup, "list") + index = getattr(propertyGroup, "list_index") + + propertyGroup.list_index = self.selection_index + return{'FINISHED'} + + +class GENERIC_LIST_OT_actions(Operator): + """Move items up and down, add and remove""" + bl_idname = "generic_list.list_action" + bl_label = "List Actions" + bl_description = "Move items up and down, add and remove" + bl_options = {'REGISTER', 'UNDO'} + + action: EnumProperty( + items=( + ('UP', "Up", ""), + ('DOWN', "Down", ""), + ('REMOVE', "Remove", ""), + ('ADD', "Add", ""))) # type: ignore + + property_group_path: StringProperty( + name="property group path", + description="", + ) # type: ignore + + component_name: StringProperty( + name="component name", + description="", + ) # type: ignore + + def invoke(self, context, event): + object = context.object + # information is stored in component meta + components_in_object = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == self.component_name, components_in_object), None) + + propertyGroup = component_meta + for path_item in json.loads(self.property_group_path): + propertyGroup = getattr(propertyGroup, path_item) + + target_list = getattr(propertyGroup, "list") + index = getattr(propertyGroup, "list_index") + + + if self.action == 'DOWN' and index < len(target_list) - 1: + #item_next = scn.rule_list[index + 1].name + target_list.move(index, index + 1) + propertyGroup.list_index += 1 + + elif self.action == 'UP' and index >= 1: + #item_prev = scn.rule_list[index - 1].name + target_list.move(index, index - 1) + propertyGroup.list_index -= 1 + + elif self.action == 'REMOVE': + target_list.remove(index) + propertyGroup.list_index = min(max(0, index - 1), len(target_list) - 1) + + if self.action == 'ADD': + item = target_list.add() + propertyGroup.list_index = index + 1 # we use this to force the change detection + #info = '"%s" added to list' % (item.name) + #self.report({'INFO'}, info) + + return {"FINISHED"} \ No newline at end of file diff --git a/tools/blenvy/bevy_components/components/maps.py b/tools/blenvy/bevy_components/components/maps.py new file mode 100644 index 0000000..e357aab --- /dev/null +++ b/tools/blenvy/bevy_components/components/maps.py @@ -0,0 +1,121 @@ +import json +from bpy_types import Operator, UIList +from bpy.props import (StringProperty, EnumProperty, PointerProperty, FloatVectorProperty, IntProperty) + +from ..propGroups.conversions_from_prop_group import property_group_value_to_custom_property_value + +class GENERIC_MAP_OT_actions(Operator): + """Move items up and down, add and remove""" + bl_idname = "generic_map.map_action" + bl_label = "Map Actions" + bl_description = "Move items up and down, add and remove" + bl_options = {'REGISTER', 'UNDO'} + + action: EnumProperty( + items=( + ('UP', "Up", ""), + ('DOWN', "Down", ""), + ('REMOVE', "Remove", ""), + ('ADD', "Add", ""))) # type: ignore + + property_group_path: StringProperty( + name="property group path", + description="", + ) # type: ignore + + component_name: StringProperty( + name="component name", + description="", + ) # type: ignore + + target_index: IntProperty(name="target index", description="index of item to manipulate")# type: ignore + + def invoke(self, context, event): + object = context.object + # information is stored in component meta + components_in_object = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == self.component_name, components_in_object), None) + + propertyGroup = component_meta + for path_item in json.loads(self.property_group_path): + propertyGroup = getattr(propertyGroup, path_item) + + keys_list = getattr(propertyGroup, "list") + index = getattr(propertyGroup, "list_index") + + values_list = getattr(propertyGroup, "values_list") + values_index = getattr(propertyGroup, "values_list_index") + + key_setter = getattr(propertyGroup, "keys_setter") + value_setter = getattr(propertyGroup, "values_setter") + + if self.action == 'DOWN' and index < len(keys_list) - 1: + #item_next = scn.rule_list[index + 1].name + keys_list.move(index, index + 1) + propertyGroup.list_index += 1 + + elif self.action == 'UP' and index >= 1: + #item_prev = scn.rule_list[index - 1].name + keys_list.move(index, index - 1) + propertyGroup.list_index -= 1 + + elif self.action == 'REMOVE': + index = self.target_index + keys_list.remove(index) + values_list.remove(index) + propertyGroup.list_index = min(max(0, index - 1), len(keys_list) - 1) + propertyGroup.values_index = min(max(0, index - 1), len(keys_list) - 1) + + if self.action == 'ADD': + print("keys_list", keys_list) + + # first we gather all key/value pairs + hashmap = {} + for index, key in enumerate(keys_list): + key_entry = {} + for field_name in key.field_names: + key_entry[field_name] = getattr(key, field_name, None) + value_entry = {} + for field_name in values_list[index].field_names: + value_entry[field_name] = values_list[index][field_name] + hashmap[json.dumps(key_entry)] = index + print("hashmap", hashmap ) + + # then we need to find the index of a specific value if it exists + key_entry = {} + for field_name in key_setter.field_names: + key_entry[field_name] = getattr(key_setter, field_name, None) + key_to_add = json.dumps(key_entry) + existing_index = hashmap.get(key_to_add, None) + print("existing_index", existing_index) + + if existing_index is None: + print("adding new value") + key = keys_list.add() + # copy the values over + for field_name in key_setter.field_names: + val = getattr(key_setter, field_name, None) + if val is not None: + key[field_name] = val + # TODO: add error handling + + value = values_list.add() + # copy the values over + for field_name in value_setter.field_names: + val = getattr(value_setter, field_name, None) + if val is not None: + value[field_name] = val + # TODO: add error handling + + propertyGroup.list_index = index + 1 # we use this to force the change detection + propertyGroup.values_index = index + 1 # we use this to force the change detection + else: + print("overriding value") + for field_name in value_setter.field_names: + values_list[existing_index][field_name] = value_setter[field_name] + + + #info = '"%s" added to list' % (item.name) + #self.report({'INFO'}, info) + + return {"FINISHED"} \ No newline at end of file diff --git a/tools/blenvy/bevy_components/components/metadata.py b/tools/blenvy/bevy_components/components/metadata.py new file mode 100644 index 0000000..47ee2f4 --- /dev/null +++ b/tools/blenvy/bevy_components/components/metadata.py @@ -0,0 +1,344 @@ +import bpy +from bpy.props import (StringProperty, BoolProperty, PointerProperty) +from bpy_types import (PropertyGroup) + +from ..propGroups.conversions_from_prop_group import property_group_value_to_custom_property_value +from ..propGroups.conversions_to_prop_group import property_group_value_from_custom_property_value + +class ComponentMetadata(bpy.types.PropertyGroup): + short_name : bpy.props.StringProperty( + name = "name", + default = "" + ) # type: ignore + + long_name : bpy.props.StringProperty( + name = "long name", + default = "" + ) # type: ignore + + values: bpy.props.StringProperty( + name = "Value", + default = "" + ) # type: ignore + + enabled: BoolProperty( + name="enabled", + description="component enabled", + default=True + ) # type: ignore + + invalid: BoolProperty( + name="invalid", + description="component is invalid, because of missing registration/ other issues", + default=False + ) # type: ignore + + invalid_details: StringProperty( + name="invalid details", + description="detailed information about why the component is invalid", + default="" + ) # type: ignore + + visible: BoolProperty( # REALLY dislike doing this for UI control, but ok hack for now + default=True + ) # type: ignore + +class ComponentsMeta(PropertyGroup): + infos_per_component: StringProperty( + name="infos per component", + description="component" + ) # type: ignore + components: bpy.props.CollectionProperty(type = ComponentMetadata) # type: ignore + + @classmethod + def register(cls): + bpy.types.Object.components_meta = PointerProperty(type=ComponentsMeta) + + @classmethod + def unregister(cls): + del bpy.types.Object.components_meta + +# remove no longer valid metadata from object +def cleanup_invalid_metadata(object): + bevy_components = get_bevy_components(object) + if len(bevy_components.keys()) == 0: # no components, bail out + return + components_metadata = object.components_meta.components + to_remove = [] + for index, component_meta in enumerate(components_metadata): + long_name = component_meta.long_name + if long_name not in bevy_components.keys(): + print("component:", long_name, "present in metadata, but not in object") + to_remove.append(index) + for index in to_remove: + components_metadata.remove(index) + + +# returns a component definition ( an entry in registry's type_infos) with matching long name or None if nothing has been found +def find_component_definition_from_long_name(long_name): + registry = bpy.context.window_manager.components_registry + return registry.type_infos.get(long_name, None) + +# FIXME: feels a bit heavy duty, should only be done +# if the components panel is active ? +def ensure_metadata_for_all_objects(): + for object in bpy.data.objects: + add_metadata_to_components_without_metadata(object) + +# returns whether an object has custom properties without matching metadata +def do_object_custom_properties_have_missing_metadata(object): + components_metadata = getattr(object, "components_meta", None) + if components_metadata == None: + return True + + components_metadata = components_metadata.components + + missing_metadata = False + for component_name in get_bevy_components(object) : + if component_name == "components_meta": + continue + component_meta = next(filter(lambda component: component["long_name"] == component_name, components_metadata), None) + if component_meta == None: + # current component has no metadata but is there even a compatible type in the registry ? + # if not ignore it + component_definition = find_component_definition_from_long_name(component_name) + if component_definition != None: + missing_metadata = True + break + + return missing_metadata + + +import json + +def upsert_bevy_component(object, long_name, value): + if not 'bevy_components' in object: + object['bevy_components'] = '{}' + bevy_components = json.loads(object['bevy_components']) + bevy_components[long_name] = value + object['bevy_components'] = json.dumps(bevy_components) + #object['bevy_components'][long_name] = value # Sigh, this does not work, hits Blender's 63 char length limit + +def remove_bevy_component(object, long_name): + if 'bevy_components' in object: + bevy_components = json.loads(object['bevy_components']) + if long_name in bevy_components: + del bevy_components[long_name] + object['bevy_components'] = json.dumps(bevy_components) + if long_name in object: + del object[long_name] + +def get_bevy_components(object): + if 'bevy_components' in object: + bevy_components = json.loads(object['bevy_components']) + return bevy_components + return {} + +def get_bevy_component_value_by_long_name(object, long_name): + bevy_components = get_bevy_components(object) + if len(bevy_components.keys()) == 0 : + return None + return bevy_components.get(long_name, None) + +def is_bevy_component_in_object(object, long_name): + return get_bevy_component_value_by_long_name(object, long_name) is not None + +# adds metadata to object only if it is missing +def add_metadata_to_components_without_metadata(object): + registry = bpy.context.window_manager.components_registry + + for component_name in get_bevy_components(object) : + if component_name == "components_meta": + continue + upsert_component_in_object(object, component_name, registry) + +# adds a component to an object (including metadata) using the provided component definition & optional value +def add_component_to_object(object, component_definition, value=None): + cleanup_invalid_metadata(object) + if object is not None: + # print("add_component_to_object", component_definition) + long_name = component_definition["long_name"] + registry = bpy.context.window_manager.components_registry + if not registry.has_type_infos(): + raise Exception('registry type infos have not been loaded yet or are missing !') + definition = registry.type_infos[long_name] + # now we use our pre_generated property groups to set the initial value of our custom property + (_, propertyGroup) = upsert_component_in_object(object, long_name=long_name, registry=registry) + if value == None: + value = property_group_value_to_custom_property_value(propertyGroup, definition, registry, None) + else: # we have provided a value, that is a raw , custom property value, to set the value of the propertyGroup + object["__disable__update"] = True # disable update callback while we set the values of the propertyGroup "tree" (as a propertyGroup can contain other propertyGroups) + property_group_value_from_custom_property_value(propertyGroup, definition, registry, value) + del object["__disable__update"] + + upsert_bevy_component(object, long_name, value) + +def upsert_component_in_object(object, long_name, registry): + # print("upsert_component_in_object", object, "component name", component_name) + # TODO: upsert this part too ? + target_components_metadata = object.components_meta.components + component_definition = registry.type_infos.get(long_name, None) + if component_definition != None: + short_name = component_definition["short_name"] + long_name = component_definition["long_name"] + property_group_name = registry.get_propertyGroupName_from_longName(long_name) + propertyGroup = None + + component_meta = next(filter(lambda component: component["long_name"] == long_name, target_components_metadata), None) + if not component_meta: + component_meta = target_components_metadata.add() + component_meta.short_name = short_name + component_meta.long_name = long_name + propertyGroup = getattr(component_meta, property_group_name, None) + else: # this one has metadata but we check that the relevant property group is present + propertyGroup = getattr(component_meta, property_group_name, None) + + # try to inject propertyGroup if not present + if propertyGroup == None: + #print("propertygroup not found in metadata attempting to inject") + if property_group_name in registry.component_propertyGroups: + # we have found a matching property_group, so try to inject it + # now inject property group + setattr(ComponentMetadata, property_group_name, registry.component_propertyGroups[property_group_name]) # FIXME: not ideal as all ComponentMetadata get the propGroup, but have not found a way to assign it per instance + propertyGroup = getattr(component_meta, property_group_name, None) + + # now deal with property groups details + if propertyGroup != None: + if long_name in registry.invalid_components: + component_meta.enabled = False + component_meta.invalid = True + component_meta.invalid_details = "component contains fields that are not in the schema, disabling" + else: + # if we still have not found the property group, mark it as invalid + component_meta.enabled = False + component_meta.invalid = True + component_meta.invalid_details = "component not present in the schema, possibly renamed? Disabling for now" + # property_group_value_from_custom_property_value(propertyGroup, component_definition, registry, object[component_name]) + + return (component_meta, propertyGroup) + else: + return(None, None) + + +def copy_propertyGroup_values_to_another_object(source_object, target_object, component_name, registry): + if source_object == None or target_object == None or component_name == None: + raise Exception('missing input data, cannot copy component propertryGroup') + + component_definition = find_component_definition_from_long_name(component_name) + long_name = component_name + property_group_name = registry.get_propertyGroupName_from_longName(long_name) + + registry = bpy.context.window_manager.components_registry + + source_components_metadata = source_object.components_meta.components + source_componentMeta = next(filter(lambda component: component["long_name"] == long_name, source_components_metadata), None) + # matching component means we already have this type of component + source_propertyGroup = getattr(source_componentMeta, property_group_name) + + # now deal with the target object + (_, target_propertyGroup) = upsert_component_in_object(target_object, component_name, registry) + # add to object + value = property_group_value_to_custom_property_value(target_propertyGroup, component_definition, registry, None) + upsert_bevy_component(target_object, long_name, value) + + # copy the values over + for field_name in source_propertyGroup.field_names: + if field_name in source_propertyGroup: + target_propertyGroup[field_name] = source_propertyGroup[field_name] + apply_propertyGroup_values_to_object_customProperties(target_object) + + +# TODO: move to propgroups ? +def apply_propertyGroup_values_to_object_customProperties(object): + cleanup_invalid_metadata(object) + registry = bpy.context.window_manager.components_registry + for component_name in get_bevy_components(object) : + """if component_name == "components_meta": + continue""" + (_, propertyGroup) = upsert_component_in_object(object, component_name, registry) + component_definition = find_component_definition_from_long_name(component_name) + if component_definition != None: + value = property_group_value_to_custom_property_value(propertyGroup, component_definition, registry, None) + upsert_bevy_component(object=object, long_name=component_name, value=value) + +# apply component value(s) to custom property of a single component +def apply_propertyGroup_values_to_object_customProperties_for_component(object, component_name): + registry = bpy.context.window_manager.components_registry + (_, propertyGroup) = upsert_component_in_object(object, component_name, registry) + component_definition = find_component_definition_from_long_name(component_name) + if component_definition != None: + value = property_group_value_to_custom_property_value(propertyGroup, component_definition, registry, None) + object[component_name] = value + + components_metadata = object.components_meta.components + componentMeta = next(filter(lambda component: component["long_name"] == component_name, components_metadata), None) + if componentMeta: + componentMeta.invalid = False + componentMeta.invalid_details = "" + + +def apply_customProperty_values_to_object_propertyGroups(object): + print("apply custom properties to ", object.name) + registry = bpy.context.window_manager.components_registry + for component_name in get_bevy_components(object) : + if component_name == "components_meta": + continue + component_definition = find_component_definition_from_long_name(component_name) + if component_definition != None: + property_group_name = registry.get_propertyGroupName_from_longName(component_name) + components_metadata = object.components_meta.components + source_componentMeta = next(filter(lambda component: component["long_name"] == component_name, components_metadata), None) + # matching component means we already have this type of component + propertyGroup = getattr(source_componentMeta, property_group_name, None) + customProperty_value = get_bevy_component_value_by_long_name(object, component_name) + #value = property_group_value_to_custom_property_value(propertyGroup, component_definition, registry, None) + + object["__disable__update"] = True # disable update callback while we set the values of the propertyGroup "tree" (as a propertyGroup can contain other propertyGroups) + property_group_value_from_custom_property_value(propertyGroup, component_definition, registry, customProperty_value) + del object["__disable__update"] + source_componentMeta.invalid = False + source_componentMeta.invalid_details = "" + +# removes the given component from the object: removes both the custom property and the matching metadata from the object +def remove_component_from_object(object, component_name): + # remove the component value + remove_bevy_component(object, component_name) + + # now remove the component's metadata + components_metadata = getattr(object, "components_meta", None) + if components_metadata == None: + return False + + components_metadata = components_metadata.components + to_remove = [] + for index, component_meta in enumerate(components_metadata): + long_name = component_meta.long_name + if long_name == component_name: + to_remove.append(index) + break + for index in to_remove: + components_metadata.remove(index) + return True + +def add_component_from_custom_property(object): + add_metadata_to_components_without_metadata(object) + apply_customProperty_values_to_object_propertyGroups(object) + +def rename_component(object, original_long_name, new_long_name): + registry = bpy.context.window_manager.components_registry + type_infos = registry.type_infos + component_definition = type_infos[new_long_name] + + component_ron_value = get_bevy_component_value_by_long_name(object=object, long_name=original_long_name) + if component_ron_value is None and original_long_name in object: + component_ron_value = object[original_long_name] + + remove_component_from_object(object, original_long_name) + add_component_to_object(object, component_definition, component_ron_value) + + +def toggle_component(object, component_name): + components_in_object = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == component_name, components_in_object), None) + if component_meta != None: + component_meta.visible = not component_meta.visible diff --git a/tools/blenvy/bevy_components/components/operators.py b/tools/blenvy/bevy_components/components/operators.py new file mode 100644 index 0000000..2a2c2f2 --- /dev/null +++ b/tools/blenvy/bevy_components/components/operators.py @@ -0,0 +1,321 @@ +import ast +import json +import bpy +from bpy_types import Operator +from bpy.props import (StringProperty) + +from .metadata import add_component_from_custom_property, add_component_to_object, apply_propertyGroup_values_to_object_customProperties_for_component, copy_propertyGroup_values_to_another_object, get_bevy_component_value_by_long_name, get_bevy_components, is_bevy_component_in_object, remove_component_from_object, rename_component, toggle_component + +class AddComponentOperator(Operator): + """Add Bevy component to object""" + bl_idname = "object.add_bevy_component" + bl_label = "Add component to object Operator" + bl_options = {"UNDO"} + + component_type: StringProperty( + name="component_type", + description="component type to add", + ) # type: ignore + + def execute(self, context): + object = context.object + print("adding component ", self.component_type, "to object '"+object.name+"'") + + has_component_type = self.component_type != "" + if has_component_type and object != None: + type_infos = context.window_manager.components_registry.type_infos + component_definition = type_infos[self.component_type] + add_component_to_object(object, component_definition) + + return {'FINISHED'} + +class CopyComponentOperator(Operator): + """Copy Bevy component from object""" + bl_idname = "object.copy_bevy_component" + bl_label = "Copy component Operator" + bl_options = {"UNDO"} + + source_component_name: StringProperty( + name="source component_name (long)", + description="name of the component to copy", + ) # type: ignore + + source_object_name: StringProperty( + name="source object name", + description="name of the object to copy the component from", + ) # type: ignore + + @classmethod + def register(cls): + bpy.types.WindowManager.copied_source_component_name = StringProperty() + bpy.types.WindowManager.copied_source_object = StringProperty() + + @classmethod + def unregister(cls): + del bpy.types.WindowManager.copied_source_component_name + del bpy.types.WindowManager.copied_source_object + + + def execute(self, context): + if self.source_component_name != '' and self.source_object_name != "": + context.window_manager.copied_source_component_name = self.source_component_name + context.window_manager.copied_source_object = self.source_object_name + else: + self.report({"ERROR"}, "The source object name / component name to copy a component from have not been specified") + + return {'FINISHED'} + + +class PasteComponentOperator(Operator): + """Paste Bevy component to object""" + bl_idname = "object.paste_bevy_component" + bl_label = "Paste component to object Operator" + bl_options = {"UNDO"} + + def execute(self, context): + source_object_name = context.window_manager.copied_source_object + source_object = bpy.data.objects.get(source_object_name, None) + print("source object", source_object) + if source_object == None: + self.report({"ERROR"}, "The source object to copy a component from does not exist") + else: + component_name = context.window_manager.copied_source_component_name + component_value = get_bevy_component_value_by_long_name(source_object, component_name) + if component_value is None: + self.report({"ERROR"}, "The source component to copy from does not exist") + else: + print("pasting component to object: component name:", str(component_name), "component value:" + str(component_value)) + print (context.object) + registry = context.window_manager.components_registry + copy_propertyGroup_values_to_another_object(source_object, context.object, component_name, registry) + + return {'FINISHED'} + +class RemoveComponentOperator(Operator): + """Remove Bevy component from object""" + bl_idname = "object.remove_bevy_component" + bl_label = "Remove component from object Operator" + bl_options = {"UNDO"} + + component_name: StringProperty( + name="component name", + description="component to delete", + ) # type: ignore + + object_name: StringProperty( + name="object name", + description="object whose component to delete", + default="" + ) # type: ignore + + def execute(self, context): + if self.object_name == "": + object = context.object + else: + object = bpy.data.objects[self.object_name] + print("removing component ", self.component_name, "from object '"+object.name+"'") + + if object is not None and 'bevy_components' in object : + component_value = get_bevy_component_value_by_long_name(object, self.component_name) + if component_value is not None: + remove_component_from_object(object, self.component_name) + else : + self.report({"ERROR"}, "The component to remove ("+ self.component_name +") does not exist") + else: + self.report({"ERROR"}, "The object to remove ("+ self.component_name +") from does not exist") + return {'FINISHED'} + + +class RemoveComponentFromAllObjectsOperator(Operator): + """Remove Bevy component from all object""" + bl_idname = "object.remove_bevy_component_all" + bl_label = "Remove component from all objects Operator" + bl_options = {"UNDO"} + + component_name: StringProperty( + name="component name (long name)", + description="component to delete", + ) # type: ignore + + @classmethod + def register(cls): + bpy.types.WindowManager.components_remove_progress = bpy.props.FloatProperty(default=-1.0) + + @classmethod + def unregister(cls): + del bpy.types.WindowManager.components_remove_progress + + def execute(self, context): + print("removing component ", self.component_name, "from all objects") + total = len(bpy.data.objects) + for index, object in enumerate(bpy.data.objects): + if len(object.keys()) > 0: + if object is not None and is_bevy_component_in_object(object, self.component_name): + remove_component_from_object(object, self.component_name) + + progress = index / total + context.window_manager.components_remove_progress = progress + # now force refresh the ui + bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1) + context.window_manager.components_remove_progress = -1.0 + + return {'FINISHED'} + + +class RenameHelper(bpy.types.PropertyGroup): + original_name: bpy.props.StringProperty(name="") # type: ignore + new_name: bpy.props.StringProperty(name="") # type: ignore + + #object: bpy.props.PointerProperty(type=bpy.types.Object) + @classmethod + def register(cls): + bpy.types.WindowManager.bevy_component_rename_helper = bpy.props.PointerProperty(type=RenameHelper) + + @classmethod + def unregister(cls): + # remove handlers & co + del bpy.types.WindowManager.bevy_component_rename_helper + +class OT_rename_component(Operator): + """Rename Bevy component""" + bl_idname = "object.rename_bevy_component" + bl_label = "rename component" + bl_options = {"UNDO"} + + original_name: bpy.props.StringProperty(default="") # type: ignore + new_name: StringProperty( + name="new_name", + description="new name of component", + ) # type: ignore + + target_objects: bpy.props.StringProperty() # type: ignore + + @classmethod + def register(cls): + bpy.types.WindowManager.components_rename_progress = bpy.props.FloatProperty(default=-1.0) #bpy.props.PointerProperty(type=RenameHelper) + + @classmethod + def unregister(cls): + del bpy.types.WindowManager.components_rename_progress + + def execute(self, context): + registry = context.window_manager.components_registry + type_infos = registry.type_infos + settings = context.window_manager.bevy_component_rename_helper + original_name = settings.original_name if self.original_name == "" else self.original_name + new_name = self.new_name + + + print("renaming components: original name", original_name, "new_name", self.new_name, "targets", self.target_objects) + target_objects = json.loads(self.target_objects) + errors = [] + total = len(target_objects) + + if original_name != '' and new_name != '' and original_name != new_name and len(target_objects) > 0: + for index, object_name in enumerate(target_objects): + object = bpy.data.objects[object_name] + if object and original_name in get_bevy_components(object) or original_name in object: + try: + # attempt conversion + rename_component(object=object, original_long_name=original_name, new_long_name=new_name) + except Exception as error: + if '__disable__update' in object: + del object["__disable__update"] # make sure custom properties are updateable afterwards, even in the case of failure + components_metadata = getattr(object, "components_meta", None) + if components_metadata: + components_metadata = components_metadata.components + component_meta = next(filter(lambda component: component["long_name"] == new_name, components_metadata), None) + if component_meta: + component_meta.invalid = True + component_meta.invalid_details = "wrong custom property value, overwrite them by changing the values in the ui or change them & regenerate" + + errors.append( "wrong custom property values to generate target component: object: '" + object.name + "', error: " + str(error)) + + progress = index / total + context.window_manager.components_rename_progress = progress + + try: + # now force refresh the ui + bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1) + except: pass # this is to allow this to run in cli/headless mode + + if len(errors) > 0: + self.report({'ERROR'}, "Failed to rename component: Errors:" + str(errors)) + else: + self.report({'INFO'}, "Sucessfully renamed component") + + #clear data after we are done + self.original_name = "" + context.window_manager.bevy_component_rename_helper.original_name = "" + context.window_manager.components_rename_progress = -1.0 + + return {'FINISHED'} + + +class GenerateComponent_From_custom_property_Operator(Operator): + """Generate Bevy components from custom property""" + bl_idname = "object.generate_bevy_component_from_custom_property" + bl_label = "Generate component from custom_property Operator" + bl_options = {"UNDO"} + + component_name: StringProperty( + name="component name", + description="component to generate custom properties for", + ) # type: ignore + + def execute(self, context): + object = context.object + + error = False + try: + add_component_from_custom_property(object) + except Exception as error: + del object["__disable__update"] # make sure custom properties are updateable afterwards, even in the case of failure + error = True + self.report({'ERROR'}, "Failed to update propertyGroup values from custom property: Error:" + str(error)) + if not error: + self.report({'INFO'}, "Sucessfully generated UI values for custom properties for selected object") + return {'FINISHED'} + + +class Fix_Component_Operator(Operator): + """Attempt to fix Bevy component""" + bl_idname = "object.fix_bevy_component" + bl_label = "Fix component (attempts to)" + bl_options = {"UNDO"} + + component_name: StringProperty( + name="component name", + description="component to fix", + ) # type: ignore + + def execute(self, context): + object = context.object + error = False + try: + apply_propertyGroup_values_to_object_customProperties_for_component(object, self.component_name) + except Exception as error: + if "__disable__update" in object: + del object["__disable__update"] # make sure custom properties are updateable afterwards, even in the case of failure + error = True + self.report({'ERROR'}, "Failed to fix component: Error:" + str(error)) + if not error: + self.report({'INFO'}, "Sucessfully fixed component (please double check component & its custom property value)") + return {'FINISHED'} + +class Toggle_ComponentVisibility(Operator): + """Toggle Bevy component's visibility""" + bl_idname = "object.toggle_bevy_component_visibility" + bl_label = "Toggle component visibility" + bl_options = {"UNDO"} + + component_name: StringProperty( + name="component name", + description="component to toggle", + ) # type: ignore + + def execute(self, context): + object = context.object + toggle_component(object, self.component_name) + return {'FINISHED'} + diff --git a/tools/blenvy/bevy_components/components/ui.py b/tools/blenvy/bevy_components/components/ui.py new file mode 100644 index 0000000..754130a --- /dev/null +++ b/tools/blenvy/bevy_components/components/ui.py @@ -0,0 +1,279 @@ +import json +import bpy + +from ..registry.operators import COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_CURRENT +from .metadata import do_object_custom_properties_have_missing_metadata, get_bevy_components +from .operators import AddComponentOperator, CopyComponentOperator, Fix_Component_Operator, RemoveComponentOperator, GenerateComponent_From_custom_property_Operator, PasteComponentOperator, Toggle_ComponentVisibility + +def draw_propertyGroup( propertyGroup, layout, nesting =[], rootName=None): + is_enum = getattr(propertyGroup, "with_enum") + is_list = getattr(propertyGroup, "with_list") + is_map = getattr(propertyGroup, "with_map") + # item in our components hierarchy can get the correct propertyGroup by STRINGS because of course, we cannot pass objects to operators...sigh + + # if it is an enum, the first field name is always the list of enum variants, the others are the variants + field_names = propertyGroup.field_names + #print("") + #print("drawing", propertyGroup, nesting, "component_name", rootName) + if is_enum: + subrow = layout.row() + display_name = field_names[0] if propertyGroup.tupple_or_struct == "struct" else "" + subrow.prop(propertyGroup, field_names[0], text=display_name) + subrow.separator() + selection = getattr(propertyGroup, "selection") + + for fname in field_names[1:]: + if fname == "variant_" + selection: + subrow = layout.row() + display_name = fname if propertyGroup.tupple_or_struct == "struct" else "" + + nestedPropertyGroup = getattr(propertyGroup, fname) + nested = getattr(nestedPropertyGroup, "nested", False) + #print("nestedPropertyGroup", nestedPropertyGroup, fname, nested) + if nested: + draw_propertyGroup(nestedPropertyGroup, subrow.column(), nesting + [fname], rootName ) + # if an enum variant is not a propertyGroup + break + elif is_list: + item_list = getattr(propertyGroup, "list") + list_index = getattr(propertyGroup, "list_index") + box = layout.box() + split = box.split(factor=0.9) + list_column, buttons_column = (split.column(),split.column()) + + list_column = list_column.box() + for index, item in enumerate(item_list): + row = list_column.row() + draw_propertyGroup(item, row, nesting, rootName) + icon = 'CHECKBOX_HLT' if list_index == index else 'CHECKBOX_DEHLT' + op = row.operator('generic_list.select_item', icon=icon, text="") + op.component_name = rootName + op.property_group_path = json.dumps(nesting) + op.selection_index = index + + #various control buttons + buttons_column.separator() + row = buttons_column.row() + op = row.operator('generic_list.list_action', icon='ADD', text="") + op.action = 'ADD' + op.component_name = rootName + op.property_group_path = json.dumps(nesting) + + row = buttons_column.row() + op = row.operator('generic_list.list_action', icon='REMOVE', text="") + op.action = 'REMOVE' + op.component_name = rootName + op.property_group_path = json.dumps(nesting) + + buttons_column.separator() + row = buttons_column.row() + op = row.operator('generic_list.list_action', icon='TRIA_UP', text="") + op.action = 'UP' + op.component_name = rootName + op.property_group_path = json.dumps(nesting) + + row = buttons_column.row() + op = row.operator('generic_list.list_action', icon='TRIA_DOWN', text="") + op.action = 'DOWN' + op.component_name = rootName + op.property_group_path = json.dumps(nesting) + + elif is_map: + root = layout.row().column() + if hasattr(propertyGroup, "list"): # TODO: improve handling of non drawable UI + keys_list = getattr(propertyGroup, "list") + values_list = getattr(propertyGroup, "values_list") + box = root.box() + row = box.row() + row.label(text="Add entry:") + keys_setter = getattr(propertyGroup, "keys_setter") + draw_propertyGroup(keys_setter, row, nesting, rootName) + + values_setter = getattr(propertyGroup, "values_setter") + draw_propertyGroup(values_setter, row, nesting, rootName) + + op = row.operator('generic_map.map_action', icon='ADD', text="") + op.action = 'ADD' + op.component_name = rootName + op.property_group_path = json.dumps(nesting) + + box = root.box() + split = box.split(factor=0.9) + list_column, buttons_column = (split.column(),split.column()) + list_column = list_column.box() + + for index, item in enumerate(keys_list): + row = list_column.row() + draw_propertyGroup(item, row, nesting, rootName) + + value = values_list[index] + draw_propertyGroup(value, row, nesting, rootName) + + op = row.operator('generic_map.map_action', icon='REMOVE', text="") + op.action = 'REMOVE' + op.component_name = rootName + op.property_group_path = json.dumps(nesting) + op.target_index = index + + + #various control buttons + buttons_column.separator() + row = buttons_column.row() + + + else: + for fname in field_names: + #subrow = layout.row() + nestedPropertyGroup = getattr(propertyGroup, fname) + nested = getattr(nestedPropertyGroup, "nested", False) + display_name = fname if propertyGroup.tupple_or_struct == "struct" else "" + + if nested: + layout.separator() + layout.separator() + + layout.label(text=display_name) # this is the name of the field/sub field + layout.separator() + subrow = layout.row() + draw_propertyGroup(nestedPropertyGroup, subrow, nesting + [fname], rootName ) + else: + subrow = layout.row() + subrow.prop(propertyGroup, fname, text=display_name) + subrow.separator() + + +class BEVY_COMPONENTS_PT_ComponentsPanel(bpy.types.Panel): + bl_idname = "BEVY_COMPONENTS_PT_ComponentsPanel" + bl_label = "" + bl_space_type = 'VIEW_3D' + bl_region_type = 'UI' + bl_category = "Bevy Components" + bl_context = "objectmode" + bl_parent_id = "BLENVY_PT_SidePanel" + + @classmethod + def poll(cls, context): + return context.window_manager.blenvy.mode == 'COMPONENTS' + return context.object is not None + + def draw_header(self, context): + layout = self.layout + name = context.object.name if context.object != None else '' + layout.label(text="Components For "+ name) + + def draw(self, context): + object = context.object + layout = self.layout + + # we get & load our component registry + registry = bpy.context.window_manager.components_registry + available_components = bpy.context.window_manager.components_list + registry_has_type_infos = registry.has_type_infos() + + if object is not None: + row = layout.row(align=True) + row.prop(available_components, "list", text="Component") + row.prop(available_components, "filter",text="Filter") + + # add components + row = layout.row(align=True) + op = row.operator(AddComponentOperator.bl_idname, text="Add", icon="ADD") + op.component_type = available_components.list + row.enabled = available_components.list != '' + + layout.separator() + + # paste components + row = layout.row(align=True) + row.operator(PasteComponentOperator.bl_idname, text="Paste component ("+bpy.context.window_manager.copied_source_component_name+")", icon="PASTEDOWN") + row.enabled = registry_has_type_infos and context.window_manager.copied_source_object != '' + + layout.separator() + + # upgrate custom props to components + upgradeable_customProperties = registry.has_type_infos() and do_object_custom_properties_have_missing_metadata(context.object) + if upgradeable_customProperties: + row = layout.row(align=True) + op = row.operator(GenerateComponent_From_custom_property_Operator.bl_idname, text="generate components from custom properties" , icon="LOOP_FORWARDS") + layout.separator() + + + components_in_object = object.components_meta.components + #print("components_names", dict(components_bla).keys()) + + for component_name in sorted(get_bevy_components(object)) : # sorted by component name, practical + #print("component_name", component_name) + if component_name == "components_meta": + continue + # anything withouth metadata gets skipped, we only want to see real components, not all custom props + component_meta = next(filter(lambda component: component["long_name"] == component_name, components_in_object), None) + if component_meta == None: + continue + + component_invalid = getattr(component_meta, "invalid") + invalid_details = getattr(component_meta, "invalid_details") + component_visible = getattr(component_meta, "visible") + single_field = False + + # our whole row + box = layout.box() + row = box.row(align=True) + # "header" + row.alert = component_invalid + row.prop(component_meta, "enabled", text="") + row.label(text=component_name) + + # we fetch the matching ui property group + root_propertyGroup_name = registry.get_propertyGroupName_from_longName(component_name) + """print("root_propertyGroup_name", root_propertyGroup_name)""" + print("component_meta", component_meta, component_invalid) + + if root_propertyGroup_name: + propertyGroup = getattr(component_meta, root_propertyGroup_name, None) + """print("propertyGroup", propertyGroup)""" + if propertyGroup: + # if the component has only 0 or 1 field names, display inline, otherwise change layout + single_field = len(propertyGroup.field_names) < 2 + prop_group_location = box.row(align=True).column() + """if single_field: + prop_group_location = row.column(align=True)#.split(factor=0.9)#layout.row(align=False)""" + + if component_visible: + if component_invalid: + error_message = invalid_details if component_invalid else "Missing component UI data, please reload registry !" + prop_group_location.label(text=error_message) + draw_propertyGroup(propertyGroup, prop_group_location, [root_propertyGroup_name], component_name) + else : + row.label(text="details hidden, click on toggle to display") + else: + error_message = invalid_details if component_invalid else "Missing component UI data, please reload registry !" + row.label(text=error_message) + + # "footer" with additional controls + if component_invalid: + if root_propertyGroup_name: + propertyGroup = getattr(component_meta, root_propertyGroup_name, None) + if propertyGroup: + unit_struct = len(propertyGroup.field_names) == 0 + if unit_struct: + op = row.operator(Fix_Component_Operator.bl_idname, text="", icon="SHADERFX") + op.component_name = component_name + row.separator() + + op = row.operator(RemoveComponentOperator.bl_idname, text="", icon="X") + op.component_name = component_name + row.separator() + + op = row.operator(CopyComponentOperator.bl_idname, text="", icon="COPYDOWN") + op.source_component_name = component_name + op.source_object_name = object.name + row.separator() + + #if not single_field: + toggle_icon = "TRIA_DOWN" if component_visible else "TRIA_RIGHT" + op = row.operator(Toggle_ComponentVisibility.bl_idname, text="", icon=toggle_icon) + op.component_name = component_name + #row.separator() + + else: + layout.label(text ="Select an object to edit its components") diff --git a/tools/blenvy/bevy_components/helpers.py b/tools/blenvy/bevy_components/helpers.py new file mode 100644 index 0000000..0c56e39 --- /dev/null +++ b/tools/blenvy/bevy_components/helpers.py @@ -0,0 +1,30 @@ +import bpy +import json + +# Makes an empty, at the specified location, rotation, scale stores it in existing collection, from https://blender.stackexchange.com/questions/51290/how-to-add-empty-object-not-using-bpy-ops +def make_empty(name, location, rotation, scale, collection): + object_data = None + empty_obj = bpy.data.objects.new( name, object_data ) + + empty_obj.empty_display_size = 2 + empty_obj.empty_display_type = 'PLAIN_AXES' + + empty_obj.name = name + empty_obj.location = location + empty_obj.scale = scale + empty_obj.rotation_euler = rotation + + collection.objects.link( empty_obj ) + #bpy.context.view_layer.update() + return empty_obj + +def upsert_settings(name, data): + stored_settings = bpy.data.texts[name] if name in bpy.data.texts else bpy.data.texts.new(name) + stored_settings.clear() + stored_settings.write(json.dumps(data)) + +def load_settings(name): + stored_settings = bpy.data.texts[name] if name in bpy.data.texts else None + if stored_settings != None: + return json.loads(stored_settings.as_string()) + return None diff --git a/tools/blenvy/bevy_components/propGroups/__init__.py b/tools/blenvy/bevy_components/propGroups/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tools/blenvy/bevy_components/propGroups/conversions_from_prop_group.py b/tools/blenvy/bevy_components/propGroups/conversions_from_prop_group.py new file mode 100644 index 0000000..908ee36 --- /dev/null +++ b/tools/blenvy/bevy_components/propGroups/conversions_from_prop_group.py @@ -0,0 +1,178 @@ +from bpy_types import PropertyGroup + +conversion_tables = { + "bool": lambda value: value, + + "char": lambda value: '"'+value+'"', + "str": lambda value: '"'+value+'"', + "alloc::string::String": lambda value: '"'+str(value)+'"', + "alloc::borrow::Cow": lambda value: '"'+str(value)+'"', + + "glam::Vec2": lambda value: "Vec2(x:"+str(value[0])+ ", y:"+str(value[1])+")", + "glam::DVec2": lambda value: "DVec2(x:"+str(value[0])+ ", y:"+str(value[1])+")", + "glam::UVec2": lambda value: "UVec2(x:"+str(value[0])+ ", y:"+str(value[1])+")", + + "glam::Vec3": lambda value: "Vec3(x:"+str(value[0])+ ", y:"+str(value[1])+ ", z:"+str(value[2])+")", + "glam::Vec3A": lambda value: "Vec3A(x:"+str(value[0])+ ", y:"+str(value[1])+ ", z:"+str(value[2])+")", + "glam::UVec3": lambda value: "UVec3(x:"+str(value[0])+ ", y:"+str(value[1])+ ", z:"+str(value[2])+")", + + "glam::Vec4": lambda value: "Vec4(x:"+str(value[0])+ ", y:"+str(value[1])+ ", z:"+str(value[2])+ ", w:"+str(value[3])+")", + "glam::DVec4": lambda value: "DVec4(x:"+str(value[0])+ ", y:"+str(value[1])+ ", z:"+str(value[2])+ ", w:"+str(value[3])+")", + "glam::UVec4": lambda value: "UVec4(x:"+str(value[0])+ ", y:"+str(value[1])+ ", z:"+str(value[2])+ ", w:"+str(value[3])+")", + + "glam::Quat": lambda value: "Quat(x:"+str(value[0])+ ", y:"+str(value[1])+ ", z:"+str(value[2])+ ", w:"+str(value[3])+")", + + "bevy_render::color::Color": lambda value: "Rgba(red:"+str(value[0])+ ", green:"+str(value[1])+ ", blue:"+str(value[2])+ ", alpha:"+str(value[3])+ ")", +} + +#converts the value of a property group(no matter its complexity) into a single custom property value +# this is more or less a glorified "to_ron()" method (not quite but close to) +def property_group_value_to_custom_property_value(property_group, definition, registry, parent=None, value=None): + long_name = definition["long_name"] + type_info = definition["typeInfo"] if "typeInfo" in definition else None + type_def = definition["type"] if "type" in definition else None + is_value_type = long_name in conversion_tables + # print("computing custom property: component name:", long_name, "type_info", type_info, "type_def", type_def, "value", value) + + if is_value_type: + value = conversion_tables[long_name](value) + elif type_info == "Struct": + values = {} + if len(property_group.field_names) ==0: + value = '()' + else: + for index, field_name in enumerate(property_group.field_names): + item_long_name = definition["properties"][field_name]["type"]["$ref"].replace("#/$defs/", "") + item_definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None + + value = getattr(property_group, field_name) + is_property_group = isinstance(value, PropertyGroup) + child_property_group = value if is_property_group else None + if item_definition != None: + value = property_group_value_to_custom_property_value(child_property_group, item_definition, registry, parent=long_name, value=value) + else: + value = '""' + values[field_name] = value + value = values + elif type_info == "Tuple": + values = {} + for index, field_name in enumerate(property_group.field_names): + item_long_name = definition["prefixItems"][index]["type"]["$ref"].replace("#/$defs/", "") + item_definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None + + value = getattr(property_group, field_name) + is_property_group = isinstance(value, PropertyGroup) + child_property_group = value if is_property_group else None + if item_definition != None: + value = property_group_value_to_custom_property_value(child_property_group, item_definition, registry, parent=long_name, value=value) + else: + value = '""' + values[field_name] = value + value = tuple(e for e in list(values.values())) + + elif type_info == "TupleStruct": + values = {} + for index, field_name in enumerate(property_group.field_names): + #print("toto", index, definition["prefixItems"][index]["type"]["$ref"]) + item_long_name = definition["prefixItems"][index]["type"]["$ref"].replace("#/$defs/", "") + item_definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None + + value = getattr(property_group, field_name) + is_property_group = isinstance(value, PropertyGroup) + child_property_group = value if is_property_group else None + if item_definition != None: + value = property_group_value_to_custom_property_value(child_property_group, item_definition, registry, parent=long_name, value=value) + else: + value = '""' + values[field_name] = value + + value = tuple(e for e in list(values.values())) + elif type_info == "Enum": + selected = getattr(property_group, "selection") + if type_def == "object": + selection_index = property_group.field_names.index("variant_"+selected) + variant_name = property_group.field_names[selection_index] + variant_definition = definition["oneOf"][selection_index-1] + if "prefixItems" in variant_definition: + value = getattr(property_group, variant_name) + is_property_group = isinstance(value, PropertyGroup) + child_property_group = value if is_property_group else None + + value = property_group_value_to_custom_property_value(child_property_group, variant_definition, registry, parent=long_name, value=value) + value = selected + str(value,) #"{}{},".format(selected ,value) + elif "properties" in variant_definition: + value = getattr(property_group, variant_name) + is_property_group = isinstance(value, PropertyGroup) + child_property_group = value if is_property_group else None + + value = property_group_value_to_custom_property_value(child_property_group, variant_definition, registry, parent=long_name, value=value) + value = selected + str(value,) + else: + value = getattr(property_group, variant_name) + is_property_group = isinstance(value, PropertyGroup) + child_property_group = value if is_property_group else None + if child_property_group: + value = property_group_value_to_custom_property_value(child_property_group, variant_definition, registry, parent=long_name, value=value) + value = selected + str(value,) + else: + value = selected # here the value of the enum is just the name of the variant + else: + value = selected + + elif type_info == "List": + item_list = getattr(property_group, "list") + value = [] + for item in item_list: + item_long_name = getattr(item, "long_name") + definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None + if definition != None: + item_value = property_group_value_to_custom_property_value(item, definition, registry, long_name, None) + if item_long_name.startswith("wrapper_"): #if we have a "fake" tupple for aka for value types, we need to remove one nested level + item_value = item_value[0] + else: + item_value = '""' + value.append(item_value) + + elif type_info == "Map": + keys_list = getattr(property_group, "list", {}) + values_list = getattr(property_group, "values_list") + value = {} + for index, key in enumerate(keys_list): + # first get the keys + key_long_name = getattr(key, "long_name") + definition = registry.type_infos[key_long_name] if key_long_name in registry.type_infos else None + if definition != None: + key_value = property_group_value_to_custom_property_value(key, definition, registry, long_name, None) + if key_long_name.startswith("wrapper_"): #if we have a "fake" tupple for aka for value types, we need to remove one nested level + key_value = key_value[0] + else: + key_value = '""' + # and then the values + val = values_list[index] + value_long_name = getattr(val, "long_name") + definition = registry.type_infos[value_long_name] if value_long_name in registry.type_infos else None + if definition != None: + val_value = property_group_value_to_custom_property_value(val, definition, registry, long_name, None) + if value_long_name.startswith("wrapper_"): #if we have a "fake" tupple for aka for value types, we need to remove one nested level + val_value = val_value[0] + else: + val_value = '""' + + value[key_value] = val_value + value = str(value).replace('{','@').replace('}','²') # FIXME: eeek !! + else: + value = conversion_tables[long_name](value) if is_value_type else value + value = '""' if isinstance(value, PropertyGroup) else value + + #print("generating custom property value", value, type(value)) + if isinstance(value, str): + value = value.replace("'", "") + + if parent == None: + value = str(value).replace("'", "") + value = value.replace(",)",")") + value = value.replace("{", "(").replace("}", ")") # FIXME: deal with hashmaps + value = value.replace("True", "true").replace("False", "false") + value = value.replace('@', '{').replace('²', '}') + return value + diff --git a/tools/blenvy/bevy_components/propGroups/conversions_to_prop_group.py b/tools/blenvy/bevy_components/propGroups/conversions_to_prop_group.py new file mode 100644 index 0000000..3c2baa6 --- /dev/null +++ b/tools/blenvy/bevy_components/propGroups/conversions_to_prop_group.py @@ -0,0 +1,312 @@ +from bpy_types import PropertyGroup +import re + +def parse_struct_string(string, start_nesting=0): + #print("processing struct string", string, "start_nesting", start_nesting) + fields = {} + buff = [] + current_fieldName = None + nesting_level = 0 + + start_offset = 0 + end_offset = 0 + + for index, char in enumerate(string): + buff.append(char) + if char == "," and nesting_level == start_nesting: + #print("first case", end_offset) + end_offset = index + end_offset = len(string) if end_offset == 0 else end_offset + + val = "".join(string[start_offset:end_offset]) + fields[current_fieldName] = val.strip() + start_offset = index + 1 + #print("done with field name", current_fieldName, "value", fields[current_fieldName]) + + if char == "[" or char == "(": + nesting_level += 1 + if nesting_level == start_nesting: + start_offset = index + 1 + #print("nesting & setting start offset", start_offset) + #print("nesting down", nesting_level) + + if char == "]" or char == ")" : + #print("nesting up", nesting_level) + if nesting_level == start_nesting: + end_offset = index + #print("unesting & setting end offset", end_offset) + nesting_level -= 1 + + + if char == ":" and nesting_level == start_nesting: + end_offset = index + fieldName = "".join(string[start_offset:end_offset]) + current_fieldName = fieldName.strip() + start_offset = index + 1 + end_offset = 0 #hack + #print("starting field name", fieldName, "index", index) + buff = [] + + end_offset = len(string) if end_offset == 0 else end_offset + #print("final start and end offset", start_offset, end_offset, "total length", len(string)) + + val = "".join(string[start_offset:end_offset]) + + fields[current_fieldName] = val.strip() + #print("done with all fields", fields) + return fields + +def parse_tuplestruct_string(string, start_nesting=0): + #print("processing tuppleStruct", string, "start_nesting", start_nesting) + fields = [] + buff = [] + nesting_level = 0 + field_index = 0 + + start_offset = 0 + end_offset = 0 + # todo: strip all stuff before start_nesting + + for index, char in enumerate(string): + buff.append(char) + if char == "," and nesting_level == start_nesting: + end_offset = index + end_offset = len(string) if end_offset == 0 else end_offset + + val = "".join(string[start_offset:end_offset]) + fields.append(val.strip()) + field_index += 1 + #print("start and end offset", start_offset, end_offset, "total length", len(string)) + #print("done with field name", field_index, "value", fields) + start_offset = index + 1 + end_offset = 0 # hack + + if char == "[" or char == "(": + nesting_level += 1 + if nesting_level == start_nesting: + start_offset = index + 1 + #print("nesting & setting start offset", start_offset) + #print("nesting down", nesting_level) + + if char == "]" or char == ")" : + if nesting_level == start_nesting: + end_offset = index + #print("unesting & setting end offset", end_offset) + #print("nesting up", nesting_level) + nesting_level -= 1 + + + end_offset = len(string) if end_offset == 0 else end_offset + #print("final start and end offset", start_offset, end_offset, "total length", len(string)) + + val = "".join(string[start_offset:end_offset]) #if end_offset != 0 else buff) + fields.append(val.strip()) + fields = list(filter(lambda entry: entry != '', fields)) + #print("done with all fields", fields) + return fields + + +def parse_vec2(value, caster, typeName): + parsed = parse_struct_string(value.replace(typeName,"").replace("(", "").replace(")","") ) + return [caster(parsed['x']), caster(parsed['y'])] + +def parse_vec3(value, caster, typeName): + parsed = parse_struct_string(value.replace(typeName,"").replace("(", "").replace(")","") ) + return [caster(parsed['x']), caster(parsed['y']), caster(parsed['z'])] + +def parse_vec4(value, caster, typeName): + parsed = parse_struct_string(value.replace(typeName,"").replace("(", "").replace(")","") ) + return [caster(parsed['x']), caster(parsed['y']), caster(parsed['z']), caster(parsed['w'])] + +def parse_color(value, caster, typeName): + parsed = parse_struct_string(value.replace(typeName,"").replace("(", "").replace(")","") ) + return [caster(parsed['red']), caster(parsed['green']), caster(parsed['blue']), caster(parsed['alpha'])] + +def to_int(input): + return int(float(input)) + +type_mappings = { + "bool": lambda value: True if value == "true" else False, + + "u8": lambda value: int(value), + "u16": lambda value: int(value), + "u32": lambda value: int(value), + "u64": lambda value: int(value), + "u128": lambda value: int(value), + "u64": lambda value: int(value), + "usize": lambda value: int(value), + + "i8": lambda value: int(value), + "i16": lambda value: int(value), + "i32": lambda value: int(value), + "i64": lambda value: int(value), + "i128": lambda value: int(value), + "isize": lambda value: int(value), + + 'f32': lambda value: float(value), + 'f64': lambda value: float(value), + + "glam::Vec2": lambda value: parse_vec2(value, float, "Vec2"), + "glam::DVec2": lambda value: parse_vec2(value, float, "DVec2"), + "glam::UVec2": lambda value: parse_vec2(value, to_int, "UVec2"), + + 'glam::Vec3': lambda value: parse_vec3(value, float, "Vec3"), + "glam::Vec3A": lambda value: parse_vec3(value, float, "Vec3A"), + "glam::UVec3": lambda value: parse_vec3(value, to_int, "UVec3"), + + "glam::Vec4": lambda value: parse_vec4(value, float, "Vec4"), + "glam::DVec4": lambda value: parse_vec4(value, float, "DVec4"), + "glam::UVec4": lambda value: parse_vec4(value, to_int, "UVec4"), + + "glam::Quat": lambda value: parse_vec4(value, float, "Quat"), + + 'alloc::string::String': lambda value: str(value.replace('"', "")), + 'alloc::borrow::Cow': lambda value: str(value.replace('"', "")), + + 'bevy_render::color::Color': lambda value: parse_color(value, float, "Rgba"), + 'bevy_ecs::entity::Entity': lambda value: int(value), +} + +def is_def_value_type(definition, registry): + if definition == None: + return True + value_types_defaults = registry.value_types_defaults + long_name = definition["long_name"] + is_value_type = long_name in value_types_defaults + return is_value_type + +#converts the value of a single custom property into a value (values) of a property group +def property_group_value_from_custom_property_value(property_group, definition, registry, value, nesting = []): + value_types_defaults = registry.value_types_defaults + type_info = definition["typeInfo"] if "typeInfo" in definition else None + type_def = definition["type"] if "type" in definition else None + properties = definition["properties"] if "properties" in definition else {} + prefixItems = definition["prefixItems"] if "prefixItems" in definition else [] + long_name = definition["long_name"] + + #is_value_type = type_def in value_types_defaults or long_name in value_types_defaults + is_value_type = long_name in value_types_defaults + nesting = nesting + [definition["short_name"]] + + + if is_value_type: + value = value.replace("(", "").replace(")", "")# FIXME: temporary, incoherent use of nesting levels between parse_tuplestruct_string & parse_struct_string + value = type_mappings[long_name](value) if long_name in type_mappings else value + return value + elif type_info == "Struct": + if len(property_group.field_names) != 0 : + custom_property_values = parse_struct_string(value, start_nesting=1 if value.startswith("(") else 0) + for index, field_name in enumerate(property_group.field_names): + item_long_name = definition["properties"][field_name]["type"]["$ref"].replace("#/$defs/", "") + item_definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None + + custom_prop_value = custom_property_values[field_name] + #print("field name", field_name, "value", custom_prop_value) + propGroup_value = getattr(property_group, field_name) + is_property_group = isinstance(propGroup_value, PropertyGroup) + child_property_group = propGroup_value if is_property_group else None + if item_definition != None: + custom_prop_value = property_group_value_from_custom_property_value(child_property_group, item_definition, registry, value=custom_prop_value, nesting=nesting) + else: + custom_prop_value = custom_prop_value + + if is_def_value_type(item_definition, registry): + setattr(property_group , field_name, custom_prop_value) + + + else: + if len(value) > 2: #a unit struct should be two chars long :() + #print("struct with zero fields") + raise Exception("input string too big for a unit struct") + + elif type_info == "Tuple": + custom_property_values = parse_tuplestruct_string(value, start_nesting=1 if len(nesting) == 1 else 1) + + for index, field_name in enumerate(property_group.field_names): + item_long_name = definition["prefixItems"][index]["type"]["$ref"].replace("#/$defs/", "") + item_definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None + + custom_property_value = custom_property_values[index] + + propGroup_value = getattr(property_group, field_name) + is_property_group = isinstance(propGroup_value, PropertyGroup) + child_property_group = propGroup_value if is_property_group else None + if item_definition != None: + custom_property_value = property_group_value_from_custom_property_value(child_property_group, item_definition, registry, value=custom_property_value, nesting=nesting) + if is_def_value_type(item_definition, registry): + setattr(property_group , field_name, custom_property_value) + + elif type_info == "TupleStruct": + custom_property_values = parse_tuplestruct_string(value, start_nesting=1 if len(nesting) == 1 else 0) + for index, field_name in enumerate(property_group.field_names): + item_long_name = definition["prefixItems"][index]["type"]["$ref"].replace("#/$defs/", "") + item_definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None + + custom_prop_value = custom_property_values[index] + + value = getattr(property_group, field_name) + is_property_group = isinstance(value, PropertyGroup) + child_property_group = value if is_property_group else None + if item_definition != None: + custom_prop_value = property_group_value_from_custom_property_value(child_property_group, item_definition, registry, value=custom_prop_value, nesting=nesting) + + if is_def_value_type(item_definition, registry): + setattr(property_group , field_name, custom_prop_value) + + elif type_info == "Enum": + field_names = property_group.field_names + if type_def == "object": + regexp = re.search('(^[^\(]+)(\((.*)\))', value) + try: + chosen_variant_raw = regexp.group(1) + chosen_variant_value = regexp.group(3) + chosen_variant_name = "variant_" + chosen_variant_raw + except: + chosen_variant_raw = value + chosen_variant_value = "" + chosen_variant_name = "variant_" + chosen_variant_raw + selection_index = property_group.field_names.index(chosen_variant_name) + variant_definition = definition["oneOf"][selection_index-1] + # first we set WHAT variant is selected + setattr(property_group, "selection", chosen_variant_raw) + + # and then we set the value of the variant + if "prefixItems" in variant_definition: + value = getattr(property_group, chosen_variant_name) + is_property_group = isinstance(value, PropertyGroup) + child_property_group = value if is_property_group else None + + chosen_variant_value = "(" +chosen_variant_value +")" # needed to handle nesting correctly + value = property_group_value_from_custom_property_value(child_property_group, variant_definition, registry, value=chosen_variant_value, nesting=nesting) + + elif "properties" in variant_definition: + value = getattr(property_group, chosen_variant_name) + is_property_group = isinstance(value, PropertyGroup) + child_property_group = value if is_property_group else None + + value = property_group_value_from_custom_property_value(child_property_group, variant_definition, registry, value=chosen_variant_value, nesting=nesting) + + else: + chosen_variant_raw = value + setattr(property_group, field_names[0], chosen_variant_raw) + + elif type_info == "List": + item_list = getattr(property_group, "list") + item_long_name = getattr(property_group, "long_name") + custom_property_values = parse_tuplestruct_string(value, start_nesting=2 if item_long_name.startswith("wrapper_") and value.startswith('(') else 1) # TODO : the additional check here is wrong, there is an issue somewhere in higher level stuff + # clear list first + item_list.clear() + for raw_value in custom_property_values: + new_entry = item_list.add() + item_long_name = getattr(new_entry, "long_name") # we get the REAL type name + definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None + + if definition != None: + property_group_value_from_custom_property_value(new_entry, definition, registry, value=raw_value, nesting=nesting) + else: + try: + value = value.replace("(", "").replace(")", "")# FIXME: temporary, incoherent use of nesting levels between parse_tuplestruct_string & parse_struct_string + value = type_mappings[long_name](value) if long_name in type_mappings else value + return value + except: + pass \ No newline at end of file diff --git a/tools/blenvy/bevy_components/propGroups/operators.py b/tools/blenvy/bevy_components/propGroups/operators.py new file mode 100644 index 0000000..e69de29 diff --git a/tools/blenvy/bevy_components/propGroups/process_component.py b/tools/blenvy/bevy_components/propGroups/process_component.py new file mode 100644 index 0000000..7c6cc14 --- /dev/null +++ b/tools/blenvy/bevy_components/propGroups/process_component.py @@ -0,0 +1,95 @@ +import bpy +from bpy_types import PropertyGroup +from bpy.props import (PointerProperty) +from . import process_structs +from . import process_tupples +from . import process_enum +from . import process_list +from . import process_map + +def process_component(registry, definition, update, extras=None, nesting = [], nesting_long_names = []): + long_name = definition['long_name'] + short_name = definition["short_name"] + type_info = definition["typeInfo"] if "typeInfo" in definition else None + type_def = definition["type"] if "type" in definition else None + properties = definition["properties"] if "properties" in definition else {} + prefixItems = definition["prefixItems"] if "prefixItems" in definition else [] + + has_properties = len(properties.keys()) > 0 + has_prefixItems = len(prefixItems) > 0 + is_enum = type_info == "Enum" + is_list = type_info == "List" + is_map = type_info == "Map" + + __annotations__ = {} + tupple_or_struct = None + + with_properties = False + with_items = False + with_enum = False + with_list = False + with_map = False + + + if has_properties: + __annotations__ = __annotations__ | process_structs.process_structs(registry, definition, properties, update, nesting, nesting_long_names) + with_properties = True + tupple_or_struct = "struct" + + if has_prefixItems: + __annotations__ = __annotations__ | process_tupples.process_tupples(registry, definition, prefixItems, update, nesting, nesting_long_names) + with_items = True + tupple_or_struct = "tupple" + + if is_enum: + __annotations__ = __annotations__ | process_enum.process_enum(registry, definition, update, nesting, nesting_long_names) + with_enum = True + + if is_list: + __annotations__ = __annotations__ | process_list.process_list(registry, definition, update, nesting, nesting_long_names) + with_list= True + + if is_map: + __annotations__ = __annotations__ | process_map.process_map(registry, definition, update, nesting, nesting_long_names) + with_map = True + + field_names = [] + for a in __annotations__: + field_names.append(a) + + + extras = extras if extras is not None else { + "long_name": long_name + } + root_component = nesting_long_names[0] if len(nesting_long_names) > 0 else long_name + # print("") + property_group_params = { + **extras, + '__annotations__': __annotations__, + 'tupple_or_struct': tupple_or_struct, + 'field_names': field_names, + **dict(with_properties = with_properties, with_items= with_items, with_enum= with_enum, with_list= with_list, with_map = with_map, short_name= short_name, long_name=long_name), + 'root_component': root_component + } + #FIXME: YIKES, but have not found another way: + """ Withouth this ; the following does not work + -BasicTest + - NestingTestLevel2 + -BasicTest => the registration & update callback of this one overwrites the first "basicTest" + have not found a cleaner workaround so far + """ + property_group_name = registry.generate_propGroup_name(nesting, long_name) + (property_group_pointer, property_group_class) = property_group_from_infos(property_group_name, property_group_params) + # add our component propertyGroup to the registry + registry.register_component_propertyGroup(property_group_name, property_group_pointer) + + return (property_group_pointer, property_group_class) + +def property_group_from_infos(property_group_name, property_group_parameters): + # print("creating property group", property_group_name) + property_group_class = type(property_group_name, (PropertyGroup,), property_group_parameters) + + bpy.utils.register_class(property_group_class) + property_group_pointer = PointerProperty(type=property_group_class) + + return (property_group_pointer, property_group_class) \ No newline at end of file diff --git a/tools/blenvy/bevy_components/propGroups/process_enum.py b/tools/blenvy/bevy_components/propGroups/process_enum.py new file mode 100644 index 0000000..2b5df03 --- /dev/null +++ b/tools/blenvy/bevy_components/propGroups/process_enum.py @@ -0,0 +1,67 @@ +from bpy.props import (StringProperty) +from . import process_component + +def process_enum(registry, definition, update, nesting, nesting_long_names): + blender_property_mapping = registry.blender_property_mapping + short_name = definition["short_name"] + long_name = definition["long_name"] + + type_def = definition["type"] if "type" in definition else None + variants = definition["oneOf"] + + nesting = nesting + [short_name] + nesting_long_names = nesting_long_names = [long_name] + + __annotations__ = {} + original_type_name = "enum" + + # print("processing enum", short_name, long_name, definition) + + if type_def == "object": + labels = [] + additional_annotations = {} + for variant in variants: + variant_name = variant["long_name"] + variant_prefixed_name = "variant_" + variant_name + labels.append(variant_name) + + if "prefixItems" in variant: + #print("tupple variant in enum", variant) + registry.add_custom_type(variant_name, variant) + (sub_component_group, _) = process_component.process_component(registry, variant, update, {"nested": True}, nesting, nesting_long_names) + additional_annotations[variant_prefixed_name] = sub_component_group + elif "properties" in variant: + #print("struct variant in enum", variant) + registry.add_custom_type(variant_name, variant) + (sub_component_group, _) = process_component.process_component(registry, variant, update, {"nested": True}, nesting, nesting_long_names) + additional_annotations[variant_prefixed_name] = sub_component_group + else: # for the cases where it's neither a tupple nor a structs: FIXME: not 100% sure of this + #print("other variant in enum") + annotations = {"variant_"+variant_name: StringProperty(default="--------")} + additional_annotations = additional_annotations | annotations + + items = tuple((e, e, e) for e in labels) + + blender_property_def = blender_property_mapping[original_type_name] + blender_property = blender_property_def["type"]( + **blender_property_def["presets"],# we inject presets first + items=items, # this is needed by Blender's EnumProperty , which we are using here + update= update +) + __annotations__["selection"] = blender_property + + for a in additional_annotations: + __annotations__[a] = additional_annotations[a] + # enum_value => what field to display + # a second field + property for the "content" of the enum + else: + items = tuple((e, e, "") for e in variants) + blender_property_def = blender_property_mapping[original_type_name] + blender_property = blender_property_def["type"]( + **blender_property_def["presets"],# we inject presets first + items=items, + update= update + ) + __annotations__["selection"] = blender_property + + return __annotations__ diff --git a/tools/blenvy/bevy_components/propGroups/process_list.py b/tools/blenvy/bevy_components/propGroups/process_list.py new file mode 100644 index 0000000..bc48855 --- /dev/null +++ b/tools/blenvy/bevy_components/propGroups/process_list.py @@ -0,0 +1,37 @@ +from bpy.props import (StringProperty, IntProperty, CollectionProperty) +from .utils import generate_wrapper_propertyGroup +from . import process_component + +def process_list(registry, definition, update, nesting=[], nesting_long_names=[]): + value_types_defaults = registry.value_types_defaults + type_infos = registry.type_infos + + short_name = definition["short_name"] + long_name = definition["long_name"] + ref_name = definition["items"]["type"]["$ref"].replace("#/$defs/", "") + + nesting = nesting+[short_name] + nesting_long_names = nesting_long_names + [long_name] + + item_definition = type_infos[ref_name] + item_long_name = item_definition["long_name"] + is_item_value_type = item_long_name in value_types_defaults + + property_group_class = None + #if the content of the list is a unit type, we need to generate a fake wrapper, otherwise we cannot use layout.prop(group, "propertyName") as there is no propertyName ! + if is_item_value_type: + property_group_class = generate_wrapper_propertyGroup(long_name, item_long_name, definition["items"]["type"]["$ref"], registry, update) + else: + (_, list_content_group_class) = process_component.process_component(registry, item_definition, update, {"nested": True, "long_name": item_long_name}, nesting) + property_group_class = list_content_group_class + + item_collection = CollectionProperty(type=property_group_class) + + item_long_name = item_long_name if not is_item_value_type else "wrapper_" + item_long_name + __annotations__ = { + "list": item_collection, + "list_index": IntProperty(name = "Index for list", default = 0, update=update), + "long_name": StringProperty(default=item_long_name) + } + + return __annotations__ \ No newline at end of file diff --git a/tools/blenvy/bevy_components/propGroups/process_map.py b/tools/blenvy/bevy_components/propGroups/process_map.py new file mode 100644 index 0000000..48f6771 --- /dev/null +++ b/tools/blenvy/bevy_components/propGroups/process_map.py @@ -0,0 +1,85 @@ +from bpy.props import (StringProperty, IntProperty, CollectionProperty, PointerProperty) +from .utils import generate_wrapper_propertyGroup +from . import process_component + +def process_map(registry, definition, update, nesting=[], nesting_long_names=[]): + value_types_defaults = registry.value_types_defaults + type_infos = registry.type_infos + + short_name = definition["short_name"] + long_name = definition["long_name"] + + nesting = nesting + [short_name] + nesting_long_names = nesting_long_names + [long_name] + + value_ref_name = definition["valueType"]["type"]["$ref"].replace("#/$defs/", "") + key_ref_name = definition["keyType"]["type"]["$ref"].replace("#/$defs/", "") + + #print("definition", definition) + __annotations__ = {} + if key_ref_name in type_infos: + key_definition = type_infos[key_ref_name] + original_long_name = key_definition["long_name"] + is_key_value_type = original_long_name in value_types_defaults + definition_link = definition["keyType"]["type"]["$ref"] + + #if the content of the list is a unit type, we need to generate a fake wrapper, otherwise we cannot use layout.prop(group, "propertyName") as there is no propertyName ! + if is_key_value_type: + keys_property_group_class = generate_wrapper_propertyGroup(f"{long_name}_keys", original_long_name, definition_link, registry, update) + else: + (_, list_content_group_class) = process_component.process_component(registry, key_definition, update, {"nested": True, "long_name": original_long_name}, nesting, nesting_long_names) + keys_property_group_class = list_content_group_class + + keys_collection = CollectionProperty(type=keys_property_group_class) + keys_property_group_pointer = PointerProperty(type=keys_property_group_class) + else: + __annotations__["list"] = StringProperty(default="N/A") + registry.add_missing_typeInfo(key_ref_name) + # the root component also becomes invalid (in practice it is not always a component, but good enough) + registry.add_invalid_component(nesting_long_names[0]) + + if value_ref_name in type_infos: + value_definition = type_infos[value_ref_name] + original_long_name = value_definition["long_name"] + is_value_value_type = original_long_name in value_types_defaults + definition_link = definition["valueType"]["type"]["$ref"] + + #if the content of the list is a unit type, we need to generate a fake wrapper, otherwise we cannot use layout.prop(group, "propertyName") as there is no propertyName ! + if is_value_value_type: + values_property_group_class = generate_wrapper_propertyGroup(f"{long_name}_values", original_long_name, definition_link, registry, update) + else: + (_, list_content_group_class) = process_component.process_component(registry, value_definition, update, {"nested": True, "long_name": original_long_name}, nesting, nesting_long_names) + values_property_group_class = list_content_group_class + + values_collection = CollectionProperty(type=values_property_group_class) + values_property_group_pointer = PointerProperty(type=values_property_group_class) + + else: + #__annotations__["list"] = StringProperty(default="N/A") + registry.add_missing_typeInfo(value_ref_name) + # the root component also becomes invalid (in practice it is not always a component, but good enough) + registry.add_invalid_component(nesting_long_names[0]) + + + if key_ref_name in type_infos and value_ref_name in type_infos: + __annotations__ = { + "list": keys_collection, + "list_index": IntProperty(name = "Index for keys", default = 0, update=update), + "keys_setter":keys_property_group_pointer, + + "values_list": values_collection, + "values_list_index": IntProperty(name = "Index for values", default = 0, update=update), + "values_setter":values_property_group_pointer, + } + + """__annotations__["list"] = StringProperty(default="N/A") + __annotations__["values_list"] = StringProperty(default="N/A") + __annotations__["keys_setter"] = StringProperty(default="N/A")""" + + """registry.add_missing_typeInfo(key_ref_name) + registry.add_missing_typeInfo(value_ref_name) + # the root component also becomes invalid (in practice it is not always a component, but good enough) + registry.add_invalid_component(nesting_long_names[0]) + print("setting invalid flag for", nesting_long_names[0])""" + + return __annotations__ diff --git a/tools/blenvy/bevy_components/propGroups/process_structs.py b/tools/blenvy/bevy_components/propGroups/process_structs.py new file mode 100644 index 0000000..8bfda0c --- /dev/null +++ b/tools/blenvy/bevy_components/propGroups/process_structs.py @@ -0,0 +1,48 @@ +from bpy.props import (StringProperty) +from . import process_component + +def process_structs(registry, definition, properties, update, nesting, nesting_long_names): + value_types_defaults = registry.value_types_defaults + blender_property_mapping = registry.blender_property_mapping + type_infos = registry.type_infos + long_name = definition["long_name"] + short_name = definition["short_name"] + + __annotations__ = {} + default_values = {} + nesting = nesting + [short_name] + nesting_long_names = nesting_long_names + [long_name] + + for property_name in properties.keys(): + ref_name = properties[property_name]["type"]["$ref"].replace("#/$defs/", "") + + if ref_name in type_infos: + original = type_infos[ref_name] + original_long_name = original["long_name"] + is_value_type = original_long_name in value_types_defaults + value = value_types_defaults[original_long_name] if is_value_type else None + default_values[property_name] = value + + if is_value_type: + if original_long_name in blender_property_mapping: + blender_property_def = blender_property_mapping[original_long_name] + blender_property = blender_property_def["type"]( + **blender_property_def["presets"],# we inject presets first + name = property_name, + default = value, + update = update + ) + __annotations__[property_name] = blender_property + else: + original_long_name = original["long_name"] + (sub_component_group, _) = process_component.process_component(registry, original, update, {"nested": True, "long_name": original_long_name}, nesting, nesting_long_names) + __annotations__[property_name] = sub_component_group + # if there are sub fields, add an attribute "sub_fields" possibly a pointer property ? or add a standard field to the type , that is stored under "attributes" and not __annotations (better) + else: + # component not found in type_infos, generating placeholder + __annotations__[property_name] = StringProperty(default="N/A") + registry.add_missing_typeInfo(ref_name) + # the root component also becomes invalid (in practice it is not always a component, but good enough) + registry.add_invalid_component(nesting_long_names[0]) + + return __annotations__ diff --git a/tools/blenvy/bevy_components/propGroups/process_tupples.py b/tools/blenvy/bevy_components/propGroups/process_tupples.py new file mode 100644 index 0000000..8513875 --- /dev/null +++ b/tools/blenvy/bevy_components/propGroups/process_tupples.py @@ -0,0 +1,55 @@ +from bpy.props import (StringProperty) +from . import process_component + +def process_tupples(registry, definition, prefixItems, update, nesting=[], nesting_long_names=[]): + value_types_defaults = registry.value_types_defaults + blender_property_mapping = registry.blender_property_mapping + type_infos = registry.type_infos + long_name = definition["long_name"] + short_name = definition["short_name"] + + nesting = nesting + [short_name] + nesting_long_names = nesting_long_names + [long_name] + __annotations__ = {} + + default_values = [] + prefix_infos = [] + for index, item in enumerate(prefixItems): + ref_name = item["type"]["$ref"].replace("#/$defs/", "") + + property_name = str(index)# we cheat a bit, property names are numbers here, as we do not have a real property name + + if ref_name in type_infos: + original = type_infos[ref_name] + original_long_name = original["long_name"] + is_value_type = original_long_name in value_types_defaults + + value = value_types_defaults[original_long_name] if is_value_type else None + default_values.append(value) + prefix_infos.append(original) + + if is_value_type: + if original_long_name in blender_property_mapping: + blender_property_def = blender_property_mapping[original_long_name] + blender_property = blender_property_def["type"]( + **blender_property_def["presets"],# we inject presets first + name = property_name, + default=value, + update= update + ) + + __annotations__[property_name] = blender_property + else: + original_long_name = original["long_name"] + (sub_component_group, _) = process_component.process_component(registry, original, update, {"nested": True, "long_name": original_long_name}, nesting) + __annotations__[property_name] = sub_component_group + else: + # component not found in type_infos, generating placeholder + __annotations__[property_name] = StringProperty(default="N/A") + registry.add_missing_typeInfo(ref_name) + # the root component also becomes invalid (in practice it is not always a component, but good enough) + registry.add_invalid_component(nesting_long_names[0]) + + + return __annotations__ + diff --git a/tools/blenvy/bevy_components/propGroups/prop_groups.py b/tools/blenvy/bevy_components/propGroups/prop_groups.py new file mode 100644 index 0000000..349407c --- /dev/null +++ b/tools/blenvy/bevy_components/propGroups/prop_groups.py @@ -0,0 +1,44 @@ +import bpy +from .conversions_from_prop_group import property_group_value_to_custom_property_value +from .process_component import process_component +from .utils import update_calback_helper + +import json +## main callback function, fired whenever any property changes, no matter the nesting level +def update_component(self, context, definition, component_name): + registry = bpy.context.window_manager.components_registry + current_object = bpy.context.object + update_disabled = current_object["__disable__update"] if "__disable__update" in current_object else False + update_disabled = registry.disable_all_object_updates or update_disabled # global settings + if update_disabled: + return + print("") + print("update in component", component_name, self, "current_object", current_object.name) + components_in_object = current_object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == component_name, components_in_object), None) + + if component_meta != None: + property_group_name = registry.get_propertyGroupName_from_longName(component_name) + property_group = getattr(component_meta, property_group_name) + # we use our helper to set the values + object = context.object + previous = json.loads(object['bevy_components']) + previous[component_name] = property_group_value_to_custom_property_value(property_group, definition, registry, None) + object['bevy_components'] = json.dumps(previous) + + +def generate_propertyGroups_for_components(): + registry = bpy.context.window_manager.components_registry + if not registry.has_type_infos(): + registry.load_type_infos() + + type_infos = registry.type_infos + + for component_name in type_infos: + definition = type_infos[component_name] + is_component = definition['isComponent'] if "isComponent" in definition else False + root_property_name = component_name if is_component else None + process_component(registry, definition, update_calback_helper(definition, update_component, root_property_name), None, []) + + # if we had to add any wrapper types on the fly, process them now + registry.process_custom_types() \ No newline at end of file diff --git a/tools/blenvy/bevy_components/propGroups/utils.py b/tools/blenvy/bevy_components/propGroups/utils.py new file mode 100644 index 0000000..0b50dd4 --- /dev/null +++ b/tools/blenvy/bevy_components/propGroups/utils.py @@ -0,0 +1,63 @@ +# helper function that returns a lambda, used for the PropertyGroups update function +def update_calback_helper(definition, update, component_name_override): + return lambda self, context: update(self, context, definition, component_name_override) + +import bpy +from bpy.props import (StringProperty) +from bpy_types import PropertyGroup + +# this helper creates a "fake"/wrapper property group that is NOT a real type in the registry +# usefull for things like value types in list items etc +def generate_wrapper_propertyGroup(wrapped_type_long_name_name, item_long_name, definition_link, registry, update): + value_types_defaults = registry.value_types_defaults + blender_property_mapping = registry.blender_property_mapping + is_item_value_type = item_long_name in value_types_defaults + + wrapper_name = "wrapper_" + wrapped_type_long_name_name + + wrapper_definition = { + "isComponent": False, + "isResource": False, + "items": False, + "prefixItems": [ + { + "type": { + "$ref": definition_link + } + } + ], + "short_name": wrapper_name, # FIXME !!! + "long_name": wrapper_name, + "type": "array", + "typeInfo": "TupleStruct" + } + + # we generate a very small 'hash' for the component name + property_group_name = registry.generate_propGroup_name(nesting=[], longName=wrapper_name) + registry.add_custom_type(wrapper_name, wrapper_definition) + + + blender_property = StringProperty(default="", update=update) + if item_long_name in blender_property_mapping: + value = value_types_defaults[item_long_name] if is_item_value_type else None + blender_property_def = blender_property_mapping[item_long_name] + blender_property = blender_property_def["type"]( + **blender_property_def["presets"],# we inject presets first + name = "property_name", + default = value, + update = update + ) + + wrapper_annotations = { + '0' : blender_property + } + property_group_params = { + '__annotations__': wrapper_annotations, + 'tupple_or_struct': "tupple", + 'field_names': ['0'], + **dict(with_properties = False, with_items= True, with_enum= False, with_list= False, with_map =False, short_name=wrapper_name, long_name=wrapper_name), + } + property_group_class = type(property_group_name, (PropertyGroup,), property_group_params) + bpy.utils.register_class(property_group_class) + + return property_group_class \ No newline at end of file diff --git a/tools/blenvy/bevy_components/registry/__init__.py b/tools/blenvy/bevy_components/registry/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tools/blenvy/bevy_components/registry/operators.py b/tools/blenvy/bevy_components/registry/operators.py new file mode 100644 index 0000000..9c5700d --- /dev/null +++ b/tools/blenvy/bevy_components/registry/operators.py @@ -0,0 +1,237 @@ +import os +import bpy +from bpy_types import (Operator) +from bpy.props import (StringProperty) +from bpy_extras.io_utils import ImportHelper + +from ..helpers import upsert_settings +from ..components.metadata import apply_customProperty_values_to_object_propertyGroups, apply_propertyGroup_values_to_object_customProperties, ensure_metadata_for_all_objects +from ..propGroups.prop_groups import generate_propertyGroups_for_components + +class ReloadRegistryOperator(Operator): + """Reloads registry (schema file) from disk, generates propertyGroups for components & ensures all objects have metadata """ + bl_idname = "object.reload_registry" + bl_label = "Reload Registry" + bl_options = {"UNDO"} + + component_type: StringProperty( + name="component_type", + description="component type to add", + ) # type: ignore + + def execute(self, context): + print("reload registry") + context.window_manager.components_registry.load_schema() + generate_propertyGroups_for_components() + print("") + print("") + print("") + ensure_metadata_for_all_objects() + + # now force refresh the ui + for area in context.screen.areas: + for region in area.regions: + if region.type == "UI": + region.tag_redraw() + + return {'FINISHED'} + +class COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_ALL(Operator): + """Apply registry to ALL objects: update the custom property values of all objects based on their definition, if any""" + bl_idname = "object.refresh_custom_properties_all" + bl_label = "Apply Registry to all objects" + bl_options = {"UNDO"} + + @classmethod + def register(cls): + bpy.types.WindowManager.custom_properties_from_components_progress_all = bpy.props.FloatProperty(default=-1.0) #bpy.props.PointerProperty(type=RenameHelper) + + @classmethod + def unregister(cls): + del bpy.types.WindowManager.custom_properties_from_components_progress_all + + def execute(self, context): + print("apply registry to all") + #context.window_manager.components_registry.load_schema() + total = len(bpy.data.objects) + + for index, object in enumerate(bpy.data.objects): + apply_propertyGroup_values_to_object_customProperties(object) + progress = index / total + context.window_manager.custom_properties_from_components_progress_all = progress + # now force refresh the ui + bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1) + context.window_manager.custom_properties_from_components_progress_all = -1.0 + + return {'FINISHED'} + +class COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_CURRENT(Operator): + """Apply registry to CURRENT object: update the custom property values of current object based on their definition, if any""" + bl_idname = "object.refresh_custom_properties_current" + bl_label = "Apply Registry to current object" + bl_options = {"UNDO"} + + @classmethod + def register(cls): + bpy.types.WindowManager.custom_properties_from_components_progress = bpy.props.FloatProperty(default=-1.0) #bpy.props.PointerProperty(type=RenameHelper) + + @classmethod + def unregister(cls): + del bpy.types.WindowManager.custom_properties_from_components_progress + + def execute(self, context): + print("apply registry to current object") + object = context.object + context.window_manager.custom_properties_from_components_progress = 0.5 + # now force refresh the ui + bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1) + apply_propertyGroup_values_to_object_customProperties(object) + + context.window_manager.custom_properties_from_components_progress = -1.0 + return {'FINISHED'} + + +class COMPONENTS_OT_REFRESH_PROPGROUPS_FROM_CUSTOM_PROPERTIES_CURRENT(Operator): + """Update UI values from custom properties to CURRENT object""" + bl_idname = "object.refresh_ui_from_custom_properties_current" + bl_label = "Apply custom_properties to current object" + bl_options = {"UNDO"} + + @classmethod + def register(cls): + bpy.types.WindowManager.components_from_custom_properties_progress = bpy.props.FloatProperty(default=-1.0) #bpy.props.PointerProperty(type=RenameHelper) + + @classmethod + def unregister(cls): + del bpy.types.WindowManager.components_from_custom_properties_progress + + def execute(self, context): + print("apply custom properties to current object") + object = context.object + error = False + try: + apply_customProperty_values_to_object_propertyGroups(object) + progress = 0.5 + context.window_manager.components_from_custom_properties_progress = progress + try: + # now force refresh the ui + bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1) + except:pass # ony run in ui + + except Exception as error_message: + del object["__disable__update"] # make sure custom properties are updateable afterwards, even in the case of failure + error = True + self.report({'ERROR'}, "Failed to update propertyGroup values from custom property: Error:" + str(error_message)) + if not error: + self.report({'INFO'}, "Sucessfully generated UI values for custom properties for selected object") + context.window_manager.components_from_custom_properties_progress = -1.0 + + return {'FINISHED'} + + +class COMPONENTS_OT_REFRESH_PROPGROUPS_FROM_CUSTOM_PROPERTIES_ALL(Operator): + """Update UI values from custom properties to ALL object""" + bl_idname = "object.refresh_ui_from_custom_properties_all" + bl_label = "Apply custom_properties to all objects" + bl_options = {"UNDO"} + + @classmethod + def register(cls): + bpy.types.WindowManager.components_from_custom_properties_progress_all = bpy.props.FloatProperty(default=-1.0) #bpy.props.PointerProperty(type=RenameHelper) + + @classmethod + def unregister(cls): + del bpy.types.WindowManager.components_from_custom_properties_progress_all + + def execute(self, context): + print("apply custom properties to all object") + bpy.context.window_manager.components_registry.disable_all_object_updates = True + errors = [] + total = len(bpy.data.objects) + + for index, object in enumerate(bpy.data.objects): + + try: + apply_customProperty_values_to_object_propertyGroups(object) + except Exception as error: + del object["__disable__update"] # make sure custom properties are updateable afterwards, even in the case of failure + errors.append( "object: '" + object.name + "', error: " + str(error)) + + progress = index / total + context.window_manager.components_from_custom_properties_progress_all = progress + # now force refresh the ui + bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1) + + + + if len(errors) > 0: + self.report({'ERROR'}, "Failed to update propertyGroup values from custom property: Errors:" + str(errors)) + else: + self.report({'INFO'}, "Sucessfully generated UI values for custom properties for all objects") + bpy.context.window_manager.components_registry.disable_all_object_updates = False + context.window_manager.components_from_custom_properties_progress_all = -1.0 + return {'FINISHED'} + +class OT_OpenFilebrowser(Operator, ImportHelper): + """Browse for registry json file""" + bl_idname = "generic.open_filebrowser" + bl_label = "Open the file browser" + + filter_glob: StringProperty( + default='*.json', + options={'HIDDEN'} + ) # type: ignore + + def execute(self, context): + """Do something with the selected file(s).""" + #filename, extension = os.path.splitext(self.filepath) + file_path = bpy.data.filepath + # Get the folder + folder_path = os.path.dirname(file_path) + relative_path = os.path.relpath(self.filepath, folder_path) + + registry = context.window_manager.components_registry + registry.schemaPath = relative_path + + upsert_settings(registry.settings_save_path, {"schemaPath": relative_path}) + + return {'FINISHED'} + + +class OT_select_object(Operator): + """Select object by name""" + bl_idname = "object.select" + bl_label = "Select object" + bl_options = {"UNDO"} + + object_name: StringProperty( + name="object_name", + description="object to select's name ", + ) # type: ignore + + def execute(self, context): + if self.object_name: + object = bpy.data.objects[self.object_name] + scenes_of_object = list(object.users_scene) + if len(scenes_of_object) > 0: + bpy.ops.object.select_all(action='DESELECT') + bpy.context.window.scene = scenes_of_object[0] + object.select_set(True) + bpy.context.view_layer.objects.active = object + return {'FINISHED'} + +class OT_select_component_name_to_replace(Operator): + """Select component name to replace""" + bl_idname = "object.select_component_name_to_replace" + bl_label = "Select component name for bulk replace" + bl_options = {"UNDO"} + + component_name: StringProperty( + name="component_name", + description="component name to replace", + ) # type: ignore + + def execute(self, context): + context.window_manager.bevy_component_rename_helper.original_name = self.component_name + return {'FINISHED'} + \ No newline at end of file diff --git a/tools/blenvy/bevy_components/registry/registry.py b/tools/blenvy/bevy_components/registry/registry.py new file mode 100644 index 0000000..d9e6681 --- /dev/null +++ b/tools/blenvy/bevy_components/registry/registry.py @@ -0,0 +1,363 @@ +import bpy +import json +import os +import uuid +from pathlib import Path +from bpy_types import (PropertyGroup) +from bpy.props import (StringProperty, BoolProperty, FloatProperty, FloatVectorProperty, IntProperty, IntVectorProperty, EnumProperty, PointerProperty, CollectionProperty) + +from ..helpers import load_settings +from ..propGroups.prop_groups import generate_propertyGroups_for_components +from ..components.metadata import ComponentMetadata, ensure_metadata_for_all_objects + +# helper class to store missing bevy types information +class MissingBevyType(bpy.types.PropertyGroup): + long_name: bpy.props.StringProperty( + name="type", + ) # type: ignore + +# helper function to deal with timer +def toggle_watcher(self, context): + #print("toggling watcher", self.watcher_enabled, watch_schema, self, bpy.app.timers) + if not self.watcher_enabled: + try: + bpy.app.timers.unregister(watch_schema) + except Exception as error: + pass + else: + self.watcher_active = True + bpy.app.timers.register(watch_schema) + +def watch_schema(): + self = bpy.context.window_manager.components_registry + # print("watching schema file for changes") + try: + stamp = os.stat(self.schemaFullPath).st_mtime + stamp = str(stamp) + if stamp != self.schemaTimeStamp and self.schemaTimeStamp != "": + print("FILE CHANGED !!", stamp, self.schemaTimeStamp) + # see here for better ways : https://stackoverflow.com/questions/11114492/check-if-a-file-is-not-open-nor-being-used-by-another-process + """try: + os.rename(path, path) + #return False + except OSError: # file is in use + print("in use") + #return True""" + #bpy.ops.object.reload_registry() + # we need to add an additional delay as the file might not have loaded yet + bpy.app.timers.register(lambda: bpy.ops.object.reload_registry(), first_interval=1) + + self.schemaTimeStamp = stamp + except Exception as error: + pass + return self.watcher_poll_frequency if self.watcher_enabled else None + + +# this is where we store the information for all available components +class ComponentsRegistry(PropertyGroup): + + settings_save_path = ".bevy_components_settings" # where to store data in bpy.texts + + schemaPath: bpy.props.StringProperty( + name="schema path", + description="path to the registry schema file", + default="registry.json" + )# type: ignore + schemaFullPath : bpy.props.StringProperty( + name="schema full path", + description="path to the registry schema file", + )# type: ignore + + registry: bpy.props. StringProperty( + name="registry", + description="component registry" + )# type: ignore + + missing_type_infos: StringProperty( + name="missing type infos", + description="unregistered/missing type infos" + )# type: ignore + + disable_all_object_updates: BoolProperty(name="disable_object_updates", default=False) # type: ignore + + ## file watcher + watcher_enabled: BoolProperty(name="Watcher_enabled", default=True, update=toggle_watcher)# type: ignore + watcher_active: BoolProperty(name = "Flag for watcher status", default = False)# type: ignore + + watcher_poll_frequency: IntProperty( + name="watcher poll frequency", + description="frequency (s) at wich to poll for changes to the registry file", + min=1, + max=10, + default=1 + )# type: ignore + + schemaTimeStamp: StringProperty( + name="last timestamp of schema file", + description="", + default="" + )# type: ignore + + + missing_types_list: CollectionProperty(name="missing types list", type=MissingBevyType)# type: ignore + missing_types_list_index: IntProperty(name = "Index for missing types list", default = 0)# type: ignore + + blender_property_mapping = { + "bool": dict(type=BoolProperty, presets=dict()), + + "u8": dict(type=IntProperty, presets=dict(min=0, max=255)), + "u16": dict(type=IntProperty, presets=dict(min=0, max=65535)), + "u32": dict(type=IntProperty, presets=dict(min=0)), + "u64": dict(type=IntProperty, presets=dict(min=0)), + "u128": dict(type=IntProperty, presets=dict(min=0)), + "u64": dict(type=IntProperty, presets=dict(min=0)), + "usize": dict(type=IntProperty, presets=dict(min=0)), + + "i8": dict(type=IntProperty, presets=dict()), + "i16":dict(type=IntProperty, presets=dict()), + "i32":dict(type=IntProperty, presets=dict()), + "i64":dict(type=IntProperty, presets=dict()), + "i128":dict(type=IntProperty, presets=dict()), + "isize": dict(type=IntProperty, presets=dict()), + + "f32": dict(type=FloatProperty, presets=dict()), + "f64": dict(type=FloatProperty, presets=dict()), + + "glam::Vec2": {"type": FloatVectorProperty, "presets": dict(size = 2) }, + "glam::DVec2": {"type": FloatVectorProperty, "presets": dict(size = 2) }, + "glam::UVec2": {"type": FloatVectorProperty, "presets": dict(size = 2) }, + + "glam::Vec3": {"type": FloatVectorProperty, "presets": {"size":3} }, + "glam::Vec3A":{"type": FloatVectorProperty, "presets": {"size":3} }, + "glam::DVec3":{"type": FloatVectorProperty, "presets": {"size":3} }, + "glam::UVec3":{"type": FloatVectorProperty, "presets": {"size":3} }, + + "glam::Vec4": {"type": FloatVectorProperty, "presets": {"size":4} }, + "glam::Vec4A": {"type": FloatVectorProperty, "presets": {"size":4} }, + "glam::DVec4": {"type": FloatVectorProperty, "presets": {"size":4} }, + "glam::UVec4":{"type": FloatVectorProperty, "presets": {"size":4, "min":0.0} }, + + "glam::Quat": {"type": FloatVectorProperty, "presets": {"size":4} }, + + "bevy_render::color::Color": dict(type = FloatVectorProperty, presets=dict(subtype='COLOR', size=4)), + + "char": dict(type=StringProperty, presets=dict()), + "str": dict(type=StringProperty, presets=dict()), + "alloc::string::String": dict(type=StringProperty, presets=dict()), + "alloc::borrow::Cow": dict(type=StringProperty, presets=dict()), + + + "enum": dict(type=EnumProperty, presets=dict()), + + 'bevy_ecs::entity::Entity': {"type": IntProperty, "presets": {"min":0} }, + 'bevy_utils::Uuid': dict(type=StringProperty, presets=dict()), + + } + + + value_types_defaults = { + "string":" ", + "boolean": True, + "float": 0.0, + "uint": 0, + "int":0, + + # todo : we are re-doing the work of the bevy /rust side here, but it seems more pratical to alway look for the same field name on the blender side for matches + "bool": True, + + "u8": 0, + "u16":0, + "u32":0, + "u64":0, + "u128":0, + "usize":0, + + "i8": 0, + "i16":0, + "i32":0, + "i64":0, + "i128":0, + "isize":0, + + "f32": 0.0, + "f64":0.0, + + "char": " ", + "str": " ", + "alloc::string::String": " ", + "alloc::borrow::Cow": " ", + + "glam::Vec2": [0.0, 0.0], + "glam::DVec2": [0.0, 0.0], + "glam::UVec2": [0, 0], + + "glam::Vec3": [0.0, 0.0, 0.0], + "glam::Vec3A":[0.0, 0.0, 0.0], + "glam::UVec3": [0, 0, 0], + + "glam::Vec4": [0.0, 0.0, 0.0, 0.0], + "glam::DVec4": [0.0, 0.0, 0.0, 0.0], + "glam::UVec4": [0, 0, 0, 0], + + "glam::Quat": [0.0, 0.0, 0.0, 0.0], + + "bevy_render::color::Color": [1.0, 1.0, 0.0, 1.0], + + 'bevy_ecs::entity::Entity': 0,#4294967295, # this is the same as Bevy's Entity::Placeholder, too big for Blender..sigh + 'bevy_utils::Uuid': '"'+str(uuid.uuid4())+'"' + + } + + type_infos = {} + type_infos_missing = [] + component_propertyGroups = {} + custom_types_to_add = {} + invalid_components = [] + + @classmethod + def register(cls): + bpy.types.WindowManager.components_registry = PointerProperty(type=ComponentsRegistry) + bpy.context.window_manager.components_registry.watcher_active = False + + @classmethod + def unregister(cls): + bpy.context.window_manager.components_registry.watcher_active = False + + for propgroup_name in cls.component_propertyGroups.keys(): + try: + delattr(ComponentMetadata, propgroup_name) + #print("unregistered propertyGroup", propgroup_name) + except Exception as error: + pass + #print("failed to remove", error, "ComponentMetadata") + + try: + bpy.app.timers.unregister(watch_schema) + except Exception as error: + pass + + del bpy.types.WindowManager.components_registry + + def load_schema(self): + print("load schema", self) + # cleanup previous data if any + self.propGroupIdCounter = 0 + self.long_names_to_propgroup_names.clear() + self.missing_types_list.clear() + self.type_infos.clear() + self.type_infos_missing.clear() + self.component_propertyGroups.clear() + self.custom_types_to_add.clear() + self.invalid_components.clear() + + # now prepare paths to load data + file_path = bpy.data.filepath + # Get the folder + folder_path = os.path.dirname(file_path) + path = os.path.join(folder_path, self.schemaPath) + self.schemaFullPath = path + + f = Path(bpy.path.abspath(path)) # make a path object of abs path + with open(path) as f: + data = json.load(f) + defs = data["$defs"] + self.registry = json.dumps(defs) # FIXME:meh ? + + # start timer + if not self.watcher_active and self.watcher_enabled: + self.watcher_active = True + print("registering function", watch_schema) + bpy.app.timers.register(watch_schema) + + + # we load the json once, so we do not need to do it over & over again + def load_type_infos(self): + print("load type infos") + ComponentsRegistry.type_infos = json.loads(self.registry) + + def has_type_infos(self): + return len(self.type_infos.keys()) != 0 + + def load_settings(self): + print("loading settings") + settings = load_settings(self.settings_save_path) + + if settings!= None: + print("settings", settings) + self.schemaPath = settings["schemaPath"] + self.load_schema() + generate_propertyGroups_for_components() + ensure_metadata_for_all_objects() + + + # we keep a list of component propertyGroup around + def register_component_propertyGroup(self, name, propertyGroup): + self.component_propertyGroups[name] = propertyGroup + + # to be able to give the user more feedback on any missin/unregistered types in their schema file + def add_missing_typeInfo(self, long_name): + if not long_name in self.type_infos_missing: + self.type_infos_missing.append(long_name) + setattr(self, "missing_type_infos", str(self.type_infos_missing)) + item = self.missing_types_list.add() + item.long_name = long_name + + def add_custom_type(self, long_name, type_definition): + self.custom_types_to_add[long_name] = type_definition + + def process_custom_types(self): + for long_name in self.custom_types_to_add: + self.type_infos[long_name] = self.custom_types_to_add[long_name] + self.custom_types_to_add.clear() + + # add an invalid component to the list (long name) + def add_invalid_component(self, component_name): + self.invalid_components.append(component_name) + + + ########### + + propGroupIdCounter: IntProperty( + name="propGroupIdCounter", + description="", + min=0, + max=1000000000, + default=0 + ) # type: ignore + + long_names_to_propgroup_names = {} + + # generate propGroup name from nesting level & shortName: each shortName + nesting is unique + def generate_propGroup_name(self, nesting, longName): + #print("gen propGroup name for", shortName, nesting) + self.propGroupIdCounter += 1 + + propGroupIndex = str(self.propGroupIdCounter) + propGroupName = propGroupIndex + "_ui" + + key = str(nesting) + longName if len(nesting) > 0 else longName + self.long_names_to_propgroup_names[key] = propGroupName + return propGroupName + + def get_propertyGroupName_from_longName(self, longName): + return self.long_names_to_propgroup_names.get(longName, None) + + def long_name_to_key(): + pass + + ########### + +""" + object[component_definition.name] = 0.5 + property_manager = object.id_properties_ui(component_definition.name) + property_manager.update(min=-10, max=10, soft_min=-5, soft_max=5) + + print("property_manager", property_manager) + + object[component_definition.name] = [0.8,0.2,1.0] + property_manager = object.id_properties_ui(component_definition.name) + property_manager.update(subtype='COLOR') + + #IDPropertyUIManager + #rna_ui = object[component_definition.name].get('_RNA_UI') +""" \ No newline at end of file diff --git a/tools/blenvy/bevy_components/registry/ui.py b/tools/blenvy/bevy_components/registry/ui.py new file mode 100644 index 0000000..63db2f6 --- /dev/null +++ b/tools/blenvy/bevy_components/registry/ui.py @@ -0,0 +1,339 @@ +import json +import bpy +from bpy_types import (UIList) +from bpy.props import (StringProperty) + +from ..components.operators import OT_rename_component, RemoveComponentFromAllObjectsOperator, RemoveComponentOperator +from .operators import( + COMPONENTS_OT_REFRESH_PROPGROUPS_FROM_CUSTOM_PROPERTIES_ALL, + COMPONENTS_OT_REFRESH_PROPGROUPS_FROM_CUSTOM_PROPERTIES_CURRENT, + OT_OpenFilebrowser, + OT_select_component_name_to_replace, + OT_select_object, ReloadRegistryOperator, + COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_ALL, + COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_CURRENT) + +class BEVY_COMPONENTS_PT_Configuration(bpy.types.Panel): + bl_idname = "BEVY_COMPONENTS_PT_Configuration" + bl_label = "Components" + bl_space_type = 'VIEW_3D' + bl_region_type = 'UI' + bl_category = "Bevy Components" + bl_context = "objectmode" + bl_parent_id = "BLENVY_PT_SidePanel" + bl_options = {'DEFAULT_CLOSED'} + bl_description = "list of missing/unregistered type from the bevy side" + + @classmethod + def poll(cls, context): + return context.window_manager.blenvy.mode == 'SETTINGS' + return context.object is not None + + def draw(self, context): + layout = self.layout + registry = context.window_manager.components_registry + + + row = layout.row() + col = row.column() + col.enabled = False + col.prop(registry, "schemaPath", text="Registry Schema path") + col = row.column() + col.operator(OT_OpenFilebrowser.bl_idname, text="Browse for registry schema file (json)") + + layout.separator() + layout.operator(ReloadRegistryOperator.bl_idname, text="reload registry" , icon="FILE_REFRESH") + + layout.separator() + row = layout.row() + + row.prop(registry, "watcher_enabled", text="enable registry file polling") + row.prop(registry, "watcher_poll_frequency", text="registry file poll frequency (s)") + + layout.separator() + layout.separator() + + +class BEVY_COMPONENTS_PT_AdvancedToolsPanel(bpy.types.Panel): + """panel listing all the missing bevy types in the schema""" + bl_idname = "BEVY_COMPONENTS_PT_AdvancedToolsPanel" + bl_label = "Advanced tools" + bl_space_type = 'VIEW_3D' + bl_region_type = 'UI' + bl_category = "Bevy Components" + bl_context = "objectmode" + bl_parent_id = "BLENVY_PT_SidePanel" + bl_options = {'DEFAULT_CLOSED'} + bl_description = "advanced tooling" + + @classmethod + def poll(cls, context): + return context.window_manager.blenvy.mode == 'TOOLS' + + def draw_invalid_or_unregistered_header(self, layout, items): + row = layout.row() + + for item in items: + col = row.column() + col.label(text=item) + + + def draw_invalid_or_unregistered(self, layout, status, component_name, object): + available_components = bpy.context.window_manager.components_list + registry = bpy.context.window_manager.components_registry + registry_has_type_infos = registry.has_type_infos() + + row = layout.row() + + col = row.column() + col.label(text=component_name) + + col = row.column() + operator = col.operator(OT_select_object.bl_idname, text=object.name) + operator.object_name = object.name + + col = row.column() + col.label(text=status) + + col = row.column() + col.prop(available_components, "list", text="") + + col = row.column() + operator = col.operator(OT_rename_component.bl_idname, text="", icon="SHADERFX") #rename + new_name = registry.type_infos[available_components.list]['long_name'] if available_components.list in registry.type_infos else "" + operator.original_name = component_name + operator.target_objects = json.dumps([object.name]) + operator.new_name = new_name + col.enabled = registry_has_type_infos and component_name != "" and component_name != new_name + + + col = row.column() + operator = col.operator(RemoveComponentOperator.bl_idname, text="", icon="X") + operator.object_name = object.name + operator.component_name = component_name + + col = row.column() + col = row.column() + operator = col.operator(OT_select_component_name_to_replace.bl_idname, text="", icon="EYEDROPPER") #text="select for rename", + operator.component_name = component_name + + def draw(self, context): + layout = self.layout + registry = bpy.context.window_manager.components_registry + registry_has_type_infos = registry.has_type_infos() + selected_object = context.selected_objects[0] if len(context.selected_objects) > 0 else None + available_components = bpy.context.window_manager.components_list + + row = layout.row() + box= row.box() + box.label(text="Invalid/ unregistered components") + + objects_with_invalid_components = [] + invalid_component_names = [] + + self.draw_invalid_or_unregistered_header(layout, ["Component", "Object", "Status", "Target"]) + + for object in bpy.data.objects: # TODO: very inneficent + if len(object.keys()) > 0: + if "components_meta" in object: + components_metadata = object.components_meta.components + comp_names = [] + for index, component_meta in enumerate(components_metadata): + long_name = component_meta.long_name + if component_meta.invalid: + self.draw_invalid_or_unregistered(layout, "Invalid", long_name, object) + + if not object.name in objects_with_invalid_components: + objects_with_invalid_components.append(object.name) + + if not long_name in invalid_component_names: + invalid_component_names.append(long_name) + + + comp_names.append(long_name) + + for custom_property in object.keys(): + if custom_property != 'components_meta' and custom_property != 'bevy_components' and custom_property not in comp_names: + self.draw_invalid_or_unregistered(layout, "Unregistered", custom_property, object) + + if not object.name in objects_with_invalid_components: + objects_with_invalid_components.append(object.name) + """if not long_name in invalid_component_names: + invalid_component_names.append(custom_property)""" # FIXME + layout.separator() + layout.separator() + original_name = bpy.context.window_manager.bevy_component_rename_helper.original_name + + row = layout.row() + col = row.column() + col.label(text="Original") + col = row.column() + col.label(text="New") + col = row.column() + col.label(text="------") + + row = layout.row() + col = row.column() + box = col.box() + box.label(text=original_name) + + col = row.column() + col.prop(available_components, "list", text="") + #row.prop(available_components, "filter",text="Filter") + + col = row.column() + components_rename_progress = context.window_manager.components_rename_progress + + if components_rename_progress == -1.0: + operator = col.operator(OT_rename_component.bl_idname, text="apply", icon="SHADERFX") + operator.target_objects = json.dumps(objects_with_invalid_components) + new_name = registry.type_infos[available_components.list]['short_name'] if available_components.list in registry.type_infos else "" + operator.new_name = new_name + col.enabled = registry_has_type_infos and original_name != "" and original_name != new_name + else: + if hasattr(layout,"progress") : # only for Blender > 4.0 + col.progress(factor = components_rename_progress, text=f"updating {components_rename_progress * 100.0:.2f}%") + + col = row.column() + remove_components_progress = context.window_manager.components_remove_progress + if remove_components_progress == -1.0: + operator = row.operator(RemoveComponentFromAllObjectsOperator.bl_idname, text="", icon="X") + operator.component_name = context.window_manager.bevy_component_rename_helper.original_name + col.enabled = registry_has_type_infos and original_name != "" + else: + if hasattr(layout,"progress") : # only for Blender > 4.0 + col.progress(factor = remove_components_progress, text=f"updating {remove_components_progress * 100.0:.2f}%") + + layout.separator() + layout.separator() + row = layout.row() + box= row.box() + box.label(text="Conversions between custom properties and components & vice-versa") + + row = layout.row() + row.label(text="WARNING ! The following operations will overwrite your existing custom properties if they have matching types on the bevy side !") + row.alert = True + + ## + row = layout.row() + custom_properties_from_components_progress_current = context.window_manager.custom_properties_from_components_progress + + if custom_properties_from_components_progress_current == -1.0: + row.operator(COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_CURRENT.bl_idname, text="update custom properties of current object" , icon="LOOP_FORWARDS") + row.enabled = registry_has_type_infos and selected_object is not None + else: + if hasattr(layout,"progress") : # only for Blender > 4.0 + layout.progress(factor = custom_properties_from_components_progress_current, text=f"updating {custom_properties_from_components_progress_current * 100.0:.2f}%") + + layout.separator() + row = layout.row() + custom_properties_from_components_progress_all = context.window_manager.custom_properties_from_components_progress_all + + if custom_properties_from_components_progress_all == -1.0: + row.operator(COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_ALL.bl_idname, text="update custom properties of ALL objects" , icon="LOOP_FORWARDS") + row.enabled = registry_has_type_infos + else: + if hasattr(layout,"progress") : # only for Blender > 4.0 + layout.progress(factor = custom_properties_from_components_progress_all, text=f"updating {custom_properties_from_components_progress_all * 100.0:.2f}%") + + ######################## + + row = layout.row() + row.label(text="WARNING ! The following operations will try to overwrite your existing ui values if they have matching types on the bevy side !") + row.alert = True + + components_from_custom_properties_progress_current = context.window_manager.components_from_custom_properties_progress + + row = layout.row() + if components_from_custom_properties_progress_current == -1.0: + row.operator(COMPONENTS_OT_REFRESH_PROPGROUPS_FROM_CUSTOM_PROPERTIES_CURRENT.bl_idname, text="update UI FROM custom properties of current object" , icon="LOOP_BACK") + row.enabled = registry_has_type_infos and selected_object is not None + else: + if hasattr(layout,"progress") : # only for Blender > 4.0 + layout.progress(factor = components_from_custom_properties_progress_current, text=f"updating {components_from_custom_properties_progress_current * 100.0:.2f}%") + + layout.separator() + row = layout.row() + components_from_custom_properties_progress_all = context.window_manager.components_from_custom_properties_progress_all + + if components_from_custom_properties_progress_all == -1.0: + row.operator(COMPONENTS_OT_REFRESH_PROPGROUPS_FROM_CUSTOM_PROPERTIES_ALL.bl_idname, text="update UI FROM custom properties of ALL objects" , icon="LOOP_BACK") + row.enabled = registry_has_type_infos + else: + if hasattr(layout,"progress") : # only for Blender > 4.0 + layout.progress(factor = components_from_custom_properties_progress_all, text=f"updating {components_from_custom_properties_progress_all * 100.0:.2f}%") + + +class BEVY_COMPONENTS_PT_MissingTypesPanel(bpy.types.Panel): + """panel listing all the missing bevy types in the schema""" + bl_idname = "BEVY_COMPONENTS_PT_MissingTypesPanel" + bl_label = "Missing/Unregistered Types" + bl_space_type = 'VIEW_3D' + bl_region_type = 'UI' + bl_category = "Bevy Components" + bl_context = "objectmode" + bl_parent_id = "BLENVY_PT_SidePanel" + bl_options = {'DEFAULT_CLOSED'} + bl_description = "list of missing/unregistered type from the bevy side" + + @classmethod + def poll(cls, context): + return context.window_manager.blenvy.mode == 'TOOLS' + + def draw(self, context): + layout = self.layout + registry = bpy.context.window_manager.components_registry + + layout.label(text="Missing types ") + layout.template_list("MISSING_TYPES_UL_List", "Missing types list", registry, "missing_types_list", registry, "missing_types_list_index") + + +class MISSING_TYPES_UL_List(UIList): + """Missing components UIList.""" + + use_filter_name_reverse: bpy.props.BoolProperty( + name="Reverse Name", + default=False, + options=set(), + description="Reverse name filtering", + ) # type: ignore + + use_order_name = bpy.props.BoolProperty(name="Name", default=False, options=set(), + description="Sort groups by their name (case-insensitive)") + + def filter_items__(self, context, data, propname): + """Filter and order items in the list.""" + # We initialize filtered and ordered as empty lists. Notice that # if all sorting and filtering is disabled, we will return # these empty. + filtered = [] + ordered = [] + items = getattr(data, propname) + + helper_funcs = bpy.types.UI_UL_list + + + print("filter, order", items, self, dict(self)) + if self.filter_name: + print("ssdfs", self.filter_name) + filtered= helper_funcs.filter_items_by_name(self.filter_name, self.bitflag_filter_item, items, "long_name", reverse=self.use_filter_name_reverse) + + if not filtered: + filtered = [self.bitflag_filter_item] * len(items) + + if self.use_order_name: + ordered = helper_funcs.sort_items_by_name(items, "name") + + + return filtered, ordered + + + def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index): + if self.layout_type in {'DEFAULT', 'COMPACT'}: + row = layout.row() + #row.enabled = False + #row.alert = True + row.prop(item, "long_name", text="") + + elif self.layout_type in {'GRID'}: + layout.alignment = 'CENTER' + row = layout.row() + row.prop(item, "long_name", text="") diff --git a/tools/blenvy/blueprints/__init__.py b/tools/blenvy/blueprints/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tools/gltf_auto_export/blueprints/blueprints_registry.py b/tools/blenvy/blueprints/blueprints_registry.py similarity index 100% rename from tools/gltf_auto_export/blueprints/blueprints_registry.py rename to tools/blenvy/blueprints/blueprints_registry.py diff --git a/tools/gltf_auto_export/blueprints/operators.py b/tools/blenvy/blueprints/operators.py similarity index 86% rename from tools/gltf_auto_export/blueprints/operators.py rename to tools/blenvy/blueprints/operators.py index c74145e..dbfda71 100644 --- a/tools/gltf_auto_export/blueprints/operators.py +++ b/tools/blenvy/blueprints/operators.py @@ -28,6 +28,9 @@ class OT_select_blueprint(Operator): bpy.context.window.scene = scene bpy.context.view_layer.objects.active = None bpy.context.view_layer.active_layer_collection = bpy.context.view_layer.layer_collection.children[self.blueprint_collection_name] + #bpy.context.view_layer.collections.active = collection # bpy.context.view_layer.active_layer_collection = collection + """for o in collection.objects: + o.select_set(True)""" return {'FINISHED'} \ No newline at end of file diff --git a/tools/gltf_auto_export/blueprints/ui.py b/tools/blenvy/blueprints/ui.py similarity index 100% rename from tools/gltf_auto_export/blueprints/ui.py rename to tools/blenvy/blueprints/ui.py diff --git a/tools/blenvy/core/__init__.py b/tools/blenvy/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tools/gltf_auto_export/blenvy/blenvy_manager.py b/tools/blenvy/core/blenvy_manager.py similarity index 75% rename from tools/gltf_auto_export/blenvy/blenvy_manager.py rename to tools/blenvy/core/blenvy_manager.py index f948751..91ab24e 100644 --- a/tools/gltf_auto_export/blenvy/blenvy_manager.py +++ b/tools/blenvy/core/blenvy_manager.py @@ -1,10 +1,6 @@ import bpy -import json -import os -import uuid -from pathlib import Path from bpy_types import (PropertyGroup) -from bpy.props import (StringProperty, BoolProperty, FloatProperty, FloatVectorProperty, IntProperty, IntVectorProperty, EnumProperty, PointerProperty, CollectionProperty) +from bpy.props import (EnumProperty, PointerProperty) class BlenvyManager(PropertyGroup): @@ -15,6 +11,7 @@ class BlenvyManager(PropertyGroup): ('BLUEPRINTS', "Blueprints", ""), ('ASSETS', "Assets", ""), ('SETTINGS', "Settings", ""), + ('TOOLS', "Tools", ""), ) ) # type: ignore diff --git a/tools/gltf_auto_export/blenvy/operators.py b/tools/blenvy/core/operators.py similarity index 69% rename from tools/gltf_auto_export/blenvy/operators.py rename to tools/blenvy/core/operators.py index 06e6504..a9815d0 100644 --- a/tools/gltf_auto_export/blenvy/operators.py +++ b/tools/blenvy/core/operators.py @@ -1,7 +1,8 @@ -import os -import bpy from bpy_types import (Operator) -from bpy.props import (StringProperty, EnumProperty) +from bpy.props import (EnumProperty) + + + class OT_switch_bevy_tooling(Operator): """Switch bevy tooling""" @@ -12,15 +13,17 @@ class OT_switch_bevy_tooling(Operator): tool: EnumProperty( items=( - ('COMPONENTS', "Components", ""), + ('COMPONENTS', "Components", "Switch to components"), ('BLUEPRINTS', "Blueprints", ""), ('ASSETS', "Assets", ""), ('SETTINGS', "Settings", ""), - + ('TOOLS', "Tools", ""), ) ) # type: ignore - + @classmethod + def description(cls, context, properties): + return properties.tool def execute(self, context): context.window_manager.blenvy.mode = self.tool diff --git a/tools/gltf_auto_export/blenvy/ui.py b/tools/blenvy/core/ui.py similarity index 85% rename from tools/gltf_auto_export/blenvy/ui.py rename to tools/blenvy/core/ui.py index cebeab3..6ab0ac1 100644 --- a/tools/gltf_auto_export/blenvy/ui.py +++ b/tools/blenvy/core/ui.py @@ -39,9 +39,7 @@ class BLENVY_PT_SidePanel(bpy.types.Panel): world_scene_active = context.scene.name in main_scene_names library_scene_active = context.scene.name in library_scene_names - layout.label(text="Active Blueprint: "+ active_collection.name.upper()) - # Now to actual drawing of the UI target = row.box() if active_mode == 'COMPONENTS' else row tool_switch_components = target.operator(operator="bevy.tooling_switch", text="", icon="PROPERTIES") @@ -56,24 +54,20 @@ class BLENVY_PT_SidePanel(bpy.types.Panel): tool_switch_components.tool = "ASSETS" target = row.box() if active_mode == 'SETTINGS' else row - tool_switch_components = target.operator(operator="bevy.tooling_switch", text="", icon="TOOL_SETTINGS") + tool_switch_components = target.operator(operator="bevy.tooling_switch", text="", icon="SETTINGS") tool_switch_components.tool = "SETTINGS" - """row.label(text="", icon="PROPERTIES") - row.label(text="", icon="PACKAGE") - row.label(text="", icon="ASSET_MANAGER") - row.label(text="", icon="TOOL_SETTINGS")""" - - - - + target = row.box() if active_mode == 'TOOLS' else row + tool_switch_components = target.operator(operator="bevy.tooling_switch", text="", icon="TOOL_SETTINGS") + tool_switch_components.tool = "TOOLS" + # Debug stuff + """layout.label(text="Active Blueprint: "+ active_collection.name.upper()) layout.label(text="World scene active: "+ str(world_scene_active)) layout.label(text="Library scene active: "+ str(library_scene_active)) + layout.label(text=blenvy.mode)""" - layout.label(text=blenvy.mode) - - if blenvy.mode == "SETTINGS": + """if blenvy.mode == "SETTINGS": header, panel = layout.panel("auto_export", default_closed=False) header.label(text="Auto Export") if panel: @@ -92,11 +86,11 @@ class BLENVY_PT_SidePanel(bpy.types.Panel): op.gltf_export_id = "gltf_auto_export" # we specify that we are in a special case op = layout.operator("EXPORT_SCENES_OT_auto_gltf", text="Auto Export Settings") - op.auto_export = True + op.auto_export = True""" - header, panel = layout.panel("components", default_closed=False) - header.label(text="Components") - if panel: - panel.label(text="YOOO") + """header, panel = layout.panel("components", default_closed=False) + header.label(text="Components") + if panel: + panel.label(text="YOOO")""" diff --git a/tools/blenvy/docs/blender_addon_add_scene.png b/tools/blenvy/docs/blender_addon_add_scene.png new file mode 100644 index 0000000..f18a4dd Binary files /dev/null and b/tools/blenvy/docs/blender_addon_add_scene.png differ diff --git a/tools/blenvy/docs/blender_addon_add_scene2.png b/tools/blenvy/docs/blender_addon_add_scene2.png new file mode 100644 index 0000000..420d4b9 Binary files /dev/null and b/tools/blenvy/docs/blender_addon_add_scene2.png differ diff --git a/tools/blenvy/docs/blender_addon_add_scene3.png b/tools/blenvy/docs/blender_addon_add_scene3.png new file mode 100644 index 0000000..1574e9d Binary files /dev/null and b/tools/blenvy/docs/blender_addon_add_scene3.png differ diff --git a/tools/blenvy/docs/blender_addon_install.png b/tools/blenvy/docs/blender_addon_install.png new file mode 100644 index 0000000..a6c974c Binary files /dev/null and b/tools/blenvy/docs/blender_addon_install.png differ diff --git a/tools/blenvy/docs/blender_addon_install2.png b/tools/blenvy/docs/blender_addon_install2.png new file mode 100644 index 0000000..d530218 Binary files /dev/null and b/tools/blenvy/docs/blender_addon_install2.png differ diff --git a/tools/blenvy/docs/blender_addon_install_zip.png b/tools/blenvy/docs/blender_addon_install_zip.png new file mode 100644 index 0000000..0d7e171 Binary files /dev/null and b/tools/blenvy/docs/blender_addon_install_zip.png differ diff --git a/tools/blenvy/docs/blender_addon_materials.png b/tools/blenvy/docs/blender_addon_materials.png new file mode 100644 index 0000000..1a93124 Binary files /dev/null and b/tools/blenvy/docs/blender_addon_materials.png differ diff --git a/tools/blenvy/docs/blender_addon_materials2.png b/tools/blenvy/docs/blender_addon_materials2.png new file mode 100644 index 0000000..ba196fc Binary files /dev/null and b/tools/blenvy/docs/blender_addon_materials2.png differ diff --git a/tools/blenvy/docs/blender_addon_use.png b/tools/blenvy/docs/blender_addon_use.png new file mode 100644 index 0000000..52e533b Binary files /dev/null and b/tools/blenvy/docs/blender_addon_use.png differ diff --git a/tools/blenvy/docs/blender_addon_use2.png b/tools/blenvy/docs/blender_addon_use2.png new file mode 100644 index 0000000..fac555a Binary files /dev/null and b/tools/blenvy/docs/blender_addon_use2.png differ diff --git a/tools/blenvy/docs/blender_addon_use3.png b/tools/blenvy/docs/blender_addon_use3.png new file mode 100644 index 0000000..322cd69 Binary files /dev/null and b/tools/blenvy/docs/blender_addon_use3.png differ diff --git a/tools/blenvy/docs/blender_addon_use4.png b/tools/blenvy/docs/blender_addon_use4.png new file mode 100644 index 0000000..e1d1c13 Binary files /dev/null and b/tools/blenvy/docs/blender_addon_use4.png differ diff --git a/tools/blenvy/docs/combine_override.png b/tools/blenvy/docs/combine_override.png new file mode 100644 index 0000000..ee6929f Binary files /dev/null and b/tools/blenvy/docs/combine_override.png differ diff --git a/tools/blenvy/docs/exported_collections.png b/tools/blenvy/docs/exported_collections.png new file mode 100644 index 0000000..2ae3c45 Binary files /dev/null and b/tools/blenvy/docs/exported_collections.png differ diff --git a/tools/blenvy/docs/exported_library_files.png b/tools/blenvy/docs/exported_library_files.png new file mode 100644 index 0000000..c3c31a6 Binary files /dev/null and b/tools/blenvy/docs/exported_library_files.png differ diff --git a/tools/blenvy/docs/force_export.jpg b/tools/blenvy/docs/force_export.jpg new file mode 100644 index 0000000..4d034ec Binary files /dev/null and b/tools/blenvy/docs/force_export.jpg differ diff --git a/tools/blenvy/docs/nested_blueprints.png b/tools/blenvy/docs/nested_blueprints.png new file mode 100644 index 0000000..47d0aae Binary files /dev/null and b/tools/blenvy/docs/nested_blueprints.png differ diff --git a/tools/blenvy/docs/nested_blueprints2.png b/tools/blenvy/docs/nested_blueprints2.png new file mode 100644 index 0000000..224f0aa Binary files /dev/null and b/tools/blenvy/docs/nested_blueprints2.png differ diff --git a/tools/blenvy/docs/nested_blueprints3.png b/tools/blenvy/docs/nested_blueprints3.png new file mode 100644 index 0000000..04b47f8 Binary files /dev/null and b/tools/blenvy/docs/nested_blueprints3.png differ diff --git a/tools/blenvy/docs/options.svg b/tools/blenvy/docs/options.svg new file mode 100644 index 0000000..bb1f7d3 --- /dev/null +++ b/tools/blenvy/docs/options.svg @@ -0,0 +1,1159 @@ + + + + + + + + + + + + + + + + + Original + + + + + * Auto exports to gltf file on every save* Everything is embeded in the gltf file + + Main Scene (world/level) + + + Blueprints Mode + + + + Object C0 (instance) + + + + + + Object C1 (instance) + + + Library Scene + + Object B: collection/ blueprint + + + Object C: collection/ blueprint + + + + Transform Step + + + + + + Object A(unique) + + Temporary Scene (world/level) + + + Object B1 (Empty)+ blueprintName("ObjectB")+ spawnHere + + + + Object C0 (Empty)+ blueprintName("ObjectC")+ spawnHere + + + + + + Object C1 (Empty)+ blueprintName("ObjectC")+ spawnHere + + + + Determine used Collections to export + + Object B + + Object C + + + Result + + + + + Entity A(unique) + + + world/level.gltf + + + Entity B1+ blueprintName("ObjectB")+ spawnHere + + + + Entity C0+ blueprintName("ObjectC")+ spawnHere + + + + + + Entity C1+ blueprintName("ObjectC")+ spawnHere + + + + Library of gltf files (one per Collection/Blueprint) + + Object B.gltf + blueprintName("ObjectB") + + Object C:.gltf + blueprintName("ObjectC") + + + Object D: unused collection/ blueprint + + + + + + + + Single blend file + + + NO + + YES + + + Export nested blueprints + + + NO + + YES + + + Export materials library + + + NO + + YES + + NO + + YES + + + Combine mode + + + EMBED + + SPLIT + + INJECT + + diff --git a/tools/blenvy/docs/process.svg b/tools/blenvy/docs/process.svg new file mode 100644 index 0000000..0bcdeec --- /dev/null +++ b/tools/blenvy/docs/process.svg @@ -0,0 +1,881 @@ + + + + + + + + + + + + + + + + + Original + + + + + + Object A(unique) + + Main Scene (world/level) + + + Object B1 (instance) + + + + Object C0 (instance) + + + + + + Object C1 (instance) + + + + Library Scene + + Object B: collection/ blueprint + + + Object C: collection/ blueprint + + + + Transform Step + + + + + + Object A(unique) + + Temporary Scene (world/level) + + + Object B1 (Empty)+ blueprintName("ObjectB")+ spawnHere + + + + Object C0 (Empty)+ blueprintName("ObjectC")+ spawnHere + + + + + + Object C1 (Empty)+ blueprintName("ObjectC")+ spawnHere + + + + Determine used Collections to export + + Object B + + Object C + + + Result + + + + + Entity A(unique) + + + world/level.gltf + + + Entity B1+ blueprintName("ObjectB")+ spawnHere + + + + Entity C0+ blueprintName("ObjectC")+ spawnHere + + + + + + Entity C1+ blueprintName("ObjectC")+ spawnHere + + + + Library of gltf files (one per Collection/Blueprint) + + Object B.gltf + blueprintName("ObjectB") + + Object C:.gltf + blueprintName("ObjectC") + + + Object D: unused collection/ blueprint + + + + + + + diff --git a/tools/blenvy/docs/purge_orphan1_data1.png b/tools/blenvy/docs/purge_orphan1_data1.png new file mode 100644 index 0000000..4c40f3a Binary files /dev/null and b/tools/blenvy/docs/purge_orphan1_data1.png differ diff --git a/tools/blenvy/docs/purge_orphan1_data2.png b/tools/blenvy/docs/purge_orphan1_data2.png new file mode 100644 index 0000000..62f7ab1 Binary files /dev/null and b/tools/blenvy/docs/purge_orphan1_data2.png differ diff --git a/tools/blenvy/docs/purge_orphan1_data3.png b/tools/blenvy/docs/purge_orphan1_data3.png new file mode 100644 index 0000000..2500dff Binary files /dev/null and b/tools/blenvy/docs/purge_orphan1_data3.png differ diff --git a/tools/blenvy/docs/workflow_empties.jpg b/tools/blenvy/docs/workflow_empties.jpg new file mode 100644 index 0000000..cea099e Binary files /dev/null and b/tools/blenvy/docs/workflow_empties.jpg differ diff --git a/tools/blenvy/docs/workflow_original.jpg b/tools/blenvy/docs/workflow_original.jpg new file mode 100644 index 0000000..9666ffd Binary files /dev/null and b/tools/blenvy/docs/workflow_original.jpg differ diff --git a/tools/blenvy/gltf_auto_export/__init__.py b/tools/blenvy/gltf_auto_export/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tools/blenvy/gltf_auto_export/auto_export/__init__.py b/tools/blenvy/gltf_auto_export/auto_export/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tools/blenvy/gltf_auto_export/auto_export/auto_export.py b/tools/blenvy/gltf_auto_export/auto_export/auto_export.py new file mode 100644 index 0000000..1a561a5 --- /dev/null +++ b/tools/blenvy/gltf_auto_export/auto_export/auto_export.py @@ -0,0 +1,183 @@ +import copy +import json +import os +from types import SimpleNamespace +import bpy +import traceback + + +from .preferences import AutoExportGltfAddonPreferences + +from .get_blueprints_to_export import get_blueprints_to_export +from .get_levels_to_export import get_levels_to_export +from .get_standard_exporter_settings import get_standard_exporter_settings + +from .export_main_scenes import export_main_scene +from .export_blueprints import export_blueprints + +from ..helpers.helpers_scenes import (get_scenes, ) +from ..helpers.helpers_blueprints import blueprints_scan + +from ..modules.export_materials import cleanup_materials, export_materials +from ..modules.bevy_scene_components import remove_scene_components, upsert_scene_components + + +"""this is the main 'central' function for all auto export """ +def auto_export(changes_per_scene, changed_export_parameters, addon_prefs): + # have the export parameters (not auto export, just gltf export) have changed: if yes (for example switch from glb to gltf, compression or not, animations or not etc), we need to re-export everything + print ("changed_export_parameters", changed_export_parameters) + try: + # path to the current blend file + file_path = bpy.data.filepath + # Get the folder + blend_file_path = os.path.dirname(file_path) + + # get the preferences for our addon + export_root_folder = getattr(addon_prefs, "export_root_folder") + export_output_folder = getattr(addon_prefs,"export_output_folder") + export_models_path = os.path.join(blend_file_path, export_output_folder) + + #should we use change detection or not + export_change_detection = getattr(addon_prefs, "export_change_detection") + export_scene_settings = getattr(addon_prefs,"export_scene_settings") + + do_export_blueprints = getattr(addon_prefs,"export_blueprints") + export_materials_library = getattr(addon_prefs,"export_materials_library") + print("export_materials_library", export_materials_library) + + # standard gltf export settings are stored differently + standard_gltf_exporter_settings = get_standard_exporter_settings() + gltf_extension = standard_gltf_exporter_settings.get("export_format", 'GLB') + gltf_extension = '.glb' if gltf_extension == 'GLB' else '.gltf' + + # generate the actual complete output path + export_blueprints_path = os.path.join(blend_file_path, export_root_folder, getattr(addon_prefs,"export_blueprints_path")) + export_levels_path = os.path.join(blend_file_path, export_root_folder, getattr(addon_prefs, "export_levels_path")) + + print("export_blueprints_path", export_blueprints_path) + + # here we do a bit of workaround by creating an override # TODO: do this at the "UI" level + print("collection_instances_combine_mode", addon_prefs.collection_instances_combine_mode) + """if hasattr(addon_prefs, "__annotations__") : + tmp = {} + for k in AutoExportGltfAddonPreferences.__annotations__: + item = AutoExportGltfAddonPreferences.__annotations__[k] + #print("tutu",k, item.keywords.get('default', None) ) + default = item.keywords.get('default', None) + tmp[k] = default + + for (k, v) in addon_prefs.properties.items(): + tmp[k] = v + + addon_prefs = SimpleNamespace(**tmp) #copy.deepcopy(addon_prefs) + addon_prefs.__annotations__ = tmp""" + addon_prefs.export_blueprints_path = export_blueprints_path + addon_prefs.export_levels_path = export_levels_path + addon_prefs.export_gltf_extension = gltf_extension + addon_prefs.export_models_path = export_models_path + + [main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs) + + print("main scenes", main_scene_names, "library_scenes", library_scene_names) + print("export_output_folder", export_output_folder) + + blueprints_data = blueprints_scan(level_scenes, library_scenes, addon_prefs) + blueprints_per_scene = blueprints_data.blueprints_per_scenes + internal_blueprints = [blueprint.name for blueprint in blueprints_data.internal_blueprints] + external_blueprints = [blueprint.name for blueprint in blueprints_data.external_blueprints] + + if export_scene_settings: + # inject/ update scene components + upsert_scene_components(level_scenes) + #inject/ update light shadow information + for light in bpy.data.lights: + enabled = 'true' if light.use_shadow else 'false' + light['BlenderLightShadows'] = f"(enabled: {enabled}, buffer_bias: {light.shadow_buffer_bias})" + + # export + if do_export_blueprints: + print("EXPORTING") + # get blueprints/collections infos + (blueprints_to_export) = get_blueprints_to_export(changes_per_scene, changed_export_parameters, blueprints_data, addon_prefs) + + # get level/main scenes infos + (main_scenes_to_export) = get_levels_to_export(changes_per_scene, changed_export_parameters, blueprints_data, addon_prefs) + + # since materials export adds components we need to call this before blueprints are exported + # export materials & inject materials components into relevant objects + if export_materials_library: + export_materials(blueprints_data.blueprint_names, library_scenes, blend_file_path, addon_prefs) + + # update the list of tracked exports + exports_total = len(blueprints_to_export) + len(main_scenes_to_export) + (1 if export_materials_library else 0) + bpy.context.window_manager.auto_export_tracker.exports_total = exports_total + bpy.context.window_manager.auto_export_tracker.exports_count = exports_total + + bpy.context.window_manager.exportedCollections.clear() + for blueprint in blueprints_to_export: + bla = bpy.context.window_manager.exportedCollections.add() + bla.name = blueprint.name + print("-------------------------------") + #print("collections: all:", collections) + #print("collections: not found on disk:", collections_not_on_disk) + print("BLUEPRINTS: local/internal:", internal_blueprints) + print("BLUEPRINTS: external:", external_blueprints) + print("BLUEPRINTS: per_scene:", blueprints_per_scene) + print("-------------------------------") + print("BLUEPRINTS: to export:", [blueprint.name for blueprint in blueprints_to_export]) + print("-------------------------------") + print("MAIN SCENES: to export:", main_scenes_to_export) + print("-------------------------------") + # backup current active scene + old_current_scene = bpy.context.scene + # backup current selections + old_selections = bpy.context.selected_objects + + # first export any main/level/world scenes + if len(main_scenes_to_export) > 0: + print("export MAIN scenes") + for scene_name in main_scenes_to_export: + print(" exporting scene:", scene_name) + export_main_scene(bpy.data.scenes[scene_name], blend_file_path, addon_prefs, blueprints_data) + + # now deal with blueprints/collections + do_export_library_scene = not export_change_detection or changed_export_parameters or len(blueprints_to_export) > 0 + if do_export_library_scene: + print("export LIBRARY") + # we only want to go through the library scenes where our blueprints to export are present + """for (scene_name, blueprints_to_export) in blueprints_per_scene.items(): + print(" exporting blueprints from scene:", scene_name) + print(" blueprints to export", blueprints_to_export)""" + export_blueprints(blueprints_to_export, blend_file_path, addon_prefs, blueprints_data) + + # reset current scene from backup + bpy.context.window.scene = old_current_scene + + # reset selections + for obj in old_selections: + obj.select_set(True) + if export_materials_library: + cleanup_materials(blueprints_data.blueprint_names, library_scenes) + + else: + for scene_name in main_scene_names: + export_main_scene(bpy.data.scenes[scene_name], blend_file_path, addon_prefs, []) + + + + except Exception as error: + print(traceback.format_exc()) + + def error_message(self, context): + self.layout.label(text="Failure during auto_export: Error: "+ str(error)) + + bpy.context.window_manager.popup_menu(error_message, title="Error", icon='ERROR') + + finally: + # FIXME: error handling ? also redundant + [main_scene_names, main_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs) + + if export_scene_settings: + # inject/ update scene components + remove_scene_components(main_scenes) + diff --git a/tools/blenvy/gltf_auto_export/auto_export/did_export_settings_change.py b/tools/blenvy/gltf_auto_export/auto_export/did_export_settings_change.py new file mode 100644 index 0000000..d855b42 --- /dev/null +++ b/tools/blenvy/gltf_auto_export/auto_export/did_export_settings_change.py @@ -0,0 +1,39 @@ +import json +import bpy + +""" +This should ONLY be run when actually doing exports/aka calling auto_export function, because we only care about the difference in settings between EXPORTS +""" +def did_export_settings_change(): + # compare both the auto export settings & the gltf settings + previous_auto_settings = bpy.data.texts[".gltf_auto_export_settings_previous"] if ".gltf_auto_export_settings_previous" in bpy.data.texts else None + previous_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings_previous"] if ".gltf_auto_export_gltf_settings_previous" in bpy.data.texts else None + + current_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else None + current_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else None + + #check if params have changed + + # if there were no setting before, it is new, we need export + changed = False + if previous_auto_settings == None: + print("previous settings missing, exporting") + changed = True + elif previous_gltf_settings == None: + print("previous gltf settings missing, exporting") + changed = True + else: + auto_settings_changed = sorted(json.loads(previous_auto_settings.as_string()).items()) != sorted(json.loads(current_auto_settings.as_string()).items()) if current_auto_settings != None else False + gltf_settings_changed = sorted(json.loads(previous_gltf_settings.as_string()).items()) != sorted(json.loads(current_gltf_settings.as_string()).items()) if current_gltf_settings != None else False + + """print("auto settings previous", sorted(json.loads(previous_auto_settings.as_string()).items())) + print("auto settings current", sorted(json.loads(current_auto_settings.as_string()).items())) + print("auto_settings_changed", auto_settings_changed)""" + + """print("gltf settings previous", sorted(json.loads(previous_gltf_settings.as_string()).items())) + print("gltf settings current", sorted(json.loads(current_gltf_settings.as_string()).items())) + print("gltf_settings_changed", gltf_settings_changed)""" + + changed = auto_settings_changed or gltf_settings_changed + + return changed \ No newline at end of file diff --git a/tools/blenvy/gltf_auto_export/auto_export/export_blueprints.py b/tools/blenvy/gltf_auto_export/auto_export/export_blueprints.py new file mode 100644 index 0000000..7677f40 --- /dev/null +++ b/tools/blenvy/gltf_auto_export/auto_export/export_blueprints.py @@ -0,0 +1,44 @@ +import os +import bpy + +from ..constants import TEMPSCENE_PREFIX +from ..helpers.generate_and_export import generate_and_export +from .export_gltf import (generate_gltf_export_preferences) +from ..helpers.helpers_scenes import clear_hollow_scene, copy_hollowed_collection_into + + +def export_blueprints(blueprints, blend_file_path, addon_prefs, blueprints_data): + export_blueprints_path = getattr(addon_prefs,"export_blueprints_path") + gltf_export_preferences = generate_gltf_export_preferences(addon_prefs) + + try: + # save current active collection + active_collection = bpy.context.view_layer.active_layer_collection + export_materials_library = getattr(addon_prefs,"export_materials_library") + + for blueprint in blueprints: + print("exporting collection", blueprint.name) + gltf_output_path = os.path.join(export_blueprints_path, blueprint.name) + export_settings = { **gltf_export_preferences, 'use_active_scene': True, 'use_active_collection': True, 'use_active_collection_with_nested':True} + + # if we are using the material library option, do not export materials, use placeholder instead + if export_materials_library: + export_settings['export_materials'] = 'PLACEHOLDER' + + collection = bpy.data.collections[blueprint.name] + generate_and_export( + addon_prefs, + temp_scene_name=TEMPSCENE_PREFIX+collection.name, + export_settings=export_settings, + gltf_output_path=gltf_output_path, + tempScene_filler= lambda temp_collection: copy_hollowed_collection_into(collection, temp_collection, blueprints_data=blueprints_data, addon_prefs=addon_prefs), + tempScene_cleaner= lambda temp_scene, params: clear_hollow_scene(original_root_collection=collection, temp_scene=temp_scene, **params) + ) + + # reset active collection to the one we save before + bpy.context.view_layer.active_layer_collection = active_collection + + except Exception as error: + print("failed to export collections to gltf: ", error) + raise error + diff --git a/tools/blenvy/gltf_auto_export/auto_export/export_gltf.py b/tools/blenvy/gltf_auto_export/auto_export/export_gltf.py new file mode 100644 index 0000000..72f47f5 --- /dev/null +++ b/tools/blenvy/gltf_auto_export/auto_export/export_gltf.py @@ -0,0 +1,77 @@ +import json +import os +import bpy + +from .get_standard_exporter_settings import get_standard_exporter_settings +from .preferences import (AutoExportGltfPreferenceNames) + +def generate_gltf_export_preferences(addon_prefs): + # default values + gltf_export_preferences = dict( + # export_format= 'GLB', #'GLB', 'GLTF_SEPARATE', 'GLTF_EMBEDDED' + check_existing=False, + + use_selection=False, + use_visible=True, # Export visible and hidden objects. See Object/Batch Export to skip. + use_renderable=False, + use_active_collection= False, + use_active_collection_with_nested=False, + use_active_scene = False, + + export_cameras=True, + export_extras=True, # For custom exported properties. + export_lights=True, + + #export_texcoords=True, + #export_normals=True, + # here add draco settings + #export_draco_mesh_compression_enable = False, + + #export_tangents=False, + #export_materials + #export_colors=True, + #export_attributes=True, + #use_mesh_edges + #use_mesh_vertices + + + #export_yup=True, + #export_skins=True, + #export_morph=False, + #export_apply=False, + #export_animations=False, + #export_optimize_animation_size=False + ) + + for key in addon_prefs.__annotations__.keys(): + if str(key) not in AutoExportGltfPreferenceNames: + #print("overriding setting", key, "value", getattr(addon_prefs,key)) + gltf_export_preferences[key] = getattr(addon_prefs, key) + + + standard_gltf_exporter_settings = get_standard_exporter_settings() + + constant_keys = [ + 'use_selection', + 'use_visible', + 'use_active_collection', + 'use_active_collection_with_nested', + 'use_active_scene', + 'export_cameras', + 'export_extras', # For custom exported properties. + 'export_lights', + ] + + # a certain number of essential params should NEVER be overwritten , no matter the settings of the standard exporter + for key in standard_gltf_exporter_settings.keys(): + if str(key) not in constant_keys: + gltf_export_preferences[key] = standard_gltf_exporter_settings.get(key) + return gltf_export_preferences + + +#https://docs.blender.org/api/current/bpy.ops.export_scene.html#bpy.ops.export_scene.gltf +def export_gltf (path, export_settings): + settings = {**export_settings, "filepath": path} + # print("export settings",settings) + os.makedirs(os.path.dirname(path), exist_ok=True) + #bpy.ops.export_scene.gltf(**settings) diff --git a/tools/blenvy/gltf_auto_export/auto_export/export_main_scenes.py b/tools/blenvy/gltf_auto_export/auto_export/export_main_scenes.py new file mode 100644 index 0000000..d96c201 --- /dev/null +++ b/tools/blenvy/gltf_auto_export/auto_export/export_main_scenes.py @@ -0,0 +1,77 @@ +import os +import bpy +from pathlib import Path + +from ..constants import TEMPSCENE_PREFIX +from ..helpers.generate_and_export import generate_and_export +from .export_gltf import (generate_gltf_export_preferences, export_gltf) +from ..modules.bevy_dynamic import is_object_dynamic, is_object_static +from ..helpers.helpers_scenes import clear_hollow_scene, copy_hollowed_collection_into +from ..helpers.helpers_blueprints import inject_blueprints_list_into_main_scene, remove_blueprints_list_from_main_scene + +def export_main_scene(scene, blend_file_path, addon_prefs, blueprints_data): + gltf_export_preferences = generate_gltf_export_preferences(addon_prefs) + export_root_folder = getattr(addon_prefs, "export_root_folder") + export_output_folder = getattr(addon_prefs,"export_output_folder") + export_levels_path = getattr(addon_prefs,"export_levels_path") + + export_blueprints = getattr(addon_prefs,"export_blueprints") + export_separate_dynamic_and_static_objects = getattr(addon_prefs, "export_separate_dynamic_and_static_objects") + + export_settings = { **gltf_export_preferences, + 'use_active_scene': True, + 'use_active_collection':True, + 'use_active_collection_with_nested':True, + 'use_visible': False, + 'use_renderable': False, + 'export_apply':True + } + + if export_blueprints : + gltf_output_path = os.path.join(export_levels_path, scene.name) + + inject_blueprints_list_into_main_scene(scene, blueprints_data, addon_prefs) + return + if export_separate_dynamic_and_static_objects: + #print("SPLIT STATIC AND DYNAMIC") + # first export static objects + generate_and_export( + addon_prefs, + temp_scene_name=TEMPSCENE_PREFIX, + export_settings=export_settings, + gltf_output_path=gltf_output_path, + tempScene_filler= lambda temp_collection: copy_hollowed_collection_into(scene.collection, temp_collection, blueprints_data=blueprints_data, filter=is_object_static, addon_prefs=addon_prefs), + tempScene_cleaner= lambda temp_scene, params: clear_hollow_scene(original_root_collection=scene.collection, temp_scene=temp_scene, **params) + ) + + # then export all dynamic objects + gltf_output_path = os.path.join(export_levels_path, scene.name+ "_dynamic") + generate_and_export( + addon_prefs, + temp_scene_name=TEMPSCENE_PREFIX, + export_settings=export_settings, + gltf_output_path=gltf_output_path, + tempScene_filler= lambda temp_collection: copy_hollowed_collection_into(scene.collection, temp_collection, blueprints_data=blueprints_data, filter=is_object_dynamic, addon_prefs=addon_prefs), + tempScene_cleaner= lambda temp_scene, params: clear_hollow_scene(original_root_collection=scene.collection, temp_scene=temp_scene, **params) + ) + + else: + #print("NO SPLIT") + generate_and_export( + addon_prefs, + temp_scene_name=TEMPSCENE_PREFIX, + export_settings=export_settings, + gltf_output_path=gltf_output_path, + tempScene_filler= lambda temp_collection: copy_hollowed_collection_into(scene.collection, temp_collection, blueprints_data=blueprints_data, addon_prefs=addon_prefs), + tempScene_cleaner= lambda temp_scene, params: clear_hollow_scene(original_root_collection=scene.collection, temp_scene=temp_scene, **params) + ) + + else: + gltf_output_path = os.path.join(export_root_folder, export_output_folder, scene.name) + print(" exporting gltf to", gltf_output_path, ".gltf/glb") + export_gltf(gltf_output_path, export_settings) + + remove_blueprints_list_from_main_scene(scene) + + + diff --git a/tools/blenvy/gltf_auto_export/auto_export/get_blueprints_to_export.py b/tools/blenvy/gltf_auto_export/auto_export/get_blueprints_to_export.py new file mode 100644 index 0000000..f635ebc --- /dev/null +++ b/tools/blenvy/gltf_auto_export/auto_export/get_blueprints_to_export.py @@ -0,0 +1,60 @@ +import bpy +import os +from ..helpers.helpers_scenes import (get_scenes, ) +from ..helpers.helpers_blueprints import find_blueprints_not_on_disk + +# TODO: this should also take the split/embed mode into account: if a nested collection changes AND embed is active, its container collection should also be exported +def get_blueprints_to_export(changes_per_scene, changed_export_parameters, blueprints_data, addon_prefs): + export_change_detection = getattr(addon_prefs, "export_change_detection") + export_gltf_extension = getattr(addon_prefs, "export_gltf_extension", ".glb") + export_blueprints_path = getattr(addon_prefs,"export_blueprints_path", "") + collection_instances_combine_mode = getattr(addon_prefs, "collection_instances_combine_mode") + + [main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs) + internal_blueprints = blueprints_data.internal_blueprints + blueprints_to_export = internal_blueprints # just for clarity + + # print("export_change_detection", export_change_detection, "changed_export_parameters", changed_export_parameters, "changes_per_scene", changes_per_scene) + + # if the export parameters have changed, bail out early + # we need to re_export everything if the export parameters have been changed + if export_change_detection and not changed_export_parameters: + changed_blueprints = [] + + # first check if all collections have already been exported before (if this is the first time the exporter is run + # in your current Blender session for example) + blueprints_not_on_disk = find_blueprints_not_on_disk(internal_blueprints, export_blueprints_path, export_gltf_extension) + + for scene in library_scenes: + if scene.name in changes_per_scene: + changed_objects = list(changes_per_scene[scene.name].keys()) + changed_blueprints = [blueprints_data.blueprints_from_objects[changed] for changed in changed_objects if changed in blueprints_data.blueprints_from_objects] + # we only care about local blueprints/collections + changed_local_blueprints = [blueprint for blueprint in changed_blueprints if blueprint.name in blueprints_data.blueprints_per_name.keys() and blueprint.local] + # FIXME: double check this: why are we combining these two ? + changed_blueprints += changed_local_blueprints + + + blueprints_to_export = list(set(changed_blueprints + blueprints_not_on_disk)) + + + # filter out blueprints that are not marked & deal with the different combine modes + # we check for blueprint & object specific overrides ... + filtered_blueprints = [] + for blueprint in blueprints_to_export: + if blueprint.marked: + filtered_blueprints.append(blueprint) + else: + blueprint_instances = blueprints_data.internal_collection_instances.get(blueprint.name, []) + # print("INSTANCES", blueprint_instances, blueprints_data.internal_collection_instances) + # marked blueprints that have changed are always exported, regardless of whether they are in use (have instances) or not + for blueprint_instance in blueprint_instances: + combine_mode = blueprint_instance['_combine'] if '_combine' in blueprint_instance else collection_instances_combine_mode + if combine_mode == "Split": # we only keep changed blueprints if mode is set to split for at least one instance (aka if ALL instances of a blueprint are merged, do not export ? ) + filtered_blueprints.append(blueprint) + + blueprints_to_export = list(set(filtered_blueprints)) + + + # changed/all blueprints to export + return (blueprints_to_export) \ No newline at end of file diff --git a/tools/blenvy/gltf_auto_export/auto_export/get_levels_to_export.py b/tools/blenvy/gltf_auto_export/auto_export/get_levels_to_export.py new file mode 100644 index 0000000..4b4370f --- /dev/null +++ b/tools/blenvy/gltf_auto_export/auto_export/get_levels_to_export.py @@ -0,0 +1,51 @@ +import bpy +from ..helpers.helpers_blueprints import check_if_blueprint_on_disk +from ..helpers.helpers_scenes import (get_scenes, ) + +# IF collection_instances_combine_mode is not 'split' check for each scene if any object in changes_per_scene has an instance in the scene +def changed_object_in_scene(scene_name, changes_per_scene, blueprints_data, collection_instances_combine_mode): + # Embed / EmbedExternal + blueprints_from_objects = blueprints_data.blueprints_from_objects + + blueprint_instances_in_scene = blueprints_data.blueprint_instances_per_main_scene.get(scene_name, None) + if blueprint_instances_in_scene is not None: + changed_objects = [object_name for change in changes_per_scene.values() for object_name in change.keys()] + changed_blueprints = [blueprints_from_objects[changed] for changed in changed_objects if changed in blueprints_from_objects] + changed_blueprints_with_instances_in_scene = [blueprint for blueprint in changed_blueprints if blueprint.name in blueprint_instances_in_scene.keys()] + + + changed_blueprint_instances= [object for blueprint in changed_blueprints_with_instances_in_scene for object in blueprint_instances_in_scene[blueprint.name]] + # print("changed_blueprint_instances", changed_blueprint_instances,) + + level_needs_export = False + for blueprint_instance in changed_blueprint_instances: + blueprint = blueprints_data.blueprint_name_from_instances[blueprint_instance] + combine_mode = blueprint_instance['_combine'] if '_combine' in blueprint_instance else collection_instances_combine_mode + #print("COMBINE MODE FOR OBJECT", combine_mode) + if combine_mode == 'Embed': + level_needs_export = True + break + elif combine_mode == 'EmbedExternal' and not blueprint.local: + level_needs_export = True + break + # changes => list of changed objects (regardless of wether they have been changed in main scene or in lib scene) + # wich of those objects are blueprint instances + # we need a list of changed objects that are blueprint instances + return level_needs_export + return False + + +# this also takes the split/embed mode into account: if a collection instance changes AND embed is active, its container level/world should also be exported +def get_levels_to_export(changes_per_scene, changed_export_parameters, blueprints_data, addon_prefs): + export_change_detection = getattr(addon_prefs, "export_change_detection") + export_gltf_extension = getattr(addon_prefs, "export_gltf_extension") + export_levels_path = getattr(addon_prefs, "export_levels_path") + collection_instances_combine_mode = getattr(addon_prefs, "collection_instances_combine_mode") + + [main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs) + + # determine list of main scenes to export + # we have more relaxed rules to determine if the main scenes have changed : any change is ok, (allows easier handling of changes, render settings etc) + main_scenes_to_export = [scene_name for scene_name in main_scene_names if not export_change_detection or changed_export_parameters or scene_name in changes_per_scene.keys() or changed_object_in_scene(scene_name, changes_per_scene, blueprints_data, collection_instances_combine_mode) or not check_if_blueprint_on_disk(scene_name, export_levels_path, export_gltf_extension) ] + + return (main_scenes_to_export) \ No newline at end of file diff --git a/tools/blenvy/gltf_auto_export/auto_export/get_standard_exporter_settings.py b/tools/blenvy/gltf_auto_export/auto_export/get_standard_exporter_settings.py new file mode 100644 index 0000000..c3eaf04 --- /dev/null +++ b/tools/blenvy/gltf_auto_export/auto_export/get_standard_exporter_settings.py @@ -0,0 +1,14 @@ +import bpy +import json + +def get_standard_exporter_settings(): + standard_gltf_exporter_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else None + if standard_gltf_exporter_settings != None: + try: + standard_gltf_exporter_settings = json.loads(standard_gltf_exporter_settings.as_string()) + except: + standard_gltf_exporter_settings = {} + else: + standard_gltf_exporter_settings = {} + + return standard_gltf_exporter_settings \ No newline at end of file diff --git a/tools/blenvy/gltf_auto_export/auto_export/internals.py b/tools/blenvy/gltf_auto_export/auto_export/internals.py new file mode 100644 index 0000000..425b9c2 --- /dev/null +++ b/tools/blenvy/gltf_auto_export/auto_export/internals.py @@ -0,0 +1,22 @@ +import bpy + +class SceneLink(bpy.types.PropertyGroup): + name: bpy.props.StringProperty(name="") # type: ignore + scene: bpy.props.PointerProperty(type=bpy.types.Scene) # type: ignore + +class SceneLinks(bpy.types.PropertyGroup): + name = bpy.props.StringProperty(name="List of scenes to export", default="Unknown") + items: bpy.props.CollectionProperty(type = SceneLink) # type: ignore + +class CUSTOM_PG_sceneName(bpy.types.PropertyGroup): + name: bpy.props.StringProperty() # type: ignore + display: bpy.props.BoolProperty() # type: ignore + +class CollectionToExport(bpy.types.PropertyGroup): + name: bpy.props.StringProperty(name="") # type: ignore + +class BlueprintsToExport(bpy.types.PropertyGroup): + name = bpy.props.StringProperty(name="List of collections to export", default="Unknown") + items: bpy.props.CollectionProperty(type = CollectionToExport) # type: ignore + + diff --git a/tools/blenvy/gltf_auto_export/auto_export/operators.py b/tools/blenvy/gltf_auto_export/auto_export/operators.py new file mode 100644 index 0000000..ef43a29 --- /dev/null +++ b/tools/blenvy/gltf_auto_export/auto_export/operators.py @@ -0,0 +1,468 @@ +import json +import bpy +from bpy.types import Operator +from bpy_extras.io_utils import ExportHelper +from bpy.props import (IntProperty, StringProperty, BoolProperty) + +from ..ui.operators import OT_OpenFolderbrowser, draw_folder_browser + +#from ..ui.main import GLTF_PT_auto_export_general, GLTF_PT_auto_export_main, GLTF_PT_auto_export_root + +from .preferences import (AutoExportGltfAddonPreferences, AutoExportGltfPreferenceNames) +from .auto_export import auto_export +from ..helpers.generate_complete_preferences_dict import generate_complete_preferences_dict_auto +from ..helpers.serialize_scene import serialize_scene + +def bubble_up_changes(object, changes_per_scene): + if object.parent: + changes_per_scene[object.parent.name] = bpy.data.objects[object.parent.name] + bubble_up_changes(object.parent, changes_per_scene) + + +class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences):#, ExportHelper): + """auto export gltf""" + #bl_idname = "object.xxx" + bl_idname = "export_scenes.auto_gltf" + bl_label = "Apply settings" + bl_options = {'PRESET'} # we do not add UNDO otherwise it leads to an invisible operation that resets the state of the saved serialized scene, breaking compares for normal undo/redo operations + # ExportHelper mixin class uses this + #filename_ext = '' + #filepath: bpy.props.StringProperty(subtype="FILE_PATH", default="") # type: ignore + + #list of settings (other than purely gltf settings) whose change should trigger a re-generation of gltf files + white_list = [ + 'auto_export', + 'export_root_folder', + 'export_output_folder', + 'export_change_detection', + 'export_scene_settings', + + 'main_scene_names', + 'library_scene_names', + + 'export_blueprints', + 'export_blueprints_path', + 'export_marked_assets', + 'collection_instances_combine_mode', + + 'export_levels_path', + 'export_separate_dynamic_and_static_objects', + + 'export_materials_library', + 'export_materials_path', + ] + + @classmethod + def register(cls): + bpy.types.WindowManager.main_scene = bpy.props.PointerProperty(type=bpy.types.Scene, name="main scene", description="main_scene_picker", poll=cls.is_scene_ok) + bpy.types.WindowManager.library_scene = bpy.props.PointerProperty(type=bpy.types.Scene, name="library scene", description="library_scene_picker", poll=cls.is_scene_ok) + + bpy.types.WindowManager.main_scenes_list_index = IntProperty(name = "Index for main scenes list", default = 0) + bpy.types.WindowManager.library_scenes_list_index = IntProperty(name = "Index for library scenes list", default = 0) + + cls.main_scenes_index = 0 + cls.library_scenes_index = 0 + + @classmethod + def unregister(cls): + del bpy.types.WindowManager.main_scene + del bpy.types.WindowManager.library_scene + + del bpy.types.WindowManager.main_scenes_list_index + del bpy.types.WindowManager.library_scenes_list_index + + def is_scene_ok(self, scene): + try: + operator = bpy.context.space_data.active_operator + return scene.name not in operator.main_scenes and scene.name not in operator.library_scenes + except: + return True + + def format_settings(self): + # find all props to save + exceptional = [ + # options that don't start with 'export_' + 'collection_instances_combine_mode', + ] + all_props = self.properties + export_props = { + x: getattr(self, x) for x in dir(all_props) + if (x.startswith("export_") or x in exceptional) and all_props.get(x) is not None + } + # we inject all that we need, the above is not sufficient + for (k, v) in self.properties.items(): + if k in self.white_list or k not in AutoExportGltfPreferenceNames: + value = v + # FIXME: really weird having to do this + if k == "collection_instances_combine_mode": + value = self.collection_instances_combine_mode + if k == "export_materials": + value = self.export_materials + export_props[k] = value + # we add main & library scene names to our preferences + + export_props['main_scene_names'] = list(map(lambda scene_data: scene_data.name, getattr(self,"main_scenes"))) + export_props['library_scene_names'] = list(map(lambda scene_data: scene_data.name, getattr(self,"library_scenes"))) + return export_props + + def save_settings(self, context): + print("save settings") + auto_export_settings = self.format_settings() + self.properties['main_scene_names'] = auto_export_settings['main_scene_names'] + self.properties['library_scene_names'] = auto_export_settings['library_scene_names'] + + stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings") + stored_settings.clear() + + auto_export_settings = generate_complete_preferences_dict_auto(auto_export_settings) + stored_settings.write(json.dumps(auto_export_settings)) + print("saved settings", auto_export_settings) + #print("saving settings", bpy.data.texts[".gltf_auto_export_settings"].as_string(), "raw", json.dumps(export_props)) + + def load_settings(self, context): + print("loading settings") + settings = None + try: + settings = bpy.data.texts[".gltf_auto_export_settings"].as_string() + settings = json.loads(settings) + except: pass + + self.will_save_settings = False + if settings: + #print("loading settings in invoke AutoExportGLTF", settings) + try: + for (k, v) in settings.items(): + #print("loading setting", k, v) + setattr(self, k, v) + self.will_save_settings = True + + # Update filter if user saved settings + if hasattr(self, 'export_format'): + self.filter_glob = '*.glb' if self.export_format == 'GLB' else '*.gltf' + + # inject scenes data + if hasattr(self, 'main_scene_names'): + main_scenes = self.main_scenes + main_scenes.clear() + for item_name in self.main_scene_names: + item = main_scenes.add() + item.name = item_name + + if hasattr(self, 'library_scene_names'): + library_scenes = self.library_scenes + library_scenes.clear() + for item_name in self.library_scene_names: + item = library_scenes.add() + item.name = item_name + + except Exception as error: + print("error", error) + self.report({"ERROR"}, "Loading export settings failed. Removed corrupted settings") + bpy.data.texts.remove(bpy.data.texts[".gltf_auto_export_settings"]) + else: + self.will_save_settings = True + + """ + This should ONLY be run when actually doing exports/aka calling auto_export function, because we only care about the difference in settings between EXPORTS + """ + def did_export_settings_change(self): + # compare both the auto export settings & the gltf settings + previous_auto_settings = bpy.data.texts[".gltf_auto_export_settings_previous"] if ".gltf_auto_export_settings_previous" in bpy.data.texts else None + previous_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings_previous"] if ".gltf_auto_export_gltf_settings_previous" in bpy.data.texts else None + + current_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else None + current_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else None + + #check if params have changed + + # if there were no setting before, it is new, we need export + changed = False + if previous_auto_settings == None: + #print("previous settings missing, exporting") + changed = True + elif previous_gltf_settings == None: + #print("previous gltf settings missing, exporting") + previous_gltf_settings = bpy.data.texts.new(".gltf_auto_export_gltf_settings_previous") + previous_gltf_settings.write(json.dumps({})) + if current_gltf_settings == None: + current_gltf_settings = bpy.data.texts.new(".gltf_auto_export_gltf_settings") + current_gltf_settings.write(json.dumps({})) + + changed = True + + else: + auto_settings_changed = sorted(json.loads(previous_auto_settings.as_string()).items()) != sorted(json.loads(current_auto_settings.as_string()).items()) if current_auto_settings != None else False + gltf_settings_changed = sorted(json.loads(previous_gltf_settings.as_string()).items()) != sorted(json.loads(current_gltf_settings.as_string()).items()) if current_gltf_settings != None else False + + """print("auto settings previous", sorted(json.loads(previous_auto_settings.as_string()).items())) + print("auto settings current", sorted(json.loads(current_auto_settings.as_string()).items())) + print("auto_settings_changed", auto_settings_changed) + + print("gltf settings previous", sorted(json.loads(previous_gltf_settings.as_string()).items())) + print("gltf settings current", sorted(json.loads(current_gltf_settings.as_string()).items())) + print("gltf_settings_changed", gltf_settings_changed)""" + + changed = auto_settings_changed or gltf_settings_changed + # now write the current settings to the "previous settings" + if current_auto_settings != None: + previous_auto_settings = bpy.data.texts[".gltf_auto_export_settings_previous"] if ".gltf_auto_export_settings_previous" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings_previous") + previous_auto_settings.clear() + previous_auto_settings.write(current_auto_settings.as_string()) # TODO : check if this is always valid + + if current_gltf_settings != None: + previous_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings_previous"] if ".gltf_auto_export_gltf_settings_previous" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings_previous") + previous_gltf_settings.clear() + previous_gltf_settings.write(current_gltf_settings.as_string()) + + return changed + + def did_objects_change(self): + # sigh... you need to save & reset the frame otherwise it saves the values AT THE CURRENT FRAME WHICH CAN DIFFER ACROSS SCENES + current_frames = [scene.frame_current for scene in bpy.data.scenes] + for scene in bpy.data.scenes: + scene.frame_set(0) + + current_scene = bpy.context.window.scene + bpy.context.window.scene = bpy.data.scenes[0] + #serialize scene at frame 0 + """with bpy.context.temp_override(scene=bpy.data.scenes[1]): + bpy.context.scene.frame_set(0)""" + current = serialize_scene() + bpy.context.window.scene = current_scene + + # reset previous frames + for (index, scene) in enumerate(bpy.data.scenes): + scene.frame_set(int(current_frames[index])) + + previous_stored = bpy.data.texts[".TESTING"] if ".TESTING" in bpy.data.texts else None # bpy.data.texts.new(".TESTING") + if previous_stored == None: + previous_stored = bpy.data.texts.new(".TESTING") + previous_stored.write(current) + return {} + previous = json.loads(previous_stored.as_string()) + current = json.loads(current) + + changes_per_scene = {} + # TODO : how do we deal with changed scene names ??? + for scene in current: + # print('scene', scene) + previous_object_names = list(previous[scene].keys()) + current_object_names =list(current[scene].keys()) + #print("previous_object_names", len(previous_object_names), previous_object_names) + #print("current_object_names", len(current_object_names), current_object_names) + + """if len(previous_object_names) > len(current_object_names): + print("removed") + if len(current_object_names) > len(previous_object_names): + print("added")""" + added = list(set(current_object_names) - set(previous_object_names)) + removed = list(set(previous_object_names) - set(current_object_names)) + """print("removed", removed) + print("added",added)""" + for obj in added: + if not scene in changes_per_scene: + changes_per_scene[scene] = {} + changes_per_scene[scene][obj] = bpy.data.objects[obj] + # TODO: how do we deal with this, as we obviously do not have data for removed objects ? + for obj in removed: + if not scene in changes_per_scene: + changes_per_scene[scene] = {} + changes_per_scene[scene][obj] = None # bpy.data.objects[obj] + + for object_name in list(current[scene].keys()): # todo : exclude directly added/removed objects + #print("ob", object_name) + if object_name in previous[scene]: + # print("object", object_name,"in previous scene, comparing") + current_obj = current[scene][object_name] + prev_obj = previous[scene][object_name] + same = str(current_obj) == str(prev_obj) + + if "Camera" in object_name: + pass#print(" current", current_obj, prev_obj) + """if "Fox" in object_name: + print(" current", current_obj) + print(" previou", prev_obj) + print(" same?", same)""" + #print("foo", same) + if not same: + """ print(" current", current_obj) + print(" previou", prev_obj)""" + if not scene in changes_per_scene: + changes_per_scene[scene] = {} + + changes_per_scene[scene][object_name] = bpy.data.objects[object_name] + bubble_up_changes(bpy.data.objects[object_name], changes_per_scene[scene]) + # now bubble up for instances & parents + previous_stored.clear() + previous_stored.write(json.dumps(current)) + + print("changes per scene alternative", changes_per_scene) + return changes_per_scene + + + def execute(self, context): + bpy.context.window_manager.auto_export_tracker.disable_change_detection() + if self.direct_mode: + self.load_settings(context) + if self.will_save_settings: + self.save_settings(context) + #print("self", self.auto_export) + if self.auto_export: # only do the actual exporting if auto export is actually enabled + #changes_per_scene = context.window_manager.auto_export_tracker.changed_objects_per_scene + + #& do the export + if self.direct_mode: #Do not auto export when applying settings in the menu, do it on save only + # determine changed objects + changes_per_scene = self.did_objects_change() + # determine changed parameters + params_changed = self.did_export_settings_change() + auto_export(changes_per_scene, params_changed, self) + # cleanup + # reset the list of changes in the tracker + bpy.context.window_manager.auto_export_tracker.clear_changes() + print("AUTO EXPORT DONE") + bpy.app.timers.register(bpy.context.window_manager.auto_export_tracker.enable_change_detection, first_interval=0.1) + else: + print("auto export disabled, skipping") + return {'FINISHED'} + + def invoke(self, context, event): + #print("invoke") + bpy.context.window_manager.auto_export_tracker.disable_change_detection() + self.load_settings(context) + wm = context.window_manager + #wm.fileselect_add(self) + return context.window_manager.invoke_props_dialog(self, title="Auto export", width=640) + return {'RUNNING_MODAL'} + + """def modal(self, context, event): + + if event.type == 'SPACE': + wm = context.window_manager + wm.invoke_popup(self) + #wm.invoke_props_dialog(self) + + if event.type in {'ESC'}: + return {'CANCELLED'} + + return {'RUNNING_MODAL'}""" + + + def draw(self, context): + layout = self.layout + operator = self + + controls_enabled = self.auto_export + + layout.prop(self, "auto_export") + layout.separator() + + toggle_icon = "TRIA_DOWN" if self.show_general_settings else "TRIA_RIGHT" + layout.prop(self, "show_general_settings", text="General", icon=toggle_icon) + if self.show_general_settings: + section = layout.box() + section.enabled = controls_enabled + + draw_folder_browser(section, "Export root folder", self.export_root_folder, "export_root_folder") + row = section.row() + draw_folder_browser(row, "Assets Folder (non blueprints mode only)", self.export_root_folder, "export_output_folder") + row.enabled = not self.export_blueprints + section.prop(operator, "export_blueprints") + section.prop(operator, "export_scene_settings") + + """header, panel = layout.panel("my_panel_id", default_closed=False) + header.label(text="Hello World") + if panel: + panel.label(text="Success")""" + + toggle_icon = "TRIA_DOWN" if self.show_change_detection_settings else "TRIA_RIGHT" + layout.prop(operator, "show_change_detection_settings", text="Change Detection", icon=toggle_icon) + if self.show_change_detection_settings: + section = layout.box() + section.enabled = controls_enabled + section.prop(operator, "export_change_detection", text="Use change detection") + + # main/level scenes + toggle_icon = "TRIA_DOWN" if self.show_scene_settings else "TRIA_RIGHT" + layout.prop(operator, "show_scene_settings", text="Scenes", icon=toggle_icon) + if self.show_scene_settings: + section = layout.box() + section.enabled = controls_enabled + rows = 2 + row = section.row() + row.label(text="main scenes") + row.prop(context.window_manager, "main_scene", text='') + + row = section.row() + row.template_list("SCENE_UL_GLTF_auto_export", "level scenes", operator, "main_scenes", operator, "main_scenes_index", rows=rows) + + col = row.column(align=True) + sub_row = col.row() + add_operator = sub_row.operator("scene_list.list_action", icon='ADD', text="") + add_operator.action = 'ADD' + add_operator.scene_type = 'level' + #add_operator.operator = operator + sub_row.enabled = context.window_manager.main_scene is not None + + sub_row = col.row() + remove_operator = sub_row.operator("scene_list.list_action", icon='REMOVE', text="") + remove_operator.action = 'REMOVE' + remove_operator.scene_type = 'level' + col.separator() + + # library scenes + row = section.row() + row.label(text="library scenes") + row.prop(context.window_manager, "library_scene", text='') + + row = section.row() + row.template_list("SCENE_UL_GLTF_auto_export", "library scenes", operator, "library_scenes", operator, "library_scenes_index", rows=rows) + + col = row.column(align=True) + sub_row = col.row() + add_operator = sub_row.operator("scene_list.list_action", icon='ADD', text="") + add_operator.action = 'ADD' + add_operator.scene_type = 'library' + sub_row.enabled = context.window_manager.library_scene is not None + + + sub_row = col.row() + remove_operator = sub_row.operator("scene_list.list_action", icon='REMOVE', text="") + remove_operator.action = 'REMOVE' + remove_operator.scene_type = 'library' + col.separator() + + toggle_icon = "TRIA_DOWN" if self.show_blueprint_settings else "TRIA_RIGHT" + layout.prop(operator, "show_blueprint_settings", text="Blueprints", icon=toggle_icon) + if self.show_blueprint_settings: + section = layout.box() + section.enabled = controls_enabled + section = section.box() + section.enabled = controls_enabled and self.export_blueprints + + # collections/blueprints + draw_folder_browser(section, "Blueprints folder", self.export_root_folder, "export_blueprints_path") + #section.prop(operator, "export_blueprints_path") + section.prop(operator, "collection_instances_combine_mode") + section.prop(operator, "export_marked_assets") + section.separator() + + draw_folder_browser(section, "Levels folder", self.export_root_folder, "export_levels_path") + #section.prop(operator, "export_levels_path") + + section.prop(operator, "export_separate_dynamic_and_static_objects") + section.separator() + + # materials + section.prop(operator, "export_materials_library") + section = section.box() + section.enabled = controls_enabled and self.export_materials_library + draw_folder_browser(section, 'Materials folder', self.export_root_folder, "export_materials_path") + #section.prop(operator, "export_materials_path") + + + def cancel(self, context): + print("cancel") + #bpy.context.window_manager.auto_export_tracker.enable_change_detection() + bpy.app.timers.register(bpy.context.window_manager.auto_export_tracker.enable_change_detection, first_interval=1) + diff --git a/tools/blenvy/gltf_auto_export/auto_export/preferences.py b/tools/blenvy/gltf_auto_export/auto_export/preferences.py new file mode 100644 index 0000000..6eccfb0 --- /dev/null +++ b/tools/blenvy/gltf_auto_export/auto_export/preferences.py @@ -0,0 +1,208 @@ + +import os +from bpy.types import AddonPreferences +from bpy.props import (BoolProperty, + IntProperty, + StringProperty, + EnumProperty, + CollectionProperty + ) + +from .internals import (CUSTOM_PG_sceneName) + +AutoExportGltfPreferenceNames = [ + 'will_save_settings', + 'direct_mode',# specific to main auto_export operator + + 'show_general_settings', + 'auto_export', + 'export_root_folder', + 'export_output_folder', + 'export_scene_settings', + + 'show_change_detection_settings', + 'export_change_detection', + + 'show_scene_settings', + 'main_scenes', + 'library_scenes', + 'main_scenes_index', + 'library_scenes_index', + 'main_scene_names', + 'library_scene_names', + + 'show_blueprint_settings', + 'export_blueprints', + 'export_blueprints_path', + 'export_marked_assets', + 'collection_instances_combine_mode', + + 'export_levels_path', + 'export_separate_dynamic_and_static_objects', + + 'export_materials_library', + 'export_materials_path', +] + +def on_export_output_folder_updated(self, context): + #self.export_root_folder = os.path.relpath(self.export_root_folder) + #self.export_output_folder = os.path.join(self.export_root_folder, self.export_output_folder) + print("on_foo_updated", self.export_root_folder, self.export_output_folder) + +class AutoExportGltfAddonPreferences(AddonPreferences): + # this must match the add-on name, use '__package__' + # when defining this in a submodule of a python package. + bl_idname = __package__ + bl_options = {'PRESET'} + + #### these are for the operator + will_save_settings: BoolProperty( + name='Remember Export Settings', + description='Store glTF export settings in the Blender project', + default=True + ) # type: ignore + + # use when operator is called directly, works a bit differently than inside the ui + direct_mode: BoolProperty( + default=False + ) # type: ignore + + + auto_export: BoolProperty( + name='Auto export', + description='Automatically export to gltf on save', + default=False + ) # type: ignore + + #### general + # for UI only, workaround for lacking panels + show_general_settings: BoolProperty( + name="show_general settings", + description="show/hide general settings (UI only: has no impact on exports)", + default=True + ) # type: ignore + + export_root_folder: StringProperty( + name = "Project Root Path", + description="The root folder of your (Bevy) project (not assets!)", + # subtype='DIR_PATH', + default='../' + #update=on_export_output_folder_updated) # type: ignore + ) + + export_output_folder: StringProperty( + name='Export folder', + description='The root folder for all exports(relative to the root folder/path) Defaults to "assets" ', + default='./assets', + #subtype='DIR_PATH', + options={'HIDDEN'} + # update=on_export_output_folder_updated + ) # type: ignore + + # for UI only, workaround for lacking panels + show_change_detection_settings: BoolProperty( + name="show change detection settings", + description="show/hide change detection settings (UI only: has no impact on exports)", + default=True + ) # type: ignore + + export_change_detection: BoolProperty( + name='Change detection', + description='Use change detection to determine what/if should be exported', + default=True + ) # type: ignore + + # scenes + # for UI only, workaround for lacking panels + show_scene_settings: BoolProperty( + name="show scene settings", + description="show/hide scene settings (UI only: has no impact on exports)", + default=True + ) # type: ignore + + # scene components + export_scene_settings: BoolProperty( + name='Export scene settings', + description='Export scene settings ie AmbientLighting, Bloom, AO etc', + default=False + ) # type: ignore + + # blueprint settings + # for UI only, workaround for lacking panels + show_blueprint_settings: BoolProperty( + name="show blueprint settings", + description="show/hide blueprint settings (UI only: has no impact on exports)", + default=True + ) # type: ignore + + export_blueprints: BoolProperty( + name='Export Blueprints', + description='Replaces collection instances with an Empty with a BlueprintName custom property, and enabled a lot more features !', + default=True + ) # type: ignore + + export_blueprints_path: StringProperty( + name='Blueprints path', + description='path to export the blueprints to (relative to the export folder)', + default='assets/blueprints', + #subtype='DIR_PATH' + ) # type: ignore + + export_levels_path: StringProperty( + name='Levels path', + description='path to export the levels (main scenes) to (relative to the export folder)', + default='assets/levels', + #subtype='DIR_PATH' + ) # type: ignore + + export_separate_dynamic_and_static_objects: BoolProperty( + name="Export levels' dynamic and static objects seperatly", + description="""For MAIN scenes only (aka levels), toggle this to generate 2 files per level: + - one with all dynamic data: collection or instances marked as dynamic/ saveable + - one with all static data: anything else that is NOT marked as dynamic""", + default=False + ) # type: ignore + + export_materials_library: BoolProperty( + name='Export materials library', + description='remove materials from blueprints and use the material library instead', + default=False + ) # type: ignore + + export_materials_path: StringProperty( + name='Materials path', + description='path to export the materials libraries to (relative to the export folder)', + default='assets/materials', + #subtype='DIR_PATH' + ) # type: ignore + + """ combine mode can be + - 'Split' (default): replace with an empty, creating links to sub blueprints + - 'Embed' : treat it as an embeded object and do not replace it with an empty + - 'EmbedExternal': embed any instance of a non local collection (ie external assets) + + - 'Inject': inject components from sub collection instances into the curent object => this is now a seperate custom property that you can apply to a collecion instance + """ + + collection_instances_combine_mode : EnumProperty( + name='Collection instances', + items=( + ('Split', 'Split', 'replace collection instances with an empty + blueprint, creating links to sub blueprints (Default, Recomended)'), + ('Embed', 'Embed', 'treat collection instances as embeded objects and do not replace them with an empty'), + ('EmbedExternal', 'EmbedExternal', 'treat instances of external (not specifified in the current blend file) collections (aka assets etc) as embeded objects and do not replace them with empties'), + #('Inject', 'Inject', 'inject components from sub collection instances into the curent object') + ), + default='Split' + ) # type: ignore + + export_marked_assets: BoolProperty( + name='Auto export marked assets', + description='Collections that have been marked as assets will be systematically exported, even if not in use in another scene', + default=True + ) # type: ignore + + main_scenes: CollectionProperty(name="main scenes", type=CUSTOM_PG_sceneName) # type: ignore + main_scenes_index: IntProperty(name = "Index for main scenes list", default = 0) # type: ignore + + library_scenes: CollectionProperty(name="library scenes", type=CUSTOM_PG_sceneName) # type: ignore + library_scenes_index: IntProperty(name = "Index for library scenes list", default = 0) # type: ignore diff --git a/tools/blenvy/gltf_auto_export/auto_export/tracker.py b/tools/blenvy/gltf_auto_export/auto_export/tracker.py new file mode 100644 index 0000000..16e58c3 --- /dev/null +++ b/tools/blenvy/gltf_auto_export/auto_export/tracker.py @@ -0,0 +1,195 @@ +import json +import bpy + +from bpy.types import (PropertyGroup) +from bpy.props import (PointerProperty, IntProperty, StringProperty) + +from .get_blueprints_to_export import get_blueprints_to_export + +from ..constants import TEMPSCENE_PREFIX +from .internals import BlueprintsToExport +from ..helpers.helpers_scenes import (get_scenes) +from .preferences import AutoExportGltfAddonPreferences + +class AutoExportTracker(PropertyGroup): + + changed_objects_per_scene = {} + change_detection_enabled = True + export_params_changed = False + + gltf_settings_backup = None + last_operator = None + dummy_file_path = "" + + exports_total : IntProperty( + name='exports_total', + description='Number of total exports', + default=0 + ) # type: ignore + + exports_count : IntProperty( + name='exports_count', + description='Number of exports in progress', + default=0 + ) # type: ignore + + @classmethod + def register(cls): + bpy.types.WindowManager.auto_export_tracker = PointerProperty(type=AutoExportTracker) + # register list of exportable collections + bpy.types.WindowManager.exportedCollections = bpy.props.CollectionProperty(type=BlueprintsToExport) + + # setup handlers for updates & saving + #bpy.app.handlers.save_post.append(cls.save_handler) + #bpy.app.handlers.depsgraph_update_post.append(cls.deps_update_handler) + + @classmethod + def unregister(cls): + # remove handlers & co + """try: + bpy.app.handlers.depsgraph_update_post.remove(cls.deps_update_handler) + except:pass + try: + bpy.app.handlers.save_post.remove(cls.save_handler) + except:pass""" + del bpy.types.WindowManager.auto_export_tracker + del bpy.types.WindowManager.exportedCollections + + @classmethod + def save_handler(cls, scene, depsgraph): + print("-------------") + print("saved", bpy.data.filepath) + # auto_export(changes_per_scene, export_parameters_changed) + bpy.ops.export_scenes.auto_gltf(direct_mode= True) + + # (re)set a few things after exporting + # reset wether the gltf export paramters were changed since the last save + cls.export_params_changed = False + # reset whether there have been changed objects since the last save + cls.changed_objects_per_scene.clear() + # all our logic is done, mark this as done + + @classmethod + def deps_post_update_handler(cls, scene, depsgraph): + # print("change detection enabled", cls.change_detection_enabled) + + """ops = bpy.context.window_manager.operators + print("last operators", ops) + for op in ops: + print("operator", op)""" + active_operator = bpy.context.active_operator + if active_operator: + #print("Operator", active_operator.bl_label, active_operator.bl_idname) + if active_operator.bl_idname == "EXPORT_SCENE_OT_gltf" and active_operator.gltf_export_id == "gltf_auto_export": + # we backup any existing gltf export settings, if there were any + scene = bpy.context.scene + if "glTF2ExportSettings" in scene: + existing_setting = scene["glTF2ExportSettings"] + bpy.context.window_manager.gltf_settings_backup = json.dumps(dict(existing_setting)) + + # we force saving params + active_operator.will_save_settings = True + # we set the last operator here so we can clear the specific settings (yeah for overly complex logic) + cls.last_operator = active_operator + #print("active_operator", active_operator.has_active_exporter_extensions, active_operator.__annotations__.keys(), active_operator.filepath, active_operator.gltf_export_id) + return + + if active_operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf": + # we force saving params + active_operator.will_save_settings = True + active_operator.auto_export = True + # if we are using the operator, bail out for the rest + print("setting stuff for auto_export") + return + + # only deal with changes if we are NOT in the mids of saving/exporting + if cls.change_detection_enabled: + # ignore anything going on with temporary scenes + if not scene.name.startswith(TEMPSCENE_PREFIX): + #print("depsgraph_update_post", scene.name) + changed_scene = scene.name or "" + #print("-------------") + if not changed_scene in cls.changed_objects_per_scene: + cls.changed_objects_per_scene[changed_scene] = {} + # print("cls.changed_objects_per_scene", cls.changed_objects_per_scene) + # depsgraph = bpy.context.evaluated_depsgraph_get() + for obj in depsgraph.updates: + #print("depsgraph update", obj) + if isinstance(obj.id, bpy.types.Object): + # get the actual object + object = bpy.data.objects[obj.id.name] + #print(" changed object", obj.id.name, "changes", obj, "evalutated", obj.id.is_evaluated, "transforms", obj.is_updated_transform, "geometry", obj.is_updated_geometry) + if obj.is_updated_transform or obj.is_updated_geometry: + cls.changed_objects_per_scene[scene.name][obj.id.name] = object + + elif isinstance(obj.id, bpy.types.Material): # or isinstance(obj.id, bpy.types.ShaderNodeTree): + # print(" changed material", obj.id, "scene", scene.name,) + material = bpy.data.materials[obj.id.name] + #now find which objects are using the material + for obj in bpy.data.objects: + for slot in obj.material_slots: + if slot.material == material: + cls.changed_objects_per_scene[scene.name][obj.name] = obj + #print("changed_objects_per_scene", cls.changed_objects_per_scene) + """for obj_name_original in cls.changed_objects_per_scene[scene_name]: + if obj_name_original != ls.changed_objects_per_scene[scene_name][obj_name_original]""" + items = 0 + for scene_name in cls.changed_objects_per_scene: + items += len(cls.changed_objects_per_scene[scene_name].keys()) + if items == 0: + cls.changed_objects_per_scene.clear() + #print("changed_objects_per_scene", cls.changed_objects_per_scene) + + # filter out invalid objects + """for scene_name in cls.changed_objects_per_scene.keys(): + bla = {} + for object_name in cls.changed_objects_per_scene[scene.name]: + object = cls.changed_objects_per_scene[scene.name][object_name]""" + #print("sdfsd", object, object.valid) + #if not cls.changed_objects_per_scene[scene.name][object_name].invalid: + # bla[object_name] = cls.changed_objects_per_scene[scene.name][object_name] + #cls.changed_objects_per_scene[scene.name]= bla + #cls.changed_objects_per_scene[scene_name] = [o for o in cls.changed_objects_per_scene[scene_name] if not o.invalid] + + # get a list of exportable collections for display + # keep it simple, just use Simplenamespace for compatibility with the rest of our code + # TODO: debounce + + def disable_change_detection(self): + #print("disable change detection") + self.change_detection_enabled = False + self.__class__.change_detection_enabled = False + return None + + def enable_change_detection(self): + #print("enable change detection") + self.change_detection_enabled = True + self.__class__.change_detection_enabled = True + #print("bpy.context.window_manager.auto_export_tracker.change_detection_enabled", bpy.context.window_manager.auto_export_tracker.change_detection_enabled) + return None + + def clear_changes(self): + self.changed_objects_per_scene.clear() + self.__class__.changed_objects_per_scene.clear() + + def export_finished(self): + #print("export_finished") + self.exports_count -= 1 + if self.exports_count == 0: + print("preparing to reset change detection") + bpy.app.timers.register(self.enable_change_detection, first_interval=0.1) + #self.enable_change_detection() + return None + + +def get_auto_exporter_settings(): + auto_exporter_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else None + if auto_exporter_settings != None: + try: + auto_exporter_settings = json.loads(auto_exporter_settings.as_string()) + except: + auto_exporter_settings = {} + else: + auto_exporter_settings = {} + + return auto_exporter_settings \ No newline at end of file diff --git a/tools/blenvy/gltf_auto_export/constants.py b/tools/blenvy/gltf_auto_export/constants.py new file mode 100644 index 0000000..3caf6c0 --- /dev/null +++ b/tools/blenvy/gltf_auto_export/constants.py @@ -0,0 +1 @@ +TEMPSCENE_PREFIX = "__temp_scene" \ No newline at end of file diff --git a/tools/blenvy/gltf_auto_export/helpers/__init__.py b/tools/blenvy/gltf_auto_export/helpers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tools/blenvy/gltf_auto_export/helpers/generate_and_export.py b/tools/blenvy/gltf_auto_export/helpers/generate_and_export.py new file mode 100644 index 0000000..d99c610 --- /dev/null +++ b/tools/blenvy/gltf_auto_export/helpers/generate_and_export.py @@ -0,0 +1,58 @@ +import bpy +from ..auto_export.export_gltf import export_gltf +from .helpers_collections import (set_active_collection) + +""" +generates a temporary scene, fills it with data, cleans up after itself + * named using temp_scene_name + * filled using the tempScene_filler + * written on disk to gltf_output_path, with the gltf export parameters in export_settings + * cleaned up using tempScene_cleaner + +""" +def generate_and_export(addon_prefs, export_settings, gltf_output_path, temp_scene_name="__temp_scene", tempScene_filler=None, tempScene_cleaner=None): + + temp_scene = bpy.data.scenes.new(name=temp_scene_name) + temp_root_collection = temp_scene.collection + + # save active scene + original_scene = bpy.context.window.scene + # and selected collection + original_collection = bpy.context.view_layer.active_layer_collection + # and mode + original_mode = bpy.context.active_object.mode if bpy.context.active_object != None else None + # we change the mode to object mode, otherwise the gltf exporter is not happy + if original_mode != None and original_mode != 'OBJECT': + print("setting to object mode", original_mode) + bpy.ops.object.mode_set(mode='OBJECT') + # we set our active scene to be this one : this is needed otherwise the stand-in empties get generated in the wrong scene + bpy.context.window.scene = temp_scene + + area = [area for area in bpy.context.screen.areas if area.type == "VIEW_3D"][0] + region = [region for region in area.regions if region.type == 'WINDOW'][0] + with bpy.context.temp_override(scene=temp_scene, area=area, region=region): + # detect scene mistmatch + scene_mismatch = bpy.context.scene.name != bpy.context.window.scene.name + if scene_mismatch: + raise Exception("Context scene mismatch, aborting", bpy.context.scene.name, bpy.context.window.scene.name) + + set_active_collection(bpy.context.scene, temp_root_collection.name) + # generate contents of temporary scene + scene_filler_data = tempScene_filler(temp_root_collection) + # export the temporary scene + try: + export_gltf(gltf_output_path, export_settings) + except Exception as error: + print("failed to export gltf !", error) + raise error + # restore everything + tempScene_cleaner(temp_scene, scene_filler_data) + + # reset active scene + bpy.context.window.scene = original_scene + # reset active collection + bpy.context.view_layer.active_layer_collection = original_collection + # reset mode + if original_mode != None: + bpy.ops.object.mode_set( mode = original_mode ) + diff --git a/tools/blenvy/gltf_auto_export/helpers/generate_complete_preferences_dict.py b/tools/blenvy/gltf_auto_export/helpers/generate_complete_preferences_dict.py new file mode 100644 index 0000000..f6ef742 --- /dev/null +++ b/tools/blenvy/gltf_auto_export/helpers/generate_complete_preferences_dict.py @@ -0,0 +1,47 @@ + +from ..auto_export.preferences import AutoExportGltfAddonPreferences +from io_scene_gltf2 import (ExportGLTF2_Base) + +# given the input (actual) gltf settings, filters out any invalid/useless params & params that are equal to defaults +def generate_complete_preferences_dict_gltf(settings): + complete_preferences = {} + defaults = {} + gltf_parameters_to_ignore = ["use_active_collection", "use_active_collection_with_nested", "use_active_scene", "use_selection", "will_save_settings", "gltf_export_id"] + def filter_out(pair): + key, value = pair + if key in gltf_parameters_to_ignore: + return False + return True + + for k in ExportGLTF2_Base.__annotations__: # we use parameters from the base class of the standard gltf exporter, that contains all relevant parameters + item = ExportGLTF2_Base.__annotations__[k] + #print("item", item) + default = item.keywords.get('default', None) + #complete_preferences[k] = default + defaults[k] = default + + for key in list(settings.keys()): + if key in defaults and settings[key] != defaults[key]: # only write out values different from defaults + complete_preferences[key] = settings[key] + + complete_preferences = dict(filter(filter_out, dict(complete_preferences).items())) + return complete_preferences + +# given the input (actual) auto settings, filters out any invalid/useless params & params that are equal to defaults +def generate_complete_preferences_dict_auto(settings): + complete_preferences = {} + defaults = {} + + for k in AutoExportGltfAddonPreferences.__annotations__: + item = AutoExportGltfAddonPreferences.__annotations__[k] + default = item.keywords.get('default', None) + #complete_preferences[k] = default + defaults[k] = default + + for key in list(settings.keys()): + if key in defaults: + if settings[key] != defaults[key]: # only write out values different from defaults + complete_preferences[key] = settings[key] + else: + complete_preferences[key] = settings[key] + return complete_preferences diff --git a/tools/blenvy/gltf_auto_export/helpers/helpers_blueprints.py b/tools/blenvy/gltf_auto_export/helpers/helpers_blueprints.py new file mode 100644 index 0000000..9d0f4a4 --- /dev/null +++ b/tools/blenvy/gltf_auto_export/helpers/helpers_blueprints.py @@ -0,0 +1,400 @@ + +import os +from types import SimpleNamespace +import bpy + +class Blueprint: + def __init__(self, name): + self.name = name + self.local = True + self.marked = False # If marked as asset or with auto_export flag, always export if changed + self.scene = None # Not sure, could be usefull for tracking + + self.instances = [] + self.objects = [] + self.nested_blueprints = [] + + self.collection = None # should we just sublclass ? + + def __repr__(self): + return f'Name: {self.name} Local: {self.local}, Scene: {self.scene}, Instances: {self.instances}, Objects: {self.objects}, nested_blueprints: {self.nested_blueprints}' + + def __str__(self): + return f'Name: "{self.name}", Local: {self.local}, Scene: {self.scene}, Instances: {self.instances}, Objects: {self.objects}, nested_blueprints: {self.nested_blueprints}' + + +def find_blueprints_not_on_disk(blueprints, folder_path, extension): + not_found_blueprints = [] + for blueprint in blueprints: + gltf_output_path = os.path.join(folder_path, blueprint.name + extension) + # print("gltf_output_path", gltf_output_path) + found = os.path.exists(gltf_output_path) and os.path.isfile(gltf_output_path) + if not found: + not_found_blueprints.append(blueprint) + return not_found_blueprints + +def check_if_blueprint_on_disk(scene_name, folder_path, extension): + gltf_output_path = os.path.join(folder_path, scene_name + extension) + found = os.path.exists(gltf_output_path) and os.path.isfile(gltf_output_path) + print("level", scene_name, "found", found, "path", gltf_output_path) + return found + +# blueprints: any collection with either +# - an instance +# - marked as asset +# - with the "auto_export" flag +# https://blender.stackexchange.com/questions/167878/how-to-get-all-collections-of-the-current-scene +def blueprints_scan(main_scenes, library_scenes, addon_prefs): + export_marked_assets = getattr(addon_prefs,"export_marked_assets") + + blueprints = {} + blueprints_from_objects = {} + blueprint_name_from_instances = {} + collections = [] + + # main scenes + blueprint_instances_per_main_scene = {} + internal_collection_instances = {} + external_collection_instances = {} + + # meh + def add_object_to_collection_instances(collection_name, object, internal=True): + collection_category = internal_collection_instances if internal else external_collection_instances + if not collection_name in collection_category.keys(): + #print("ADDING INSTANCE OF", collection_name, "object", object.name, "categ", collection_category) + collection_category[collection_name] = [] #.append(collection_name) + collection_category[collection_name].append(object) + + for scene in main_scenes:# should it only be main scenes ? what about collection instances inside other scenes ? + for object in scene.objects: + #print("object", object.name) + if object.instance_type == 'COLLECTION': + collection = object.instance_collection + collection_name = object.instance_collection.name + #print(" from collection:", collection_name) + + collection_from_library = False + for library_scene in library_scenes: # should be only in library scenes + collection_from_library = library_scene.user_of_id(collection) > 0 # TODO: also check if it is an imported asset + if collection_from_library: + break + + add_object_to_collection_instances(collection_name=collection_name, object=object, internal = collection_from_library) + + # experiment with custom properties from assets stored in other blend files + """if not collection_from_library: + for property_name in object.keys(): + print("stuff", property_name) + for property_name in collection.keys(): + print("OTHER", property_name)""" + + # blueprints[collection_name].instances.append(object) + + # FIXME: this only account for direct instances of blueprints, not for any nested blueprint inside a blueprint + if scene.name not in blueprint_instances_per_main_scene.keys(): + blueprint_instances_per_main_scene[scene.name] = {} + if collection_name not in blueprint_instances_per_main_scene[scene.name].keys(): + blueprint_instances_per_main_scene[scene.name][collection_name] = [] + blueprint_instances_per_main_scene[scene.name][collection_name].append(object) + + blueprint_name_from_instances[object] = collection_name + + """# add any indirect ones + # FIXME: needs to be recursive, either here or above + for nested_blueprint in blueprints[collection_name].nested_blueprints: + if not nested_blueprint in blueprint_instances_per_main_scene[scene.name]: + blueprint_instances_per_main_scene[scene.name].append(nested_blueprint)""" + + for collection in bpy.data.collections: + #print("collection", collection, collection.name_full, "users", collection.users) + + collection_from_library = False + defined_in_scene = None + for scene in library_scenes: # should be only in library scenes + collection_from_library = scene.user_of_id(collection) > 0 + if collection_from_library: + defined_in_scene = scene + break + if not collection_from_library: + continue + + + if ( + 'AutoExport' in collection and collection['AutoExport'] == True # get marked collections + or export_marked_assets and collection.asset_data is not None # or if you have marked collections as assets you can auto export them too + or collection.name in list(internal_collection_instances.keys()) # or if the collection has an instance in one of the main scenes + ): + blueprint = Blueprint(collection.name) + blueprint.local = True + blueprint.marked = 'AutoExport' in collection and collection['AutoExport'] == True or export_marked_assets and collection.asset_data is not None + blueprint.objects = [object.name for object in collection.all_objects if not object.instance_type == 'COLLECTION'] # inneficient, double loop + blueprint.nested_blueprints = [object.instance_collection.name for object in collection.all_objects if object.instance_type == 'COLLECTION'] # FIXME: not precise enough, aka "what is a blueprint" + blueprint.collection = collection + blueprint.instances = internal_collection_instances[collection.name] if collection.name in internal_collection_instances else [] + blueprint.scene = defined_in_scene + blueprints[collection.name] = blueprint + + # add nested collections to internal/external_collection instances + # FIXME: inneficient, third loop over all_objects + for object in collection.all_objects: + if object.instance_type == 'COLLECTION': + add_object_to_collection_instances(collection_name=object.instance_collection.name, object=object, internal = blueprint.local) + + # now create reverse lookup , so you can find the collection from any of its contained objects + for object in collection.all_objects: + blueprints_from_objects[object.name] = blueprint#collection.name + + # + collections.append(collection) + + # add any collection that has an instance in the main scenes, but is not present in any of the scenes (IE NON LOCAL/ EXTERNAL) + for collection_name in external_collection_instances: + collection = bpy.data.collections[collection_name] + blueprint = Blueprint(collection.name) + blueprint.local = False + blueprint.marked = True #external ones are always marked, as they have to have been marked in their original file #'AutoExport' in collection and collection['AutoExport'] == True + blueprint.objects = [object.name for object in collection.all_objects if not object.instance_type == 'COLLECTION'] # inneficient, double loop + blueprint.nested_blueprints = [object.instance_collection.name for object in collection.all_objects if object.instance_type == 'COLLECTION'] # FIXME: not precise enough, aka "what is a blueprint" + blueprint.collection = collection + blueprint.instances = external_collection_instances[collection.name] if collection.name in external_collection_instances else [] + blueprints[collection.name] = blueprint + #print("EXTERNAL COLLECTION", collection, dict(collection)) + + # add nested collections to internal/external_collection instances + # FIXME: inneficient, third loop over all_objects + """for object in collection.all_objects: + if object.instance_type == 'COLLECTION': + add_object_to_collection_instances(collection_name=object.instance_collection.name, object=object, internal = blueprint.local)""" + + # now create reverse lookup , so you can find the collection from any of its contained objects + for object in collection.all_objects: + blueprints_from_objects[object.name] = blueprint#collection.name + + + # then add any nested collections at root level (so we can have a flat list, regardless of nesting) + # TODO: do this recursively + for blueprint_name in list(blueprints.keys()): + parent_blueprint = blueprints[blueprint_name] + + for nested_blueprint_name in parent_blueprint.nested_blueprints: + if not nested_blueprint_name in blueprints.keys(): + collection = bpy.data.collections[nested_blueprint_name] + blueprint = Blueprint(collection.name) + blueprint.local = parent_blueprint.local + blueprint.objects = [object.name for object in collection.all_objects if not object.instance_type == 'COLLECTION'] # inneficient, double loop + blueprint.nested_blueprints = [object.instance_collection.name for object in collection.all_objects if object.instance_type == 'COLLECTION'] # FIXME: not precise enough, aka "what is a blueprint" + blueprint.collection = collection + blueprint.instances = external_collection_instances[collection.name] if collection.name in external_collection_instances else [] + blueprint.scene = parent_blueprint.scene if parent_blueprint.local else None + blueprints[collection.name] = blueprint + + + # now create reverse lookup , so you can find the collection from any of its contained objects + for object in collection.all_objects: + blueprints_from_objects[object.name] = blueprint#collection.name + + + blueprints = dict(sorted(blueprints.items())) + + '''print("BLUEPRINTS") + for blueprint_name in blueprints: + print(" ", blueprints[blueprint_name]) + + """print("BLUEPRINTS LOOKUP") + print(blueprints_from_objects)""" + + print("BLUEPRINT INSTANCES PER MAIN SCENE") + print(blueprint_instances_per_main_scene)''' + + + """changes_test = {'Library': { + 'Blueprint1_mesh': bpy.data.objects['Blueprint1_mesh'], + 'Fox_mesh': bpy.data.objects['Fox_mesh'], + 'External_blueprint2_Cylinder': bpy.data.objects['External_blueprint2_Cylinder']} + } + # which main scene has been impacted by this + # does one of the main scenes contain an INSTANCE of an impacted blueprint + for scene in main_scenes: + changed_objects = list(changes_test["Library"].keys()) # just a hack for testing + #bluprint_instances_in_scene = blueprint_instances_per_main_scene[scene.name] + #print("instances per scene", bluprint_instances_in_scene, "changed_objects", changed_objects) + + changed_blueprints_with_instances_in_scene = [blueprints_from_objects[changed] for changed in changed_objects if changed in blueprints_from_objects] + print("changed_blueprints_with_instances_in_scene", changed_blueprints_with_instances_in_scene) + level_needs_export = len(changed_blueprints_with_instances_in_scene) > 0 + if level_needs_export: + print("level needs export", scene.name) + + for scene in library_scenes: + changed_objects = list(changes_test[scene.name].keys()) + changed_blueprints = [blueprints_from_objects[changed] for changed in changed_objects if changed in blueprints_from_objects] + # we only care about local blueprints/collections + changed_local_blueprints = [blueprint_name for blueprint_name in changed_blueprints if blueprint_name in blueprints.keys() and blueprints[blueprint_name].local] + print("changed blueprints", changed_local_blueprints)""" + + # additional helper data structures for lookups etc + blueprints_per_name = blueprints + blueprints = [] # flat list + internal_blueprints = [] + external_blueprints = [] + blueprints_per_scenes = {} + + blueprint_instances_per_library_scene = {} + + for blueprint in blueprints_per_name.values(): + blueprints.append(blueprint) + if blueprint.local: + internal_blueprints.append(blueprint) + if blueprint.scene: + if not blueprint.scene.name in blueprints_per_scenes: + blueprints_per_scenes[blueprint.scene.name] = [] + blueprints_per_scenes[blueprint.scene.name].append(blueprint.name) # meh + + else: + external_blueprints.append(blueprint) + + # we also need to have blueprint instances for + + data = { + "blueprints": blueprints, + "blueprints_per_name": blueprints_per_name, + "blueprint_names": list(blueprints_per_name.keys()), + "blueprints_from_objects": blueprints_from_objects, + + "internal_blueprints": internal_blueprints, + "external_blueprints": external_blueprints, + "blueprints_per_scenes": blueprints_per_scenes, + + "blueprint_instances_per_main_scene": blueprint_instances_per_main_scene, + "blueprint_instances_per_library_scene": blueprint_instances_per_library_scene, + + # not sure about these two + "internal_collection_instances": internal_collection_instances, + "external_collection_instances": external_collection_instances, + + "blueprint_name_from_instances": blueprint_name_from_instances + } + + return SimpleNamespace(**data) + + +import json +from .object_makers import (make_empty) + + +def add_scene_property(scene, property_name, property_data): + root_collection = scene.collection + scene_property = None + for object in scene.objects: + if object.name == property_name: + scene_property = object + break + + if scene_property is None: + scene_property = make_empty(property_name, [0,0,0], [0,0,0], [0,0,0], root_collection) + + for key in property_data.keys(): + scene_property[key] = property_data[key] + + +def inject_blueprints_list_into_main_scene(scene, blueprints_data, addon_prefs): + export_root_folder = getattr(addon_prefs, "export_root_folder") + export_output_folder = getattr(addon_prefs,"export_output_folder") + export_levels_path = getattr(addon_prefs,"export_levels_path") + export_blueprints_path = getattr(addon_prefs, "export_blueprints_path") + export_gltf_extension = getattr(addon_prefs, "export_gltf_extension") + + # print("injecting assets/blueprints data into scene") + assets_list_name = f"assets_list_{scene.name}_components" + assets_list_data = {} + + + # FIXME: temporary hack + for blueprint in blueprints_data.blueprints: + bpy.context.window_manager.blueprints_registry.add_blueprint(blueprint) + + blueprint_instance_names_for_scene = blueprints_data.blueprint_instances_per_main_scene.get(scene.name, None) + # find all blueprints used in a scene + blueprints_in_scene = [] + if blueprint_instance_names_for_scene: # what are the blueprints used in this scene, inject those into the assets list component + children_per_blueprint = {} + for blueprint_name in blueprint_instance_names_for_scene: + blueprint = blueprints_data.blueprints_per_name.get(blueprint_name, None) + if blueprint: + children_per_blueprint[blueprint_name] = blueprint.nested_blueprints + blueprints_in_scene += blueprint.nested_blueprints + assets_list_data["BlueprintsList"] = f"({json.dumps(dict(children_per_blueprint))})" + print(blueprint_instance_names_for_scene) + add_scene_property(scene, assets_list_name, assets_list_data) + + + relative_blueprints_path = os.path.relpath(export_blueprints_path, export_root_folder) + + blueprint_assets_list = [] + if blueprint_instance_names_for_scene: + for blueprint_name in blueprint_instance_names_for_scene: + blueprint = blueprints_data.blueprints_per_name.get(blueprint_name, None) + if blueprint is not None: + print("BLUEPRINT", blueprint) + blueprint_exported_path = None + if blueprint.local: + blueprint_exported_path = os.path.join(relative_blueprints_path, f"{blueprint.name}{export_gltf_extension}") + else: + # get the injected path of the external blueprints + blueprint_exported_path = blueprint.collection['Export_path'] if 'Export_path' in blueprint.collection else None + print("foo", dict(blueprint.collection)) + if blueprint_exported_path is not None: + blueprint_assets_list.append({"name": blueprint.name, "path": blueprint_exported_path, "type": "MODEL", "internal": True}) + + + # fetch images/textures + # see https://blender.stackexchange.com/questions/139859/how-to-get-absolute-file-path-for-linked-texture-image + textures = [] + for ob in bpy.data.objects: + if ob.type == "MESH": + for mat_slot in ob.material_slots: + if mat_slot.material: + if mat_slot.material.node_tree: + textures.extend([x.image.filepath for x in mat_slot.material.node_tree.nodes if x.type=='TEX_IMAGE']) + print("textures", textures) + + assets_list_name = f"assets_{scene.name}" + assets_list_data = {"blueprints": json.dumps(blueprint_assets_list), "sounds":[], "images":[]} + scene["assets"] = json.dumps(blueprint_assets_list) + + print("blueprint assets", blueprint_assets_list) + add_scene_property(scene, assets_list_name, assets_list_data) + for blueprint in blueprint_assets_list: + bpy.context.window_manager.assets_registry.add_asset(**blueprint) + + + '''root_collection = scene.collection + + assets_list = None + for object in scene.objects: + if object.name == assets_list_name: + assets_list = object + break + + if assets_list is None: + assets_list = make_empty(assets_list_name, [0,0,0], [0,0,0], [0,0,0], root_collection) + + blueprint_names_for_scene = blueprints_data.blueprint_instances_per_main_scene.get(scene.name, None) + # find all blueprints used in a scene + if blueprint_names_for_scene: # what are the blueprints used in this scene, inject those into the assets list component + children_per_blueprint = {} + for blueprint_name in blueprint_names_for_scene: + blueprint = blueprints_data.blueprints_per_name.get(blueprint_name, None) + if blueprint: + children_per_blueprint[blueprint_name] = blueprint.nested_blueprints + assets_list["BlueprintsList"] = f"({json.dumps(dict(children_per_blueprint))})"''' + +def remove_blueprints_list_from_main_scene(scene): + assets_list = None + assets_list_name = f"assets_list_{scene.name}_components" + + for object in scene.objects: + if object.name == assets_list_name: + assets_list = object + if assets_list is not None: + bpy.data.objects.remove(assets_list, do_unlink=True) diff --git a/tools/blenvy/gltf_auto_export/helpers/helpers_collections.py b/tools/blenvy/gltf_auto_export/helpers/helpers_collections.py new file mode 100644 index 0000000..ad5c3d6 --- /dev/null +++ b/tools/blenvy/gltf_auto_export/helpers/helpers_collections.py @@ -0,0 +1,23 @@ +import bpy + +# traverse all collections +def traverse_tree(t): + yield t + for child in t.children: + yield from traverse_tree(child) + +#Recursivly transverse layer_collection for a particular name +def recurLayerCollection(layerColl, collName): + found = None + if (layerColl.name == collName): + return layerColl + for layer in layerColl.children: + found = recurLayerCollection(layer, collName) + if found: + return found + +def set_active_collection(scene, collection_name): + layer_collection = bpy.data.scenes[scene.name].view_layers['ViewLayer'].layer_collection + layerColl = recurLayerCollection(layer_collection, collection_name) + # set active collection to the collection + bpy.context.view_layer.active_layer_collection = layerColl diff --git a/tools/blenvy/gltf_auto_export/helpers/helpers_scenes.py b/tools/blenvy/gltf_auto_export/helpers/helpers_scenes.py new file mode 100644 index 0000000..850630a --- /dev/null +++ b/tools/blenvy/gltf_auto_export/helpers/helpers_scenes.py @@ -0,0 +1,222 @@ +import json +import bpy +from .object_makers import (make_empty) + + +# these are mostly for when using this add-on together with the bevy_components add-on +custom_properties_to_filter_out = ['_combine', 'template', 'components_meta'] + +def is_component_valid(object, component_name): + if "components_meta" in object or hasattr(object, "components_meta"): + target_components_metadata = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == component_name, target_components_metadata), None) + if component_meta != None: + return component_meta.enabled and not component_meta.invalid + return True + +def remove_unwanted_custom_properties(object): + to_remove = [] + component_names = list(object.keys()) # to avoid 'IDPropertyGroup changed size during iteration' issues + for component_name in component_names: + if not is_component_valid(object, component_name): + to_remove.append(component_name) + for cp in custom_properties_to_filter_out + to_remove: + if cp in object: + del object[cp] + +# TODO: rename actions ? +# reference https://github.com/KhronosGroup/glTF-Blender-IO/blob/main/addons/io_scene_gltf2/blender/exp/animation/gltf2_blender_gather_action.py#L481 +def copy_animation_data(source, target): + if source.animation_data: + ad = source.animation_data + + blender_actions = [] + blender_tracks = {} + + # TODO: this might need to be modified/ adapted to match the standard gltf exporter settings + for track in ad.nla_tracks: + non_muted_strips = [strip for strip in track.strips if strip.action is not None and strip.mute is False] + for strip in non_muted_strips: #t.strips: + # print(" ", source.name,'uses',strip.action.name, "active", strip.active, "action", strip.action) + blender_actions.append(strip.action) + blender_tracks[strip.action.name] = track.name + + # Remove duplicate actions. + blender_actions = list(set(blender_actions)) + # sort animations alphabetically (case insensitive) so they have a defined order and match Blender's Action list + blender_actions.sort(key = lambda a: a.name.lower()) + + markers_per_animation = {} + animations_infos = [] + + for action in blender_actions: + animation_name = blender_tracks[action.name] + animations_infos.append( + f'(name: "{animation_name}", frame_start: {action.frame_range[0]}, frame_end: {action.frame_range[1]}, frames_length: {action.frame_range[1] - action.frame_range[0]}, frame_start_override: {action.frame_start}, frame_end_override: {action.frame_end})' + ) + markers_per_animation[animation_name] = {} + + for marker in action.pose_markers: + if marker.frame not in markers_per_animation[animation_name]: + markers_per_animation[animation_name][marker.frame] = [] + markers_per_animation[animation_name][marker.frame].append(marker.name) + + # best method, using the built-in link animation operator + with bpy.context.temp_override(active_object=source, selected_editable_objects=[target]): + bpy.ops.object.make_links_data(type='ANIMATION') + + """if target.animation_data == None: + target.animation_data_create() + target.animation_data.action = source.animation_data.action.copy() + + print("copying animation data for", source.name, target.animation_data) + properties = [p.identifier for p in source.animation_data.bl_rna.properties if not p.is_readonly] + for prop in properties: + print("copying stuff", prop) + setattr(target.animation_data, prop, getattr(source.animation_data, prop))""" + + # we add an "AnimationInfos" component + target['AnimationInfos'] = f'(animations: {animations_infos})'.replace("'","") + + # and animation markers + markers_formated = '{' + for animation in markers_per_animation.keys(): + markers_formated += f'"{animation}":' + markers_formated += "{" + for frame in markers_per_animation[animation].keys(): + markers = markers_per_animation[animation][frame] + markers_formated += f"{frame}:{markers}, ".replace("'", '"') + markers_formated += '}, ' + markers_formated += '}' + target["AnimationMarkers"] = f'( {markers_formated} )' + + +def duplicate_object(object, parent, combine_mode, destination_collection, blueprints_data, nester=""): + copy = None + internal_blueprint_names = [blueprint.name for blueprint in blueprints_data.internal_blueprints] + # print("COMBINE MODE", combine_mode) + if object.instance_type == 'COLLECTION' and (combine_mode == 'Split' or (combine_mode == 'EmbedExternal' and (object.instance_collection.name in internal_blueprint_names)) ): + #print("creating empty for", object.name, object.instance_collection.name, internal_blueprint_names, combine_mode) + collection_name = object.instance_collection.name + original_name = object.name + + object.name = original_name + "____bak" + empty_obj = make_empty(original_name, object.location, object.rotation_euler, object.scale, destination_collection) + + """we inject the collection/blueprint name, as a component called 'BlueprintName', but we only do this in the empty, not the original object""" + empty_obj['BlueprintName'] = '("'+collection_name+'")' + empty_obj["BlueprintPath"] = '' + empty_obj['SpawnHere'] = '()' + + # we also inject a list of all sub blueprints, so that the bevy side can preload them + blueprint_name = collection_name + children_per_blueprint = {} + blueprint = blueprints_data.blueprints_per_name.get(blueprint_name, None) + if blueprint: + children_per_blueprint[blueprint_name] = blueprint.nested_blueprints + empty_obj["BlueprintsList"] = f"({json.dumps(dict(children_per_blueprint))})" + + # we copy custom properties over from our original object to our empty + for component_name, component_value in object.items(): + if component_name not in custom_properties_to_filter_out and is_component_valid(object, component_name): #copy only valid properties + empty_obj[component_name] = component_value + copy = empty_obj + else: + # for objects which are NOT collection instances or when embeding + # we create a copy of our object and its children, to leave the original one as it is + original_name = object.name + object.name = original_name + "____bak" + copy = object.copy() + copy.name = original_name + + destination_collection.objects.link(copy) + + """if object.parent == None: + if parent_empty is not None: + copy.parent = parent_empty + """ + # do this both for empty replacements & normal copies + if parent is not None: + copy.parent = parent + remove_unwanted_custom_properties(copy) + copy_animation_data(object, copy) + + for child in object.children: + duplicate_object(child, copy, combine_mode, destination_collection, blueprints_data, nester+" ") + +# copies the contents of a collection into another one while replacing library instances with empties +def copy_hollowed_collection_into(source_collection, destination_collection, parent_empty=None, filter=None, blueprints_data=None, addon_prefs={}): + collection_instances_combine_mode = getattr(addon_prefs, "collection_instances_combine_mode") + + for object in source_collection.objects: + if object.name.endswith("____bak"): # some objects could already have been handled, ignore them + continue + if filter is not None and filter(object) is False: + continue + #check if a specific collection instance does not have an ovveride for combine_mode + combine_mode = object['_combine'] if '_combine' in object else collection_instances_combine_mode + parent = parent_empty + duplicate_object(object, parent, combine_mode, destination_collection, blueprints_data) + + # for every child-collection of the source, copy its content into a new sub-collection of the destination + for collection in source_collection.children: + original_name = collection.name + collection.name = original_name + "____bak" + collection_placeholder = make_empty(original_name, [0,0,0], [0,0,0], [1,1,1], destination_collection) + + if parent_empty is not None: + collection_placeholder.parent = parent_empty + copy_hollowed_collection_into( + source_collection = collection, + destination_collection = destination_collection, + parent_empty = collection_placeholder, + filter = filter, + blueprints_data = blueprints_data, + addon_prefs=addon_prefs + ) + + + + return {} + +# clear & remove "hollow scene" +def clear_hollow_scene(temp_scene, original_root_collection): + def restore_original_names(collection): + if collection.name.endswith("____bak"): + collection.name = collection.name.replace("____bak", "") + for object in collection.objects: + if object.instance_type == 'COLLECTION': + if object.name.endswith("____bak"): + object.name = object.name.replace("____bak", "") + else: + if object.name.endswith("____bak"): + object.name = object.name.replace("____bak", "") + for child_collection in collection.children: + restore_original_names(child_collection) + + + # remove any data we created + temp_root_collection = temp_scene.collection + temp_scene_objects = [o for o in temp_root_collection.all_objects] + for object in temp_scene_objects: + #print("removing", object.name) + bpy.data.objects.remove(object, do_unlink=True) + + # remove the temporary scene + bpy.data.scenes.remove(temp_scene, do_unlink=True) + + # reset original names + restore_original_names(original_root_collection) + +# convenience utility to get lists of scenes +def get_scenes(addon_prefs): + level_scene_names= getattr(addon_prefs,"main_scene_names", []) #list(map(lambda scene: scene.name, getattr(addon_prefs,"main_scenes"))) + library_scene_names = getattr(addon_prefs,"library_scene_names", []) #list(map(lambda scene: scene.name, getattr(addon_prefs,"library_scenes"))) + + level_scene_names = list(filter(lambda name: name in bpy.data.scenes, level_scene_names)) + library_scene_names = list(filter(lambda name: name in bpy.data.scenes, library_scene_names)) + + level_scenes = list(map(lambda name: bpy.data.scenes[name], level_scene_names)) + library_scenes = list(map(lambda name: bpy.data.scenes[name], library_scene_names)) + + return [level_scene_names, level_scenes, library_scene_names, library_scenes] diff --git a/tools/blenvy/gltf_auto_export/helpers/object_makers.py b/tools/blenvy/gltf_auto_export/helpers/object_makers.py new file mode 100644 index 0000000..50827b5 --- /dev/null +++ b/tools/blenvy/gltf_auto_export/helpers/object_makers.py @@ -0,0 +1,47 @@ +import bmesh +import bpy +import mathutils + +# Makes an empty, at the specified location, rotation, scale stores it in existing collection, from https://blender.stackexchange.com/questions/51290/how-to-add-empty-object-not-using-bpy-ops +def make_empty(name, location, rotation, scale, collection): + object_data = None + empty_obj = bpy.data.objects.new( name, object_data ) + + empty_obj.empty_display_size = 2 + empty_obj.empty_display_type = 'PLAIN_AXES' + + empty_obj.name = name + empty_obj.location = location + empty_obj.scale = scale + empty_obj.rotation_euler = rotation + + collection.objects.link( empty_obj ) + #bpy.context.view_layer.update() + return empty_obj + +def make_cube(name, location=[0,0,0], rotation=[0,0,0], scale=[1,1,1], collection=None): + new_mesh = bpy.data.meshes.new(name+"_Mesh") #None + """verts = [( 1.0, 1.0, 0.0), + ( 1.0, -1.0, 0.0), + (-1.0, -1.0, 0.0), + (-1.0, 1.0, 0.0), + ] # 4 verts made with XYZ coords + edges = [] + faces = [[0, 1, 2, 3]] + new_mesh.from_pydata(verts, edges, faces)""" + + + bm = bmesh.new() + bmesh.ops.create_cube(bm, size=0.1, matrix=mathutils.Matrix.Translation(location)) # FIXME: other ways to set position seems to fail ? + bm.to_mesh(new_mesh) + bm.free() + + new_object = bpy.data.objects.new(name, new_mesh) + new_object.name = name + new_object.location = location + new_object.scale = scale + new_object.rotation_euler = rotation + + if collection != None: + collection.objects.link( new_object ) + return new_object \ No newline at end of file diff --git a/tools/blenvy/gltf_auto_export/helpers/ping_depsgraph_update.py b/tools/blenvy/gltf_auto_export/helpers/ping_depsgraph_update.py new file mode 100644 index 0000000..55527b2 --- /dev/null +++ b/tools/blenvy/gltf_auto_export/helpers/ping_depsgraph_update.py @@ -0,0 +1,10 @@ +import bpy +import rna_prop_ui + +# fake way to make our operator's changes be visible to the change/depsgraph update handler in gltf_auto_export +def ping_depsgraph_update(object=None): + if object == None: + object = bpy.data.scenes[0] + rna_prop_ui.rna_idprop_ui_create(object, "________temp", default=0) + rna_prop_ui.rna_idprop_ui_prop_clear(object, "________temp") + return None \ No newline at end of file diff --git a/tools/blenvy/gltf_auto_export/helpers/serialize_scene.py b/tools/blenvy/gltf_auto_export/helpers/serialize_scene.py new file mode 100644 index 0000000..a88314f --- /dev/null +++ b/tools/blenvy/gltf_auto_export/helpers/serialize_scene.py @@ -0,0 +1,233 @@ +import json +from mathutils import Color +import numpy as np +import bpy +from ..constants import TEMPSCENE_PREFIX + +fields_to_ignore_generic = ["tag", "type", "update_tag", "use_extra_user", "use_fake_user", "user_clear", "user_of_id", "user_remap", "users", + 'animation_data_clear', 'animation_data_create', 'asset_clear', 'asset_data', 'asset_generate_preview', 'asset_mark', 'bl_rna', 'evaluated_get', + 'library', 'library_weak_reference', 'make_local','name', 'name_full', 'original', + 'override_create', 'override_hierarchy_create', 'override_library', 'preview', 'preview_ensure', 'rna_type', + 'session_uid', 'copy', 'id_type', 'is_embedded_data', 'is_evaluated', 'is_library_indirect', 'is_missing', 'is_runtime_data'] + +# possible alternatives https://blender.stackexchange.com/questions/286010/bpy-detect-modified-mesh-data-vertices-edges-loops-or-polygons-for-cachin +def mesh_hash(obj): + # this is incomplete, how about edges ? + vertex_count = len(obj.data.vertices) + vertices_np = np.empty(vertex_count * 3, dtype=np.float32) + obj.data.vertices.foreach_get("co", vertices_np) + h = str(hash(vertices_np.tobytes())) + return h + +# TODO: redo this one, this is essentially modifiec copy & pasted data, not fitting +def animation_hash(obj): + animation_data = obj.animation_data + if not animation_data: + return None + blender_actions = [] + blender_tracks = {} + + # TODO: this might need to be modified/ adapted to match the standard gltf exporter settings + for track in animation_data.nla_tracks: + strips = [strip for strip in track.strips if strip.action is not None] + for strip in strips: + # print(" ", source.name,'uses',strip.action.name, "active", strip.active, "action", strip.action) + blender_actions.append(strip.action) + blender_tracks[strip.action.name] = track.name + + # Remove duplicate actions. + blender_actions = list(set(blender_actions)) + # sort animations alphabetically (case insensitive) so they have a defined order and match Blender's Action list + blender_actions.sort(key = lambda a: a.name.lower()) + + markers_per_animation = {} + animations_infos = [] + + for action in blender_actions: + animation_name = blender_tracks[action.name] + animations_infos.append( + f'(name: "{animation_name}", frame_start: {action.frame_range[0]}, frame_end: {action.frame_range[1]}, frames_length: {action.frame_range[1] - action.frame_range[0]}, frame_start_override: {action.frame_start}, frame_end_override: {action.frame_end})' + ) + markers_per_animation[animation_name] = {} + + for marker in action.pose_markers: + if marker.frame not in markers_per_animation[animation_name]: + markers_per_animation[animation_name][marker.frame] = [] + markers_per_animation[animation_name][marker.frame].append(marker.name) + + compact_result = hash(str((blender_actions, blender_tracks, markers_per_animation, animations_infos))) + return compact_result + + +def camera_hash(obj): + camera_fields = ["angle", "angle_x", "angle_y", "animation_data", "background_images", "clip_end", "clip_start", "display_size", "dof", "fisheye_fov"] + camera_data = obj.data + fields_to_ignore= fields_to_ignore_generic + + all_field_names = dir(camera_data) + fields = [getattr(camera_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")] + # TODO: the above is not enough, certain fields are left as bpy.data.xx + #print("camera", obj, fields) + return str(fields) + +def light_hash(obj): + light_data = obj.data + fields_to_ignore = fields_to_ignore_generic + + all_field_names = dir(light_data) + fields = [getattr(light_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")] + return str(fields) + +def bones_hash(bones): + fields_to_ignore = fields_to_ignore_generic + ['AxisRollFromMatrix', 'MatrixFromAxisRoll', 'evaluate_envelope', 'convert_local_to_pose', 'foreach_get', 'foreach_set', 'get', 'set', 'find', 'items', 'keys', 'values'] + + bones_result = [] + for bone in bones: + all_field_names = dir(bone) + fields = [getattr(bone, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")] + bones_result.append(fields) + #print("fields of bone", bones_result) + return str(hash(str(bones_result))) + +# fixme: not good enough ? +def armature_hash(obj): + fields_to_ignore = fields_to_ignore_generic + ['display_type', 'is_editmode', 'pose_position', 'foreach_get', 'get'] + fields_to_convert = {'bones': bones_hash}#, 'collections_all': bones_hash} + armature_data = obj.data + all_field_names = dir(armature_data) + + fields = [getattr(armature_data, prop, None) if not prop in fields_to_convert.keys() else fields_to_convert[prop](getattr(armature_data, prop)) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")] + #print("ARMATURE", fields) + + """for bone in armature_data.bones: + print("bone", bone, bone_hash(bone))""" + return str(fields) + +def field_value(data): + pass + +def color(color_data): + # print("color", color_data, type(color_data)) + return str(peel_value(color_data)) + +def lineart(lineart_data): + fields_to_ignore = fields_to_ignore_generic + + all_field_names = dir(lineart_data) + fields = [getattr(lineart_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")] + return str(fields) + +def node_tree(nodetree_data): + fields_to_ignore = fields_to_ignore_generic+ ['contains_tree','get_output_node', 'interface_update', 'override_template_create'] + all_field_names = dir(nodetree_data) + fields = [getattr(nodetree_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")] + + # print("node tree", fields) + return str(fields) + + +def peel_value( value ): + try: + len( value ) + return [ peel_value( x ) for x in value ] + except TypeError: + return value + +def material_hash(material): + fields_to_ignore = fields_to_ignore_generic + fields_to_convert = {'diffuse_color': color, 'line_color': color, 'lineart': lineart, 'node_tree': node_tree} # TODO: perhaps use types rather than names + all_field_names = dir(material) + fields = [getattr(material, prop, None) if not prop in fields_to_convert.keys() else fields_to_convert[prop](getattr(material, prop)) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")] + + type_of = [type(getattr(material, prop, None)) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")] + names = [prop for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")] + + tutu = [t == Color for t in type_of] # bpy.types.MaterialLineArt bpy.types.ShaderNodeTree + #print("fields", type_of) + + """for prop in [prop for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]: + bla = getattr(material, prop, None) + if hasattr(bla, "rna_type"): + print("YOLO", prop, bla, peel_value(bla), "type", type(bla), bla.rna_type, bla.rna_type == bpy.types.FloatProperty, type(bla) == bpy.types.bpy_prop_collection) + print("types", type(bla) == bpy.types.bpy_prop_collection, type(bla) == bpy.types.FloatColorAttributeValue)""" + + # print("oooooh", material, material.bl_rna.properties.items()) + + return str(fields)#str(hash(str(fields))) + +# TODO: this is partially taken from export_materials utilities, perhaps we could avoid having to fetch things multiple times ? +def materials_hash(obj, cache): + # print("materials") + materials = [] + for material_slot in obj.material_slots: + material = material_slot.material + cached_hash = cache['materials'].get(material.name, None) + if cached_hash: + # print("CACHHHHHED", cached_hash) + materials.append(cached_hash) + else: + mat = material_hash(material) + cache['materials'][material.name] = mat + materials.append(mat) + # print("NOT CACHHH", mat) + + # materials = [material_hash(material_slot.material) if not material_slot.material.name in cache["materials"] else cache["materials"][material_slot.material.name] for material_slot in obj.material_slots] + return str(hash(str(materials))) + +def custom_properties_hash(obj): + custom_properties = {} + for property_name in obj.keys(): + if property_name not in '_RNA_UI' and property_name != 'components_meta': + custom_properties[property_name] = obj[property_name] + + return str(hash(str(custom_properties))) + + +def serialize_scene(): + cache = {"materials":{}} + print("serializing scene") + data = {} + for scene in bpy.data.scenes: + if scene.name.startswith(TEMPSCENE_PREFIX): + continue + data[scene.name] = {} + for object in scene.objects: + object = bpy.data.objects[object.name] + + #loc, rot, scale = bpy.context.object.matrix_world.decompose() + + transform = str((object.location, object.rotation_euler, object.scale)) #str((object.matrix_world.to_translation(), object.matrix_world.to_euler('XYZ'), object.matrix_world.to_quaternion()))# + visibility = object.visible_get() + custom_properties = custom_properties_hash(object) if len(object.keys()) > 0 else None + animations = animation_hash(object) + mesh = mesh_hash(object) if object.type == 'MESH' else None + camera = camera_hash(object) if object.type == 'CAMERA' else None + light = light_hash(object) if object.type == 'LIGHT' else None + armature = armature_hash(object) if object.type == 'ARMATURE' else None + parent = object.parent.name if object.parent else None + collections = [collection.name for collection in object.users_collection] + materials = materials_hash(object, cache) if len(object.material_slots) > 0 else None + + data[scene.name][object.name] = { + "name": object.name, + "transforms": transform, + "visibility": visibility, + "custom_properties": custom_properties, + "animations": animations, + "mesh": mesh, + "camera": camera, + "light": light, + "armature": armature, + "parent": parent, + "collections": collections, + "materials": materials + } + + """print("data", data) + print("") + print("") + print("data json", json.dumps(data))""" + + return json.dumps(data) + + diff --git a/tools/blenvy/gltf_auto_export/helpers/to_remove_later.py b/tools/blenvy/gltf_auto_export/helpers/to_remove_later.py new file mode 100644 index 0000000..2d69f86 --- /dev/null +++ b/tools/blenvy/gltf_auto_export/helpers/to_remove_later.py @@ -0,0 +1,402 @@ +bl_info = { + "name": "gltf_auto_export", + "author": "kaosigh", + "version": (0, 10, 0), + "blender": (3, 4, 0), + "location": "File > Import-Export", + "description": "glTF/glb auto-export", + "warning": "", + "wiki_url": "https://github.com/kaosat-dev/Blender_bevy_components_workflow", + "tracker_url": "https://github.com/kaosat-dev/Blender_bevy_components_workflow/issues/new", + "category": "Import-Export" +} + +import bpy +from bpy.props import (BoolProperty, + IntProperty, + StringProperty, + EnumProperty, + CollectionProperty + ) + + +# glTF extensions are named following a convention with known prefixes. +# See: https://github.com/KhronosGroup/glTF/tree/main/extensions#about-gltf-extensions +# also: https://github.com/KhronosGroup/glTF/blob/main/extensions/Prefixes.md +glTF_extension_name = "EXT_auto_export" + +# Support for an extension is "required" if a typical glTF viewer cannot be expected +# to load a given model without understanding the contents of the extension. +# For example, a compression scheme or new image format (with no fallback included) +# would be "required", but physics metadata or app-specific settings could be optional. +extension_is_required = False +from io_scene_gltf2 import (GLTF_PT_export_main, GLTF_PT_export_include) + +class ExampleExtensionProperties(bpy.types.PropertyGroup): + enabled: bpy.props.BoolProperty( + name=bl_info["name"], + description='Include this extension in the exported glTF file.', + default=True + ) + + auto_export_main_scene_name: StringProperty( + name='Main scene', + description='The name of the main scene/level/world to auto export', + default='Scene' + ) + auto_export_output_folder: StringProperty( + name='Export folder (relative)', + description='The root folder for all exports(relative to current file) Defaults to current folder', + default='' + ) + auto_export_library_scene_name: StringProperty( + name='Library scene', + description='The name of the library scene to auto export', + default='Library' + ) + # scene components + auto_export_scene_settings: BoolProperty( + name='Export scene settings', + description='Export scene settings ie AmbientLighting, Bloom, AO etc', + default=False + ) + + # blueprint settings + auto_export_blueprints: BoolProperty( + name='Export Blueprints', + description='Replaces collection instances with an Empty with a BlueprintName custom property', + default=True + ) + auto_export_blueprints_path: StringProperty( + name='Blueprints path', + description='path to export the blueprints to (relative to the Export folder)', + default='library' + ) + + auto_export_materials_library: BoolProperty( + name='Export materials library', + description='remove materials from blueprints and use the material library instead', + default=False + ) + auto_export_materials_path: StringProperty( + name='Materials path', + description='path to export the materials libraries to (relative to the root folder)', + default='materials' + ) + +def register(): + bpy.utils.register_class(ExampleExtensionProperties) + bpy.types.Scene.ExampleExtensionProperties = bpy.props.PointerProperty(type=ExampleExtensionProperties) + +def register_panel(): + # Register the panel on demand, we need to be sure to only register it once + # This is necessary because the panel is a child of the extensions panel, + # which may not be registered when we try to register this extension + try: + bpy.utils.register_class(GLTF_PT_UserExtensionPanel) + except Exception: + pass + + # If the glTF exporter is disabled, we need to unregister the extension panel + # Just return a function to the exporter so it can unregister the panel + return unregister_panel + + +def unregister_panel(): + # Since panel is registered on demand, it is possible it is not registered + try: + bpy.utils.unregister_class(GLTF_PT_UserExtensionPanel) + except Exception: + pass + + +def unregister(): + unregister_panel() + bpy.utils.unregister_class(ExampleExtensionProperties) + del bpy.types.Scene.ExampleExtensionProperties + +class GLTF_PT_UserExtensionPanel(bpy.types.Panel): + + bl_space_type = 'FILE_BROWSER' + bl_region_type = 'TOOL_PROPS' + bl_label = "Enabled" + bl_parent_id = "GLTF_PT_export_user_extensions" + bl_options = {'DEFAULT_CLOSED'} + + @classmethod + def poll(cls, context): + sfile = context.space_data + operator = sfile.active_operator + return operator.bl_idname == "EXPORT_SCENE_OT_gltf" + + def draw_header(self, context): + props = bpy.context.scene.ExampleExtensionProperties + self.layout.prop(props, 'enabled') + + def draw(self, context): + layout = self.layout + layout.use_property_split = True + layout.use_property_decorate = False # No animation. + + props = bpy.context.scene.ExampleExtensionProperties + layout.active = props.enabled + + props = bpy.context.scene.ExampleExtensionProperties + for bla in props.__annotations__: + layout.prop(props, bla) + + +class glTF2ExportUserExtension: + + def __init__(self): + # We need to wait until we create the gltf2UserExtension to import the gltf2 modules + # Otherwise, it may fail because the gltf2 may not be loaded yet + from io_scene_gltf2.io.com.gltf2_io_extensions import Extension + self.Extension = Extension + self.properties = bpy.context.scene.ExampleExtensionProperties + + def gather_node_hook(self, gltf2_object, blender_object, export_settings): + if self.properties.enabled: + if gltf2_object.extensions is None: + gltf2_object.extensions = {} + print("bla bla") + gltf2_object.extensions[glTF_extension_name] = self.Extension( + name=glTF_extension_name, + extension={"auto_export_blueprints": self.properties.auto_export_blueprints}, + required=extension_is_required + ) + + +def did_export_parameters_change(current_params, previous_params): + set1 = set(previous_params.items()) + set2 = set(current_params.items()) + difference = dict(set1 ^ set2) + + changed_param_names = list(set(difference.keys())- set(AutoExportGltfPreferenceNames)) + changed_parameters = len(changed_param_names) > 0 + return changed_parameters + +# original in export_blueprints => export_collections + # The part below is not necessary NORMALLY , but blender crashes in the "normal" case when using bpy.context.temp_override, + #if relevant we replace sub collections instances with placeholders too + # this is not needed if a collection/blueprint does not have sub blueprints or sub collections + collection_in_blueprint_hierarchy = collection_name in blueprint_hierarchy and len(blueprint_hierarchy[collection_name]) > 0 + collection_has_child_collections = len(bpy.data.collections[collection_name].children) > 0 + #if collection_in_blueprint_hierarchy or collection_has_child_collections: + + + + """else: + print("standard export") + # set active scene to be the library scene + original_scene = bpy.context.window.scene + bpy.context.window.scene = library_scene + with bpy.context.temp_override(scene=library_scene): + print("active scene", bpy.context.scene) + export_gltf(gltf_output_path, export_settings) + bpy.context.window.scene = original_scene""" + +""" + blueprint_template = object['Template'] if 'Template' in object else False + if blueprint_template and parent_empty is None: # ONLY WORKS AT ROOT LEVEL + print("BLUEPRINT TEMPLATE", blueprint_template, destination_collection, parent_empty) + for object in source_collection.objects: + if object.type == 'EMPTY' and object.name.endswith("components"): + original_collection = bpy.data.collections[collection_name] + components_holder = object + print("WE CAN INJECT into", object, "data from", original_collection) + + # now we look for components inside the collection + components = {} + for object in original_collection.objects: + if object.type == 'EMPTY' and object.name.endswith("components"): + for component_name in object.keys(): + if component_name not in '_RNA_UI': + print( component_name , "-" , object[component_name] ) + components[component_name] = object[component_name] + + # copy template components into target object + for key in components: + print("copying ", key,"to", components_holder) + if not key in components_holder: + components_holder[key] = components[key] + """ + +# potentially useful alternative +def duplicate_object2(object, original_name): + print("copy object", object) + + with bpy.context.temp_override(object=object, active_object = object): + bpy.ops.object.duplicate(linked=False) + new_obj = bpy.context.active_object + + print("new obj", new_obj, "bpy.context.view_layer", bpy.context.view_layer.objects) + for obj in bpy.context.view_layer.objects: + print("obj", obj) + bpy.context.view_layer.update() + new_obj.name = original_name + + if object.animation_data: + print("OJECT ANIMATION") + new_obj.animation_data.action = object.animation_data.action.copy() + + return new_obj + + + + + + if active_operator: + # print("Operator", active_operator.bl_label, active_operator.bl_idname, "bla", bpy.context.window_manager.gltf_exporter_running) + if active_operator.bl_idname == "EXPORT_SCENE_OT_gltf" : #and not bpy.context.window_manager.gltf_exporter_running: + # we force saving params + active_operator.will_save_settings = True + if active_operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf": + # we force saving params + active_operator.will_save_settings = True + + + """ + print("matching") + try: + bpy.app.timers.unregister(cls.gltf_exporter_handler) + except:pass + bpy.app.timers.register(cls.gltf_exporter_handler, first_interval=3) + # we backup any existing gltf export settings, if there where any + scene = bpy.context.scene + if "glTF2ExportSettings" in scene: + existing_setting = scene["glTF2ExportSettings"] + cls.existing_gltf_settings = existing_setting + bpy.context.window_manager.gltf_exporter_running = True + + + else: + if bpy.context.window_manager.gltf_exporter_running: + bpy.context.window_manager.gltf_exporter_running = False""" + + + """@classmethod + def gltf_exporter_handler(cls): + # FOr some reason, the active operator here is always None, so using a workaround + # active_operator = bpy.context.active_operator + print("here", bpy.context.window_manager.gltf_exporter_running) + + if bpy.context.window_manager.gltf_exporter_running: + try: + dummy_file_path = "/home/ckaos/projects/bevy/Blender_bevy_components_worklflow/testing/bevy_example/assets/dummy.glb" + + import os + if os.path.exists(dummy_file_path): + print("dummy file exists, assuming it worked") + os.unlink(dummy_file_path) + + # get the parameters + scene = bpy.context.scene + if "glTF2ExportSettings" in scene: + settings = scene["glTF2ExportSettings"] + formatted_settings = dict(settings) + + export_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings") + + #check if params have changed + bpy.context.window_manager.gltf_settings_changed = sorted(json.loads(export_settings.as_string()).items()) != sorted(formatted_settings.items()) + + print("gltf NEW settings", formatted_settings, "OLD settings", export_settings, "CHANGED ?", bpy.context.window_manager.gltf_settings_changed) + + # now write new settings + export_settings.clear() + export_settings.write(json.dumps(formatted_settings)) + + + # now reset the original gltf_settings + if getattr(cls, "existing_gltf_settings", None) != None: + print("resetting original gltf settings") + scene["glTF2ExportSettings"] = cls.existing_gltf_settings + else: + print("no pre_existing settings") + if "glTF2ExportSettings" in scene: + del scene["glTF2ExportSettings"] + cls.existing_gltf_settings = None + except:pass + bpy.context.window_manager.gltf_exporter_running = False + return None + + + else: + try: + bpy.app.timers.unregister(cls.gltf_exporter_handler) + except:pass + return None + return 1""" + + +def invoke_override(self, context, event): + settings = context.scene.get(self.scene_key) + self.will_save_settings = False + if settings: + try: + for (k, v) in settings.items(): + setattr(self, k, v) + self.will_save_settings = True + + # Update filter if user saved settings + if hasattr(self, 'export_format'): + self.filter_glob = '*.glb' if self.export_format == 'GLB' else '*.gltf' + + except (AttributeError, TypeError): + self.report({"ERROR"}, "Loading export settings failed. Removed corrupted settings") + del context.scene[self.scene_key] + + import sys + preferences = bpy.context.preferences + for addon_name in preferences.addons.keys(): + try: + if hasattr(sys.modules[addon_name], 'glTF2ExportUserExtension') or hasattr(sys.modules[addon_name], 'glTF2ExportUserExtensions'): + pass #exporter_extension_panel_unregister_functors.append(sys.modules[addon_name].register_panel()) + except Exception: + pass + + # self.has_active_exporter_extensions = len(exporter_extension_panel_unregister_functors) > 0 + print("ovverride") + wm = context.window_manager + wm.fileselect_add(self) + return {'RUNNING_MODAL'} + + + +from io_scene_gltf2 import (ExportGLTF2, GLTF_PT_export_main, GLTF_PT_export_include) + + +from io_scene_gltf2 import (ExportGLTF2, GLTF_PT_export_main,ExportGLTF2_Base, GLTF_PT_export_include) +import io_scene_gltf2 as gltf_exporter_original +#import io_scene_gltf2.GLTF_PT_export_data_scene as GLTF_PT_export_data_scene_original +""" +class GLTF_PT_export_data(gltf_exporter_original.GLTF_PT_export_data): + bl_space_type = 'FILE_BROWSER' + bl_region_type = 'TOOL_PROPS' + bl_label = "Data" + bl_parent_id = "GLTF_PT_auto_export_gltf" + bl_options = {'DEFAULT_CLOSED'} + + @classmethod + def poll(cls, context): + sfile = context.space_data + operator = sfile.active_operator + + return operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf" + +class GLTF_PT_export_data_scene(gltf_exporter_original.GLTF_PT_export_data_scene): + bl_space_type = 'FILE_BROWSER' + bl_region_type = 'TOOL_PROPS' + bl_label = "Scene Graph" + bl_parent_id = "GLTF_PT_export_data" + bl_options = {'DEFAULT_CLOSED'} + + @classmethod + def poll(cls, context): + sfile = context.space_data + operator = sfile.active_operator + return operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf" + + def draw(self, context): + return super().draw(context)""" \ No newline at end of file diff --git a/tools/blenvy/gltf_auto_export/modules/__init__.py b/tools/blenvy/gltf_auto_export/modules/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tools/blenvy/gltf_auto_export/modules/bevy_dynamic.py b/tools/blenvy/gltf_auto_export/modules/bevy_dynamic.py new file mode 100644 index 0000000..aaa6622 --- /dev/null +++ b/tools/blenvy/gltf_auto_export/modules/bevy_dynamic.py @@ -0,0 +1,29 @@ +import bpy + + +# checks if an object is dynamic +# TODO: for efficiency, it might make sense to write this flag semi automatically at the root level of the object so we can skip the inner loop +# TODO: we need to recompute these on blueprint changes too +# even better, keep a list of dynamic objects per scene , updated only when needed ? +def is_object_dynamic(object): + is_dynamic = object['Dynamic'] if 'Dynamic' in object else False + # only look for data in the original collection if it is not alread marked as dynamic at instance level + if not is_dynamic and object.type == 'EMPTY' and hasattr(object, 'instance_collection') and object.instance_collection != None : + #print("collection", object.instance_collection, "object", object.name) + # get the name of the collection this is an instance of + collection_name = object.instance_collection.name + original_collection = bpy.data.collections[collection_name] + + # scan original collection, look for a 'Dynamic' flag + for object in original_collection.objects: + #print(" inner", object) + if object.type == 'EMPTY' and object.name.endswith("components"): + for component_name in object.keys(): + #print(" compo", component_name) + if component_name == 'Dynamic': + is_dynamic = True + break + return is_dynamic + +def is_object_static(object): + return not is_object_dynamic(object) \ No newline at end of file diff --git a/tools/blenvy/gltf_auto_export/modules/bevy_scene_components.py b/tools/blenvy/gltf_auto_export/modules/bevy_scene_components.py new file mode 100644 index 0000000..b6f384f --- /dev/null +++ b/tools/blenvy/gltf_auto_export/modules/bevy_scene_components.py @@ -0,0 +1,64 @@ + +import bpy +from ..helpers.object_makers import make_empty + +# TODO: replace this with placing scene level custom properties once support for that has been added to bevy_gltf +def upsert_scene_components(main_scenes): + for scene in main_scenes: + lighting_components_name = f"lighting_components_{scene.name}" + lighting_components = bpy.data.objects.get(lighting_components_name, None) + if not lighting_components: + root_collection = scene.collection + lighting_components = make_empty('lighting_components_'+scene.name, [0,0,0], [0,0,0], [0,0,0], root_collection) + + if scene.world is not None: + lighting_components['BlenderBackgroundShader'] = ambient_color_to_component(scene.world) + lighting_components['BlenderShadowSettings'] = scene_shadows_to_component(scene) + + if scene.eevee.use_bloom: + lighting_components['BloomSettings'] = scene_bloom_to_component(scene) + elif 'BloomSettings' in lighting_components: + del lighting_components['BloomSettings'] + + if scene.eevee.use_gtao: + lighting_components['SSAOSettings'] = scene_ao_to_component(scene) + elif 'SSAOSettings' in lighting_components: + del lighting_components['SSAOSettings'] + +def remove_scene_components(main_scenes): + for scene in main_scenes: + lighting_components_name = f"lighting_components_{scene.name}" + lighting_components = bpy.data.objects.get(lighting_components_name, None) + if lighting_components: + bpy.data.objects.remove(lighting_components, do_unlink=True) + + +def ambient_color_to_component(world): + color = None + strength = None + try: + color = world.node_tree.nodes['Background'].inputs[0].default_value + strength = world.node_tree.nodes['Background'].inputs[1].default_value + except Exception as ex: + print("failed to parse ambient color: Only background is supported") + + + if color is not None and strength is not None: + colorRgba = f"Rgba(red: {color[0]}, green: {color[1]}, blue: {color[2]}, alpha: {color[3]})" + component = f"( color: {colorRgba}, strength: {strength})" + return component + return None + +def scene_shadows_to_component(scene): + cascade_size = scene.eevee.shadow_cascade_size + component = f"(cascade_size: {cascade_size})" + return component + +def scene_bloom_to_component(scene): + component = f"BloomSettings(intensity: {scene.eevee.bloom_intensity})" + return component + +def scene_ao_to_component(scene): + ssao = scene.eevee.use_gtao + component= "SSAOSettings()" + return component \ No newline at end of file diff --git a/tools/blenvy/gltf_auto_export/modules/export_materials.py b/tools/blenvy/gltf_auto_export/modules/export_materials.py new file mode 100644 index 0000000..4322a4d --- /dev/null +++ b/tools/blenvy/gltf_auto_export/modules/export_materials.py @@ -0,0 +1,127 @@ +import os +import bpy +from pathlib import Path + +from ..helpers.generate_and_export import generate_and_export + +from ..helpers.helpers_collections import (traverse_tree) +from ..auto_export.export_gltf import (export_gltf, generate_gltf_export_preferences) +from ..helpers.object_makers import make_cube + +# get materials per object, and injects the materialInfo component +def get_materials(object): + material_slots = object.material_slots + used_materials_names = [] + #materials_per_object = {} + current_project_name = Path(bpy.context.blend_data.filepath).stem + + for m in material_slots: + material = m.material + # print(" slot", m, "material", material) + used_materials_names.append(material.name) + # TODO:, also respect slots & export multiple materials if applicable ! + object['MaterialInfo'] = '(name: "'+material.name+'", source: "'+current_project_name + '")' + + return used_materials_names + +def clear_material_info(collection_names, library_scenes): + for scene in library_scenes: + root_collection = scene.collection + for cur_collection in traverse_tree(root_collection): + if cur_collection.name in collection_names: + for object in cur_collection.all_objects: + if 'MaterialInfo' in dict(object): # FIXME: hasattr does not work ???? + del object["MaterialInfo"] + + +def get_all_materials(collection_names, library_scenes): + #print("collecton", layerColl, "otot", layerColl.all_objects) #all_objects + used_material_names = [] + for scene in library_scenes: + root_collection = scene.collection + for cur_collection in traverse_tree(root_collection): + if cur_collection.name in collection_names: + for object in cur_collection.all_objects: + used_material_names = used_material_names + get_materials(object) + # we only want unique names + used_material_names = list(set(used_material_names)) + return used_material_names + + +# creates a new object with the applied material, for the material library +def make_material_object(name, location=[0,0,0], rotation=[0,0,0], scale=[1,1,1], material=None, collection=None): + #original_active_object = bpy.context.active_object + #bpy.ops.mesh.primitive_cube_add(size=0.1, location=location) + object = make_cube(name, location=location, rotation=rotation, scale=scale, collection=collection) + if material: + if object.data.materials: + # assign to 1st material slot + object.data.materials[0] = material + else: + # no slots + object.data.materials.append(material) + return object + + +# generates a materials scene: +def generate_materials_scene_content(root_collection, used_material_names): + for index, material_name in enumerate(used_material_names): + material = bpy.data.materials[material_name] + make_material_object("Material_"+material_name, [index * 0.2,0,0], material=material, collection=root_collection) + return {} + +def clear_materials_scene(temp_scene): + root_collection = temp_scene.collection + scene_objects = [o for o in root_collection.objects] + for object in scene_objects: + #print("removing ", object) + try: + mesh = bpy.data.meshes[object.name+"_Mesh"] + bpy.data.meshes.remove(mesh, do_unlink=True) + except Exception as error: + pass + #print("could not remove mesh", error) + + try: + bpy.data.objects.remove(object, do_unlink=True) + except:pass + + bpy.data.scenes.remove(temp_scene) + +# exports the materials used inside the current project: +# the name of the output path is /_materials_library.gltf/glb +def export_materials(collections, library_scenes, folder_path, addon_prefs): + gltf_export_preferences = generate_gltf_export_preferences(addon_prefs) + export_materials_path = getattr(addon_prefs,"export_materials_path") + export_root_folder = getattr(addon_prefs, "export_root_folder") + + + used_material_names = get_all_materials(collections, library_scenes) + current_project_name = Path(bpy.context.blend_data.filepath).stem + + export_settings = { **gltf_export_preferences, + 'use_active_scene': True, + 'use_active_collection':True, + 'use_active_collection_with_nested':True, + 'use_visible': False, + 'use_renderable': False, + 'export_apply':True + } + + gltf_output_path = os.path.join(export_root_folder, export_materials_path, current_project_name + "_materials_library") + + print(" exporting Materials to", gltf_output_path, ".gltf/glb") + + generate_and_export( + addon_prefs, + temp_scene_name="__materials_scene", + export_settings=export_settings, + gltf_output_path=gltf_output_path, + tempScene_filler= lambda temp_collection: generate_materials_scene_content(temp_collection, used_material_names), + tempScene_cleaner= lambda temp_scene, params: clear_materials_scene(temp_scene=temp_scene) + ) + + +def cleanup_materials(collections, library_scenes): + # remove temporary components + clear_material_info(collections, library_scenes) \ No newline at end of file diff --git a/tools/blenvy/gltf_auto_export/ui/main.py b/tools/blenvy/gltf_auto_export/ui/main.py new file mode 100644 index 0000000..a3b8caa --- /dev/null +++ b/tools/blenvy/gltf_auto_export/ui/main.py @@ -0,0 +1,323 @@ +from typing import Set +import bpy +###################################################### +## ui logic & co + +# side panel that opens auto_export specific gltf settings & the auto export settings themselves +class GLTF_PT_auto_export_SidePanel(bpy.types.Panel): + bl_idname = "GLTF_PT_auto_export_SidePanel" + bl_label = "Auto export" + bl_space_type = 'VIEW_3D' + bl_region_type = 'UI' + bl_category = "Auto Export" + bl_context = "objectmode" + bl_parent_id = "BLENVY_PT_SidePanel" + + + @classmethod + def poll(cls, context): + return context.window_manager.blenvy.mode == 'SETTINGS' + + """def draw_header(self, context): + layout = self.layout + layout.label(text="Auto export ")""" + + def draw(self, context): + layout = self.layout + layout.label(text="MAKE SURE TO KEEP 'REMEMBER EXPORT SETTINGS' TOGGLED !!") + op = layout.operator("EXPORT_SCENE_OT_gltf", text='Gltf Settings')#'glTF 2.0 (.glb/.gltf)') + #op.export_format = 'GLTF_SEPARATE' + op.use_selection=True + op.will_save_settings=True + op.use_visible=True # Export visible and hidden objects. See Object/Batch Export to skip. + op.use_renderable=True + op.use_active_collection = True + op.use_active_collection_with_nested=True + op.use_active_scene = True + op.filepath="____dummy____" + op.gltf_export_id = "gltf_auto_export" # we specify that we are in a special case + + op = layout.operator("EXPORT_SCENES_OT_auto_gltf", text="Auto Export Settings") + op.auto_export = True + +class GLTF_PT_auto_export_changes_list(bpy.types.Panel): + bl_space_type = 'VIEW_3D' + bl_region_type = 'UI' + bl_label = "Changes per scene since last save " + bl_parent_id = "GLTF_PT_auto_export_SidePanel" + bl_options = {'DEFAULT_CLOSED'} + + def draw(self, context): + layout = self.layout + layout.use_property_split = True + layout.use_property_decorate = False # No animation. + + #if "auto_export_tracker" in context.window_manager: + changed_objects_per_scene = context.window_manager.auto_export_tracker.changed_objects_per_scene + for scene_name in changed_objects_per_scene: + layout.label(text=f'{scene_name}') + for object_name in list(changed_objects_per_scene[scene_name].keys()): + row = layout.row() + row.label(text=f' {object_name}') + +# main ui in the file => export +class GLTF_PT_auto_export_main(bpy.types.Panel): + bl_space_type = 'FILE_BROWSER' + bl_region_type = 'TOOL_PROPS' + bl_label = "" + bl_parent_id = "FILE_PT_operator" + bl_options = {'HIDE_HEADER'} + + @classmethod + def poll(cls, context): + sfile = context.space_data + operator = sfile.active_operator + + return operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf" + + def draw(self, context): + layout = self.layout + layout.use_property_split = True + layout.use_property_decorate = False # No animation. + +class GLTF_PT_auto_export_root(bpy.types.Panel): + bl_space_type = 'FILE_BROWSER' + bl_region_type = 'TOOL_PROPS' + bl_label = "Auto export" + bl_parent_id = "GLTF_PT_auto_export_main" + #bl_options = {'DEFAULT_CLOSED'} + + @classmethod + def poll(cls, context): + sfile = context.space_data + operator = sfile.active_operator + return operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf" + + def draw_header(self, context): + sfile = context.space_data + operator = sfile.active_operator + self.layout.prop(operator, "auto_export", text="") + + def draw(self, context): + layout = self.layout + layout.use_property_split = True + layout.use_property_decorate = False # No animation. + + sfile = context.space_data + operator = sfile.active_operator + + layout.active = operator.auto_export + layout.prop(operator, 'will_save_settings') + +class GLTF_PT_auto_export_general(bpy.types.Panel): + bl_space_type = 'FILE_BROWSER' + bl_region_type = 'TOOL_PROPS' + bl_label = "General" + bl_parent_id = "GLTF_PT_auto_export_root" + + @classmethod + def poll(cls, context): + sfile = context.space_data + operator = sfile.active_operator + + return operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf" #"EXPORT_SCENE_OT_gltf" + + def draw(self, context): + layout = self.layout + layout.use_property_split = True + layout.use_property_decorate = False # No animation. + + sfile = context.space_data + operator = sfile.active_operator + + layout.active = operator.auto_export + layout.prop(operator, "export_output_folder") + layout.prop(operator, "export_scene_settings") + + +class GLTF_PT_auto_export_change_detection(bpy.types.Panel): + bl_space_type = 'FILE_BROWSER' + bl_region_type = 'TOOL_PROPS' + bl_label = "Change detection" + bl_parent_id = "GLTF_PT_auto_export_root" + + @classmethod + def poll(cls, context): + sfile = context.space_data + operator = sfile.active_operator + + return operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf" #"EXPORT_SCENE_OT_gltf" + def draw_header(self, context): + layout = self.layout + sfile = context.space_data + operator = sfile.active_operator + layout.prop(operator, "export_change_detection", text="") + + def draw(self, context): + layout = self.layout + layout.use_property_split = True + layout.use_property_decorate = False # No animation. + + sfile = context.space_data + operator = sfile.active_operator + + layout.active = operator.auto_export + layout.prop(operator, "export_change_detection") + + + +class GLTF_PT_auto_export_scenes(bpy.types.Panel): + bl_space_type = 'FILE_BROWSER' + bl_region_type = 'TOOL_PROPS' + bl_label = "Scenes" + bl_parent_id = "GLTF_PT_auto_export_root" + + @classmethod + def poll(cls, context): + sfile = context.space_data + operator = sfile.active_operator + + return operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf" #"EXPORT_SCENE_OT_gltf" + + def draw_header(self, context): + layout = self.layout + sfile = context.space_data + operator = sfile.active_operator + #layout.label(text="export scenes")#layout.prop(operator, "export_blueprints", text="") + + def draw(self, context): + layout = self.layout + layout.use_property_split = True + layout.use_property_decorate = False # No animation. + + sfile = context.space_data + operator = sfile.active_operator + + # scene selectors + row = layout.row() + col = row.column(align=True) + col.separator() + + layout.active = operator.auto_export + source = operator + rows = 2 + + # main/level scenes + row = layout.row() + row.label(text="main scenes") + row.prop(context.window_manager, "main_scene", text='') + + row = layout.row() + row.template_list("SCENE_UL_GLTF_auto_export", "level scenes", source, "main_scenes", source, "main_scenes_index", rows=rows) + + col = row.column(align=True) + sub_row = col.row() + add_operator = sub_row.operator("scene_list.list_action", icon='ADD', text="") + add_operator.action = 'ADD' + add_operator.scene_type = 'level' + #add_operator.source = operator + sub_row.enabled = context.window_manager.main_scene is not None + + sub_row = col.row() + remove_operator = sub_row.operator("scene_list.list_action", icon='REMOVE', text="") + remove_operator.action = 'REMOVE' + remove_operator.scene_type = 'level' + col.separator() + + #up_operator = col.operator("scene_list.list_action", icon='TRIA_UP', text="") + #up_operator.action = 'UP' + #col.operator("scene_list.list_action", icon='TRIA_DOWN', text="").action = 'DOWN' + + # library scenes + row = layout.row() + row.label(text="library scenes") + row.prop(context.window_manager, "library_scene", text='') + + row = layout.row() + row.template_list("SCENE_UL_GLTF_auto_export", "library scenes", source, "library_scenes", source, "library_scenes_index", rows=rows) + + col = row.column(align=True) + sub_row = col.row() + add_operator = sub_row.operator("scene_list.list_action", icon='ADD', text="") + add_operator.action = 'ADD' + add_operator.scene_type = 'library' + sub_row.enabled = context.window_manager.library_scene is not None + + + sub_row = col.row() + remove_operator = sub_row.operator("scene_list.list_action", icon='REMOVE', text="") + remove_operator.action = 'REMOVE' + remove_operator.scene_type = 'library' + col.separator() + +class GLTF_PT_auto_export_blueprints(bpy.types.Panel): + bl_space_type = 'FILE_BROWSER' + bl_region_type = 'TOOL_PROPS' + bl_label = "Blueprints" + bl_parent_id = "GLTF_PT_auto_export_root" + + @classmethod + def poll(cls, context): + sfile = context.space_data + operator = sfile.active_operator + + return operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf" #"EXPORT_SCENE_OT_gltf" + + + def draw_header(self, context): + layout = self.layout + sfile = context.space_data + operator = sfile.active_operator + layout.prop(operator, "export_blueprints", text="") + + def draw(self, context): + layout = self.layout + layout.use_property_split = True + layout.use_property_decorate = False # No animation. + + sfile = context.space_data + operator = sfile.active_operator + + layout.active = operator.auto_export and operator.export_blueprints + + # collections/blueprints + layout.prop(operator, "export_blueprints_path") + layout.prop(operator, "collection_instances_combine_mode") + layout.prop(operator, "export_marked_assets") + layout.prop(operator, "export_separate_dynamic_and_static_objects") + layout.separator() + # materials + layout.prop(operator, "export_materials_library") + layout.prop(operator, "export_materials_path") + +class SCENE_UL_GLTF_auto_export(bpy.types.UIList): + # The draw_item function is called for each item of the collection that is visible in the list. + # data is the RNA object containing the collection, + # item is the current drawn item of the collection, + # icon is the "computed" icon for the item (as an integer, because some objects like materials or textures + # have custom icons ID, which are not available as enum items). + # active_data is the RNA object containing the active property for the collection (i.e. integer pointing to the + # active item of the collection). + # active_propname is the name of the active property (use 'getattr(active_data, active_propname)'). + # index is index of the current item in the collection. + # flt_flag is the result of the filtering process for this item. + # Note: as index and flt_flag are optional arguments, you do not have to use/declare them here if you don't + # need them. + def draw_item(self, context, layout, data, item, icon, active_data, active_propname): + ob = data + # draw_item must handle the three layout types... Usually 'DEFAULT' and 'COMPACT' can share the same code. + if self.layout_type in {'DEFAULT', 'COMPACT'}: + # You should always start your row layout by a label (icon + text), or a non-embossed text field, + # this will also make the row easily selectable in the list! The later also enables ctrl-click rename. + # We use icon_value of label, as our given icon is an integer value, not an enum ID. + # Note "data" names should never be translated! + #if ma: + # layout.prop(ma, "name", text="", emboss=False, icon_value=icon) + #else: + # layout.label(text="", translate=False, icon_value=icon) + layout.label(text=item.name, icon_value=icon) + #layout.prop(item, "name", text="", emboss=False, icon_value=icon) + # 'GRID' layout type should be as compact as possible (typically a single icon!). + elif self.layout_type == 'GRID': + layout.alignment = 'CENTER' + layout.label(text="", icon_value=icon) diff --git a/tools/blenvy/gltf_auto_export/ui/operators.py b/tools/blenvy/gltf_auto_export/ui/operators.py new file mode 100644 index 0000000..742b2a5 --- /dev/null +++ b/tools/blenvy/gltf_auto_export/ui/operators.py @@ -0,0 +1,175 @@ + +import bpy +from bpy.types import Operator + + +class ASSETS_LIST_OT_actions(Operator): + """Add / remove etc assets""" + bl_idname = "asset_list.list_action" + bl_label = "Asset Actions" + bl_description = "Move items up and down, add and remove" + bl_options = {'REGISTER'} + + +class SCENES_LIST_OT_actions(Operator): + """Move items up and down, add and remove""" + bl_idname = "scene_list.list_action" + bl_label = "List Actions" + bl_description = "Move items up and down, add and remove" + bl_options = {'REGISTER'} + + action: bpy.props.EnumProperty( + items=( + ('UP', "Up", ""), + ('DOWN', "Down", ""), + ('REMOVE', "Remove", ""), + ('ADD', "Add", ""))) # type: ignore + + + scene_type: bpy.props.StringProperty()#TODO: replace with enum + + def invoke(self, context, event): + source = context.active_operator + target_name = "library_scenes" + target_index = "library_scenes_index" + if self.scene_type == "level": + target_name = "main_scenes" + target_index = "main_scenes_index" + + target = getattr(source, target_name) + idx = getattr(source, target_index) + current_index = getattr(source, target_index) + + try: + item = target[idx] + except IndexError: + pass + else: + if self.action == 'DOWN' and idx < len(target) - 1: + target.move(idx, idx + 1) + setattr(source, target_index, current_index +1 ) + info = 'Item "%s" moved to position %d' % (item.name, current_index + 1) + self.report({'INFO'}, info) + + elif self.action == 'UP' and idx >= 1: + target.move(idx, idx - 1) + setattr(source, target_index, current_index -1 ) + info = 'Item "%s" moved to position %d' % (item.name, current_index + 1) + self.report({'INFO'}, info) + + elif self.action == 'REMOVE': + info = 'Item "%s" removed from list' % (target[idx].name) + setattr(source, target_index, current_index -1 ) + target.remove(idx) + self.report({'INFO'}, info) + + if self.action == 'ADD': + new_scene_name = None + if self.scene_type == "level": + if context.window_manager.main_scene: + new_scene_name = context.window_manager.main_scene.name + else: + if context.window_manager.library_scene: + new_scene_name = context.window_manager.library_scene.name + if new_scene_name: + item = target.add() + item.name = new_scene_name#f"Rule {idx +1}" + + if self.scene_type == "level": + context.window_manager.main_scene = None + else: + context.window_manager.library_scene = None + + #name = f"Rule {idx +1}" + #target.append({"name": name}) + setattr(source, target_index, len(target) - 1) + #source[target_index] = len(target) - 1 + info = '"%s" added to list' % (item.name) + self.report({'INFO'}, info) + + return {"FINISHED"} + + +import os +from bpy_extras.io_utils import ImportHelper + +class OT_OpenFolderbrowser(Operator, ImportHelper): + """Browse for registry json file""" + bl_idname = "generic.open_folderbrowser" + bl_label = "Select folder" + + # Define this to tell 'fileselect_add' that we want a directoy + directory: bpy.props.StringProperty( + name="Outdir Path", + description="selected folder" + # subtype='DIR_PATH' is not needed to specify the selection mode. + # But this will be anyway a directory path. + ) # type: ignore + + # Filters folders + filter_folder: bpy.props.BoolProperty( + default=True, + options={"HIDDEN"} + ) # type: ignore + + target_property: bpy.props.StringProperty( + name="target_property", + options={'HIDDEN'} + ) # type: ignore + + def execute(self, context): + """Do something with the selected file(s).""" + operator = context.active_operator + new_path = self.directory + target_path_name = self.target_property + + # path to the current blend file + blend_file_path = bpy.data.filepath + # Get the folder + blend_file_folder_path = os.path.dirname(blend_file_path) + print("blend_file_folder_path", blend_file_folder_path) + + print("new_path", self.directory, self.target_property, operator) + + path_names = ['export_output_folder', 'export_blueprints_path', 'export_levels_path', 'export_materials_path'] + export_root_folder = operator.export_root_folder + #export_root_path_absolute = os.path.join(blend_file_folder_path, export_root_folder) + + if target_path_name == 'export_root_folder': + print("changing root new_path") + # we need to change all other relative paths before setting the new absolute path + for path_name in path_names: + # get absolute path + relative_path = getattr(operator, path_name, None) + if relative_path is not None: + absolute_path = os.path.join(export_root_folder, relative_path) + print("absolute path for", path_name, absolute_path) + relative_path = os.path.relpath(absolute_path, new_path) + setattr(operator, path_name, relative_path) + + # store the root path as relative to the current blend file + setattr(operator, target_path_name, new_path) + + else: + relative_path = os.path.relpath(new_path, export_root_folder) + setattr(operator, target_path_name, relative_path) + + #filename, extension = os.path.splitext(self.filepath) + + + return {'FINISHED'} + +def draw_folder_browser(layout, label, value, target_property): + row = layout.row() + row.label(text=label) + + '''box = row.box() + box.scale_y = 0.5 + box.label(text=value)''' + + col = row.column() + col.enabled = False + col.prop(bpy.context.active_operator, target_property, text="") + + folder_selector = row.operator(OT_OpenFolderbrowser.bl_idname, icon="FILE_FOLDER", text="") + folder_selector.target_property = target_property #"export_root_folder" \ No newline at end of file diff --git a/tools/blenvy/tests/__init__.py b/tools/blenvy/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tools/blenvy/tests/component_values_shuffler.py b/tools/blenvy/tests/component_values_shuffler.py new file mode 100644 index 0000000..9dd4c16 --- /dev/null +++ b/tools/blenvy/tests/component_values_shuffler.py @@ -0,0 +1,217 @@ + +import random +import string +import uuid +from bpy_types import PropertyGroup + +def random_bool(): + return bool(random.getrandbits(1)) + +def rand_int(): + return random.randint(0, 100) + +def rand_float(): + return random.random() + +def random_word(length): + letters = string.ascii_lowercase + return ''.join(random.choice(letters) for i in range(length)) + +def random_vec(length, type,): + value = [] + for i in range(0, length): + if type == 'float': + value.append(rand_float()) + if type == 'int': + value.append(rand_int()) + return value + +type_mappings = { + "bool": random_bool, + + "u8": rand_int, + "u16": rand_int, + "u32": rand_int, + "u64": rand_int, + "u128": rand_int, + "u64": rand_int, + "usize": rand_int, + + "i8": rand_int, + "i16": rand_int, + "i32": rand_int, + "i64": rand_int, + "i128": rand_int, + "isize": rand_int, + + 'f32': rand_float, + 'f64': rand_float, + + "glam::Vec2": lambda : random_vec(2, 'float'), + "glam::DVec2": lambda : random_vec(2, 'float'), + "glam::UVec2": lambda : random_vec(2, 'int'), + + 'glam::Vec3': lambda : random_vec(3, 'float'), + "glam::Vec3A": lambda : random_vec(3, 'float'), + "glam::UVec3": lambda : random_vec(3, 'int'), + + "glam::Vec4": lambda : random_vec(4, 'float'), + "glam::DVec4": lambda : random_vec(4, 'float'), + "glam::UVec4": lambda : random_vec(4, 'int'), + + "glam::Quat": lambda : random_vec(4, 'float'), + + 'bevy_render::color::Color': lambda : random_vec(4, 'float'), + 'alloc::string::String': lambda : random_word(8), + 'alloc::borrow::Cow': lambda : random_word(8), + + 'bevy_ecs::entity::Entity': lambda: 0, #4294967295, # + 'bevy_utils::Uuid': lambda: '"'+str( uuid.UUID("73b3b118-7d01-4778-8bcc-4e79055f5d22") )+'"' +} +# + +def is_def_value_type(definition, registry): + if definition == None: + return True + value_types_defaults = registry.value_types_defaults + long_name = definition["long_name"] + is_value_type = long_name in value_types_defaults + return is_value_type + +# see https://docs.python.org/3/library/random.html +def component_values_shuffler(seed=1, property_group=None, definition=None, registry=None, parent=None): + if parent == None: + random.seed(seed) + + value_types_defaults = registry.value_types_defaults + component_name = definition["short_name"] + type_info = definition["typeInfo"] if "typeInfo" in definition else None + type_def = definition["type"] if "type" in definition else None + properties = definition["properties"] if "properties" in definition else {} + prefixItems = definition["prefixItems"] if "prefixItems" in definition else [] + has_properties = len(properties.keys()) > 0 + has_prefixItems = len(prefixItems) > 0 + is_enum = type_info == "Enum" + is_list = type_info == "List" + long_name = definition["long_name"] + + #is_value_type = type_def in value_types_defaults or long_name in value_types_defaults + is_value_type = long_name in value_types_defaults + + if is_value_type: + fieldValue = type_mappings[long_name]() + return fieldValue + + elif type_info == "Struct": + for index, field_name in enumerate(property_group.field_names): + item_long_name = definition["properties"][field_name]["type"]["$ref"].replace("#/$defs/", "") + item_definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None + + value = getattr(property_group, field_name) + is_property_group = isinstance(value, PropertyGroup) + child_property_group = value if is_property_group else None + if item_definition != None: + value = component_values_shuffler(seed, child_property_group, item_definition, registry, parent=component_name) + else: + value = '""' + is_item_value_type = is_def_value_type(item_definition, registry) + if is_item_value_type: + #print("setting attr", field_name , "for", component_name, "to", value, "value type", is_item_value_type) + setattr(property_group , field_name, value) + + elif type_info == "Tuple": + #print("tup") + + for index, field_name in enumerate(property_group.field_names): + item_long_name = definition["prefixItems"][index]["type"]["$ref"].replace("#/$defs/", "") + item_definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None + + value = getattr(property_group, field_name) + is_property_group = isinstance(value, PropertyGroup) + child_property_group = value if is_property_group else None + if item_definition != None: + value = component_values_shuffler(seed, child_property_group, item_definition, registry, parent=component_name) + else: + value = '""' + + is_item_value_type = is_def_value_type(item_definition, registry) + if is_item_value_type: + #print("setting attr", field_name , "for", component_name, "to", value, "value type", is_item_value_type) + setattr(property_group , field_name, value) + + elif type_info == "TupleStruct": + #print("tupstruct") + for index, field_name in enumerate(property_group.field_names): + item_long_name = definition["prefixItems"][index]["type"]["$ref"].replace("#/$defs/", "") + item_definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None + + value = getattr(property_group, field_name) + is_property_group = isinstance(value, PropertyGroup) + child_property_group = value if is_property_group else None + if item_definition != None: + value = component_values_shuffler(seed, child_property_group, item_definition, registry, parent=component_name) + else: + value = '""' + + is_item_value_type = is_def_value_type(item_definition, registry) + if is_item_value_type: + setattr(property_group , field_name, value) + + elif type_info == "Enum": + available_variants = definition["oneOf"] if type_def != "object" else list(map(lambda x: x["long_name"], definition["oneOf"])) + selected = random.choice(available_variants) + + # set selected variant + setattr(property_group , "selection", selected) + + if type_def == "object": + selection_index = property_group.field_names.index("variant_"+selected) + variant_name = property_group.field_names[selection_index] + variant_definition = definition["oneOf"][selection_index-1] + if "prefixItems" in variant_definition: + value = getattr(property_group, variant_name) + is_property_group = isinstance(value, PropertyGroup) + child_property_group = value if is_property_group else None + + value = component_values_shuffler(seed, child_property_group, variant_definition, registry, parent=component_name) + value = selected + str(value,) + elif "properties" in variant_definition: + value = getattr(property_group, variant_name) + is_property_group = isinstance(value, PropertyGroup) + child_property_group = value if is_property_group else None + + value = component_values_shuffler(seed, child_property_group, variant_definition, registry, parent=component_name) + value = selected + str(value,) + else: + value = selected # here the value of the enum is just the name of the variant + else: + value = selected + + + + elif type_info == "List": + item_list = getattr(property_group, "list") + item_list.clear() + + item_long_name = getattr(property_group, "long_name") + number_of_list_items_to_add = random.randint(1, 2) + + for i in range(0, number_of_list_items_to_add): + new_entry = item_list.add() + item_long_name = getattr(new_entry, "long_name") # we get the REAL type name + + definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None + + if definition != None: + component_values_shuffler(seed, new_entry, definition, registry, parent=component_name) + else: + pass + else: + print("something else") + fieldValue = type_mappings[long_name]() if long_name in type_mappings else 'None' + return fieldValue + + #return value + + + \ No newline at end of file diff --git a/tools/blenvy/tests/expected_bevy_hierarchy.json b/tools/blenvy/tests/expected_bevy_hierarchy.json new file mode 100644 index 0000000..150da44 --- /dev/null +++ b/tools/blenvy/tests/expected_bevy_hierarchy.json @@ -0,0 +1 @@ +{"Blueprint7_hierarchy.001":["Blueprint4_nested.001","Cube.001"],"Cylinder":["Cylinder.001","Cylinder.001"],"Blueprint8_animated_no_bones":["Cylinder.002"],"Blueprint7_hierarchy":["Cube.001"],"Collection 2":["Blueprint8_animated_no_bones","Collection 2 1","Empty_in_collection","Spot"],"Fox_mesh":["fox1","fox1"],"_rootJoint":["b_Root_00","b_Root_00"],"b_Root_00":["b_Hip_01","b_Hip_01"],"Blueprint1":["Blueprint1_mesh"],"Fox":["Fox_mesh","_rootJoint","Fox_mesh","_rootJoint"],"Light":["Light","DirectionalLight Gizmo"],"b_Spine01_02":["b_Spine02_03","b_Spine02_03"],"b_RightLeg01_019":["b_RightLeg02_020","b_RightLeg02_020"],"b_LeftFoot01_017":["b_LeftFoot02_018","b_LeftFoot02_018"],"b_LeftForeArm_010":["b_LeftHand_011","b_LeftHand_011"],"Collection":["Blueprint1.001","Blueprint4_nested","Blueprint6_animated","Blueprint7_hierarchy","Camera","Cube","Empty","External_blueprint","External_blueprint2","Light","Plane"],"Cylinder.001":["Cylinder.002","Blueprint7_hierarchy.001","Empty_as_child"],"b_Hip_01":["b_Spine01_02","b_Tail01_012","b_LeftLeg01_015","b_RightLeg01_019","b_Spine01_02","b_Tail01_012","b_LeftLeg01_015","b_RightLeg01_019"],"world":["no_name"],"Parent_Object":["Cube.003","Blueprint1","Cylinder.001"],"Blueprint6_animated.001":["Fox"],"Blueprint4_nested":["Blueprint3"],"Blueprint6_animated":["Fox"],"Cube.001":["Cube.002","Cylinder","Cube.002","Cylinder"],"b_Spine02_03":["b_Neck_04","b_RightUpperArm_06","b_LeftUpperArm_09","b_Neck_04","b_RightUpperArm_06","b_LeftUpperArm_09"],"b_LeftLeg01_015":["b_LeftLeg02_016","b_LeftLeg02_016"],"Blueprint4_nested.001":["Blueprint3"],"b_Tail02_013":["b_Tail03_014","b_Tail03_014"],"b_RightForeArm_07":["b_RightHand_08","b_RightHand_08"],"External_blueprint2_Cylinder":["Cylinder"],"Blueprint3":["Blueprint3_mesh","Blueprint3_mesh"],"External_blueprint2":["External_blueprint2_Cylinder","External_blueprint3"],"b_LeftUpperArm_09":["b_LeftForeArm_010","b_LeftForeArm_010"],"Cube":["Cube"],"Plane":["Plane"],"no_name":["Parent_Object","Blueprint6_animated.001","lighting_components_World","assets_list_World_components","Collection","Collection 2"],"Collection 2 1":["Empty_in_sub_collection"],"External_blueprint_mesh":["Cube.001"],"b_LeftLeg02_016":["b_LeftFoot01_017","b_LeftFoot01_017"],"Cylinder.002":["Cylinder.003"],"b_RightLeg02_020":["b_RightFoot01_021","b_RightFoot01_021"],"b_Neck_04":["b_Head_05","b_Head_05"],"b_RightUpperArm_06":["b_RightForeArm_07","b_RightForeArm_07"],"Spot":["Spot"],"External_blueprint3_Cone":["Cone"],"External_blueprint":["External_blueprint_mesh"],"Blueprint3_mesh":["Cylinder","Cylinder"],"External_blueprint3":["External_blueprint3_Cone"],"Camera":["Camera Gizmo"],"Blueprint1_mesh":["Cube.001","Cube.001"],"Blueprint1.001":["Blueprint1_mesh"],"b_Tail01_012":["b_Tail02_013","b_Tail02_013"],"b_RightFoot01_021":["b_RightFoot02_022","b_RightFoot02_022"]} \ No newline at end of file diff --git a/tools/blenvy/tests/expected_component_values.py b/tools/blenvy/tests/expected_component_values.py new file mode 100644 index 0000000..88ec17f --- /dev/null +++ b/tools/blenvy/tests/expected_component_values.py @@ -0,0 +1,555 @@ + + +expected_custom_property_values = {'bevy_animation::AnimationPlayer': '(animation: "", paused: true)', + 'bevy_asset::handle::Handle<()>': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle>': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_audio::audio::PlaybackSettings': '(mode: Once, paused: true, spatial: true, spatial_scale: "", speed: 0.0, ' + 'volume: (0.0))', + 'bevy_audio::audio::SpatialListener': '(left_ear_offset: Vec3(x:0.0, y:0.0, z:0.0), right_ear_offset: Vec3(x:0.0, ' + 'y:0.0, z:0.0))', + 'bevy_core::name::Name': '(hash: 0, name: " ")', + 'bevy_core_pipeline::bloom::settings::BloomSettings': '(composite_mode: EnergyConserving, high_pass_frequency: 0.0, ' + 'intensity: 0.0, low_frequency_boost: 0.0, ' + 'low_frequency_boost_curvature: 0.0, prefilter_settings: ' + '(threshold: 0.0, threshold_softness: 0.0))', + 'bevy_core_pipeline::contrast_adaptive_sharpening::ContrastAdaptiveSharpeningSettings': '(denoise: true, enabled: ' + 'true, sharpening_strength: ' + '0.0)', + 'bevy_core_pipeline::core_2d::camera_2d::Camera2d': '()', + 'bevy_core_pipeline::core_3d::camera_3d::Camera3d': '(depth_load_op: Clear(0.0), depth_texture_usages: (0), ' + 'screen_space_specular_transmission_quality: Low, ' + 'screen_space_specular_transmission_steps: 0)', + 'bevy_core_pipeline::fxaa::Fxaa': '(edge_threshold: "", edge_threshold_min: "", enabled: true)', + 'bevy_core_pipeline::tonemapping::DebandDither': 'Disabled', + 'bevy_core_pipeline::tonemapping::Tonemapping': 'None', + 'bevy_example::dupe_components::EnumTest': 'Metal', + 'bevy_example::game::animation::Marker1': '()', + 'bevy_example::game::animation::Marker2': '()', + 'bevy_example::game::animation::Marker3': '()', + 'bevy_example::game::animation::MarkerFox': '()', + 'bevy_example::test_components::AComponentWithAnExtremlyExageratedOrMaybeNotButCouldBeNameOrWut': '()', + 'bevy_example::test_components::BasicTest': '(a: 0.0, b: 0, c: " ")', + 'bevy_example::test_components::EnumComplex': 'Float(0.0)', + 'bevy_example::test_components::EnumTest': 'Metal', + 'bevy_example::test_components::HashmapTestIntColor': '(inner: {})', + 'bevy_example::test_components::HashmapTestIntString': '(named_animations: {})', + 'bevy_example::test_components::HashmapTestSimple': '(named_animations: {})', + 'bevy_example::test_components::HashmapTestStringColor': '(inner: {})', + 'bevy_example::test_components::HashmapTestStringColorFlat': '({})', + 'bevy_example::test_components::HashmapTestStringFloat': '(named_animations: {})', + 'bevy_example::test_components::NestedTupleStuff': '(0.0, 0, (basic: (a: 0.0, b: 0, c: " "), color: (Rgba(red:1.0, ' + 'green:1.0, blue:0.0, alpha:1.0)), colors_list: ([]), enable: ' + 'true, enum_inner: Metal, nested: (vec: (Vec3(x:0.0, y:0.0, ' + 'z:0.0))), text: " ", toggle: (true)))', + 'bevy_example::test_components::NestingTestLevel2': '(basic: (a: 0.0, b: 0, c: " "), color: (Rgba(red:1.0, green:1.0, ' + 'blue:0.0, alpha:1.0)), colors_list: ([]), enable: true, ' + 'enum_inner: Metal, nested: (vec: (Vec3(x:0.0, y:0.0, z:0.0))), ' + 'text: " ", toggle: (true))', + 'bevy_example::test_components::NestingTestLevel3': '(vec: (Vec3(x:0.0, y:0.0, z:0.0)))', + 'bevy_example::test_components::TupleTest2': '(0.0, 0, " ")', + 'bevy_example::test_components::TupleTestBool': '(true)', + 'bevy_example::test_components::TupleTestColor': '(Rgba(red:1.0, green:1.0, blue:0.0, alpha:1.0))', + 'bevy_example::test_components::TupleTestF32': '(0.0)', + 'bevy_example::test_components::TupleTestStr': '(" ")', + 'bevy_example::test_components::TupleTestU64': '(0)', + 'bevy_example::test_components::TupleVec': '([])', + 'bevy_example::test_components::TupleVec2': '(Vec2(x:0.0, y:0.0))', + 'bevy_example::test_components::TupleVec3': '(Vec3(x:0.0, y:0.0, z:0.0))', + 'bevy_example::test_components::TupleVecF32F32': '([])', + 'bevy_example::test_components::UnitTest': '()', + 'bevy_example::test_components::VecOfColors': '([])', + 'bevy_example::test_components::VecOfF32s': '([])', + 'bevy_example::test_components::VecOfVec3s2': '([])', + 'bevy_gltf::GltfExtras': '(value: " ")', + 'bevy_gltf_blueprints::animation::AnimationInfos': '(animations: [])', + 'bevy_gltf_blueprints::animation::AnimationMarkers': '({})', + 'bevy_gltf_blueprints::animation::BlueprintAnimations': '(named_animations: "")', + 'bevy_gltf_blueprints::animation::SceneAnimations': '(named_animations: "")', + 'bevy_gltf_blueprints::materials::MaterialInfo': '(name: " ", source: " ")', + 'bevy_gltf_blueprints::spawn_from_blueprints::BlueprintName': '(" ")', + 'bevy_gltf_blueprints::spawn_from_blueprints::BlueprintsList': '({})', + 'bevy_gltf_blueprints::spawn_from_blueprints::SpawnHere': '()', + 'bevy_gltf_components::GltfProcessed': '()', + 'bevy_gltf_components::blender_settings::lighting::BlenderBackgroundShader': '(color: Rgba(red:1.0, green:1.0, ' + 'blue:0.0, alpha:1.0), strength: 0.0)', + 'bevy_gltf_components::blender_settings::lighting::BlenderLightShadows': '(buffer_bias: 0.0, enabled: true)', + 'bevy_gltf_components::blender_settings::lighting::BlenderShadowSettings': '(cascade_size: 0)', + 'bevy_gltf_worlflow_examples_common::core::camera::camera_replace_proxies::SSAOSettings': '()', + 'bevy_gltf_worlflow_examples_common::core::camera::camera_tracking::CameraTrackable': '()', + 'bevy_gltf_worlflow_examples_common::core::camera::camera_tracking::CameraTracking': '(offset: Vec3(x:0.0, y:0.0, ' + 'z:0.0))', + 'bevy_gltf_worlflow_examples_common::core::camera::camera_tracking::CameraTrackingOffset': '(Vec3(x:0.0, y:0.0, ' + 'z:0.0))', + 'bevy_gltf_worlflow_examples_common::game::picking::Pickable': '()', + 'bevy_gltf_worlflow_examples_common::game::player::Player': '()', + 'bevy_gltf_worlflow_examples_common_rapier::physics::physics_replace_proxies::AutoAABBCollider': 'Cuboid', + 'bevy_gltf_worlflow_examples_common_rapier::physics::physics_replace_proxies::Collider': 'Ball(0.0)', + 'bevy_hierarchy::components::children::Children': '([])', + 'bevy_hierarchy::components::parent::Parent': '(0)', + 'bevy_pbr::bundle::CascadesVisibleEntities': '()', + 'bevy_pbr::bundle::CubemapVisibleEntities': '()', + 'bevy_pbr::fog::FogSettings': '(color: Rgba(red:1.0, green:1.0, blue:0.0, alpha:1.0), directional_light_color: ' + 'Rgba(red:1.0, green:1.0, blue:0.0, alpha:1.0), directional_light_exponent: 0.0, ' + 'falloff: Linear(end: 0.0, start: 0.0))', + 'bevy_pbr::light::CascadeShadowConfig': '(bounds: [], minimum_distance: 0.0, overlap_proportion: 0.0)', + 'bevy_pbr::light::Cascades': '(cascades: "")', + 'bevy_pbr::light::ClusterConfig': 'None', + 'bevy_pbr::light::DirectionalLight': '(color: Rgba(red:1.0, green:1.0, blue:0.0, alpha:1.0), illuminance: 0.0, ' + 'shadow_depth_bias: 0.0, shadow_normal_bias: 0.0, shadows_enabled: true)', + 'bevy_pbr::light::NotShadowCaster': '()', + 'bevy_pbr::light::NotShadowReceiver': '()', + 'bevy_pbr::light::PointLight': '(color: Rgba(red:1.0, green:1.0, blue:0.0, alpha:1.0), intensity: 0.0, radius: 0.0, ' + 'range: 0.0, shadow_depth_bias: 0.0, shadow_normal_bias: 0.0, shadows_enabled: true)', + 'bevy_pbr::light::ShadowFilteringMethod': 'Hardware2x2', + 'bevy_pbr::light::SpotLight': '(color: Rgba(red:1.0, green:1.0, blue:0.0, alpha:1.0), inner_angle: 0.0, intensity: ' + '0.0, outer_angle: 0.0, radius: 0.0, range: 0.0, shadow_depth_bias: 0.0, ' + 'shadow_normal_bias: 0.0, shadows_enabled: true)', + 'bevy_pbr::light_probe::LightProbe': '()', + 'bevy_pbr::ssao::ScreenSpaceAmbientOcclusionSettings': '(quality_level: "")', + 'bevy_pbr::wireframe::NoWireframe': '()', + 'bevy_pbr::wireframe::Wireframe': '()', + 'bevy_pbr::wireframe::WireframeColor': '(color: Rgba(red:1.0, green:1.0, blue:0.0, alpha:1.0))', + 'bevy_rapier3d::dynamics::rigid_body::AdditionalMassProperties': 'Mass(0.0)', + 'bevy_rapier3d::dynamics::rigid_body::Ccd': '(enabled: true)', + 'bevy_rapier3d::dynamics::rigid_body::Damping': '(angular_damping: 0.0, linear_damping: 0.0)', + 'bevy_rapier3d::dynamics::rigid_body::Dominance': '(groups: 0)', + 'bevy_rapier3d::dynamics::rigid_body::ExternalForce': '(force: Vec3(x:0.0, y:0.0, z:0.0), torque: Vec3(x:0.0, y:0.0, ' + 'z:0.0))', + 'bevy_rapier3d::dynamics::rigid_body::ExternalImpulse': '(impulse: Vec3(x:0.0, y:0.0, z:0.0), torque_impulse: ' + 'Vec3(x:0.0, y:0.0, z:0.0))', + 'bevy_rapier3d::dynamics::rigid_body::GravityScale': '(0.0)', + 'bevy_rapier3d::dynamics::rigid_body::LockedAxes': '(0)', + 'bevy_rapier3d::dynamics::rigid_body::RigidBody': 'Dynamic', + 'bevy_rapier3d::dynamics::rigid_body::Sleeping': '(angular_threshold: 0.0, linear_threshold: 0.0, sleeping: true)', + 'bevy_rapier3d::dynamics::rigid_body::Velocity': '(angvel: Vec3(x:0.0, y:0.0, z:0.0), linvel: Vec3(x:0.0, y:0.0, ' + 'z:0.0))', + 'bevy_rapier3d::geometry::collider::CollidingEntities': '("")', + 'bevy_rapier3d::geometry::collider::CollisionGroups': '(filters: (0), memberships: (0))', + 'bevy_rapier3d::geometry::collider::ContactForceEventThreshold': '(0.0)', + 'bevy_rapier3d::geometry::collider::Friction': '(coefficient: 0.0, combine_rule: "")', + 'bevy_rapier3d::geometry::collider::Group': '(0)', + 'bevy_rapier3d::geometry::collider::Restitution': '(coefficient: 0.0, combine_rule: "")', + 'bevy_rapier3d::geometry::collider::Sensor': '()', + 'bevy_rapier3d::geometry::collider::SolverGroups': '(filters: (0), memberships: (0))', + 'bevy_render::camera::camera::Camera': '(clear_color: Default, hdr: true, is_active: true, msaa_writeback: true, ' + 'order: 0, viewport: None)', + 'bevy_render::camera::camera::CameraMainTextureUsages': 'None', + 'bevy_render::camera::camera::CameraRenderGraph': 'None', + 'bevy_render::camera::camera::Exposure': 'None', + 'bevy_render::camera::projection::OrthographicProjection': '(area: (max: Vec2(x:0.0, y:0.0), min: Vec2(x:0.0, ' + 'y:0.0)), far: 0.0, near: 0.0, scale: 0.0, scaling_mode: ' + 'Fixed(height: 0.0, width: 0.0), viewport_origin: ' + 'Vec2(x:0.0, y:0.0))', + 'bevy_render::camera::projection::PerspectiveProjection': '(aspect_ratio: 0.0, far: 0.0, fov: 0.0, near: 0.0)', + 'bevy_render::camera::projection::Projection': 'Perspective((aspect_ratio: 0.0, far: 0.0, fov: 0.0, near: 0.0))', + 'bevy_render::mesh::mesh::skinning::SkinnedMesh': '(inverse_bindposes: Strong(""), joints: [])', + 'bevy_render::mesh::morph::MeshMorphWeights': '(weights: [])', + 'bevy_render::mesh::morph::MorphWeights': '(first_mesh: "", weights: [])', + 'bevy_render::primitives::Aabb': '(center: Vec3A(x:0.0, y:0.0, z:0.0), half_extents: Vec3A(x:0.0, y:0.0, z:0.0))', + 'bevy_render::primitives::CascadesFrusta': '()', + 'bevy_render::primitives::CubemapFrusta': '()', + 'bevy_render::primitives::Frustum': '()', + 'bevy_render::view::ColorGrading': '(exposure: 0.0, gamma: 0.0, post_saturation: 0.0, pre_saturation: 0.0)', + 'bevy_render::view::visibility::InheritedVisibility': '(true)', + 'bevy_render::view::visibility::NoFrustumCulling': '()', + 'bevy_render::view::visibility::ViewVisibility': '(true)', + 'bevy_render::view::visibility::Visibility': 'Inherited', + 'bevy_render::view::visibility::VisibleEntities': '()', + 'bevy_render::view::visibility::render_layers::RenderLayers': '(0)', + 'bevy_sprite::mesh2d::mesh::Mesh2dHandle': '(Strong(""))', + 'bevy_sprite::sprite::ImageScaleMode': 'Sliced((border: "", center_scale_mode: "", max_corner_scale: 0.0, ' + 'sides_scale_mode: ""))', + 'bevy_sprite::sprite::Sprite': '(anchor: Center, color: Rgba(red:1.0, green:1.0, blue:0.0, alpha:1.0), custom_size: ' + '"", flip_x: true, flip_y: true, rect: "")', + 'bevy_text::pipeline::TextLayoutInfo': '(glyphs: "", logical_size: Vec2(x:0.0, y:0.0))', + 'bevy_text::text2d::Text2dBounds': '(size: Vec2(x:0.0, y:0.0))', + 'bevy_text::text::Text': '(justify: Left, linebreak_behavior: WordBoundary, sections: [])', + 'bevy_transform::components::global_transform::GlobalTransform': '((matrix3: (x_axis: Vec3A(x:0.0, y:0.0, z:0.0), ' + 'y_axis: Vec3A(x:0.0, y:0.0, z:0.0), z_axis: ' + 'Vec3A(x:0.0, y:0.0, z:0.0)), translation: ' + 'Vec3A(x:0.0, y:0.0, z:0.0)))', + 'bevy_transform::components::transform::Transform': '(rotation: Quat(x:0.0, y:0.0, z:0.0, w:0.0), scale: Vec3(x:0.0, ' + 'y:0.0, z:0.0), translation: Vec3(x:0.0, y:0.0, z:0.0))', + 'bevy_ui::focus::FocusPolicy': 'Block', + 'bevy_ui::focus::Interaction': 'Pressed', + 'bevy_ui::focus::RelativeCursorPosition': '(normalized: "", normalized_visible_node_rect: (max: Vec2(x:0.0, y:0.0), ' + 'min: Vec2(x:0.0, y:0.0)))', + 'bevy_ui::measurement::ContentSize': '()', + 'bevy_ui::ui_node::BackgroundColor': '(Rgba(red:1.0, green:1.0, blue:0.0, alpha:1.0))', + 'bevy_ui::ui_node::BorderColor': '(Rgba(red:1.0, green:1.0, blue:0.0, alpha:1.0))', + 'bevy_ui::ui_node::CalculatedClip': '(clip: (max: Vec2(x:0.0, y:0.0), min: Vec2(x:0.0, y:0.0)))', + 'bevy_ui::ui_node::Node': '(calculated_size: Vec2(x:0.0, y:0.0), outline_offset: 0.0, outline_width: 0.0, ' + 'stack_index: 0, unrounded_size: Vec2(x:0.0, y:0.0))', + 'bevy_ui::ui_node::Outline': '(color: Rgba(red:1.0, green:1.0, blue:0.0, alpha:1.0), offset: Auto, width: Auto)', + 'bevy_ui::ui_node::Style': '(align_content: Default, align_items: Default, align_self: Auto, aspect_ratio: None, ' + 'border: (bottom: Auto, left: Auto, right: Auto, top: Auto), bottom: Auto, column_gap: ' + 'Auto, direction: Inherit, display: Flex, flex_basis: Auto, flex_direction: Row, ' + 'flex_grow: 0.0, flex_shrink: 0.0, flex_wrap: NoWrap, grid_auto_columns: "", ' + 'grid_auto_flow: Row, grid_auto_rows: "", grid_column: (end: "", span: "", start: ""), ' + 'grid_row: (end: "", span: "", start: ""), grid_template_columns: "", grid_template_rows: ' + '"", height: Auto, justify_content: Default, justify_items: Default, justify_self: Auto, ' + 'left: Auto, margin: (bottom: Auto, left: Auto, right: Auto, top: Auto), max_height: Auto, ' + 'max_width: Auto, min_height: Auto, min_width: Auto, overflow: (x: Visible, y: Visible), ' + 'padding: (bottom: Auto, left: Auto, right: Auto, top: Auto), position_type: Relative, ' + 'right: Auto, row_gap: Auto, top: Auto, width: Auto)', + 'bevy_ui::ui_node::UiImage': '(flip_x: true, flip_y: true, texture: Strong(""))', + 'bevy_ui::ui_node::ZIndex': 'Local(0)', + 'bevy_ui::widget::button::Button': '()', + 'bevy_ui::widget::image::UiImageSize': '(size: Vec2(x:0.0, y:0.0))', + 'bevy_ui::widget::label::Label': '()', + 'bevy_ui::widget::text::TextFlags': '(needs_new_measure_func: true, needs_recompute: true)', + 'bevy_window::window::PrimaryWindow': '()', + 'bevy_window::window::Window': '(canvas: None, composite_alpha_mode: Auto, cursor: (grab_mode: None, hit_test: true, ' + 'icon: Default, visible: true), decorations: true, enabled_buttons: (close: true, ' + 'maximize: true, minimize: true), focused: true, ime_enabled: true, ime_position: ' + 'Vec2(x:0.0, y:0.0), internal: (maximize_request: None, minimize_request: None, ' + 'physical_cursor_position: None), mode: Windowed, name: None, position: Automatic, ' + 'present_mode: AutoVsync, prevent_default_event_handling: true, resizable: true, ' + 'resize_constraints: (max_height: 0.0, max_width: 0.0, min_height: 0.0, min_width: ' + '0.0), resolution: (physical_height: 0, physical_width: 0, scale_factor: 0.0, ' + 'scale_factor_override: None), title: " ", transparent: true, visible: true, ' + 'window_level: AlwaysOnBottom, window_theme: "")'} + + + +expected_custom_property_values_randomized = {'bevy_animation::AnimationPlayer': '(animation: "", paused: true)', + 'bevy_asset::handle::Handle<()>': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle>': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_asset::handle::Handle': 'Strong("")', + 'bevy_audio::audio::PlaybackSettings': '(mode: Once, paused: false, spatial: false, spatial_scale: "", speed: ' + '0.5780913233757019, volume: (0.20609822869300842))', + 'bevy_audio::audio::SpatialListener': '(left_ear_offset: Vec3(x:0.5714026093482971, y:0.42888906598091125, ' + 'z:0.5780913233757019), right_ear_offset: Vec3(x:0.20609822869300842, ' + 'y:0.8133212327957153, z:0.8235888481140137))', + 'bevy_core::name::Name': '(hash: 73, name: "bnpsagop")', + 'bevy_core_pipeline::bloom::settings::BloomSettings': '(composite_mode: EnergyConserving, high_pass_frequency: ' + '0.42888906598091125, intensity: 0.5780913233757019, ' + 'low_frequency_boost: 0.20609822869300842, ' + 'low_frequency_boost_curvature: 0.8133212327957153, ' + 'prefilter_settings: (threshold: 0.8235888481140137, ' + 'threshold_softness: 0.6534725427627563))', + 'bevy_core_pipeline::contrast_adaptive_sharpening::ContrastAdaptiveSharpeningSettings': '(denoise: true, enabled: ' + 'false, sharpening_strength: ' + '0.42888906598091125)', + 'bevy_core_pipeline::core_2d::camera_2d::Camera2d': '()', + 'bevy_core_pipeline::core_3d::camera_3d::Camera3d': '(depth_load_op: Clear(0.42888906598091125), ' + 'depth_texture_usages: (73), ' + 'screen_space_specular_transmission_quality: Low, ' + 'screen_space_specular_transmission_steps: 26)', + 'bevy_core_pipeline::fxaa::Fxaa': '(edge_threshold: "", edge_threshold_min: "", enabled: true)', + 'bevy_core_pipeline::tonemapping::DebandDither': 'Disabled', + 'bevy_core_pipeline::tonemapping::Tonemapping': 'None', + 'bevy_example::dupe_components::EnumTest': 'Squishy', + 'bevy_example::game::animation::Marker1': '()', + 'bevy_example::game::animation::Marker2': '()', + 'bevy_example::game::animation::Marker3': '()', + 'bevy_example::game::animation::MarkerFox': '()', + 'bevy_example::test_components::AComponentWithAnExtremlyExageratedOrMaybeNotButCouldBeNameOrWut': '()', + 'bevy_example::test_components::BasicTest': '(a: 0.5714026093482971, b: 54, c: "psagopiu")', + 'bevy_example::test_components::EnumComplex': 'StructLike(a: 0.03258506581187248, b: 61, c: "sagopiuz")', + 'bevy_example::test_components::EnumTest': 'Squishy', + 'bevy_example::test_components::HashmapTestIntColor': '(inner: {})', + 'bevy_example::test_components::HashmapTestIntString': '(named_animations: {})', + 'bevy_example::test_components::HashmapTestSimple': '(named_animations: {})', + 'bevy_example::test_components::HashmapTestStringColor': '(inner: {})', + 'bevy_example::test_components::HashmapTestStringColorFlat': '({})', + 'bevy_example::test_components::HashmapTestStringFloat': '(named_animations: {})', + 'bevy_example::test_components::NestedTupleStuff': '(0.5714026093482971, 54, (basic: (a: 0.4825616776943207, b: 1, c: ' + '"gopiuzfb"), color: (Rgba(red:0.5206693410873413, ' + 'green:0.3277728259563446, blue:0.24999667704105377, ' + 'alpha:0.952816903591156)), colors_list: ' + '([Rgba(red:0.0445563830435276, green:0.8601610660552979, ' + 'blue:0.6031906008720398, alpha:0.38160598278045654), ' + 'Rgba(red:0.2836182117462158, green:0.6749648451805115, ' + 'blue:0.456831157207489, alpha:0.6858614683151245)]), enable: ' + 'true, enum_inner: Rock, nested: (vec: (Vec3(x:0.1329781413078308, ' + 'y:0.7678378224372864, z:0.9824132323265076))), text: "otmbsahe", ' + 'toggle: (false)))', + 'bevy_example::test_components::NestingTestLevel2': '(basic: (a: 0.5714026093482971, b: 54, c: "psagopiu"), color: ' + '(Rgba(red:0.8106188178062439, green:0.03440357372164726, ' + 'blue:0.49008557200431824, alpha:0.07608934491872787)), ' + 'colors_list: ([Rgba(red:0.0445563830435276, ' + 'green:0.8601610660552979, blue:0.6031906008720398, ' + 'alpha:0.38160598278045654), Rgba(red:0.2836182117462158, ' + 'green:0.6749648451805115, blue:0.456831157207489, ' + 'alpha:0.6858614683151245)]), enable: true, enum_inner: Rock, ' + 'nested: (vec: (Vec3(x:0.1329781413078308, y:0.7678378224372864, ' + 'z:0.9824132323265076))), text: "otmbsahe", toggle: (false))', + 'bevy_example::test_components::NestingTestLevel3': '(vec: (Vec3(x:0.5714026093482971, y:0.42888906598091125, ' + 'z:0.5780913233757019)))', + 'bevy_example::test_components::TupleTest2': '(0.5714026093482971, 54, "psagopiu")', + 'bevy_example::test_components::TupleTestBool': '(true)', + 'bevy_example::test_components::TupleTestColor': '(Rgba(red:0.5714026093482971, green:0.42888906598091125, ' + 'blue:0.5780913233757019, alpha:0.20609822869300842))', + 'bevy_example::test_components::TupleTestF32': '(0.5714026093482971)', + 'bevy_example::test_components::TupleTestStr': '("sbnpsago")', + 'bevy_example::test_components::TupleTestU64': '(73)', + 'bevy_example::test_components::TupleVec': '(["npsagopi"])', + 'bevy_example::test_components::TupleVec2': '(Vec2(x:0.5714026093482971, y:0.42888906598091125))', + 'bevy_example::test_components::TupleVec3': '(Vec3(x:0.5714026093482971, y:0.42888906598091125, ' + 'z:0.5780913233757019))', + 'bevy_example::test_components::TupleVecF32F32': '([(0.42888906598091125, 0.5780913233757019)])', + 'bevy_example::test_components::UnitTest': '()', + 'bevy_example::test_components::VecOfColors': '([Rgba(red:0.42888906598091125, green:0.5780913233757019, ' + 'blue:0.20609822869300842, alpha:0.8133212327957153)])', + 'bevy_example::test_components::VecOfF32s': '([0.42888906598091125])', + 'bevy_example::test_components::VecOfVec3s2': '([(Vec3(x:0.42888906598091125, y:0.5780913233757019, ' + 'z:0.20609822869300842))])', + 'bevy_gltf::GltfExtras': '(value: "sbnpsago")', + 'bevy_gltf_blueprints::animation::AnimationInfos': '(animations: [(frame_end: 0.42888906598091125, ' + 'frame_end_override: 0.5780913233757019, frame_start: ' + '0.20609822869300842, frame_start_override: 0.8133212327957153, ' + 'frames_length: 0.8235888481140137, name: "uzfbqpkc")])', + 'bevy_gltf_blueprints::animation::AnimationMarkers': '({})', + 'bevy_gltf_blueprints::animation::BlueprintAnimations': '(named_animations: "")', + 'bevy_gltf_blueprints::animation::SceneAnimations': '(named_animations: "")', + 'bevy_gltf_blueprints::materials::MaterialInfo': '(name: "sbnpsago", source: "piuzfbqp")', + 'bevy_gltf_blueprints::spawn_from_blueprints::BlueprintName': '("sbnpsago")', + 'bevy_gltf_blueprints::spawn_from_blueprints::BlueprintsList': '({})', + 'bevy_gltf_blueprints::spawn_from_blueprints::SpawnHere': '()', + 'bevy_gltf_components::GltfProcessed': '()', + 'bevy_gltf_components::blender_settings::lighting::BlenderBackgroundShader': '(color: Rgba(red:0.5714026093482971, ' + 'green:0.42888906598091125, ' + 'blue:0.5780913233757019, ' + 'alpha:0.20609822869300842), strength: ' + '0.8133212327957153)', + 'bevy_gltf_components::blender_settings::lighting::BlenderLightShadows': '(buffer_bias: 0.5714026093482971, enabled: ' + 'false)', + 'bevy_gltf_components::blender_settings::lighting::BlenderShadowSettings': '(cascade_size: 73)', + 'bevy_gltf_worlflow_examples_common::core::camera::camera_replace_proxies::SSAOSettings': '()', + 'bevy_gltf_worlflow_examples_common::core::camera::camera_tracking::CameraTrackable': '()', + 'bevy_gltf_worlflow_examples_common::core::camera::camera_tracking::CameraTracking': '(offset: ' + 'Vec3(x:0.5714026093482971, ' + 'y:0.42888906598091125, ' + 'z:0.5780913233757019))', + 'bevy_gltf_worlflow_examples_common::core::camera::camera_tracking::CameraTrackingOffset': '(Vec3(x:0.5714026093482971, ' + 'y:0.42888906598091125, ' + 'z:0.5780913233757019))', + 'bevy_gltf_worlflow_examples_common::game::picking::Pickable': '()', + 'bevy_gltf_worlflow_examples_common::game::player::Player': '()', + 'bevy_gltf_worlflow_examples_common_rapier::physics::physics_replace_proxies::AutoAABBCollider': 'Capsule', + 'bevy_gltf_worlflow_examples_common_rapier::physics::physics_replace_proxies::Collider': 'Ball(0.42888906598091125)', + 'bevy_hierarchy::components::children::Children': '([0])', + 'bevy_hierarchy::components::parent::Parent': '(0)', + 'bevy_pbr::bundle::CascadesVisibleEntities': '()', + 'bevy_pbr::bundle::CubemapVisibleEntities': '()', + 'bevy_pbr::fog::FogSettings': '(color: Rgba(red:0.5714026093482971, green:0.42888906598091125, ' + 'blue:0.5780913233757019, alpha:0.20609822869300842), directional_light_color: ' + 'Rgba(red:0.8133212327957153, green:0.8235888481140137, blue:0.6534725427627563, ' + 'alpha:0.16022956371307373), directional_light_exponent: 0.5206693410873413, falloff: ' + 'ExponentialSquared(density: 0.07608934491872787))', + 'bevy_pbr::light::CascadeShadowConfig': '(bounds: [0.42888906598091125], minimum_distance: 0.5780913233757019, ' + 'overlap_proportion: 0.20609822869300842)', + 'bevy_pbr::light::Cascades': '(cascades: "")', + 'bevy_pbr::light::ClusterConfig': 'None', + 'bevy_pbr::light::DirectionalLight': '(color: Rgba(red:0.5714026093482971, green:0.42888906598091125, ' + 'blue:0.5780913233757019, alpha:0.20609822869300842), illuminance: ' + '0.8133212327957153, shadow_depth_bias: 0.8235888481140137, shadow_normal_bias: ' + '0.6534725427627563, shadows_enabled: false)', + 'bevy_pbr::light::NotShadowCaster': '()', + 'bevy_pbr::light::NotShadowReceiver': '()', + 'bevy_pbr::light::PointLight': '(color: Rgba(red:0.5714026093482971, green:0.42888906598091125, ' + 'blue:0.5780913233757019, alpha:0.20609822869300842), intensity: 0.8133212327957153, ' + 'radius: 0.8235888481140137, range: 0.6534725427627563, shadow_depth_bias: ' + '0.16022956371307373, shadow_normal_bias: 0.5206693410873413, shadows_enabled: false)', + 'bevy_pbr::light::ShadowFilteringMethod': 'Jimenez14', + 'bevy_pbr::light::SpotLight': '(color: Rgba(red:0.5714026093482971, green:0.42888906598091125, ' + 'blue:0.5780913233757019, alpha:0.20609822869300842), inner_angle: 0.8133212327957153, ' + 'intensity: 0.8235888481140137, outer_angle: 0.6534725427627563, radius: ' + '0.16022956371307373, range: 0.5206693410873413, shadow_depth_bias: 0.3277728259563446, ' + 'shadow_normal_bias: 0.24999667704105377, shadows_enabled: true)', + 'bevy_pbr::light_probe::LightProbe': '()', + 'bevy_pbr::ssao::ScreenSpaceAmbientOcclusionSettings': '(quality_level: "")', + 'bevy_pbr::wireframe::NoWireframe': '()', + 'bevy_pbr::wireframe::Wireframe': '()', + 'bevy_pbr::wireframe::WireframeColor': '(color: Rgba(red:0.5714026093482971, green:0.42888906598091125, ' + 'blue:0.5780913233757019, alpha:0.20609822869300842))', + 'bevy_rapier3d::dynamics::rigid_body::AdditionalMassProperties': 'Mass(0.42888906598091125)', + 'bevy_rapier3d::dynamics::rigid_body::Ccd': '(enabled: true)', + 'bevy_rapier3d::dynamics::rigid_body::Damping': '(angular_damping: 0.5714026093482971, linear_damping: ' + '0.42888906598091125)', + 'bevy_rapier3d::dynamics::rigid_body::Dominance': '(groups: 73)', + 'bevy_rapier3d::dynamics::rigid_body::ExternalForce': '(force: Vec3(x:0.5714026093482971, y:0.42888906598091125, ' + 'z:0.5780913233757019), torque: Vec3(x:0.20609822869300842, ' + 'y:0.8133212327957153, z:0.8235888481140137))', + 'bevy_rapier3d::dynamics::rigid_body::ExternalImpulse': '(impulse: Vec3(x:0.5714026093482971, y:0.42888906598091125, ' + 'z:0.5780913233757019), torque_impulse: ' + 'Vec3(x:0.20609822869300842, y:0.8133212327957153, ' + 'z:0.8235888481140137))', + 'bevy_rapier3d::dynamics::rigid_body::GravityScale': '(0.5714026093482971)', + 'bevy_rapier3d::dynamics::rigid_body::LockedAxes': '(73)', + 'bevy_rapier3d::dynamics::rigid_body::RigidBody': 'Dynamic', + 'bevy_rapier3d::dynamics::rigid_body::Sleeping': '(angular_threshold: 0.5714026093482971, linear_threshold: ' + '0.42888906598091125, sleeping: true)', + 'bevy_rapier3d::dynamics::rigid_body::Velocity': '(angvel: Vec3(x:0.5714026093482971, y:0.42888906598091125, ' + 'z:0.5780913233757019), linvel: Vec3(x:0.20609822869300842, ' + 'y:0.8133212327957153, z:0.8235888481140137))', + 'bevy_rapier3d::geometry::collider::CollidingEntities': '("")', + 'bevy_rapier3d::geometry::collider::CollisionGroups': '(filters: (73), memberships: (4))', + 'bevy_rapier3d::geometry::collider::ContactForceEventThreshold': '(0.5714026093482971)', + 'bevy_rapier3d::geometry::collider::Friction': '(coefficient: 0.5714026093482971, combine_rule: "")', + 'bevy_rapier3d::geometry::collider::Group': '(73)', + 'bevy_rapier3d::geometry::collider::Restitution': '(coefficient: 0.5714026093482971, combine_rule: "")', + 'bevy_rapier3d::geometry::collider::Sensor': '()', + 'bevy_rapier3d::geometry::collider::SolverGroups': '(filters: (73), memberships: (4))', + 'bevy_render::camera::camera::Camera': '(clear_color: None, hdr: false, is_active: false, msaa_writeback: false, ' + 'order: 73, viewport: None)', + 'bevy_render::camera::camera::CameraMainTextureUsages': 'None', + 'bevy_render::camera::camera::CameraRenderGraph': 'None', + 'bevy_render::camera::camera::Exposure': 'None', + 'bevy_render::camera::projection::OrthographicProjection': '(area: (max: Vec2(x:0.5714026093482971, ' + 'y:0.42888906598091125), min: Vec2(x:0.5780913233757019, ' + 'y:0.20609822869300842)), far: 0.8133212327957153, near: ' + '0.8235888481140137, scale: 0.6534725427627563, ' + 'scaling_mode: WindowSize(0.03440357372164726), ' + 'viewport_origin: Vec2(x:0.49008557200431824, ' + 'y:0.07608934491872787))', + 'bevy_render::camera::projection::PerspectiveProjection': '(aspect_ratio: 0.5714026093482971, far: ' + '0.42888906598091125, fov: 0.5780913233757019, near: ' + '0.20609822869300842)', + 'bevy_render::camera::projection::Projection': 'Perspective((aspect_ratio: 0.42888906598091125, far: ' + '0.5780913233757019, fov: 0.20609822869300842, near: ' + '0.8133212327957153))', + 'bevy_render::mesh::mesh::skinning::SkinnedMesh': '(inverse_bindposes: Strong(""), joints: [0, 0])', + 'bevy_render::mesh::morph::MeshMorphWeights': '(weights: [0.42888906598091125])', + 'bevy_render::mesh::morph::MorphWeights': '(first_mesh: "", weights: [0.42888906598091125])', + 'bevy_render::primitives::Aabb': '(center: Vec3A(x:0.5714026093482971, y:0.42888906598091125, z:0.5780913233757019), ' + 'half_extents: Vec3A(x:0.20609822869300842, y:0.8133212327957153, ' + 'z:0.8235888481140137))', + 'bevy_render::primitives::CascadesFrusta': '()', + 'bevy_render::primitives::CubemapFrusta': '()', + 'bevy_render::primitives::Frustum': '()', + 'bevy_render::view::ColorGrading': '(exposure: 0.5714026093482971, gamma: 0.42888906598091125, post_saturation: ' + '0.5780913233757019, pre_saturation: 0.20609822869300842)', + 'bevy_render::view::visibility::InheritedVisibility': '(true)', + 'bevy_render::view::visibility::NoFrustumCulling': '()', + 'bevy_render::view::visibility::ViewVisibility': '(true)', + 'bevy_render::view::visibility::Visibility': 'Visible', + 'bevy_render::view::visibility::VisibleEntities': '()', + 'bevy_render::view::visibility::render_layers::RenderLayers': '(73)', + 'bevy_sprite::mesh2d::mesh::Mesh2dHandle': '(Strong(""))', + 'bevy_sprite::sprite::ImageScaleMode': 'Sliced((border: "", center_scale_mode: "", max_corner_scale: ' + '0.42888906598091125, sides_scale_mode: ""))', + 'bevy_sprite::sprite::Sprite': '(anchor: Custom(Vec2(x:0.03258506581187248, y:0.4825616776943207)), color: ' + 'Rgba(red:0.014832446351647377, green:0.46258050203323364, blue:0.4912964105606079, ' + 'alpha:0.27752065658569336), custom_size: "", flip_x: true, flip_y: false, rect: "")', + 'bevy_text::pipeline::TextLayoutInfo': '(glyphs: "", logical_size: Vec2(x:0.5714026093482971, y:0.42888906598091125))', + 'bevy_text::text2d::Text2dBounds': '(size: Vec2(x:0.5714026093482971, y:0.42888906598091125))', + 'bevy_text::text::Text': '(justify: Right, linebreak_behavior: WordBoundary, sections: [(style: (color: ' + 'Rgba(red:0.4825616776943207, green:0.014832446351647377, blue:0.46258050203323364, ' + 'alpha:0.4912964105606079), font: Weak(Index(index: "")), font_size: 0.03440357372164726), ' + 'value: "pkchxlbn"), (style: (color: Rgba(red:0.8601610660552979, green:0.6031906008720398, ' + 'blue:0.38160598278045654, alpha:0.2836182117462158), font: Weak(Uuid(uuid: ' + '"73b3b118-7d01-4778-8bcc-4e79055f5d22")), font_size: 0.17467059195041656), value: ' + '"jvleoyho")])', + 'bevy_transform::components::global_transform::GlobalTransform': '((matrix3: (x_axis: Vec3A(x:0.5714026093482971, ' + 'y:0.42888906598091125, z:0.5780913233757019), ' + 'y_axis: Vec3A(x:0.20609822869300842, ' + 'y:0.8133212327957153, z:0.8235888481140137), ' + 'z_axis: Vec3A(x:0.6534725427627563, ' + 'y:0.16022956371307373, z:0.5206693410873413)), ' + 'translation: Vec3A(x:0.3277728259563446, ' + 'y:0.24999667704105377, z:0.952816903591156)))', + 'bevy_transform::components::transform::Transform': '(rotation: Quat(x:0.5714026093482971, y:0.42888906598091125, ' + 'z:0.5780913233757019, w:0.20609822869300842), scale: ' + 'Vec3(x:0.8133212327957153, y:0.8235888481140137, ' + 'z:0.6534725427627563), translation: Vec3(x:0.16022956371307373, ' + 'y:0.5206693410873413, z:0.3277728259563446))', + 'bevy_ui::focus::FocusPolicy': 'Block', + 'bevy_ui::focus::Interaction': 'None', + 'bevy_ui::focus::RelativeCursorPosition': '(normalized: "", normalized_visible_node_rect: (max: ' + 'Vec2(x:0.5714026093482971, y:0.42888906598091125), min: ' + 'Vec2(x:0.5780913233757019, y:0.20609822869300842)))', + 'bevy_ui::measurement::ContentSize': '()', + 'bevy_ui::ui_node::BackgroundColor': '(Rgba(red:0.5714026093482971, green:0.42888906598091125, ' + 'blue:0.5780913233757019, alpha:0.20609822869300842))', + 'bevy_ui::ui_node::BorderColor': '(Rgba(red:0.5714026093482971, green:0.42888906598091125, blue:0.5780913233757019, ' + 'alpha:0.20609822869300842))', + 'bevy_ui::ui_node::CalculatedClip': '(clip: (max: Vec2(x:0.5714026093482971, y:0.42888906598091125), min: ' + 'Vec2(x:0.5780913233757019, y:0.20609822869300842)))', + 'bevy_ui::ui_node::Node': '(calculated_size: Vec2(x:0.5714026093482971, y:0.42888906598091125), outline_offset: ' + '0.5780913233757019, outline_width: 0.20609822869300842, stack_index: 62, unrounded_size: ' + 'Vec2(x:0.8235888481140137, y:0.6534725427627563))', + 'bevy_ui::ui_node::Outline': '(color: Rgba(red:0.5714026093482971, green:0.42888906598091125, ' + 'blue:0.5780913233757019, alpha:0.20609822869300842), offset: VMax(0.4912964105606079), ' + 'width: Percent(0.6534725427627563))', + 'bevy_ui::ui_node::Style': '(align_content: SpaceAround, align_items: Default, align_self: Baseline, aspect_ratio: ' + 'Some(0.5780913233757019), border: (bottom: Px(0.46258050203323364), left: ' + 'Vw(0.8235888481140137), right: VMin(0.8106188178062439), top: Auto), bottom: ' + 'Vh(0.49008557200431824), column_gap: Auto, direction: Inherit, display: None, flex_basis: ' + 'Percent(0.0445563830435276), flex_direction: Column, flex_grow: 0.6031906008720398, ' + 'flex_shrink: 0.38160598278045654, flex_wrap: Wrap, grid_auto_columns: "", grid_auto_flow: ' + 'RowDense, grid_auto_rows: "", grid_column: (end: "", span: "", start: ""), grid_row: ' + '(end: "", span: "", start: ""), grid_template_columns: "", grid_template_rows: "", ' + 'height: Vw(0.17467059195041656), justify_content: FlexEnd, justify_items: Stretch, ' + 'justify_self: End, left: Px(0.45692843198776245), margin: (bottom: ' + 'VMax(0.9824132323265076), left: Vw(0.6133268475532532), right: Auto, top: ' + 'Vh(0.004055144265294075)), max_height: Px(0.1949533075094223), max_width: ' + 'Percent(0.5363451838493347), min_height: VMax(0.8981962203979492), min_width: ' + 'Percent(0.666689932346344), overflow: (x: Clip, y: Clip), padding: (bottom: ' + 'Vw(0.06499417871236801), left: Vh(0.32468828558921814), right: Vh(0.15641891956329346), ' + 'top: Px(0.9697836637496948)), position_type: Relative, right: Auto, row_gap: Auto, top: ' + 'Vw(0.3011642396450043), width: Vh(0.6578909158706665))', + 'bevy_ui::ui_node::UiImage': '(flip_x: true, flip_y: false, texture: Weak(Uuid(uuid: ' + '"73b3b118-7d01-4778-8bcc-4e79055f5d22")))', + 'bevy_ui::ui_node::ZIndex': 'Local(54)', + 'bevy_ui::widget::button::Button': '()', + 'bevy_ui::widget::image::UiImageSize': '(size: Vec2(x:0.5714026093482971, y:0.42888906598091125))', + 'bevy_ui::widget::label::Label': '()', + 'bevy_ui::widget::text::TextFlags': '(needs_new_measure_func: true, needs_recompute: false)', + 'bevy_window::window::PrimaryWindow': '()', + 'bevy_window::window::Window': '(canvas: None, composite_alpha_mode: PostMultiplied, cursor: (grab_mode: Confined, ' + 'hit_test: true, icon: Default, visible: false), decorations: false, enabled_buttons: ' + '(close: true, maximize: false, minimize: true), focused: false, ime_enabled: true, ' + 'ime_position: Vec2(x:0.8106188178062439, y:0.03440357372164726), internal: ' + '(maximize_request: Some(false), minimize_request: None, physical_cursor_position: ' + 'None), mode: SizedFullscreen, name: None, position: Centered(Current), present_mode: ' + 'Immediate, prevent_default_event_handling: false, resizable: false, ' + 'resize_constraints: (max_height: 0.42126399278640747, max_width: 0.8268482089042664, ' + 'min_height: 0.2623211145401001, min_width: 0.17467059195041656), resolution: ' + '(physical_height: 38, physical_width: 84, scale_factor: 0.36258742213249207, ' + 'scale_factor_override: Some(0.7678378224372864)), title: "hotmbsah", transparent: ' + 'false, visible: false, window_level: Normal, window_theme: "")'} \ No newline at end of file diff --git a/tools/blenvy/tests/expected_screenshot.png b/tools/blenvy/tests/expected_screenshot.png new file mode 100644 index 0000000..040ba94 Binary files /dev/null and b/tools/blenvy/tests/expected_screenshot.png differ diff --git a/tools/blenvy/tests/setup_data.py b/tools/blenvy/tests/setup_data.py new file mode 100644 index 0000000..53be3de --- /dev/null +++ b/tools/blenvy/tests/setup_data.py @@ -0,0 +1,31 @@ +import bpy +import pytest + +@pytest.fixture +def setup_data(request): + print("\nSetting up resources...") + + schemaPath = "../../testing/bevy_example/assets/registry.json" + + yield {"schema_path": schemaPath} + + def finalizer(): + print("\nPerforming teardown...") + registry = bpy.context.window_manager.components_registry + + type_infos = registry.type_infos + object = bpy.context.object + remove_component_operator = bpy.ops.object.remove_bevy_component + + for long_name in type_infos: + definition = type_infos[long_name] + component_name = definition["short_name"] + if component_name in object: + try: + remove_component_operator(component_name=component_name) + except Exception as error: + pass + + request.addfinalizer(finalizer) + + return None \ No newline at end of file diff --git a/tools/blenvy/tests/test_bevy_integration.py b/tools/blenvy/tests/test_bevy_integration.py new file mode 100644 index 0000000..87e261d --- /dev/null +++ b/tools/blenvy/tests/test_bevy_integration.py @@ -0,0 +1,167 @@ +import bpy +import os +import subprocess +import json +import pytest +import shutil + +import filecmp +from PIL import Image +from pixelmatch.contrib.PIL import pixelmatch + +@pytest.fixture +def setup_data(request): + print("\nSetting up resources...") + + root_path = "../../testing/bevy_example" + assets_root_path = os.path.join(root_path, "assets") + blueprints_path = os.path.join(assets_root_path, "blueprints") + levels_path = os.path.join(assets_root_path, "levels") + + models_path = os.path.join(assets_root_path, "models") + materials_path = os.path.join(assets_root_path, "materials") + yield { + "root_path": root_path, + "models_path": models_path, + "blueprints_path": blueprints_path, + "levels_path": levels_path, + "materials_path":materials_path + } + + def finalizer(): + + #other_materials_path = os.path.join("../../testing", "other_materials") + + print("\nPerforming teardown...") + if os.path.exists(blueprints_path): + shutil.rmtree(blueprints_path) + + if os.path.exists(levels_path): + shutil.rmtree(levels_path) + + if os.path.exists(models_path): + shutil.rmtree(models_path) + + if os.path.exists(materials_path): + shutil.rmtree(materials_path) + + diagnostics_file_path = os.path.join(root_path, "bevy_diagnostics.json") + if os.path.exists(diagnostics_file_path): + os.remove(diagnostics_file_path) + + hierarchy_file_path = os.path.join(root_path, "bevy_hierarchy.json") + if os.path.exists(hierarchy_file_path): + os.remove(hierarchy_file_path) + + screenshot_observed_path = os.path.join(root_path, "screenshot.png") + if os.path.exists(screenshot_observed_path): + os.remove(screenshot_observed_path) + + request.addfinalizer(finalizer) + + return None + + +""" +- calls exporter on the testing scene +- launches bevy app & checks for output +- checks screenshot, hierarchy & diagnostics files generated on the bevy side against reference files +- if all worked => test is a-ok +- removes generated files +""" +def test_export_complex(setup_data): + root_path = setup_data["root_path"] + auto_export_operator = bpy.ops.export_scenes.auto_gltf + + # with change detection + # first, configure things + # we use the global settings for that + export_props = { + "main_scene_names" : ['World'], + "library_scene_names": ['Library'], + } + gltf_settings = { + "export_animations": True, + "export_optimize_animation_size": False + } + + # store settings for the auto_export part + stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings") + stored_auto_settings.clear() + stored_auto_settings.write(json.dumps(export_props)) + + # and store settings for the gltf part + stored_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings") + stored_gltf_settings.clear() + stored_gltf_settings.write(json.dumps(gltf_settings)) + + # move the main cube + bpy.data.objects["Cube"].location = [1, 0, 0] + # move the cube in the library + bpy.data.objects["Blueprint1_mesh"].location = [1, 2, 1] + + auto_export_operator( + auto_export=True, + direct_mode=True, + export_root_folder = os.path.abspath(root_path), + #export_blueprints_path = os.path.join("assets", "models", "library"), + export_output_folder = os.path.join("assets", "models"), #"./models", + #export_levels_path = os.path.join("assets", "models"), + + export_scene_settings=True, + export_blueprints=True, + export_materials_library=True + ) + # blueprint1 => has an instance, got changed, should export + # blueprint2 => has NO instance, but marked as asset, should export + # blueprint3 => has NO instance, not marked as asset, used inside blueprint 4: should export + # blueprint4 => has an instance, with nested blueprint3, should export + # blueprint5 => has NO instance, not marked as asset, should NOT export + + assert os.path.exists(os.path.join(setup_data["levels_path"], "World.glb")) == True + assert os.path.exists(os.path.join(setup_data["blueprints_path"], "Blueprint1.glb")) == True + assert os.path.exists(os.path.join(setup_data["blueprints_path"], "Blueprint2.glb")) == True + assert os.path.exists(os.path.join(setup_data["blueprints_path"], "Blueprint3.glb")) == True + assert os.path.exists(os.path.join(setup_data["blueprints_path"], "Blueprint4_nested.glb")) == True + assert os.path.exists(os.path.join(setup_data["blueprints_path"], "Blueprint5.glb")) == False + assert os.path.exists(os.path.join(setup_data["blueprints_path"], "Blueprint6_animated.glb")) == True + assert os.path.exists(os.path.join(setup_data["blueprints_path"], "Blueprint7_hierarchy.glb")) == True + + # 'assets_list_'+scene.name+"_components" should have been removed after the export + assets_list_object_name = "assets_list_"+"World"+"_components" + assets_list_object_present = assets_list_object_name in bpy.data.objects + assert assets_list_object_present == False + + # now run bevy + command = "cargo run --features bevy/dynamic_linking" + FNULL = open(os.devnull, 'w') #use this if you want to suppress output to stdout from the subprocess + return_code = subprocess.call(["cargo", "run", "--features", "bevy/dynamic_linking"], cwd=root_path) + print("RETURN CODE OF BEVY APP", return_code) + assert return_code == 0 + + with open(os.path.join(root_path, "bevy_diagnostics.json")) as diagnostics_file: + diagnostics = json.load(diagnostics_file) + print("diagnostics", diagnostics) + assert diagnostics["animations"] == True + assert diagnostics["empty_found"] == True + assert diagnostics["blueprints_list_found"] == True + assert diagnostics["exported_names_correct"] == True + + with open(os.path.join(root_path, "bevy_hierarchy.json")) as hierarchy_file: + with open(os.path.join(os.path.dirname(__file__), "expected_bevy_hierarchy.json")) as expexted_hierarchy_file: + hierarchy = json.load(hierarchy_file) + expected = json.load(expexted_hierarchy_file) + assert sorted(hierarchy.items()) == sorted(expected.items()) + + # last but not least, do a visual compare + screenshot_expected_path = os.path.join(os.path.dirname(__file__), "expected_screenshot.png") + screenshot_observed_path = os.path.join(root_path, "screenshot.png") + img_a = Image.open(screenshot_expected_path) + img_b = Image.open(screenshot_observed_path) + img_diff = Image.new("RGBA", img_a.size) + mismatch = pixelmatch(img_a, img_b, img_diff, includeAA=True) + print("image mismatch", mismatch) + assert mismatch < 50 + + + diff --git a/tools/blenvy/tests/test_bevy_integration_prepare.py b/tools/blenvy/tests/test_bevy_integration_prepare.py new file mode 100644 index 0000000..6501f8d --- /dev/null +++ b/tools/blenvy/tests/test_bevy_integration_prepare.py @@ -0,0 +1,69 @@ +import os +import json +import pytest +import bpy + +@pytest.fixture +def setup_data(request): + print("\nSetting up resources...") + + root_path = "../../testing/bevy_example" + assets_root_path = os.path.join(root_path, "assets") + blueprints_path = os.path.join(assets_root_path, "blueprints") + levels_path = os.path.join(assets_root_path, "levels") + + models_path = os.path.join(assets_root_path, "models") + materials_path = os.path.join(assets_root_path, "materials") + yield { + "root_path": root_path, + "models_path": models_path, + "blueprints_path": blueprints_path, + "levels_path": levels_path, + "materials_path":materials_path + } + +# this runs the external blueprints file +def test_export_external_blueprints(setup_data): + root_path = setup_data["root_path"] + auto_export_operator = bpy.ops.export_scenes.auto_gltf + + # with change detection + # first, configure things + # we use the global settings for that + export_props = { + "main_scene_names" : [], + "library_scene_names": ['Library'], + } + gltf_settings = { + "export_animations": True, + "export_optimize_animation_size": False + } + + # store settings for the auto_export part + stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings") + stored_auto_settings.clear() + stored_auto_settings.write(json.dumps(export_props)) + + # and store settings for the gltf part + stored_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings") + stored_gltf_settings.clear() + stored_gltf_settings.write(json.dumps(gltf_settings)) + + + auto_export_operator( + auto_export=True, + direct_mode=True, + export_root_folder = os.path.abspath(root_path), + #export_blueprints_path = os.path.join("assets", "models", "library"), + #export_output_folder = os.path.join("assets", "models"), #"./models", + #export_levels_path = os.path.join("assets", "models"), + + export_scene_settings=False, + export_blueprints=True, + export_materials_library=True, + export_marked_assets= True + ) + + assert os.path.exists(os.path.join(setup_data["blueprints_path"], "External_blueprint.glb")) == True + assert os.path.exists(os.path.join(setup_data["blueprints_path"], "External_blueprint2.glb")) == True + assert os.path.exists(os.path.join(setup_data["blueprints_path"], "External_blueprint3.glb")) == True \ No newline at end of file diff --git a/tools/blenvy/tests/test_change_tracking.py b/tools/blenvy/tests/test_change_tracking.py new file mode 100644 index 0000000..1f82697 --- /dev/null +++ b/tools/blenvy/tests/test_change_tracking.py @@ -0,0 +1,270 @@ +import bpy +import os +import json +import pytest +import shutil +import pathlib +import mathutils + +from .test_helpers import prepare_auto_export, run_auto_export_and_compare + +@pytest.fixture +def setup_data(request): + print("\nSetting up resources...") + #other_materials_path = os.path.join("../../testing", "other_materials") + root_path = "../../testing/bevy_example" + assets_root_path = os.path.join(root_path, "assets") + blueprints_path = os.path.join(assets_root_path, "blueprints") + levels_path = os.path.join(assets_root_path, "levels") + + models_path = os.path.join(assets_root_path, "models") + materials_path = os.path.join(assets_root_path, "materials") + + yield { + "root_path": root_path, + "models_path": models_path, + "blueprints_path": blueprints_path, + "levels_path": levels_path, + "materials_path":materials_path + } + + def finalizer(): + print("\nPerforming teardown...") + if os.path.exists(blueprints_path): + shutil.rmtree(blueprints_path) + + if os.path.exists(levels_path): + shutil.rmtree(levels_path) + + if os.path.exists(models_path): + shutil.rmtree(models_path) + + if os.path.exists(materials_path): + shutil.rmtree(materials_path) + + diagnostics_file_path = os.path.join(root_path, "bevy_diagnostics.json") + if os.path.exists(diagnostics_file_path): + os.remove(diagnostics_file_path) + + hierarchy_file_path = os.path.join(root_path, "bevy_hierarchy.json") + if os.path.exists(hierarchy_file_path): + os.remove(hierarchy_file_path) + + screenshot_observed_path = os.path.join(root_path, "screenshot.png") + if os.path.exists(screenshot_observed_path): + os.remove(screenshot_observed_path) + + request.addfinalizer(finalizer) + + return None + +def test_export_change_tracking_custom_properties(setup_data): + # set things up + prepare_auto_export() + + def first_change(): + # now add a custom property to the cube in the main scene & export again + print("----------------") + print("main scene change (custom property)") + print("----------------") + bpy.data.objects["Cube"]["test_property"] = 42 + + run_auto_export_and_compare( + setup_data=setup_data, + changes=[first_change], + expected_changed_files = [["World"]] # only the "world" file should have changed + ) + +def test_export_change_tracking_custom_properties_collection_instances_combine_mode_embed(setup_data): + # set things up + prepare_auto_export({"collection_instances_combine_mode": "Embed"}) + + def first_change(): + # we have no change, but we also have no blueprints exported, because of the embed mode + blueprint1_file_path = os.path.join(setup_data["blueprints_path"], "Blueprint1.glb") + assert os.path.exists(blueprint1_file_path) == False + + def second_change(): + # add a custom property to the cube in the library scene & export again + # this should trigger changes in the main scene as well since the mode is embed & this blueprints has an instance in the main scene + print("----------------") + print("library change (custom property)") + print("----------------") + bpy.data.objects["Blueprint1_mesh"]["test_property"] = 42 + + def third_change(): + # now we set the _combine mode of the instance to "split", so auto_export should: + # * not take the changes into account in the main scene + # * export the blueprint (so file for Blueprint1 will be changed) + bpy.data.objects["Blueprint1"]["_combine"] = "Split" + + def fourth_change(): + print("----------------") + print("library change (custom property, forced 'Split' combine mode )") + print("----------------") + + bpy.data.objects["Blueprint1_mesh"]["test_property"] = 151 + + run_auto_export_and_compare( + setup_data=setup_data, + changes=[first_change, second_change, third_change, fourth_change], + expected_changed_files = [[], ["World"], ["World","Blueprint1"], ["World"]] # only the "world" file should have changed + ) + + +def test_export_change_tracking_light_properties(setup_data): + # set things up + prepare_auto_export() + + def first_change(): + # now add a custom property to the cube in the main scene & export again + print("----------------") + print("main scene change (light, energy)") + print("----------------") + + bpy.data.lights["Light"].energy = 100 + #world_file_path = os.path.join(setup_data["levels_path"], "World.glb") + #assert os.path.exists(world_file_path) == True + + def second_change(): + print("----------------") + print("main scene change (light, shadow_cascade_count)") + print("----------------") + + bpy.data.lights["Light"].shadow_cascade_count = 2 + + def third_change(): + print("----------------") + print("main scene change (light, use_shadow)") + print("----------------") + + bpy.data.lights["Light"].use_shadow = False + + run_auto_export_and_compare( + setup_data=setup_data, + changes=[first_change, second_change, third_change], + expected_changed_files = [["World"], ["World"], ["World"]] # only the "world" file should have changed + ) + + +def test_export_change_tracking_camera_properties(setup_data): + # set things up + prepare_auto_export() + + def first_change(): + print("----------------") + print("main scene change (camera)") + print("----------------") + + bpy.data.cameras["Camera"].angle = 0.5 + + run_auto_export_and_compare( + setup_data=setup_data, + changes=[first_change], + expected_changed_files = [["World"]] # only the "world" file should have changed + ) + +def test_export_change_tracking_material_properties(setup_data): + # set things up + prepare_auto_export() + + def first_change(): + print("----------------") + print("main scene change (material, clip)") + print("----------------") + + bpy.data.materials["Material.001"].blend_method = 'CLIP' + + def second_change(): + print("----------------") + print("main scene change (material, alpha_threshold)") + print("----------------") + bpy.data.materials["Material.001"].alpha_threshold = 0.2 + + def third_change(): + print("----------------") + print("main scene change (material, diffuse_color)") + print("----------------") + bpy.data.materials["Material.001"].diffuse_color[0] = 0.2 + + run_auto_export_and_compare( + setup_data=setup_data, + changes=[first_change, second_change, third_change], + expected_changed_files = [["Blueprint1", "Blueprint7_hierarchy"], ["Blueprint1", "Blueprint7_hierarchy"], ["Blueprint1", "Blueprint7_hierarchy"]] + # the material is assigned to Blueprint 1 so in normal (split mode) only the "Blueprint1" file should have changed + # the same material is assigned to Blueprint 7 so in normal (split mode) only the "Blueprint1" file should have changed + ) + + +""" +- setup gltf parameters & auto_export parameters +- calls exporter on the testing scene +- saves timestamps of generated files +- changes things in the main scene and/or library +- checks if timestamps have changed +- if all worked => test is a-ok +- removes generated files + +""" +def test_export_various_chained_changes(setup_data): + + def first_change(): + # export again with no changes + print("----------------") + print("no changes") + print("----------------") + world_file_path = os.path.join(setup_data["levels_path"], "World.glb") + assert os.path.exists(world_file_path) == True + + def second_change(): + # now move the main cube & export again + print("----------------") + print("main scene change") + print("----------------") + + bpy.context.window_manager.auto_export_tracker.enable_change_detection() # FIXME: should not be needed, but .. + bpy.data.objects["Cube"].location = [1, 0, 0] + + def third_change(): + # now same, but move the cube in the library + print("----------------") + print("library change (blueprint) ") + print("----------------") + bpy.context.window_manager.auto_export_tracker.enable_change_detection() # FIXME: should not be needed, but .. + + bpy.data.objects["Blueprint1_mesh"].location = [1, 2, 1] + + def fourth_change(): + # now change something in a nested blueprint + print("----------------") + print("library change (nested blueprint) ") + print("----------------") + + bpy.data.objects["Blueprint3_mesh"].location= [0, 0.1 ,2] + + def fifth_change(): + # now same, but using an operator + print("----------------") + print("change using operator") + print("----------------") + + with bpy.context.temp_override(active_object=bpy.data.objects["Cube"], selected_objects=[bpy.data.objects["Cube"]], scene=bpy.data.scenes["World"]): + print("translate using operator") + bpy.ops.transform.translate(value=mathutils.Vector((2.0, 1.0, -5.0))) + bpy.ops.transform.rotate(value=0.378874, constraint_axis=(False, False, True), mirror=False, proportional_edit_falloff='SMOOTH', proportional_size=1) + bpy.ops.object.transform_apply() + bpy.ops.transform.translate(value=(3.5, 0, 0), constraint_axis=(True, False, False)) + + run_auto_export_and_compare( + setup_data=setup_data, + changes=[first_change, second_change, third_change, fourth_change, fifth_change], + expected_changed_files = [ + [], + ["World"], # only the "world" file should have changed + ["Blueprint1"],# The blueprint1 file should have changed, since that is the collection we changed, not the world, since we are in "split mode by default" + ["Blueprint3"],# The blueprint3 file should have changed, since that is the collection we changed # the blueprint4 file NOT, since, while it contains an instance of the collection we changed, the default export mode is "split" + ["World"] + ] + ) + + #bpy.context.window_manager.auto_export_tracker.enable_change_detection() # FIXME: should not be needed, but .. diff --git a/tools/blenvy/tests/test_changed_parameters.py b/tools/blenvy/tests/test_changed_parameters.py new file mode 100644 index 0000000..4a38869 --- /dev/null +++ b/tools/blenvy/tests/test_changed_parameters.py @@ -0,0 +1,257 @@ +import bpy +import os +import json +import pytest +import shutil + +from .test_helpers import prepare_auto_export + +@pytest.fixture +def setup_data(request): + print("\nSetting up resources...") + root_path = "../../testing/bevy_example" + assets_root_path = os.path.join(root_path, "assets") + blueprints_path = os.path.join(assets_root_path, "blueprints") + levels_path = os.path.join(assets_root_path, "levels") + + models_path = os.path.join(assets_root_path, "models") + materials_path = os.path.join(assets_root_path, "materials") + + #other_materials_path = os.path.join("../../testing", "other_materials") + yield { + "root_path": root_path, + "models_path": models_path, + "blueprints_path": blueprints_path, + "levels_path": levels_path, + "materials_path":materials_path + } + + def finalizer(): + + + print("\nPerforming teardown...") + if os.path.exists(blueprints_path): + shutil.rmtree(blueprints_path) + + if os.path.exists(levels_path): + shutil.rmtree(levels_path) + + if os.path.exists(models_path): + shutil.rmtree(models_path) + + if os.path.exists(materials_path): + shutil.rmtree(materials_path) + + diagnostics_file_path = os.path.join(root_path, "bevy_diagnostics.json") + if os.path.exists(diagnostics_file_path): + os.remove(diagnostics_file_path) + + hierarchy_file_path = os.path.join(root_path, "bevy_hierarchy.json") + if os.path.exists(hierarchy_file_path): + os.remove(hierarchy_file_path) + + screenshot_observed_path = os.path.join(root_path, "screenshot.png") + if os.path.exists(screenshot_observed_path): + os.remove(screenshot_observed_path) + + request.addfinalizer(finalizer) + + return None + + +""" +- setup gltf parameters & auto_export parameters +- calls exporter on the testing scene +- saves timestamps of generated files +- changes exporter parameters +- checks if timestamps have changed +- if all worked => test is a-ok +- removes generated files +""" + +def test_export_no_parameters(setup_data): + auto_export_operator = bpy.ops.export_scenes.auto_gltf + + # make sure to clear any parameters first + stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings") + stored_auto_settings.clear() + stored_auto_settings.write(json.dumps({})) + + # first test exporting without any parameters set, this should not export anything + auto_export_operator( + auto_export=True, + direct_mode=True, + export_materials_library=True, + export_root_folder = os.path.abspath(setup_data["root_path"]), + export_output_folder="./models", + ) + + world_file_path = os.path.join(setup_data["levels_path"], "World.glb") + assert os.path.exists(world_file_path) != True + +def test_export_auto_export_parameters_only(setup_data): + auto_export_operator = bpy.ops.export_scenes.auto_gltf + export_props = { + "main_scene_names" : ['World'], + "library_scene_names": ['Library'], + } + + # store settings for the auto_export part + stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings") + stored_auto_settings.clear() + stored_auto_settings.write(json.dumps(export_props)) + + auto_export_operator( + auto_export=True, + direct_mode=True, + export_root_folder = os.path.abspath(setup_data["root_path"]), + export_output_folder="./models", + export_materials_library=True + ) + + world_file_path = os.path.join(setup_data["levels_path"], "World.glb") + assert os.path.exists(world_file_path) == True + +def test_export_changed_parameters(setup_data): + auto_export_operator = bpy.ops.export_scenes.auto_gltf + + # with change detection + # first, configure things + # we use the global settings for that + export_props = { + "main_scene_names" : ['World'], + "library_scene_names": ['Library'], + } + + # store settings for the auto_export part + stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings") + stored_auto_settings.clear() + stored_auto_settings.write(json.dumps(export_props)) + + gltf_settings = { + "export_animations": True, + "export_optimize_animation_size": False + } + # and store settings for the gltf part + stored_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings") + stored_gltf_settings.clear() + stored_gltf_settings.write(json.dumps(gltf_settings)) + + auto_export_operator( + auto_export=True, + direct_mode=True, + export_root_folder = os.path.abspath(setup_data["root_path"]), + export_output_folder="./models", + export_scene_settings=True, + export_blueprints=True, + export_materials_library=True + ) + + world_file_path = os.path.join(setup_data["levels_path"], "World.glb") + assert os.path.exists(world_file_path) == True + + blueprints_path = setup_data["blueprints_path"] + model_library_file_paths = list(map(lambda file_name: os.path.join(blueprints_path, file_name), sorted(os.listdir(blueprints_path)))) + modification_times_first = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths)) + + # export again, with no param changes: this should NOT export anything again, ie, modification times should be the same + print("second export") + auto_export_operator( + auto_export=True, + direct_mode=True, + export_root_folder = os.path.abspath(setup_data["root_path"]), + export_output_folder="./models", + export_scene_settings=True, + export_blueprints=True, + export_materials_library=True + ) + + modification_times_no_change = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths)) + assert modification_times_no_change == modification_times_first + + # export again, this time changing the gltf settings + print("third export, changed gltf parameters") + + gltf_settings = { + "export_animations": True, + "export_optimize_animation_size": True + } + + stored_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings") + stored_gltf_settings.clear() + stored_gltf_settings.write(json.dumps(gltf_settings)) + + auto_export_operator( + auto_export=True, + direct_mode=True, + export_root_folder = os.path.abspath(setup_data["root_path"]), + export_output_folder="./models", + export_scene_settings=True, + export_blueprints=True, + export_materials_library=True + ) + + modification_times_changed_gltf = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths)) + assert modification_times_changed_gltf != modification_times_first + modification_times_first = modification_times_changed_gltf + + # now run it again, without changes, timestamps should be identical + + auto_export_operator( + auto_export=True, + direct_mode=True, + export_root_folder = os.path.abspath(setup_data["root_path"]), + export_output_folder="./models", + export_scene_settings=True, + export_blueprints=True, + export_materials_library=True + ) + + modification_times_changed_gltf = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths)) + assert modification_times_changed_gltf == modification_times_first + modification_times_first = modification_times_changed_gltf + + + # export again, this time changing the auto_export settings + print("fourth export, changed auto parameters") + + export_props = { + "main_scene_names" : ['World'], + "library_scene_names": ['Library'], + "export_materials_library": False # we need to add it here, as the direct settings set on the operator will only be used for the NEXT run + } + + # store settings for the auto_export part + stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings") + stored_auto_settings.clear() + stored_auto_settings.write(json.dumps(export_props)) + + auto_export_operator( + auto_export=True, + direct_mode=True, + export_root_folder = os.path.abspath(setup_data["root_path"]), + export_output_folder="./models", + export_scene_settings=True, + export_blueprints=True, + export_materials_library=False + ) + + modification_times_changed_auto = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths)) + assert modification_times_changed_auto != modification_times_first + modification_times_first = modification_times_changed_auto + + # now run it again, withouth changes, timestamps should be identical + + auto_export_operator( + auto_export=True, + direct_mode=True, + export_root_folder = os.path.abspath(setup_data["root_path"]), + export_output_folder="./models", + export_scene_settings=True, + export_blueprints=True, + export_materials_library=False + ) + + modification_times_changed_gltf = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths)) + assert modification_times_changed_gltf == modification_times_first + modification_times_first = modification_times_changed_gltf diff --git a/tools/blenvy/tests/test_components.py b/tools/blenvy/tests/test_components.py new file mode 100644 index 0000000..ac3295e --- /dev/null +++ b/tools/blenvy/tests/test_components.py @@ -0,0 +1,257 @@ +import bpy +import pprint + +from ..propGroups.conversions_to_prop_group import property_group_value_from_custom_property_value +from ..propGroups.conversions_from_prop_group import property_group_value_to_custom_property_value +from .component_values_shuffler import component_values_shuffler +from .expected_component_values import (expected_custom_property_values, expected_custom_property_values_randomized) +from ..components.metadata import get_bevy_component_value_by_long_name, get_bevy_components, upsert_bevy_component + +from .setup_data import setup_data + +def test_components_should_generate_correct_custom_properties(setup_data): + registry = bpy.context.window_manager.components_registry + registry.schemaPath = setup_data["schema_path"] + bpy.ops.object.reload_registry() + + type_infos = registry.type_infos + object = bpy.context.object + + add_component_operator = bpy.ops.object.add_bevy_component + errors = [] + addable_components = [] + added_components = [] + + custom_property_values = {} + + for long_name in type_infos: + definition = type_infos[long_name] + long_name = definition["long_name"] + is_component = definition['isComponent'] if "isComponent" in definition else False + if not is_component: + continue + + addable_components.append(long_name) + + try: + add_component_operator(component_type=long_name) + + property_group_name = registry.get_propertyGroupName_from_longName(long_name) + + target_components_metadata = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == long_name, target_components_metadata), None) + propertyGroup = getattr(component_meta, property_group_name, None) + added_components.append(long_name) + custom_property_values[long_name] = get_bevy_component_value_by_long_name(object, long_name) + assert get_bevy_component_value_by_long_name(object, long_name) == expected_custom_property_values[long_name] + + except Exception as error: + errors.append(error) + + pp = pprint.PrettyPrinter(depth=14, width=120) + print("CUSTOM PROPERTY VALUES") + pp.pprint(custom_property_values) + + assert len(errors) == 0 + assert len(added_components) == 173 + + +def test_components_should_generate_correct_custom_properties_with_randomized_values(setup_data): + registry = bpy.context.window_manager.components_registry + registry.schemaPath = setup_data["schema_path"] + bpy.ops.object.reload_registry() + + type_infos = registry.type_infos + object = bpy.context.object + + add_component_operator = bpy.ops.object.add_bevy_component + errors = [] + error_components = [] + addable_components = [] + added_components = [] + + custom_property_values = {} + + for long_name in type_infos: + definition = type_infos[long_name] + long_name = definition["long_name"] + is_component = definition['isComponent'] if "isComponent" in definition else False + if not is_component: + continue + + addable_components.append(long_name) + + try: + add_component_operator(component_type=long_name) + property_group_name = registry.get_propertyGroupName_from_longName(long_name) + + target_components_metadata = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == long_name, target_components_metadata), None) + propertyGroup = getattr(component_meta, property_group_name, None) + component_values_shuffler(seed= 10, property_group=propertyGroup, definition=definition, registry=registry) + + added_components.append(long_name) + custom_property_values[long_name] = get_bevy_component_value_by_long_name(object, long_name) + assert get_bevy_component_value_by_long_name(object, long_name) == expected_custom_property_values_randomized[long_name] + + except Exception as error: + errors.append(error) + error_components.append(long_name) + + pp = pprint.PrettyPrinter(depth=14, width=120) + print("CUSTOM PROPERTY VALUES") + pp.pprint(custom_property_values) + + print("error_components", error_components) + assert len(errors) == 0 + assert len(added_components) == 173 + +def test_components_should_generate_correct_propertyGroup_values_from_custom_properties(setup_data): + registry = bpy.context.window_manager.components_registry + registry.schemaPath = setup_data["schema_path"] + bpy.ops.object.reload_registry() + + type_infos = registry.type_infos + object = bpy.context.object + + add_component_operator = bpy.ops.object.add_bevy_component + errors = [] + addable_components = [] + added_components = [] + failing_components = [] + + for long_name in type_infos: + definition = type_infos[long_name] + long_name = definition["long_name"] + is_component = definition['isComponent'] if "isComponent" in definition else False + if not is_component: + continue + + addable_components.append(long_name) + + try: + add_component_operator(component_type=long_name) + property_group_name = registry.get_propertyGroupName_from_longName(long_name) + + target_components_metadata = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == long_name, target_components_metadata), None) + propertyGroup = getattr(component_meta, property_group_name, None) + added_components.append(long_name) + # randomise values + component_values_shuffler(seed= 10, property_group=propertyGroup, definition=definition, registry=registry) + custom_property_value = get_bevy_component_value_by_long_name(object, long_name) + + # first check if custom property value matches what we expect + assert custom_property_value == expected_custom_property_values_randomized[long_name] + + # we update propgroup values from custom property values + property_group_value_from_custom_property_value(propertyGroup, definition, registry, custom_property_value, nesting = []) + # and then generate it back + custom_property_value_regen = property_group_value_to_custom_property_value(propertyGroup, definition, registry, None) + assert custom_property_value_regen == expected_custom_property_values_randomized[long_name] + + # custom_property_values[long_name] = object[long_name] + #assert object[long_name] == expected_custom_property_values[long_name] + #print("CUSTOM PROPERTY ", object[long_name]) + + except Exception as error: + errors.append(error) + failing_components.append(long_name) + + for index, error in enumerate(errors): + print("ERROR", error, failing_components[index]) + assert len(errors) == 0 + assert len(added_components) == 173 + + +def test_remove_components(setup_data): + registry = bpy.context.window_manager.components_registry + registry.schemaPath = setup_data["schema_path"] + bpy.ops.object.reload_registry() + + type_infos = registry.type_infos + + add_component_operator = bpy.ops.object.add_bevy_component + errors = [] + addable_components = [] + added_components = [] + + for long_name in type_infos: + definition = type_infos[long_name] + long_name = definition["long_name"] + is_component = definition['isComponent'] if "isComponent" in definition else False + if not is_component: + continue + + addable_components.append(long_name) + + try: + add_component_operator(component_type=long_name) + object = bpy.context.object + # print("propertyGroup", propertyGroup, propertyGroup.field_names) + added_components.append(long_name) + except Exception as error: + errors.append(error) + assert len(errors) == 0 + + # now test component removal + errors.clear() + remove_component_operator = bpy.ops.object.remove_bevy_component + for long_name in added_components: + try: + remove_component_operator(component_name=long_name) + except Exception as error: + errors.append(error) + assert len(errors) == 0 + +def test_copy_paste_components(setup_data): + context = bpy.context + registry = context.window_manager.components_registry + registry.schemaPath = setup_data["schema_path"] + bpy.ops.object.reload_registry() + + long_name = "bevy_example::test_components::BasicTest" + + # SOURCE object setup + add_component_operator = bpy.ops.object.add_bevy_component + add_component_operator(component_type=long_name) + + property_group_name = registry.get_propertyGroupName_from_longName(long_name) + object = context.object + + target_components_metadata = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == long_name, target_components_metadata), None) + propertyGroup = getattr(component_meta, property_group_name, None) + + setattr(propertyGroup, propertyGroup.field_names[0], 25.0) + + copy_component_operator = bpy.ops.object.copy_bevy_component + copy_component_operator(source_component_name=long_name, source_object_name=object.name) + + # --------------------------------------- + # TARGET object + bpy.ops.mesh.primitive_cube_add() + new_cube = bpy.context.selected_objects[0] + # change name + new_cube.name = "TargetCube" + target_components_metadata = new_cube.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == long_name, target_components_metadata), None) + + # first check that there is no component currently + assert component_meta == None + + paste_component_operator = bpy.ops.object.paste_bevy_component + paste_component_operator() + + target_components_metadata = new_cube.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == long_name, target_components_metadata), None) + + # now after pasting to the new object, it should have component meta + assert component_meta != None + + # and then check if the propertyGroup of the target object is correct + propertyGroup = getattr(component_meta, property_group_name, None) + assert propertyGroup.field_names == ['a', 'b', 'c'] + + a_fieldValue = getattr(propertyGroup, propertyGroup.field_names[0]) + assert a_fieldValue == 25.0 diff --git a/tools/blenvy/tests/test_conversions.py b/tools/blenvy/tests/test_conversions.py new file mode 100644 index 0000000..4125ce0 --- /dev/null +++ b/tools/blenvy/tests/test_conversions.py @@ -0,0 +1,51 @@ +from ..propGroups.conversions_to_prop_group import parse_struct_string, parse_tuplestruct_string + + +def test_parse_tuplestruct_string(): + assert parse_tuplestruct_string("(A)", start_nesting=1) == ['A'] + assert parse_tuplestruct_string("[(A)]", start_nesting=1) == ['(A)'] + + assert parse_tuplestruct_string("(a: 45, b: 65)", start_nesting=1) == ['a: 45', 'b: 65'] + assert parse_tuplestruct_string("[(a: 45, b: 65)]", start_nesting=1) == ['(a: 45, b: 65)'] + assert parse_tuplestruct_string("45, 65, 'bla'", start_nesting=0) == ['45', '65', "'bla'"] + + assert parse_tuplestruct_string("[(A), (B)]", start_nesting=1) == ['(A)', '(B)'] + + assert parse_tuplestruct_string("([(-1.8, 2.9), (0.0, -62)])", start_nesting=1) == ['[(-1.8, 2.9), (0.0, -62)]'] + assert parse_tuplestruct_string("([(-1.8, 2.9), (0.0, -62)])", start_nesting=2) == ['(-1.8, 2.9)', '(0.0, -62)'] + assert parse_tuplestruct_string("([(-1.8, 2.9), (0.0, -62), (25)])", start_nesting=2) == ['(-1.8, 2.9)', '(0.0, -62)', '(25)'] + + assert parse_tuplestruct_string("(Vec3(x:-2.0, y:120.0, z:1.0))", start_nesting=2) == ['x:-2.0', 'y:120.0', 'z:1.0'] + + assert parse_tuplestruct_string("(9)", start_nesting=1) == ['9'] + assert parse_tuplestruct_string('("toto")', start_nesting=1) == ['"toto"'] + + assert parse_tuplestruct_string("(Rgba(red:0.0, green:0.2, blue:0.9, alpha:1.0))", start_nesting=1) == ['Rgba(red:0.0, green:0.2, blue:0.9, alpha:1.0)'] + assert parse_tuplestruct_string("(Rgba(red:0.0, green:0.2, blue:0.9, alpha:1.0))", start_nesting=2) == ['red:0.0', 'green:0.2', 'blue:0.9', 'alpha:1.0'] + + assert parse_tuplestruct_string("([(-1.2, 2.9), (0.0, -62)])", start_nesting=2) == ['(-1.2, 2.9)', '(0.0, -62)'] + + assert parse_tuplestruct_string("([Rgba(red:1.0, green:1.0, blue:0.0, alpha:1.0), Rgba(red:1.0, green:0.0, blue:0.5, alpha:1.0)])", start_nesting=2) == ['Rgba(red:1.0, green:1.0, blue:0.0, alpha:1.0)', 'Rgba(red:1.0, green:0.0, blue:0.5, alpha:1.0)'] + assert parse_tuplestruct_string('(7.2, 2607, "sdf")', start_nesting=1) == ['7.2', '2607', '"sdf"'] + + assert parse_tuplestruct_string('[a, b]', start_nesting=1) == ['a', 'b'] + assert parse_tuplestruct_string('[]', start_nesting=1) == [] + +def test_parse_struct_string(): + assert parse_struct_string("a: 45, b:65") == {'a': '45', 'b':'65'} + assert parse_struct_string("x:-2.0, y:120.0, z:1.0") == {'x': '-2.0', 'y':'120.0', 'z':'1.0'} + + assert parse_struct_string("enabled: true") == {'enabled': 'true'} + assert parse_struct_string("(enabled: true)", start_nesting=1) == {'enabled': 'true'} + + + assert parse_struct_string("(filters: (25), memberships: (5))", start_nesting=1) == {'filters': '(25)', 'memberships':'(5)'} + assert parse_struct_string("groups: 0", start_nesting=0) == {'groups': '0'} + assert parse_struct_string("(groups: 0)", start_nesting=1) == {'groups': '0'} + + assert parse_struct_string("(composite_mode: EnergyConserving, high_pass_frequency: 4.0, intensity: 0.0, low_frequency_boost: -6.0, low_frequency_boost_curvature: 4.1, prefilter_settings: (threshold: -5.1, threshold_softness: 2.1))", start_nesting=1) == {'composite_mode': 'EnergyConserving', 'high_pass_frequency': '4.0', 'intensity': '0.0', 'low_frequency_boost': '-6.0', 'low_frequency_boost_curvature': '4.1', 'prefilter_settings': '(threshold: -5.1, threshold_softness: 2.1)'} + + + assert parse_struct_string("dimensions: UVec3(x:0.0, y:0.0, z:0.0), dynamic_resizing: true, z_config: (far_z_mode: MaxLightRange, first_slice_depth: 0.0)") == {'dimensions': 'UVec3(x:0.0, y:0.0, z:0.0)', 'dynamic_resizing': 'true', 'z_config': '(far_z_mode: MaxLightRange, first_slice_depth: 0.0)'} + + assert parse_struct_string('(inverse_bindposes: Strong(""), joints: [4294967295, 4294967295, 4294967295])', start_nesting=1) == {'inverse_bindposes': 'Strong("")', 'joints': '[4294967295, 4294967295, 4294967295]'} \ No newline at end of file diff --git a/tools/blenvy/tests/test_export_parameters.py b/tools/blenvy/tests/test_export_parameters.py new file mode 100644 index 0000000..11f34a2 --- /dev/null +++ b/tools/blenvy/tests/test_export_parameters.py @@ -0,0 +1,292 @@ +import bpy +import os +import subprocess +import json +import pytest +import shutil + + +@pytest.fixture +def setup_data(request): + print("\nSetting up resources...") + root_path = "../../testing/bevy_example" + assets_root_path = os.path.join(root_path, "assets") + blueprints_path = os.path.join(assets_root_path, "blueprints") + levels_path = os.path.join(assets_root_path, "levels") + + models_path = os.path.join(assets_root_path, "models") + materials_path = os.path.join(assets_root_path, "materials") + + other_materials_path = os.path.join(assets_root_path, "other_materials") + other_blueprints_path = os.path.join(assets_root_path, "other_blueprints") + + yield { + "root_path": root_path, + "models_path": models_path, + "blueprints_path": blueprints_path, + "levels_path": levels_path, + "materials_path":materials_path, + "other_materials_path":other_materials_path, + "other_blueprints_path":other_blueprints_path + } + + def finalizer(): + print("\nPerforming teardown...") + + if os.path.exists(blueprints_path): + shutil.rmtree(blueprints_path) + + if os.path.exists(levels_path): + shutil.rmtree(levels_path) + + if os.path.exists(models_path): + shutil.rmtree(models_path) + + if os.path.exists(materials_path): + shutil.rmtree(materials_path) + + if os.path.exists(other_materials_path): + shutil.rmtree(other_materials_path) + + if os.path.exists(other_blueprints_path): + shutil.rmtree(other_blueprints_path) + + + request.addfinalizer(finalizer) + + return None + + +def get_orphan_data(): + orphan_meshes = [m.name for m in bpy.data.meshes if m.users == 0] + orphan_objects = [m.name for m in bpy.data.objects if m.users == 0] + + #print("orphan meshes before", orphan_meshes) + return orphan_meshes + orphan_objects + +def test_export_do_not_export_blueprints(setup_data): + auto_export_operator = bpy.ops.export_scenes.auto_gltf + + # first, configure things + # we use the global settings for that + export_props = { + "main_scene_names" : ['World'], + "library_scene_names": ['Library'] + } + stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings") + stored_auto_settings.clear() + stored_auto_settings.write(json.dumps(export_props)) + + auto_export_operator( + auto_export=True, + direct_mode=True, + export_root_folder = os.path.abspath(setup_data["root_path"]), + export_output_folder="assets/models", + export_scene_settings=True, + export_blueprints=False, + ) + assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True + assert os.path.exists(os.path.join(setup_data["blueprints_path"],"Blueprint1.glb")) == False + orphan_data = get_orphan_data() + assert len(orphan_data) == 0 + + +def test_export_custom_blueprints_path(setup_data): + auto_export_operator = bpy.ops.export_scenes.auto_gltf + # first, configure things + # we use the global settings for that + export_props = { + "main_scene_names" : ['World'], + "library_scene_names": ['Library'] + } + + stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings") + stored_auto_settings.clear() + stored_auto_settings.write(json.dumps(export_props)) + + auto_export_operator( + auto_export=True, + direct_mode=True, + export_root_folder = os.path.abspath(setup_data["root_path"]), + export_output_folder="./models", + export_scene_settings=True, + export_blueprints=True, + export_blueprints_path = "assets/other_blueprints" + ) + assert os.path.exists(os.path.join(setup_data["levels_path"], "World.glb")) == True + assert os.path.exists(os.path.join(setup_data["root_path"],"assets", "other_blueprints", "Blueprint1.glb")) == True + assert len(get_orphan_data()) == 0 + +def test_export_materials_library(setup_data): + auto_export_operator = bpy.ops.export_scenes.auto_gltf + + # first, configure things + # we use the global settings for that + export_props = { + "main_scene_names" : ['World'], + "library_scene_names": ['Library'] + } + stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings") + stored_settings.clear() + stored_settings.write(json.dumps(export_props)) + + auto_export_operator( + auto_export=True, + direct_mode=True, + export_root_folder = os.path.abspath(setup_data["root_path"]), + export_output_folder="./models", + export_scene_settings=True, + export_blueprints=True, + export_materials_library = True + ) + + assert os.path.exists(os.path.join(setup_data["blueprints_path"], "Blueprint1.glb")) == True + assert os.path.exists(os.path.join(setup_data["materials_path"], "testing_materials_library.glb")) == True + assert len(get_orphan_data()) == 0 + +def test_export_materials_library_custom_path(setup_data): + auto_export_operator = bpy.ops.export_scenes.auto_gltf + + # first, configure things + # we use the global settings for that + export_props = { + "main_scene_names" : ['World'], + "library_scene_names": ['Library'] + } + stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings") + stored_settings.clear() + stored_settings.write(json.dumps(export_props)) + + auto_export_operator( + auto_export=True, + direct_mode=True, + export_root_folder = os.path.abspath(setup_data["root_path"]), + export_output_folder="./models", + export_scene_settings=True, + export_blueprints=True, + export_materials_library = True, + export_materials_path="assets/other_materials" + ) + + assert os.path.exists(os.path.join(setup_data["blueprints_path"], "Blueprint1.glb")) == True + assert os.path.exists(os.path.join(setup_data["materials_path"], "testing_materials_library.glb")) == False + assert os.path.exists(os.path.join(setup_data["other_materials_path"], "testing_materials_library.glb")) == True + assert len(get_orphan_data()) == 0 + +def test_export_collection_instances_combine_mode(setup_data): # There is more in depth testing of this in the "change_tracking" tests + auto_export_operator = bpy.ops.export_scenes.auto_gltf + + # first, configure things + # we use the global settings for that + export_props = { + "main_scene_names" : ['World'], + "library_scene_names": ['Library'] + } + stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings") + stored_settings.clear() + stored_settings.write(json.dumps(export_props)) + + + bpy.data.objects["Cube"]["dynamic"] = True + + auto_export_operator( + auto_export=True, + direct_mode=True, + export_root_folder = os.path.abspath(setup_data["root_path"]), + export_output_folder="./models", + export_blueprints=True, + collection_instances_combine_mode = 'Embed' + ) + + assert os.path.exists(os.path.join(setup_data["levels_path"], "World.glb")) == True + assert os.path.exists(os.path.join(setup_data["levels_path"], "World_dynamic.glb")) == False + assert len(get_orphan_data()) == 0 + + +def test_export_do_not_export_marked_assets(setup_data): + auto_export_operator = bpy.ops.export_scenes.auto_gltf + + # first, configure things + # we use the global settings for that + export_props = { + "main_scene_names" : ['World'], + "library_scene_names": ['Library'] + } + stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings") + stored_settings.clear() + stored_settings.write(json.dumps(export_props)) + + auto_export_operator( + auto_export=True, + direct_mode=True, + export_root_folder = os.path.abspath(setup_data["root_path"]), + export_output_folder="./models", + export_scene_settings=True, + export_blueprints=True, + export_marked_assets = False + ) + assert os.path.exists(os.path.join(setup_data["levels_path"], "World.glb")) == True + assert os.path.exists(os.path.join(setup_data["blueprints_path"], "Blueprint1.glb")) == True + assert os.path.exists(os.path.join(setup_data["blueprints_path"],"Blueprint2.glb")) == False + assert os.path.exists(os.path.join(setup_data["blueprints_path"],"Blueprint3.glb")) == True + assert os.path.exists(os.path.join(setup_data["blueprints_path"],"Blueprint4_nested.glb")) == True + assert os.path.exists(os.path.join(setup_data["blueprints_path"],"Blueprint5.glb")) == False + assert len(get_orphan_data()) == 0 + + +def test_export_separate_dynamic_and_static_objects(setup_data): + auto_export_operator = bpy.ops.export_scenes.auto_gltf + + # first, configure things + # we use the global settings for that + export_props = { + "main_scene_names" : ['World'], + "library_scene_names": ['Library'] + } + stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings") + stored_settings.clear() + stored_settings.write(json.dumps(export_props)) + + + bpy.data.objects["Cube"]["dynamic"] = True + + auto_export_operator( + auto_export=True, + direct_mode=True, + export_root_folder = os.path.abspath(setup_data["root_path"]), + export_output_folder="./models", + export_scene_settings=True, + export_blueprints=True, + export_separate_dynamic_and_static_objects = True + ) + + assert os.path.exists(os.path.join(setup_data["levels_path"], "World.glb")) == True + assert os.path.exists(os.path.join(setup_data["levels_path"], "World_dynamic.glb")) == True + assert len(get_orphan_data()) == 0 + + +def test_export_should_not_generate_orphan_data(setup_data): + auto_export_operator = bpy.ops.export_scenes.auto_gltf + + # first, configure things + # we use the global settings for that + export_props = { + "main_scene_names" : ['World'], + "library_scene_names": ['Library'] + } + stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings") + stored_settings.clear() + stored_settings.write(json.dumps(export_props)) + + auto_export_operator( + auto_export=True, + direct_mode=True, + export_root_folder = os.path.abspath(setup_data["root_path"]), + export_output_folder="./models", + export_scene_settings=True, + export_blueprints=True, + ) + assert os.path.exists(os.path.join(setup_data["levels_path"], "World.glb")) == True + assert os.path.exists(os.path.join(setup_data["blueprints_path"],"Blueprint1.glb")) == True + assert len(get_orphan_data()) == 0 + diff --git a/tools/blenvy/tests/test_helpers.py b/tools/blenvy/tests/test_helpers.py new file mode 100644 index 0000000..a10d410 --- /dev/null +++ b/tools/blenvy/tests/test_helpers.py @@ -0,0 +1,78 @@ +import bpy +import os +import json +import pathlib + +def prepare_auto_export(auto_export_overrides={}, gltf_export_settings = {"export_animations": False, "export_optimize_animation_size": False}): + # with change detection + # first, configure things + # we use the global settings for that + export_props = { + "main_scene_names" : ['World'], + "library_scene_names": ['Library'], + **auto_export_overrides + } + + # store settings for the auto_export part + stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings") + stored_auto_settings.clear() + stored_auto_settings.write(json.dumps(export_props)) + + gltf_settings = gltf_export_settings + # and store settings for the gltf part + stored_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings") + stored_gltf_settings.clear() + stored_gltf_settings.write(json.dumps(gltf_settings)) + +def run_auto_export(setup_data): + auto_export_operator = bpy.ops.export_scenes.auto_gltf + auto_export_operator( + auto_export=True, + direct_mode=True, + export_root_folder = os.path.abspath(setup_data["root_path"]), + export_output_folder="./models", + export_scene_settings=True, + export_blueprints=True, + export_materials_library=False + ) + + levels_path = setup_data["levels_path"] + level_file_paths = list(map(lambda file_name: os.path.join(levels_path, file_name), sorted(os.listdir(levels_path)))) if os.path.exists(levels_path) else [] + + blueprints_path = setup_data["blueprints_path"] + blueprints_file_paths = list(map(lambda file_name: os.path.join(blueprints_path, file_name), sorted(os.listdir(blueprints_path)))) if os.path.exists(blueprints_path) else [] + + modification_times = list(map(lambda file_path: os.path.getmtime(file_path), blueprints_file_paths + level_file_paths)) + # assert os.path.exists(world_file_path) == True + + mapped_files_to_timestamps_and_index = {} + for (index, file_path) in enumerate(blueprints_file_paths + level_file_paths): + file_path = pathlib.Path(file_path).stem + mapped_files_to_timestamps_and_index[file_path] = (modification_times[index], index) + + return (modification_times, mapped_files_to_timestamps_and_index) + +def run_auto_export_and_compare(setup_data, changes, expected_changed_files = []): + (modification_times_first, mapped ) = run_auto_export(setup_data) + for index, change in enumerate(changes): + change() + (modification_times, mapped ) = run_auto_export(setup_data) + + changed_files = expected_changed_files[index] + changed_file_indices = [mapped[changed_file][1] for changed_file in changed_files] + print("changed files", changed_files, changed_file_indices, "mapped", mapped) + other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in changed_file_indices] + other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in changed_file_indices] + + print("other_files_modification_times_new ", other_files_modification_times) + print("other_files_modification_times_first", other_files_modification_times_first) + for changed_file_index in changed_file_indices: + #print("modification_times_new [changed_file_index]", modification_times[changed_file_index]) + #print("modification_times_first[changed_file_index]", modification_times_first[changed_file_index]) + if changed_file_index in modification_times_first and changed_file_index in modification_times: + assert modification_times[changed_file_index] != modification_times_first[changed_file_index], f"failure in change: {index}, at file {changed_file_index}" + # TODO: we should throw an error in the "else" case ? + assert other_files_modification_times == other_files_modification_times_first , f"failure in change: {index}" + + # reset the comparing + modification_times_first = modification_times \ No newline at end of file diff --git a/tools/blenvy/tests/test_registry.py b/tools/blenvy/tests/test_registry.py new file mode 100644 index 0000000..ec6c32f --- /dev/null +++ b/tools/blenvy/tests/test_registry.py @@ -0,0 +1,22 @@ +import bpy +from .setup_data import setup_data + +def test_blend(setup_data): + registry = bpy.context.window_manager.components_registry + registry.schemaPath = setup_data["schema_path"] + bpy.ops.object.reload_registry() + + long_name = "bevy_example::test_components::BasicTest" + + add_component_operator = bpy.ops.object.add_bevy_component + add_component_operator(component_type=long_name) + + property_group_name = registry.get_propertyGroupName_from_longName(long_name) + object = bpy.context.object + + target_components_metadata = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == long_name, target_components_metadata), None) + propertyGroup = getattr(component_meta, property_group_name, None) + + + assert propertyGroup.field_names == ['a', 'b', 'c'] \ No newline at end of file diff --git a/tools/blenvy/tests/test_rename_components.py b/tools/blenvy/tests/test_rename_components.py new file mode 100644 index 0000000..96023ef --- /dev/null +++ b/tools/blenvy/tests/test_rename_components.py @@ -0,0 +1,161 @@ +import json +import re +import bpy +import pprint +import pytest + +from ..components.metadata import get_bevy_component_value_by_long_name, get_bevy_components, is_bevy_component_in_object, upsert_bevy_component + +from .setup_data import setup_data + +# small helpers +def get_component_metadata(object, component_name): + target_components_metadata = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == component_name, target_components_metadata), None) + return component_meta + +def get_component_propGroup(registry, component_name, component_meta): + # component_type = registry.short_names_to_long_names[component_name] + # add_component_operator = bpy.ops.object.add_bevy_component + property_group_name = registry.get_propertyGroupName_from_longName(component_name) + propertyGroup = getattr(component_meta, property_group_name, None) + return propertyGroup + + +def test_rename_component_single_unit_struct(setup_data): + registry = bpy.context.window_manager.components_registry + registry.schemaPath = setup_data["schema_path"] + bpy.ops.object.reload_registry() + + rename_component_operator = bpy.ops.object.rename_bevy_component + object = bpy.context.object + + + source_component_name = "bevy_example::test_components::SomeOldUnitStruct" + target_component_name = "bevy_example::test_components::UnitTest" + upsert_bevy_component(object, source_component_name, '()') + + rename_component_operator(original_name=source_component_name, new_name=target_component_name, target_objects=json.dumps([object.name])) + + is_old_component_in_object = is_bevy_component_in_object(object, source_component_name) + is_new_component_in_object = is_bevy_component_in_object(object, target_component_name) + assert is_old_component_in_object == False + assert is_new_component_in_object == True + assert get_bevy_component_value_by_long_name(object, target_component_name) == '()' + assert get_component_propGroup(registry, target_component_name, get_component_metadata(object, target_component_name)) != None + + +def test_rename_component_single_complex_struct(setup_data): + registry = bpy.context.window_manager.components_registry + registry.schemaPath = setup_data["schema_path"] + bpy.ops.object.reload_registry() + + rename_component_operator = bpy.ops.object.rename_bevy_component + object = bpy.context.object + + + source_component_name = "bevy_example::test_components::ProxyCollider" + target_component_name = "bevy_gltf_worlflow_examples_common_rapier::physics::physics_replace_proxies::Collider" + upsert_bevy_component(object, source_component_name, 'Capsule(Vec3(x:1.0, y:2.0, z:0.0), Vec3(x:0.0, y:0.0, z:0.0), 3.0)') + + rename_component_operator(original_name=source_component_name, new_name=target_component_name, target_objects=json.dumps([object.name])) + + is_old_component_in_object = is_bevy_component_in_object(object, source_component_name) + is_new_component_in_object = is_bevy_component_in_object(object, target_component_name) + assert is_old_component_in_object == False + assert is_new_component_in_object == True + assert get_bevy_component_value_by_long_name(object, target_component_name) == 'Capsule(Vec3(x:1.0, y:2.0, z:0.0), Vec3(x:0.0, y:0.0, z:0.0), 3.0)' + assert get_component_propGroup(registry, target_component_name, get_component_metadata(object, target_component_name)) != None + + +def test_rename_component_bulk(setup_data): + registry = bpy.context.window_manager.components_registry + registry.schemaPath = setup_data["schema_path"] + bpy.ops.object.reload_registry() + + rename_component_operator = bpy.ops.object.rename_bevy_component + + source_component_name = "bevy_example::test_components::SomeOldUnitStruct" + target_component_name = "bevy_example::test_components::UnitTest" + objects_names = [] + for object in bpy.data.objects: + upsert_bevy_component(object, source_component_name, '()') + objects_names.append(object.name) + + # bulk rename + rename_component_operator(original_name=source_component_name, new_name=target_component_name, target_objects=json.dumps(objects_names)) + + for object in bpy.data.objects: + is_old_component_in_object = is_bevy_component_in_object(object, source_component_name) + is_new_component_in_object = is_bevy_component_in_object(object, target_component_name) + assert is_old_component_in_object == False + assert is_new_component_in_object == True + assert get_bevy_component_value_by_long_name(object, target_component_name) == '()' + assert get_component_propGroup(registry, target_component_name, get_component_metadata(object, target_component_name)) != None + +def test_rename_component_single_error_handling(setup_data): + registry = bpy.context.window_manager.components_registry + registry.schemaPath = setup_data["schema_path"] + bpy.ops.object.reload_registry() + + rename_component_operator = bpy.ops.object.rename_bevy_component + object = bpy.context.object + + + source_component_name = "bevy_example::test_components::SomeOldUnitStruct" + target_component_name = "bevy_example::test_components::UnitTest" + upsert_bevy_component(object, source_component_name, 'Capsule(Vec3(x:1.0, y:2.0, z:0.0), Vec3(x:0.0, y:0.0, z:0.0), 3.0)') + + expected_error = f'Error: Failed to rename component: Errors:["wrong custom property values to generate target component: object: \'{object.name}\', error: input string too big for a unit struct"]\n' + expected_error = re.escape(expected_error) + with pytest.raises(Exception, match=expected_error): + rename_component_operator(original_name=source_component_name, new_name=target_component_name, target_objects=json.dumps([object.name])) + + target_component_metadata = get_component_metadata(object, target_component_name) + + is_old_component_in_object = is_bevy_component_in_object(object, source_component_name) + is_new_component_in_object = is_bevy_component_in_object(object, target_component_name) + assert is_old_component_in_object == False + assert is_new_component_in_object == True + assert get_bevy_component_value_by_long_name(object, target_component_name) == 'Capsule(Vec3(x:1.0, y:2.0, z:0.0), Vec3(x:0.0, y:0.0, z:0.0), 3.0)' + assert get_component_propGroup(registry, target_component_name, target_component_metadata) != None + assert target_component_metadata.invalid == True + + assert target_component_metadata.invalid_details == 'wrong custom property value, overwrite them by changing the values in the ui or change them & regenerate' + +def test_rename_component_single_error_handling_clean_errors(setup_data): + registry = bpy.context.window_manager.components_registry + registry.schemaPath = setup_data["schema_path"] + bpy.ops.object.reload_registry() + + rename_component_operator = bpy.ops.object.rename_bevy_component + object = bpy.context.object + + + source_component_name = "bevy_example::test_components::SomeOldUnitStruct" + target_component_name = "bevy_example::test_components::UnitTest" + upsert_bevy_component(object, source_component_name, 'Capsule(Vec3(x:1.0, y:2.0, z:0.0), Vec3(x:0.0, y:0.0, z:0.0), 3.0)') + + expected_error = f'Error: Failed to rename component: Errors:["wrong custom property values to generate target component: object: \'{object.name}\', error: input string too big for a unit struct"]\n' + expected_error = re.escape(expected_error) + with pytest.raises(Exception, match=expected_error): + rename_component_operator(original_name=source_component_name, new_name=target_component_name, target_objects=json.dumps([object.name])) + + target_component_metadata = get_component_metadata(object, target_component_name) + + is_old_component_in_object = is_bevy_component_in_object(object, source_component_name) + is_new_component_in_object = is_bevy_component_in_object(object, target_component_name) + assert is_old_component_in_object == False + assert is_new_component_in_object == True + assert get_bevy_component_value_by_long_name(object, target_component_name) == 'Capsule(Vec3(x:1.0, y:2.0, z:0.0), Vec3(x:0.0, y:0.0, z:0.0), 3.0)' + assert get_component_propGroup(registry, target_component_name, target_component_metadata) != None + assert target_component_metadata.invalid == True + + assert target_component_metadata.invalid_details == 'wrong custom property value, overwrite them by changing the values in the ui or change them & regenerate' + + # if we fix the custom property value & regen the ui, it should be all good + regen_component_operator = bpy.ops.object.refresh_ui_from_custom_properties_current + object[target_component_name] = '' + regen_component_operator() + + assert target_component_metadata.invalid == False diff --git a/tools/blenvy/tests/test_shuffler.py b/tools/blenvy/tests/test_shuffler.py new file mode 100644 index 0000000..b80e94f --- /dev/null +++ b/tools/blenvy/tests/test_shuffler.py @@ -0,0 +1,150 @@ +import bpy +from .component_values_shuffler import component_values_shuffler +from ..components.metadata import get_bevy_component_value_by_long_name, get_bevy_components, upsert_bevy_component +from .setup_data import setup_data + +def test_shuffler(setup_data): + registry = bpy.context.window_manager.components_registry + registry.schemaPath = setup_data["schema_path"] + bpy.ops.object.reload_registry() + + type_infos = registry.type_infos + object = bpy.context.object + + add_component_operator = bpy.ops.object.add_bevy_component + + long_name = "bevy_example::test_components::BasicTest" + add_component_operator(component_type=long_name) + + property_group_name = registry.get_propertyGroupName_from_longName(long_name) + target_components_metadata = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == long_name, target_components_metadata), None) + propertyGroup = getattr(component_meta, property_group_name, None) + + definition = type_infos[long_name] + component_values_shuffler(seed= 10, property_group=propertyGroup, definition=definition, registry=registry) + + assert getattr(propertyGroup, 'a') == 0.5714026093482971 + assert getattr(propertyGroup, 'b') == 54 + assert getattr(propertyGroup, 'c') == "psagopiu" + + + # Testing a more complex component + long_name = "bevy_example::test_components::NestingTestLevel2" + add_component_operator(component_type=long_name) + + property_group_name = registry.get_propertyGroupName_from_longName(long_name) + target_components_metadata = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == long_name, target_components_metadata), None) + propertyGroup = getattr(component_meta, property_group_name, None) + + definition = type_infos[long_name] + component_values_shuffler(seed= 17, property_group=propertyGroup, definition=definition, registry=registry) + + #print("propertyGroup", object[long_name]) + # cheating / making things easier for us for complex types: we use the custom property value + assert get_bevy_component_value_by_long_name(object, long_name) == '(basic: (a: 0.5219839215278625, b: 38, c: "ljfywwrv"), color: (Rgba(red:0.2782765030860901, green:0.9174930453300476, blue:0.24890311062335968, alpha:0.815186083316803)), colors_list: ([Rgba(red:0.2523837685585022, green:0.5016026496887207, blue:0.317435085773468, alpha:0.8463277816772461), Rgba(red:0.945193886756897, green:0.4015909433364868, blue:0.9984470009803772, alpha:0.06219279021024704)]), enable: true, enum_inner: Wood, nested: (vec: (Vec3(x:0.1509154736995697, y:0.7055686116218567, z:0.5588918924331665))), text: "vgkrdwuc", toggle: (false))' + + + # And another complex component + long_name = "bevy_example::test_components::EnumComplex" + add_component_operator(component_type=long_name) + + + property_group_name = registry.get_propertyGroupName_from_longName(long_name) + target_components_metadata = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == long_name, target_components_metadata), None) + propertyGroup = getattr(component_meta, property_group_name, None) + + definition = type_infos[long_name] + component_values_shuffler(seed= 17, property_group=propertyGroup, definition=definition, registry=registry) + + print("propertyGroup", get_bevy_component_value_by_long_name(object, long_name)) + # cheating / making things easier for us for complex types: we use the custom property value + assert get_bevy_component_value_by_long_name(object, long_name) == 'StructLike(a: 0.41416797041893005, b: 38, c: "ljfywwrv")' + + # And another complex component + long_name = "bevy_animation::AnimationPlayer" + add_component_operator(component_type=long_name) + + property_group_name = registry.get_propertyGroupName_from_longName(long_name) + target_components_metadata = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == long_name, target_components_metadata), None) + propertyGroup = getattr(component_meta, property_group_name, None) + + definition = type_infos[long_name] + component_values_shuffler(seed= 17, property_group=propertyGroup, definition=definition, registry=registry) + + print("propertyGroup", get_bevy_component_value_by_long_name(object, long_name)) + # cheating / making things easier for us for complex types: we use the custom property value + assert get_bevy_component_value_by_long_name(object, long_name) == '(animation: "", paused: true)' + + + + # And another complex component + long_name = "bevy_example::test_components::VecOfColors" + add_component_operator(component_type=long_name) + + + property_group_name = registry.get_propertyGroupName_from_longName(long_name) + target_components_metadata = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == long_name, target_components_metadata), None) + propertyGroup = getattr(component_meta, property_group_name, None) + + definition = type_infos[long_name] + component_values_shuffler(seed= 17, property_group=propertyGroup, definition=definition, registry=registry) + + print("propertyGroup", get_bevy_component_value_by_long_name(object, long_name)) + # cheating / making things easier for us for complex types: we use the custom property value + assert get_bevy_component_value_by_long_name(object, long_name) == '([Rgba(red:0.8066907525062561, green:0.9604947566986084, blue:0.2896253764629364, alpha:0.766107439994812), Rgba(red:0.7042198777198792, green:0.6613830327987671, blue:0.11016204953193665, alpha:0.02693677879869938)])' + + + # And another complex component + long_name = "bevy_example::test_components::VecOfF32s" + add_component_operator(component_type=long_name) + + property_group_name = registry.get_propertyGroupName_from_longName(long_name) + target_components_metadata = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == long_name, target_components_metadata), None) + propertyGroup = getattr(component_meta, property_group_name, None) + + definition = type_infos[long_name] + component_values_shuffler(seed= 17, property_group=propertyGroup, definition=definition, registry=registry) + + print("propertyGroup", get_bevy_component_value_by_long_name(object, long_name)) + # cheating / making things easier for us for complex types: we use the custom property value + assert get_bevy_component_value_by_long_name(object, long_name) == '([0.8066907525062561, 0.9604947566986084])' + + # And another complex component + long_name = "bevy_render::mesh::mesh::skinning::SkinnedMesh" + add_component_operator(component_type=long_name) + + property_group_name = registry.get_propertyGroupName_from_longName(long_name) + target_components_metadata = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == long_name, target_components_metadata), None) + propertyGroup = getattr(component_meta, property_group_name, None) + + definition = type_infos[long_name] + component_values_shuffler(seed= 17, property_group=propertyGroup, definition=definition, registry=registry) + + print("propertyGroup", get_bevy_component_value_by_long_name(object, long_name)) + # cheating / making things easier for us for complex types: we use the custom property value + assert get_bevy_component_value_by_long_name(object, long_name) == '(inverse_bindposes: Weak(Uuid(uuid: "73b3b118-7d01-4778-8bcc-4e79055f5d22")), joints: [0, 0])' + + + # And another complex component + long_name = "bevy_render::camera::camera::CameraRenderGraph" + add_component_operator(component_type=long_name) + + property_group_name = registry.get_propertyGroupName_from_longName(long_name) + target_components_metadata = object.components_meta.components + component_meta = next(filter(lambda component: component["long_name"] == long_name, target_components_metadata), None) + propertyGroup = getattr(component_meta, property_group_name, None) + + definition = type_infos[long_name] + component_values_shuffler(seed= 17, property_group=propertyGroup, definition=definition, registry=registry) + + print("propertyGroup", get_bevy_component_value_by_long_name(object, long_name)) + # cheating / making things easier for us for complex types: we use the custom property value + assert get_bevy_component_value_by_long_name(object, long_name) == 'None' + \ No newline at end of file diff --git a/tools/gltf_auto_export/__init__.py b/tools/gltf_auto_export/__init__.py index e1798c6..81cd874 100644 --- a/tools/gltf_auto_export/__init__.py +++ b/tools/gltf_auto_export/__init__.py @@ -16,8 +16,6 @@ import bpy from bpy.props import (StringProperty, BoolProperty, IntProperty, PointerProperty) - - # from .extension import ExampleExtensionProperties, GLTF_PT_UserExtensionPanel, unregister_panel from .auto_export.operators import AutoExportGLTF @@ -41,20 +39,12 @@ from .ui.main import (GLTF_PT_auto_export_change_detection, GLTF_PT_auto_export_ ) from .ui.operators import (OT_OpenFolderbrowser, SCENES_LIST_OT_actions) -from .assets.ui import GLTF_PT_auto_export_assets -from .assets.assets_registry import AssetsRegistry -from .assets.operators import OT_add_bevy_asset, OT_remove_bevy_asset -from .blueprints.ui import GLTF_PT_auto_export_blueprints_list -from .blueprints.blueprints_registry import BlueprintsRegistry -from .blueprints.operators import OT_select_blueprint from .helpers.generate_complete_preferences_dict import generate_complete_preferences_dict_gltf -from .blenvy.ui import BLENVY_PT_SidePanel -from .blenvy.blenvy_manager import BlenvyManager -from .blenvy.operators import OT_switch_bevy_tooling + ###################################################### @@ -129,23 +119,6 @@ classes = [ GLTF_PT_auto_export_blueprints, GLTF_PT_auto_export_SidePanel, AutoExportTracker, - - - - - BlenvyManager, - OT_switch_bevy_tooling, - BLENVY_PT_SidePanel, - - - AssetsRegistry, - OT_add_bevy_asset, - OT_remove_bevy_asset, - GLTF_PT_auto_export_assets, - - BlueprintsRegistry, - OT_select_blueprint, - GLTF_PT_auto_export_blueprints_list, ] def glTF2_pre_export_callback(data):