feat(blenvy): added most of the code from both add-ons
* adapted most of the UI to the new structure
185
tools/blenvy/__init__.py
Normal file
@ -0,0 +1,185 @@
|
||||
bl_info = {
|
||||
"name": "blenvy",
|
||||
"author": "kaosigh",
|
||||
"version": (0, 1, 0),
|
||||
"blender": (3, 4, 0),
|
||||
"location": "File > Import-Export",
|
||||
"description": "tooling for the Bevy engine",
|
||||
"warning": "",
|
||||
"wiki_url": "https://github.com/kaosat-dev/Blender_bevy_components_workflow",
|
||||
"tracker_url": "https://github.com/kaosat-dev/Blender_bevy_components_workflow/issues/new",
|
||||
"category": "Import-Export"
|
||||
}
|
||||
|
||||
import bpy
|
||||
from bpy.app.handlers import persistent
|
||||
from bpy.props import (StringProperty)
|
||||
|
||||
# components management
|
||||
from .bevy_components.components.operators import CopyComponentOperator, Fix_Component_Operator, OT_rename_component, RemoveComponentFromAllObjectsOperator, RemoveComponentOperator, GenerateComponent_From_custom_property_Operator, PasteComponentOperator, AddComponentOperator, RenameHelper, Toggle_ComponentVisibility
|
||||
|
||||
from .bevy_components.registry.registry import ComponentsRegistry,MissingBevyType
|
||||
from .bevy_components.registry.operators import (COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_ALL, COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_CURRENT, COMPONENTS_OT_REFRESH_PROPGROUPS_FROM_CUSTOM_PROPERTIES_ALL, COMPONENTS_OT_REFRESH_PROPGROUPS_FROM_CUSTOM_PROPERTIES_CURRENT, OT_select_component_name_to_replace, OT_select_object, ReloadRegistryOperator, OT_OpenFilebrowser)
|
||||
from .bevy_components.registry.ui import (BEVY_COMPONENTS_PT_Configuration, BEVY_COMPONENTS_PT_AdvancedToolsPanel, BEVY_COMPONENTS_PT_MissingTypesPanel, MISSING_TYPES_UL_List)
|
||||
|
||||
from .bevy_components.components.metadata import (ComponentMetadata, ComponentsMeta)
|
||||
from .bevy_components.components.lists import GENERIC_LIST_OT_actions, Generic_LIST_OT_AddItem, Generic_LIST_OT_RemoveItem, Generic_LIST_OT_SelectItem
|
||||
from .bevy_components.components.maps import GENERIC_MAP_OT_actions
|
||||
from .bevy_components.components.definitions_list import (ComponentDefinitionsList, ClearComponentDefinitionsList)
|
||||
from .bevy_components.components.ui import (BEVY_COMPONENTS_PT_ComponentsPanel)
|
||||
|
||||
# auto export
|
||||
from .gltf_auto_export.auto_export.operators import AutoExportGLTF
|
||||
from .gltf_auto_export.auto_export.tracker import AutoExportTracker
|
||||
from .gltf_auto_export.auto_export.preferences import (AutoExportGltfAddonPreferences)
|
||||
|
||||
from .gltf_auto_export.auto_export.internals import (SceneLink,
|
||||
SceneLinks,
|
||||
CollectionToExport,
|
||||
BlueprintsToExport,
|
||||
CUSTOM_PG_sceneName
|
||||
)
|
||||
from .gltf_auto_export.ui.main import (GLTF_PT_auto_export_change_detection, GLTF_PT_auto_export_changes_list, GLTF_PT_auto_export_main,
|
||||
GLTF_PT_auto_export_root,
|
||||
GLTF_PT_auto_export_general,
|
||||
GLTF_PT_auto_export_scenes,
|
||||
GLTF_PT_auto_export_blueprints,
|
||||
SCENE_UL_GLTF_auto_export,
|
||||
|
||||
GLTF_PT_auto_export_SidePanel
|
||||
)
|
||||
from .gltf_auto_export.ui.operators import (OT_OpenFolderbrowser, SCENES_LIST_OT_actions)
|
||||
|
||||
# asset management
|
||||
from .assets.ui import GLTF_PT_auto_export_assets
|
||||
from .assets.assets_registry import AssetsRegistry
|
||||
from .assets.operators import OT_add_bevy_asset, OT_remove_bevy_asset
|
||||
|
||||
# blueprints management
|
||||
from .blueprints.ui import GLTF_PT_auto_export_blueprints_list
|
||||
from .blueprints.blueprints_registry import BlueprintsRegistry
|
||||
from .blueprints.operators import OT_select_blueprint
|
||||
|
||||
# blenvy core
|
||||
from .core.ui import BLENVY_PT_SidePanel
|
||||
from .core.blenvy_manager import BlenvyManager
|
||||
from .core.operators import OT_switch_bevy_tooling
|
||||
|
||||
classes = [
|
||||
# blenvy
|
||||
BLENVY_PT_SidePanel,
|
||||
|
||||
|
||||
# bevy components
|
||||
AddComponentOperator,
|
||||
CopyComponentOperator,
|
||||
PasteComponentOperator,
|
||||
RemoveComponentOperator,
|
||||
RemoveComponentFromAllObjectsOperator,
|
||||
Fix_Component_Operator,
|
||||
OT_rename_component,
|
||||
RenameHelper,
|
||||
GenerateComponent_From_custom_property_Operator,
|
||||
Toggle_ComponentVisibility,
|
||||
|
||||
ComponentDefinitionsList,
|
||||
ClearComponentDefinitionsList,
|
||||
|
||||
ComponentMetadata,
|
||||
ComponentsMeta,
|
||||
MissingBevyType,
|
||||
ComponentsRegistry,
|
||||
|
||||
OT_OpenFilebrowser,
|
||||
ReloadRegistryOperator,
|
||||
COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_ALL,
|
||||
COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_CURRENT,
|
||||
|
||||
COMPONENTS_OT_REFRESH_PROPGROUPS_FROM_CUSTOM_PROPERTIES_ALL,
|
||||
COMPONENTS_OT_REFRESH_PROPGROUPS_FROM_CUSTOM_PROPERTIES_CURRENT,
|
||||
|
||||
OT_select_object,
|
||||
OT_select_component_name_to_replace,
|
||||
|
||||
BEVY_COMPONENTS_PT_ComponentsPanel,
|
||||
BEVY_COMPONENTS_PT_AdvancedToolsPanel,
|
||||
BEVY_COMPONENTS_PT_Configuration,
|
||||
MISSING_TYPES_UL_List,
|
||||
BEVY_COMPONENTS_PT_MissingTypesPanel,
|
||||
|
||||
Generic_LIST_OT_SelectItem,
|
||||
Generic_LIST_OT_AddItem,
|
||||
Generic_LIST_OT_RemoveItem,
|
||||
GENERIC_LIST_OT_actions,
|
||||
|
||||
GENERIC_MAP_OT_actions,
|
||||
|
||||
# gltf auto export
|
||||
SceneLink,
|
||||
SceneLinks,
|
||||
CUSTOM_PG_sceneName,
|
||||
SCENE_UL_GLTF_auto_export,
|
||||
SCENES_LIST_OT_actions,
|
||||
|
||||
OT_OpenFolderbrowser,
|
||||
AutoExportGLTF,
|
||||
|
||||
CollectionToExport,
|
||||
BlueprintsToExport,
|
||||
|
||||
GLTF_PT_auto_export_main,
|
||||
GLTF_PT_auto_export_root,
|
||||
GLTF_PT_auto_export_general,
|
||||
GLTF_PT_auto_export_change_detection,
|
||||
GLTF_PT_auto_export_scenes,
|
||||
GLTF_PT_auto_export_blueprints,
|
||||
GLTF_PT_auto_export_SidePanel,
|
||||
AutoExportTracker,
|
||||
|
||||
# blenvy
|
||||
BlenvyManager,
|
||||
OT_switch_bevy_tooling,
|
||||
|
||||
AssetsRegistry,
|
||||
OT_add_bevy_asset,
|
||||
OT_remove_bevy_asset,
|
||||
GLTF_PT_auto_export_assets,
|
||||
|
||||
BlueprintsRegistry,
|
||||
OT_select_blueprint,
|
||||
GLTF_PT_auto_export_blueprints_list,
|
||||
]
|
||||
|
||||
|
||||
@persistent
|
||||
def post_update(scene, depsgraph):
|
||||
bpy.context.window_manager.auto_export_tracker.deps_post_update_handler( scene, depsgraph)
|
||||
|
||||
@persistent
|
||||
def post_save(scene, depsgraph):
|
||||
bpy.context.window_manager.auto_export_tracker.save_handler( scene, depsgraph)
|
||||
|
||||
@persistent
|
||||
def post_load(file_name):
|
||||
registry = bpy.context.window_manager.components_registry
|
||||
if registry != None:
|
||||
registry.load_settings()
|
||||
|
||||
def register():
|
||||
for cls in classes:
|
||||
bpy.utils.register_class(cls)
|
||||
|
||||
bpy.app.handlers.load_post.append(post_load)
|
||||
# for some reason, adding these directly to the tracker class in register() do not work reliably
|
||||
bpy.app.handlers.depsgraph_update_post.append(post_update)
|
||||
bpy.app.handlers.save_post.append(post_save)
|
||||
|
||||
def unregister():
|
||||
for cls in classes:
|
||||
bpy.utils.unregister_class(cls)
|
||||
bpy.app.handlers.load_post.remove(post_load)
|
||||
bpy.app.handlers.depsgraph_update_post.remove(post_update)
|
||||
bpy.app.handlers.save_post.remove(post_save)
|
||||
|
||||
|
||||
print("TOTO")
|
@ -15,7 +15,7 @@ class OT_add_bevy_asset(Operator):
|
||||
description="name of asset to add",
|
||||
) # type: ignore
|
||||
|
||||
asset_type: bpy.types.WindowManager.asset_type_selector = EnumProperty(
|
||||
asset_type: EnumProperty(
|
||||
items=(
|
||||
('MODEL', "Model", ""),
|
||||
('AUDIO', "Audio", ""),
|
@ -4,7 +4,7 @@ import json
|
||||
class GLTF_PT_auto_export_assets(bpy.types.Panel):
|
||||
bl_space_type = 'VIEW_3D'
|
||||
bl_region_type = 'UI'
|
||||
bl_label = "Assets"
|
||||
bl_label = ""
|
||||
bl_parent_id = "BLENVY_PT_SidePanel"
|
||||
bl_options = {'DEFAULT_CLOSED'}
|
||||
|
||||
@ -12,6 +12,15 @@ class GLTF_PT_auto_export_assets(bpy.types.Panel):
|
||||
def poll(cls, context):
|
||||
return context.window_manager.blenvy.mode == 'ASSETS'
|
||||
|
||||
def draw_header(self, context):
|
||||
layout = self.layout
|
||||
name = ""
|
||||
if context.collection is not None and context.collection.name == 'Scene Collection':
|
||||
name = f"WORLD/LEVEL: {context.scene.name}"
|
||||
else:
|
||||
name = f"BLUEPRINT: {context.collection.name}"
|
||||
layout.label(text=f"Assets For {name}")
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
57
tools/blenvy/bevy_components/components/definitions_list.py
Normal file
@ -0,0 +1,57 @@
|
||||
import bpy
|
||||
from bpy.props import (StringProperty)
|
||||
|
||||
# this one is for UI only, and its inner list contains a useable list of shortnames of components
|
||||
class ComponentDefinitionsList(bpy.types.PropertyGroup):
|
||||
|
||||
# FIXME: not sure, hard coded exclude list, feels wrong
|
||||
exclude = ['Parent', 'Children']
|
||||
|
||||
def add_component_to_ui_list(self, context):
|
||||
#print("add components to ui_list")
|
||||
items = []
|
||||
type_infos = context.window_manager.components_registry.type_infos
|
||||
for long_name in type_infos.keys():
|
||||
definition = type_infos[long_name]
|
||||
short_name = definition["short_name"]
|
||||
is_component = definition['isComponent'] if "isComponent" in definition else False
|
||||
|
||||
if self.filter in short_name and is_component:
|
||||
if not 'Handle' in short_name and not "Cow" in short_name and not "AssetId" in short_name and short_name not in self.exclude: # FIXME: hard coded, seems wrong
|
||||
items.append((long_name, short_name, long_name))
|
||||
|
||||
items.sort(key=lambda a: a[1])
|
||||
return items
|
||||
|
||||
@classmethod
|
||||
def register(cls):
|
||||
bpy.types.WindowManager.components_list = bpy.props.PointerProperty(type=ComponentDefinitionsList)
|
||||
|
||||
@classmethod
|
||||
def unregister(cls):
|
||||
del bpy.types.WindowManager.components_list
|
||||
|
||||
list : bpy.props.EnumProperty(
|
||||
name="list",
|
||||
description="list",
|
||||
# items argument required to initialize, just filled with empty values
|
||||
items = add_component_to_ui_list,
|
||||
) # type: ignore
|
||||
filter: StringProperty(
|
||||
name="component filter",
|
||||
description="filter for the components list",
|
||||
options={'TEXTEDIT_UPDATE'}
|
||||
) # type: ignore
|
||||
|
||||
|
||||
class ClearComponentDefinitionsList(bpy.types.Operator):
|
||||
''' clear list of bpy.context.collection.component_definitions '''
|
||||
bl_label = "clear component definitions"
|
||||
bl_idname = "components.clear_component_definitions"
|
||||
|
||||
def execute(self, context):
|
||||
# create a new item, assign its properties
|
||||
bpy.context.collection.component_definitions.clear()
|
||||
|
||||
return {'FINISHED'}
|
||||
|
6
tools/blenvy/bevy_components/components/helpers.py
Normal file
@ -0,0 +1,6 @@
|
||||
import rna_prop_ui
|
||||
|
||||
# fake way to make our operator's changes be visible to the change/depsgraph update handler in gltf_auto_export
|
||||
def ping_depsgraph_update(object):
|
||||
rna_prop_ui.rna_idprop_ui_create(object, "________temp", default=0)
|
||||
rna_prop_ui.rna_idprop_ui_prop_clear(object, "________temp")
|
170
tools/blenvy/bevy_components/components/lists.py
Normal file
@ -0,0 +1,170 @@
|
||||
import json
|
||||
from bpy_types import Operator, UIList
|
||||
from bpy.props import (StringProperty, EnumProperty, PointerProperty, FloatVectorProperty, IntProperty)
|
||||
|
||||
class Generic_LIST_OT_AddItem(Operator):
|
||||
"""Add a new item to the list."""
|
||||
bl_idname = "generic_list.add_item"
|
||||
bl_label = "Add a new item"
|
||||
|
||||
property_group_path: StringProperty(
|
||||
name="property group path",
|
||||
description="",
|
||||
) # type: ignore
|
||||
|
||||
component_name: StringProperty(
|
||||
name="component name",
|
||||
description="",
|
||||
) # type: ignore
|
||||
|
||||
def execute(self, context):
|
||||
print("")
|
||||
object = context.object
|
||||
# information is stored in component meta
|
||||
components_in_object = object.components_meta.components
|
||||
component_meta = next(filter(lambda component: component["long_name"] == self.component_name, components_in_object), None)
|
||||
|
||||
propertyGroup = component_meta
|
||||
for path_item in json.loads(self.property_group_path):
|
||||
propertyGroup = getattr(propertyGroup, path_item)
|
||||
|
||||
print("list container", propertyGroup, dict(propertyGroup))
|
||||
target_list = getattr(propertyGroup, "list")
|
||||
index = getattr(propertyGroup, "list_index")
|
||||
item = target_list.add()
|
||||
propertyGroup.list_index = index + 1 # we use this to force the change detection
|
||||
|
||||
print("added item", item, item.field_names, getattr(item, "field_names"))
|
||||
print("")
|
||||
return{'FINISHED'}
|
||||
|
||||
|
||||
class Generic_LIST_OT_RemoveItem(Operator):
|
||||
"""Remove an item to the list."""
|
||||
bl_idname = "generic_list.remove_item"
|
||||
bl_label = "Remove selected item"
|
||||
|
||||
property_group_path: StringProperty(
|
||||
name="property group path",
|
||||
description="",
|
||||
) # type: ignore
|
||||
|
||||
component_name: StringProperty(
|
||||
name="component name",
|
||||
description="",
|
||||
) # type: ignore
|
||||
def execute(self, context):
|
||||
print("remove from list", context.object)
|
||||
|
||||
object = context.object
|
||||
# information is stored in component meta
|
||||
components_in_object = object.components_meta.components
|
||||
component_meta = next(filter(lambda component: component["long_name"] == self.component_name, components_in_object), None)
|
||||
|
||||
propertyGroup = component_meta
|
||||
for path_item in json.loads(self.property_group_path):
|
||||
propertyGroup = getattr(propertyGroup, path_item)
|
||||
|
||||
target_list = getattr(propertyGroup, "list")
|
||||
index = getattr(propertyGroup, "list_index")
|
||||
target_list.remove(index)
|
||||
propertyGroup.list_index = min(max(0, index - 1), len(target_list) - 1)
|
||||
return{'FINISHED'}
|
||||
|
||||
|
||||
class Generic_LIST_OT_SelectItem(Operator):
|
||||
"""Remove an item to the list."""
|
||||
bl_idname = "generic_list.select_item"
|
||||
bl_label = "select an item"
|
||||
|
||||
|
||||
property_group_path: StringProperty(
|
||||
name="property group path",
|
||||
description="",
|
||||
) # type: ignore
|
||||
|
||||
component_name: StringProperty(
|
||||
name="component name",
|
||||
description="",
|
||||
) # type: ignore
|
||||
|
||||
selection_index: IntProperty() # type: ignore
|
||||
|
||||
def execute(self, context):
|
||||
print("select in list", context.object)
|
||||
|
||||
object = context.object
|
||||
# information is stored in component meta
|
||||
components_in_object = object.components_meta.components
|
||||
component_meta = next(filter(lambda component: component["long_name"] == self.component_name, components_in_object), None)
|
||||
|
||||
propertyGroup = component_meta
|
||||
for path_item in json.loads(self.property_group_path):
|
||||
propertyGroup = getattr(propertyGroup, path_item)
|
||||
|
||||
target_list = getattr(propertyGroup, "list")
|
||||
index = getattr(propertyGroup, "list_index")
|
||||
|
||||
propertyGroup.list_index = self.selection_index
|
||||
return{'FINISHED'}
|
||||
|
||||
|
||||
class GENERIC_LIST_OT_actions(Operator):
|
||||
"""Move items up and down, add and remove"""
|
||||
bl_idname = "generic_list.list_action"
|
||||
bl_label = "List Actions"
|
||||
bl_description = "Move items up and down, add and remove"
|
||||
bl_options = {'REGISTER', 'UNDO'}
|
||||
|
||||
action: EnumProperty(
|
||||
items=(
|
||||
('UP', "Up", ""),
|
||||
('DOWN', "Down", ""),
|
||||
('REMOVE', "Remove", ""),
|
||||
('ADD', "Add", ""))) # type: ignore
|
||||
|
||||
property_group_path: StringProperty(
|
||||
name="property group path",
|
||||
description="",
|
||||
) # type: ignore
|
||||
|
||||
component_name: StringProperty(
|
||||
name="component name",
|
||||
description="",
|
||||
) # type: ignore
|
||||
|
||||
def invoke(self, context, event):
|
||||
object = context.object
|
||||
# information is stored in component meta
|
||||
components_in_object = object.components_meta.components
|
||||
component_meta = next(filter(lambda component: component["long_name"] == self.component_name, components_in_object), None)
|
||||
|
||||
propertyGroup = component_meta
|
||||
for path_item in json.loads(self.property_group_path):
|
||||
propertyGroup = getattr(propertyGroup, path_item)
|
||||
|
||||
target_list = getattr(propertyGroup, "list")
|
||||
index = getattr(propertyGroup, "list_index")
|
||||
|
||||
|
||||
if self.action == 'DOWN' and index < len(target_list) - 1:
|
||||
#item_next = scn.rule_list[index + 1].name
|
||||
target_list.move(index, index + 1)
|
||||
propertyGroup.list_index += 1
|
||||
|
||||
elif self.action == 'UP' and index >= 1:
|
||||
#item_prev = scn.rule_list[index - 1].name
|
||||
target_list.move(index, index - 1)
|
||||
propertyGroup.list_index -= 1
|
||||
|
||||
elif self.action == 'REMOVE':
|
||||
target_list.remove(index)
|
||||
propertyGroup.list_index = min(max(0, index - 1), len(target_list) - 1)
|
||||
|
||||
if self.action == 'ADD':
|
||||
item = target_list.add()
|
||||
propertyGroup.list_index = index + 1 # we use this to force the change detection
|
||||
#info = '"%s" added to list' % (item.name)
|
||||
#self.report({'INFO'}, info)
|
||||
|
||||
return {"FINISHED"}
|
121
tools/blenvy/bevy_components/components/maps.py
Normal file
@ -0,0 +1,121 @@
|
||||
import json
|
||||
from bpy_types import Operator, UIList
|
||||
from bpy.props import (StringProperty, EnumProperty, PointerProperty, FloatVectorProperty, IntProperty)
|
||||
|
||||
from ..propGroups.conversions_from_prop_group import property_group_value_to_custom_property_value
|
||||
|
||||
class GENERIC_MAP_OT_actions(Operator):
|
||||
"""Move items up and down, add and remove"""
|
||||
bl_idname = "generic_map.map_action"
|
||||
bl_label = "Map Actions"
|
||||
bl_description = "Move items up and down, add and remove"
|
||||
bl_options = {'REGISTER', 'UNDO'}
|
||||
|
||||
action: EnumProperty(
|
||||
items=(
|
||||
('UP', "Up", ""),
|
||||
('DOWN', "Down", ""),
|
||||
('REMOVE', "Remove", ""),
|
||||
('ADD', "Add", ""))) # type: ignore
|
||||
|
||||
property_group_path: StringProperty(
|
||||
name="property group path",
|
||||
description="",
|
||||
) # type: ignore
|
||||
|
||||
component_name: StringProperty(
|
||||
name="component name",
|
||||
description="",
|
||||
) # type: ignore
|
||||
|
||||
target_index: IntProperty(name="target index", description="index of item to manipulate")# type: ignore
|
||||
|
||||
def invoke(self, context, event):
|
||||
object = context.object
|
||||
# information is stored in component meta
|
||||
components_in_object = object.components_meta.components
|
||||
component_meta = next(filter(lambda component: component["long_name"] == self.component_name, components_in_object), None)
|
||||
|
||||
propertyGroup = component_meta
|
||||
for path_item in json.loads(self.property_group_path):
|
||||
propertyGroup = getattr(propertyGroup, path_item)
|
||||
|
||||
keys_list = getattr(propertyGroup, "list")
|
||||
index = getattr(propertyGroup, "list_index")
|
||||
|
||||
values_list = getattr(propertyGroup, "values_list")
|
||||
values_index = getattr(propertyGroup, "values_list_index")
|
||||
|
||||
key_setter = getattr(propertyGroup, "keys_setter")
|
||||
value_setter = getattr(propertyGroup, "values_setter")
|
||||
|
||||
if self.action == 'DOWN' and index < len(keys_list) - 1:
|
||||
#item_next = scn.rule_list[index + 1].name
|
||||
keys_list.move(index, index + 1)
|
||||
propertyGroup.list_index += 1
|
||||
|
||||
elif self.action == 'UP' and index >= 1:
|
||||
#item_prev = scn.rule_list[index - 1].name
|
||||
keys_list.move(index, index - 1)
|
||||
propertyGroup.list_index -= 1
|
||||
|
||||
elif self.action == 'REMOVE':
|
||||
index = self.target_index
|
||||
keys_list.remove(index)
|
||||
values_list.remove(index)
|
||||
propertyGroup.list_index = min(max(0, index - 1), len(keys_list) - 1)
|
||||
propertyGroup.values_index = min(max(0, index - 1), len(keys_list) - 1)
|
||||
|
||||
if self.action == 'ADD':
|
||||
print("keys_list", keys_list)
|
||||
|
||||
# first we gather all key/value pairs
|
||||
hashmap = {}
|
||||
for index, key in enumerate(keys_list):
|
||||
key_entry = {}
|
||||
for field_name in key.field_names:
|
||||
key_entry[field_name] = getattr(key, field_name, None)
|
||||
value_entry = {}
|
||||
for field_name in values_list[index].field_names:
|
||||
value_entry[field_name] = values_list[index][field_name]
|
||||
hashmap[json.dumps(key_entry)] = index
|
||||
print("hashmap", hashmap )
|
||||
|
||||
# then we need to find the index of a specific value if it exists
|
||||
key_entry = {}
|
||||
for field_name in key_setter.field_names:
|
||||
key_entry[field_name] = getattr(key_setter, field_name, None)
|
||||
key_to_add = json.dumps(key_entry)
|
||||
existing_index = hashmap.get(key_to_add, None)
|
||||
print("existing_index", existing_index)
|
||||
|
||||
if existing_index is None:
|
||||
print("adding new value")
|
||||
key = keys_list.add()
|
||||
# copy the values over
|
||||
for field_name in key_setter.field_names:
|
||||
val = getattr(key_setter, field_name, None)
|
||||
if val is not None:
|
||||
key[field_name] = val
|
||||
# TODO: add error handling
|
||||
|
||||
value = values_list.add()
|
||||
# copy the values over
|
||||
for field_name in value_setter.field_names:
|
||||
val = getattr(value_setter, field_name, None)
|
||||
if val is not None:
|
||||
value[field_name] = val
|
||||
# TODO: add error handling
|
||||
|
||||
propertyGroup.list_index = index + 1 # we use this to force the change detection
|
||||
propertyGroup.values_index = index + 1 # we use this to force the change detection
|
||||
else:
|
||||
print("overriding value")
|
||||
for field_name in value_setter.field_names:
|
||||
values_list[existing_index][field_name] = value_setter[field_name]
|
||||
|
||||
|
||||
#info = '"%s" added to list' % (item.name)
|
||||
#self.report({'INFO'}, info)
|
||||
|
||||
return {"FINISHED"}
|
344
tools/blenvy/bevy_components/components/metadata.py
Normal file
@ -0,0 +1,344 @@
|
||||
import bpy
|
||||
from bpy.props import (StringProperty, BoolProperty, PointerProperty)
|
||||
from bpy_types import (PropertyGroup)
|
||||
|
||||
from ..propGroups.conversions_from_prop_group import property_group_value_to_custom_property_value
|
||||
from ..propGroups.conversions_to_prop_group import property_group_value_from_custom_property_value
|
||||
|
||||
class ComponentMetadata(bpy.types.PropertyGroup):
|
||||
short_name : bpy.props.StringProperty(
|
||||
name = "name",
|
||||
default = ""
|
||||
) # type: ignore
|
||||
|
||||
long_name : bpy.props.StringProperty(
|
||||
name = "long name",
|
||||
default = ""
|
||||
) # type: ignore
|
||||
|
||||
values: bpy.props.StringProperty(
|
||||
name = "Value",
|
||||
default = ""
|
||||
) # type: ignore
|
||||
|
||||
enabled: BoolProperty(
|
||||
name="enabled",
|
||||
description="component enabled",
|
||||
default=True
|
||||
) # type: ignore
|
||||
|
||||
invalid: BoolProperty(
|
||||
name="invalid",
|
||||
description="component is invalid, because of missing registration/ other issues",
|
||||
default=False
|
||||
) # type: ignore
|
||||
|
||||
invalid_details: StringProperty(
|
||||
name="invalid details",
|
||||
description="detailed information about why the component is invalid",
|
||||
default=""
|
||||
) # type: ignore
|
||||
|
||||
visible: BoolProperty( # REALLY dislike doing this for UI control, but ok hack for now
|
||||
default=True
|
||||
) # type: ignore
|
||||
|
||||
class ComponentsMeta(PropertyGroup):
|
||||
infos_per_component: StringProperty(
|
||||
name="infos per component",
|
||||
description="component"
|
||||
) # type: ignore
|
||||
components: bpy.props.CollectionProperty(type = ComponentMetadata) # type: ignore
|
||||
|
||||
@classmethod
|
||||
def register(cls):
|
||||
bpy.types.Object.components_meta = PointerProperty(type=ComponentsMeta)
|
||||
|
||||
@classmethod
|
||||
def unregister(cls):
|
||||
del bpy.types.Object.components_meta
|
||||
|
||||
# remove no longer valid metadata from object
|
||||
def cleanup_invalid_metadata(object):
|
||||
bevy_components = get_bevy_components(object)
|
||||
if len(bevy_components.keys()) == 0: # no components, bail out
|
||||
return
|
||||
components_metadata = object.components_meta.components
|
||||
to_remove = []
|
||||
for index, component_meta in enumerate(components_metadata):
|
||||
long_name = component_meta.long_name
|
||||
if long_name not in bevy_components.keys():
|
||||
print("component:", long_name, "present in metadata, but not in object")
|
||||
to_remove.append(index)
|
||||
for index in to_remove:
|
||||
components_metadata.remove(index)
|
||||
|
||||
|
||||
# returns a component definition ( an entry in registry's type_infos) with matching long name or None if nothing has been found
|
||||
def find_component_definition_from_long_name(long_name):
|
||||
registry = bpy.context.window_manager.components_registry
|
||||
return registry.type_infos.get(long_name, None)
|
||||
|
||||
# FIXME: feels a bit heavy duty, should only be done
|
||||
# if the components panel is active ?
|
||||
def ensure_metadata_for_all_objects():
|
||||
for object in bpy.data.objects:
|
||||
add_metadata_to_components_without_metadata(object)
|
||||
|
||||
# returns whether an object has custom properties without matching metadata
|
||||
def do_object_custom_properties_have_missing_metadata(object):
|
||||
components_metadata = getattr(object, "components_meta", None)
|
||||
if components_metadata == None:
|
||||
return True
|
||||
|
||||
components_metadata = components_metadata.components
|
||||
|
||||
missing_metadata = False
|
||||
for component_name in get_bevy_components(object) :
|
||||
if component_name == "components_meta":
|
||||
continue
|
||||
component_meta = next(filter(lambda component: component["long_name"] == component_name, components_metadata), None)
|
||||
if component_meta == None:
|
||||
# current component has no metadata but is there even a compatible type in the registry ?
|
||||
# if not ignore it
|
||||
component_definition = find_component_definition_from_long_name(component_name)
|
||||
if component_definition != None:
|
||||
missing_metadata = True
|
||||
break
|
||||
|
||||
return missing_metadata
|
||||
|
||||
|
||||
import json
|
||||
|
||||
def upsert_bevy_component(object, long_name, value):
|
||||
if not 'bevy_components' in object:
|
||||
object['bevy_components'] = '{}'
|
||||
bevy_components = json.loads(object['bevy_components'])
|
||||
bevy_components[long_name] = value
|
||||
object['bevy_components'] = json.dumps(bevy_components)
|
||||
#object['bevy_components'][long_name] = value # Sigh, this does not work, hits Blender's 63 char length limit
|
||||
|
||||
def remove_bevy_component(object, long_name):
|
||||
if 'bevy_components' in object:
|
||||
bevy_components = json.loads(object['bevy_components'])
|
||||
if long_name in bevy_components:
|
||||
del bevy_components[long_name]
|
||||
object['bevy_components'] = json.dumps(bevy_components)
|
||||
if long_name in object:
|
||||
del object[long_name]
|
||||
|
||||
def get_bevy_components(object):
|
||||
if 'bevy_components' in object:
|
||||
bevy_components = json.loads(object['bevy_components'])
|
||||
return bevy_components
|
||||
return {}
|
||||
|
||||
def get_bevy_component_value_by_long_name(object, long_name):
|
||||
bevy_components = get_bevy_components(object)
|
||||
if len(bevy_components.keys()) == 0 :
|
||||
return None
|
||||
return bevy_components.get(long_name, None)
|
||||
|
||||
def is_bevy_component_in_object(object, long_name):
|
||||
return get_bevy_component_value_by_long_name(object, long_name) is not None
|
||||
|
||||
# adds metadata to object only if it is missing
|
||||
def add_metadata_to_components_without_metadata(object):
|
||||
registry = bpy.context.window_manager.components_registry
|
||||
|
||||
for component_name in get_bevy_components(object) :
|
||||
if component_name == "components_meta":
|
||||
continue
|
||||
upsert_component_in_object(object, component_name, registry)
|
||||
|
||||
# adds a component to an object (including metadata) using the provided component definition & optional value
|
||||
def add_component_to_object(object, component_definition, value=None):
|
||||
cleanup_invalid_metadata(object)
|
||||
if object is not None:
|
||||
# print("add_component_to_object", component_definition)
|
||||
long_name = component_definition["long_name"]
|
||||
registry = bpy.context.window_manager.components_registry
|
||||
if not registry.has_type_infos():
|
||||
raise Exception('registry type infos have not been loaded yet or are missing !')
|
||||
definition = registry.type_infos[long_name]
|
||||
# now we use our pre_generated property groups to set the initial value of our custom property
|
||||
(_, propertyGroup) = upsert_component_in_object(object, long_name=long_name, registry=registry)
|
||||
if value == None:
|
||||
value = property_group_value_to_custom_property_value(propertyGroup, definition, registry, None)
|
||||
else: # we have provided a value, that is a raw , custom property value, to set the value of the propertyGroup
|
||||
object["__disable__update"] = True # disable update callback while we set the values of the propertyGroup "tree" (as a propertyGroup can contain other propertyGroups)
|
||||
property_group_value_from_custom_property_value(propertyGroup, definition, registry, value)
|
||||
del object["__disable__update"]
|
||||
|
||||
upsert_bevy_component(object, long_name, value)
|
||||
|
||||
def upsert_component_in_object(object, long_name, registry):
|
||||
# print("upsert_component_in_object", object, "component name", component_name)
|
||||
# TODO: upsert this part too ?
|
||||
target_components_metadata = object.components_meta.components
|
||||
component_definition = registry.type_infos.get(long_name, None)
|
||||
if component_definition != None:
|
||||
short_name = component_definition["short_name"]
|
||||
long_name = component_definition["long_name"]
|
||||
property_group_name = registry.get_propertyGroupName_from_longName(long_name)
|
||||
propertyGroup = None
|
||||
|
||||
component_meta = next(filter(lambda component: component["long_name"] == long_name, target_components_metadata), None)
|
||||
if not component_meta:
|
||||
component_meta = target_components_metadata.add()
|
||||
component_meta.short_name = short_name
|
||||
component_meta.long_name = long_name
|
||||
propertyGroup = getattr(component_meta, property_group_name, None)
|
||||
else: # this one has metadata but we check that the relevant property group is present
|
||||
propertyGroup = getattr(component_meta, property_group_name, None)
|
||||
|
||||
# try to inject propertyGroup if not present
|
||||
if propertyGroup == None:
|
||||
#print("propertygroup not found in metadata attempting to inject")
|
||||
if property_group_name in registry.component_propertyGroups:
|
||||
# we have found a matching property_group, so try to inject it
|
||||
# now inject property group
|
||||
setattr(ComponentMetadata, property_group_name, registry.component_propertyGroups[property_group_name]) # FIXME: not ideal as all ComponentMetadata get the propGroup, but have not found a way to assign it per instance
|
||||
propertyGroup = getattr(component_meta, property_group_name, None)
|
||||
|
||||
# now deal with property groups details
|
||||
if propertyGroup != None:
|
||||
if long_name in registry.invalid_components:
|
||||
component_meta.enabled = False
|
||||
component_meta.invalid = True
|
||||
component_meta.invalid_details = "component contains fields that are not in the schema, disabling"
|
||||
else:
|
||||
# if we still have not found the property group, mark it as invalid
|
||||
component_meta.enabled = False
|
||||
component_meta.invalid = True
|
||||
component_meta.invalid_details = "component not present in the schema, possibly renamed? Disabling for now"
|
||||
# property_group_value_from_custom_property_value(propertyGroup, component_definition, registry, object[component_name])
|
||||
|
||||
return (component_meta, propertyGroup)
|
||||
else:
|
||||
return(None, None)
|
||||
|
||||
|
||||
def copy_propertyGroup_values_to_another_object(source_object, target_object, component_name, registry):
|
||||
if source_object == None or target_object == None or component_name == None:
|
||||
raise Exception('missing input data, cannot copy component propertryGroup')
|
||||
|
||||
component_definition = find_component_definition_from_long_name(component_name)
|
||||
long_name = component_name
|
||||
property_group_name = registry.get_propertyGroupName_from_longName(long_name)
|
||||
|
||||
registry = bpy.context.window_manager.components_registry
|
||||
|
||||
source_components_metadata = source_object.components_meta.components
|
||||
source_componentMeta = next(filter(lambda component: component["long_name"] == long_name, source_components_metadata), None)
|
||||
# matching component means we already have this type of component
|
||||
source_propertyGroup = getattr(source_componentMeta, property_group_name)
|
||||
|
||||
# now deal with the target object
|
||||
(_, target_propertyGroup) = upsert_component_in_object(target_object, component_name, registry)
|
||||
# add to object
|
||||
value = property_group_value_to_custom_property_value(target_propertyGroup, component_definition, registry, None)
|
||||
upsert_bevy_component(target_object, long_name, value)
|
||||
|
||||
# copy the values over
|
||||
for field_name in source_propertyGroup.field_names:
|
||||
if field_name in source_propertyGroup:
|
||||
target_propertyGroup[field_name] = source_propertyGroup[field_name]
|
||||
apply_propertyGroup_values_to_object_customProperties(target_object)
|
||||
|
||||
|
||||
# TODO: move to propgroups ?
|
||||
def apply_propertyGroup_values_to_object_customProperties(object):
|
||||
cleanup_invalid_metadata(object)
|
||||
registry = bpy.context.window_manager.components_registry
|
||||
for component_name in get_bevy_components(object) :
|
||||
"""if component_name == "components_meta":
|
||||
continue"""
|
||||
(_, propertyGroup) = upsert_component_in_object(object, component_name, registry)
|
||||
component_definition = find_component_definition_from_long_name(component_name)
|
||||
if component_definition != None:
|
||||
value = property_group_value_to_custom_property_value(propertyGroup, component_definition, registry, None)
|
||||
upsert_bevy_component(object=object, long_name=component_name, value=value)
|
||||
|
||||
# apply component value(s) to custom property of a single component
|
||||
def apply_propertyGroup_values_to_object_customProperties_for_component(object, component_name):
|
||||
registry = bpy.context.window_manager.components_registry
|
||||
(_, propertyGroup) = upsert_component_in_object(object, component_name, registry)
|
||||
component_definition = find_component_definition_from_long_name(component_name)
|
||||
if component_definition != None:
|
||||
value = property_group_value_to_custom_property_value(propertyGroup, component_definition, registry, None)
|
||||
object[component_name] = value
|
||||
|
||||
components_metadata = object.components_meta.components
|
||||
componentMeta = next(filter(lambda component: component["long_name"] == component_name, components_metadata), None)
|
||||
if componentMeta:
|
||||
componentMeta.invalid = False
|
||||
componentMeta.invalid_details = ""
|
||||
|
||||
|
||||
def apply_customProperty_values_to_object_propertyGroups(object):
|
||||
print("apply custom properties to ", object.name)
|
||||
registry = bpy.context.window_manager.components_registry
|
||||
for component_name in get_bevy_components(object) :
|
||||
if component_name == "components_meta":
|
||||
continue
|
||||
component_definition = find_component_definition_from_long_name(component_name)
|
||||
if component_definition != None:
|
||||
property_group_name = registry.get_propertyGroupName_from_longName(component_name)
|
||||
components_metadata = object.components_meta.components
|
||||
source_componentMeta = next(filter(lambda component: component["long_name"] == component_name, components_metadata), None)
|
||||
# matching component means we already have this type of component
|
||||
propertyGroup = getattr(source_componentMeta, property_group_name, None)
|
||||
customProperty_value = get_bevy_component_value_by_long_name(object, component_name)
|
||||
#value = property_group_value_to_custom_property_value(propertyGroup, component_definition, registry, None)
|
||||
|
||||
object["__disable__update"] = True # disable update callback while we set the values of the propertyGroup "tree" (as a propertyGroup can contain other propertyGroups)
|
||||
property_group_value_from_custom_property_value(propertyGroup, component_definition, registry, customProperty_value)
|
||||
del object["__disable__update"]
|
||||
source_componentMeta.invalid = False
|
||||
source_componentMeta.invalid_details = ""
|
||||
|
||||
# removes the given component from the object: removes both the custom property and the matching metadata from the object
|
||||
def remove_component_from_object(object, component_name):
|
||||
# remove the component value
|
||||
remove_bevy_component(object, component_name)
|
||||
|
||||
# now remove the component's metadata
|
||||
components_metadata = getattr(object, "components_meta", None)
|
||||
if components_metadata == None:
|
||||
return False
|
||||
|
||||
components_metadata = components_metadata.components
|
||||
to_remove = []
|
||||
for index, component_meta in enumerate(components_metadata):
|
||||
long_name = component_meta.long_name
|
||||
if long_name == component_name:
|
||||
to_remove.append(index)
|
||||
break
|
||||
for index in to_remove:
|
||||
components_metadata.remove(index)
|
||||
return True
|
||||
|
||||
def add_component_from_custom_property(object):
|
||||
add_metadata_to_components_without_metadata(object)
|
||||
apply_customProperty_values_to_object_propertyGroups(object)
|
||||
|
||||
def rename_component(object, original_long_name, new_long_name):
|
||||
registry = bpy.context.window_manager.components_registry
|
||||
type_infos = registry.type_infos
|
||||
component_definition = type_infos[new_long_name]
|
||||
|
||||
component_ron_value = get_bevy_component_value_by_long_name(object=object, long_name=original_long_name)
|
||||
if component_ron_value is None and original_long_name in object:
|
||||
component_ron_value = object[original_long_name]
|
||||
|
||||
remove_component_from_object(object, original_long_name)
|
||||
add_component_to_object(object, component_definition, component_ron_value)
|
||||
|
||||
|
||||
def toggle_component(object, component_name):
|
||||
components_in_object = object.components_meta.components
|
||||
component_meta = next(filter(lambda component: component["long_name"] == component_name, components_in_object), None)
|
||||
if component_meta != None:
|
||||
component_meta.visible = not component_meta.visible
|
321
tools/blenvy/bevy_components/components/operators.py
Normal file
@ -0,0 +1,321 @@
|
||||
import ast
|
||||
import json
|
||||
import bpy
|
||||
from bpy_types import Operator
|
||||
from bpy.props import (StringProperty)
|
||||
|
||||
from .metadata import add_component_from_custom_property, add_component_to_object, apply_propertyGroup_values_to_object_customProperties_for_component, copy_propertyGroup_values_to_another_object, get_bevy_component_value_by_long_name, get_bevy_components, is_bevy_component_in_object, remove_component_from_object, rename_component, toggle_component
|
||||
|
||||
class AddComponentOperator(Operator):
|
||||
"""Add Bevy component to object"""
|
||||
bl_idname = "object.add_bevy_component"
|
||||
bl_label = "Add component to object Operator"
|
||||
bl_options = {"UNDO"}
|
||||
|
||||
component_type: StringProperty(
|
||||
name="component_type",
|
||||
description="component type to add",
|
||||
) # type: ignore
|
||||
|
||||
def execute(self, context):
|
||||
object = context.object
|
||||
print("adding component ", self.component_type, "to object '"+object.name+"'")
|
||||
|
||||
has_component_type = self.component_type != ""
|
||||
if has_component_type and object != None:
|
||||
type_infos = context.window_manager.components_registry.type_infos
|
||||
component_definition = type_infos[self.component_type]
|
||||
add_component_to_object(object, component_definition)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
class CopyComponentOperator(Operator):
|
||||
"""Copy Bevy component from object"""
|
||||
bl_idname = "object.copy_bevy_component"
|
||||
bl_label = "Copy component Operator"
|
||||
bl_options = {"UNDO"}
|
||||
|
||||
source_component_name: StringProperty(
|
||||
name="source component_name (long)",
|
||||
description="name of the component to copy",
|
||||
) # type: ignore
|
||||
|
||||
source_object_name: StringProperty(
|
||||
name="source object name",
|
||||
description="name of the object to copy the component from",
|
||||
) # type: ignore
|
||||
|
||||
@classmethod
|
||||
def register(cls):
|
||||
bpy.types.WindowManager.copied_source_component_name = StringProperty()
|
||||
bpy.types.WindowManager.copied_source_object = StringProperty()
|
||||
|
||||
@classmethod
|
||||
def unregister(cls):
|
||||
del bpy.types.WindowManager.copied_source_component_name
|
||||
del bpy.types.WindowManager.copied_source_object
|
||||
|
||||
|
||||
def execute(self, context):
|
||||
if self.source_component_name != '' and self.source_object_name != "":
|
||||
context.window_manager.copied_source_component_name = self.source_component_name
|
||||
context.window_manager.copied_source_object = self.source_object_name
|
||||
else:
|
||||
self.report({"ERROR"}, "The source object name / component name to copy a component from have not been specified")
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class PasteComponentOperator(Operator):
|
||||
"""Paste Bevy component to object"""
|
||||
bl_idname = "object.paste_bevy_component"
|
||||
bl_label = "Paste component to object Operator"
|
||||
bl_options = {"UNDO"}
|
||||
|
||||
def execute(self, context):
|
||||
source_object_name = context.window_manager.copied_source_object
|
||||
source_object = bpy.data.objects.get(source_object_name, None)
|
||||
print("source object", source_object)
|
||||
if source_object == None:
|
||||
self.report({"ERROR"}, "The source object to copy a component from does not exist")
|
||||
else:
|
||||
component_name = context.window_manager.copied_source_component_name
|
||||
component_value = get_bevy_component_value_by_long_name(source_object, component_name)
|
||||
if component_value is None:
|
||||
self.report({"ERROR"}, "The source component to copy from does not exist")
|
||||
else:
|
||||
print("pasting component to object: component name:", str(component_name), "component value:" + str(component_value))
|
||||
print (context.object)
|
||||
registry = context.window_manager.components_registry
|
||||
copy_propertyGroup_values_to_another_object(source_object, context.object, component_name, registry)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
class RemoveComponentOperator(Operator):
|
||||
"""Remove Bevy component from object"""
|
||||
bl_idname = "object.remove_bevy_component"
|
||||
bl_label = "Remove component from object Operator"
|
||||
bl_options = {"UNDO"}
|
||||
|
||||
component_name: StringProperty(
|
||||
name="component name",
|
||||
description="component to delete",
|
||||
) # type: ignore
|
||||
|
||||
object_name: StringProperty(
|
||||
name="object name",
|
||||
description="object whose component to delete",
|
||||
default=""
|
||||
) # type: ignore
|
||||
|
||||
def execute(self, context):
|
||||
if self.object_name == "":
|
||||
object = context.object
|
||||
else:
|
||||
object = bpy.data.objects[self.object_name]
|
||||
print("removing component ", self.component_name, "from object '"+object.name+"'")
|
||||
|
||||
if object is not None and 'bevy_components' in object :
|
||||
component_value = get_bevy_component_value_by_long_name(object, self.component_name)
|
||||
if component_value is not None:
|
||||
remove_component_from_object(object, self.component_name)
|
||||
else :
|
||||
self.report({"ERROR"}, "The component to remove ("+ self.component_name +") does not exist")
|
||||
else:
|
||||
self.report({"ERROR"}, "The object to remove ("+ self.component_name +") from does not exist")
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class RemoveComponentFromAllObjectsOperator(Operator):
|
||||
"""Remove Bevy component from all object"""
|
||||
bl_idname = "object.remove_bevy_component_all"
|
||||
bl_label = "Remove component from all objects Operator"
|
||||
bl_options = {"UNDO"}
|
||||
|
||||
component_name: StringProperty(
|
||||
name="component name (long name)",
|
||||
description="component to delete",
|
||||
) # type: ignore
|
||||
|
||||
@classmethod
|
||||
def register(cls):
|
||||
bpy.types.WindowManager.components_remove_progress = bpy.props.FloatProperty(default=-1.0)
|
||||
|
||||
@classmethod
|
||||
def unregister(cls):
|
||||
del bpy.types.WindowManager.components_remove_progress
|
||||
|
||||
def execute(self, context):
|
||||
print("removing component ", self.component_name, "from all objects")
|
||||
total = len(bpy.data.objects)
|
||||
for index, object in enumerate(bpy.data.objects):
|
||||
if len(object.keys()) > 0:
|
||||
if object is not None and is_bevy_component_in_object(object, self.component_name):
|
||||
remove_component_from_object(object, self.component_name)
|
||||
|
||||
progress = index / total
|
||||
context.window_manager.components_remove_progress = progress
|
||||
# now force refresh the ui
|
||||
bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1)
|
||||
context.window_manager.components_remove_progress = -1.0
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class RenameHelper(bpy.types.PropertyGroup):
|
||||
original_name: bpy.props.StringProperty(name="") # type: ignore
|
||||
new_name: bpy.props.StringProperty(name="") # type: ignore
|
||||
|
||||
#object: bpy.props.PointerProperty(type=bpy.types.Object)
|
||||
@classmethod
|
||||
def register(cls):
|
||||
bpy.types.WindowManager.bevy_component_rename_helper = bpy.props.PointerProperty(type=RenameHelper)
|
||||
|
||||
@classmethod
|
||||
def unregister(cls):
|
||||
# remove handlers & co
|
||||
del bpy.types.WindowManager.bevy_component_rename_helper
|
||||
|
||||
class OT_rename_component(Operator):
|
||||
"""Rename Bevy component"""
|
||||
bl_idname = "object.rename_bevy_component"
|
||||
bl_label = "rename component"
|
||||
bl_options = {"UNDO"}
|
||||
|
||||
original_name: bpy.props.StringProperty(default="") # type: ignore
|
||||
new_name: StringProperty(
|
||||
name="new_name",
|
||||
description="new name of component",
|
||||
) # type: ignore
|
||||
|
||||
target_objects: bpy.props.StringProperty() # type: ignore
|
||||
|
||||
@classmethod
|
||||
def register(cls):
|
||||
bpy.types.WindowManager.components_rename_progress = bpy.props.FloatProperty(default=-1.0) #bpy.props.PointerProperty(type=RenameHelper)
|
||||
|
||||
@classmethod
|
||||
def unregister(cls):
|
||||
del bpy.types.WindowManager.components_rename_progress
|
||||
|
||||
def execute(self, context):
|
||||
registry = context.window_manager.components_registry
|
||||
type_infos = registry.type_infos
|
||||
settings = context.window_manager.bevy_component_rename_helper
|
||||
original_name = settings.original_name if self.original_name == "" else self.original_name
|
||||
new_name = self.new_name
|
||||
|
||||
|
||||
print("renaming components: original name", original_name, "new_name", self.new_name, "targets", self.target_objects)
|
||||
target_objects = json.loads(self.target_objects)
|
||||
errors = []
|
||||
total = len(target_objects)
|
||||
|
||||
if original_name != '' and new_name != '' and original_name != new_name and len(target_objects) > 0:
|
||||
for index, object_name in enumerate(target_objects):
|
||||
object = bpy.data.objects[object_name]
|
||||
if object and original_name in get_bevy_components(object) or original_name in object:
|
||||
try:
|
||||
# attempt conversion
|
||||
rename_component(object=object, original_long_name=original_name, new_long_name=new_name)
|
||||
except Exception as error:
|
||||
if '__disable__update' in object:
|
||||
del object["__disable__update"] # make sure custom properties are updateable afterwards, even in the case of failure
|
||||
components_metadata = getattr(object, "components_meta", None)
|
||||
if components_metadata:
|
||||
components_metadata = components_metadata.components
|
||||
component_meta = next(filter(lambda component: component["long_name"] == new_name, components_metadata), None)
|
||||
if component_meta:
|
||||
component_meta.invalid = True
|
||||
component_meta.invalid_details = "wrong custom property value, overwrite them by changing the values in the ui or change them & regenerate"
|
||||
|
||||
errors.append( "wrong custom property values to generate target component: object: '" + object.name + "', error: " + str(error))
|
||||
|
||||
progress = index / total
|
||||
context.window_manager.components_rename_progress = progress
|
||||
|
||||
try:
|
||||
# now force refresh the ui
|
||||
bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1)
|
||||
except: pass # this is to allow this to run in cli/headless mode
|
||||
|
||||
if len(errors) > 0:
|
||||
self.report({'ERROR'}, "Failed to rename component: Errors:" + str(errors))
|
||||
else:
|
||||
self.report({'INFO'}, "Sucessfully renamed component")
|
||||
|
||||
#clear data after we are done
|
||||
self.original_name = ""
|
||||
context.window_manager.bevy_component_rename_helper.original_name = ""
|
||||
context.window_manager.components_rename_progress = -1.0
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class GenerateComponent_From_custom_property_Operator(Operator):
|
||||
"""Generate Bevy components from custom property"""
|
||||
bl_idname = "object.generate_bevy_component_from_custom_property"
|
||||
bl_label = "Generate component from custom_property Operator"
|
||||
bl_options = {"UNDO"}
|
||||
|
||||
component_name: StringProperty(
|
||||
name="component name",
|
||||
description="component to generate custom properties for",
|
||||
) # type: ignore
|
||||
|
||||
def execute(self, context):
|
||||
object = context.object
|
||||
|
||||
error = False
|
||||
try:
|
||||
add_component_from_custom_property(object)
|
||||
except Exception as error:
|
||||
del object["__disable__update"] # make sure custom properties are updateable afterwards, even in the case of failure
|
||||
error = True
|
||||
self.report({'ERROR'}, "Failed to update propertyGroup values from custom property: Error:" + str(error))
|
||||
if not error:
|
||||
self.report({'INFO'}, "Sucessfully generated UI values for custom properties for selected object")
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class Fix_Component_Operator(Operator):
|
||||
"""Attempt to fix Bevy component"""
|
||||
bl_idname = "object.fix_bevy_component"
|
||||
bl_label = "Fix component (attempts to)"
|
||||
bl_options = {"UNDO"}
|
||||
|
||||
component_name: StringProperty(
|
||||
name="component name",
|
||||
description="component to fix",
|
||||
) # type: ignore
|
||||
|
||||
def execute(self, context):
|
||||
object = context.object
|
||||
error = False
|
||||
try:
|
||||
apply_propertyGroup_values_to_object_customProperties_for_component(object, self.component_name)
|
||||
except Exception as error:
|
||||
if "__disable__update" in object:
|
||||
del object["__disable__update"] # make sure custom properties are updateable afterwards, even in the case of failure
|
||||
error = True
|
||||
self.report({'ERROR'}, "Failed to fix component: Error:" + str(error))
|
||||
if not error:
|
||||
self.report({'INFO'}, "Sucessfully fixed component (please double check component & its custom property value)")
|
||||
return {'FINISHED'}
|
||||
|
||||
class Toggle_ComponentVisibility(Operator):
|
||||
"""Toggle Bevy component's visibility"""
|
||||
bl_idname = "object.toggle_bevy_component_visibility"
|
||||
bl_label = "Toggle component visibility"
|
||||
bl_options = {"UNDO"}
|
||||
|
||||
component_name: StringProperty(
|
||||
name="component name",
|
||||
description="component to toggle",
|
||||
) # type: ignore
|
||||
|
||||
def execute(self, context):
|
||||
object = context.object
|
||||
toggle_component(object, self.component_name)
|
||||
return {'FINISHED'}
|
||||
|
279
tools/blenvy/bevy_components/components/ui.py
Normal file
@ -0,0 +1,279 @@
|
||||
import json
|
||||
import bpy
|
||||
|
||||
from ..registry.operators import COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_CURRENT
|
||||
from .metadata import do_object_custom_properties_have_missing_metadata, get_bevy_components
|
||||
from .operators import AddComponentOperator, CopyComponentOperator, Fix_Component_Operator, RemoveComponentOperator, GenerateComponent_From_custom_property_Operator, PasteComponentOperator, Toggle_ComponentVisibility
|
||||
|
||||
def draw_propertyGroup( propertyGroup, layout, nesting =[], rootName=None):
|
||||
is_enum = getattr(propertyGroup, "with_enum")
|
||||
is_list = getattr(propertyGroup, "with_list")
|
||||
is_map = getattr(propertyGroup, "with_map")
|
||||
# item in our components hierarchy can get the correct propertyGroup by STRINGS because of course, we cannot pass objects to operators...sigh
|
||||
|
||||
# if it is an enum, the first field name is always the list of enum variants, the others are the variants
|
||||
field_names = propertyGroup.field_names
|
||||
#print("")
|
||||
#print("drawing", propertyGroup, nesting, "component_name", rootName)
|
||||
if is_enum:
|
||||
subrow = layout.row()
|
||||
display_name = field_names[0] if propertyGroup.tupple_or_struct == "struct" else ""
|
||||
subrow.prop(propertyGroup, field_names[0], text=display_name)
|
||||
subrow.separator()
|
||||
selection = getattr(propertyGroup, "selection")
|
||||
|
||||
for fname in field_names[1:]:
|
||||
if fname == "variant_" + selection:
|
||||
subrow = layout.row()
|
||||
display_name = fname if propertyGroup.tupple_or_struct == "struct" else ""
|
||||
|
||||
nestedPropertyGroup = getattr(propertyGroup, fname)
|
||||
nested = getattr(nestedPropertyGroup, "nested", False)
|
||||
#print("nestedPropertyGroup", nestedPropertyGroup, fname, nested)
|
||||
if nested:
|
||||
draw_propertyGroup(nestedPropertyGroup, subrow.column(), nesting + [fname], rootName )
|
||||
# if an enum variant is not a propertyGroup
|
||||
break
|
||||
elif is_list:
|
||||
item_list = getattr(propertyGroup, "list")
|
||||
list_index = getattr(propertyGroup, "list_index")
|
||||
box = layout.box()
|
||||
split = box.split(factor=0.9)
|
||||
list_column, buttons_column = (split.column(),split.column())
|
||||
|
||||
list_column = list_column.box()
|
||||
for index, item in enumerate(item_list):
|
||||
row = list_column.row()
|
||||
draw_propertyGroup(item, row, nesting, rootName)
|
||||
icon = 'CHECKBOX_HLT' if list_index == index else 'CHECKBOX_DEHLT'
|
||||
op = row.operator('generic_list.select_item', icon=icon, text="")
|
||||
op.component_name = rootName
|
||||
op.property_group_path = json.dumps(nesting)
|
||||
op.selection_index = index
|
||||
|
||||
#various control buttons
|
||||
buttons_column.separator()
|
||||
row = buttons_column.row()
|
||||
op = row.operator('generic_list.list_action', icon='ADD', text="")
|
||||
op.action = 'ADD'
|
||||
op.component_name = rootName
|
||||
op.property_group_path = json.dumps(nesting)
|
||||
|
||||
row = buttons_column.row()
|
||||
op = row.operator('generic_list.list_action', icon='REMOVE', text="")
|
||||
op.action = 'REMOVE'
|
||||
op.component_name = rootName
|
||||
op.property_group_path = json.dumps(nesting)
|
||||
|
||||
buttons_column.separator()
|
||||
row = buttons_column.row()
|
||||
op = row.operator('generic_list.list_action', icon='TRIA_UP', text="")
|
||||
op.action = 'UP'
|
||||
op.component_name = rootName
|
||||
op.property_group_path = json.dumps(nesting)
|
||||
|
||||
row = buttons_column.row()
|
||||
op = row.operator('generic_list.list_action', icon='TRIA_DOWN', text="")
|
||||
op.action = 'DOWN'
|
||||
op.component_name = rootName
|
||||
op.property_group_path = json.dumps(nesting)
|
||||
|
||||
elif is_map:
|
||||
root = layout.row().column()
|
||||
if hasattr(propertyGroup, "list"): # TODO: improve handling of non drawable UI
|
||||
keys_list = getattr(propertyGroup, "list")
|
||||
values_list = getattr(propertyGroup, "values_list")
|
||||
box = root.box()
|
||||
row = box.row()
|
||||
row.label(text="Add entry:")
|
||||
keys_setter = getattr(propertyGroup, "keys_setter")
|
||||
draw_propertyGroup(keys_setter, row, nesting, rootName)
|
||||
|
||||
values_setter = getattr(propertyGroup, "values_setter")
|
||||
draw_propertyGroup(values_setter, row, nesting, rootName)
|
||||
|
||||
op = row.operator('generic_map.map_action', icon='ADD', text="")
|
||||
op.action = 'ADD'
|
||||
op.component_name = rootName
|
||||
op.property_group_path = json.dumps(nesting)
|
||||
|
||||
box = root.box()
|
||||
split = box.split(factor=0.9)
|
||||
list_column, buttons_column = (split.column(),split.column())
|
||||
list_column = list_column.box()
|
||||
|
||||
for index, item in enumerate(keys_list):
|
||||
row = list_column.row()
|
||||
draw_propertyGroup(item, row, nesting, rootName)
|
||||
|
||||
value = values_list[index]
|
||||
draw_propertyGroup(value, row, nesting, rootName)
|
||||
|
||||
op = row.operator('generic_map.map_action', icon='REMOVE', text="")
|
||||
op.action = 'REMOVE'
|
||||
op.component_name = rootName
|
||||
op.property_group_path = json.dumps(nesting)
|
||||
op.target_index = index
|
||||
|
||||
|
||||
#various control buttons
|
||||
buttons_column.separator()
|
||||
row = buttons_column.row()
|
||||
|
||||
|
||||
else:
|
||||
for fname in field_names:
|
||||
#subrow = layout.row()
|
||||
nestedPropertyGroup = getattr(propertyGroup, fname)
|
||||
nested = getattr(nestedPropertyGroup, "nested", False)
|
||||
display_name = fname if propertyGroup.tupple_or_struct == "struct" else ""
|
||||
|
||||
if nested:
|
||||
layout.separator()
|
||||
layout.separator()
|
||||
|
||||
layout.label(text=display_name) # this is the name of the field/sub field
|
||||
layout.separator()
|
||||
subrow = layout.row()
|
||||
draw_propertyGroup(nestedPropertyGroup, subrow, nesting + [fname], rootName )
|
||||
else:
|
||||
subrow = layout.row()
|
||||
subrow.prop(propertyGroup, fname, text=display_name)
|
||||
subrow.separator()
|
||||
|
||||
|
||||
class BEVY_COMPONENTS_PT_ComponentsPanel(bpy.types.Panel):
|
||||
bl_idname = "BEVY_COMPONENTS_PT_ComponentsPanel"
|
||||
bl_label = ""
|
||||
bl_space_type = 'VIEW_3D'
|
||||
bl_region_type = 'UI'
|
||||
bl_category = "Bevy Components"
|
||||
bl_context = "objectmode"
|
||||
bl_parent_id = "BLENVY_PT_SidePanel"
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return context.window_manager.blenvy.mode == 'COMPONENTS'
|
||||
return context.object is not None
|
||||
|
||||
def draw_header(self, context):
|
||||
layout = self.layout
|
||||
name = context.object.name if context.object != None else ''
|
||||
layout.label(text="Components For "+ name)
|
||||
|
||||
def draw(self, context):
|
||||
object = context.object
|
||||
layout = self.layout
|
||||
|
||||
# we get & load our component registry
|
||||
registry = bpy.context.window_manager.components_registry
|
||||
available_components = bpy.context.window_manager.components_list
|
||||
registry_has_type_infos = registry.has_type_infos()
|
||||
|
||||
if object is not None:
|
||||
row = layout.row(align=True)
|
||||
row.prop(available_components, "list", text="Component")
|
||||
row.prop(available_components, "filter",text="Filter")
|
||||
|
||||
# add components
|
||||
row = layout.row(align=True)
|
||||
op = row.operator(AddComponentOperator.bl_idname, text="Add", icon="ADD")
|
||||
op.component_type = available_components.list
|
||||
row.enabled = available_components.list != ''
|
||||
|
||||
layout.separator()
|
||||
|
||||
# paste components
|
||||
row = layout.row(align=True)
|
||||
row.operator(PasteComponentOperator.bl_idname, text="Paste component ("+bpy.context.window_manager.copied_source_component_name+")", icon="PASTEDOWN")
|
||||
row.enabled = registry_has_type_infos and context.window_manager.copied_source_object != ''
|
||||
|
||||
layout.separator()
|
||||
|
||||
# upgrate custom props to components
|
||||
upgradeable_customProperties = registry.has_type_infos() and do_object_custom_properties_have_missing_metadata(context.object)
|
||||
if upgradeable_customProperties:
|
||||
row = layout.row(align=True)
|
||||
op = row.operator(GenerateComponent_From_custom_property_Operator.bl_idname, text="generate components from custom properties" , icon="LOOP_FORWARDS")
|
||||
layout.separator()
|
||||
|
||||
|
||||
components_in_object = object.components_meta.components
|
||||
#print("components_names", dict(components_bla).keys())
|
||||
|
||||
for component_name in sorted(get_bevy_components(object)) : # sorted by component name, practical
|
||||
#print("component_name", component_name)
|
||||
if component_name == "components_meta":
|
||||
continue
|
||||
# anything withouth metadata gets skipped, we only want to see real components, not all custom props
|
||||
component_meta = next(filter(lambda component: component["long_name"] == component_name, components_in_object), None)
|
||||
if component_meta == None:
|
||||
continue
|
||||
|
||||
component_invalid = getattr(component_meta, "invalid")
|
||||
invalid_details = getattr(component_meta, "invalid_details")
|
||||
component_visible = getattr(component_meta, "visible")
|
||||
single_field = False
|
||||
|
||||
# our whole row
|
||||
box = layout.box()
|
||||
row = box.row(align=True)
|
||||
# "header"
|
||||
row.alert = component_invalid
|
||||
row.prop(component_meta, "enabled", text="")
|
||||
row.label(text=component_name)
|
||||
|
||||
# we fetch the matching ui property group
|
||||
root_propertyGroup_name = registry.get_propertyGroupName_from_longName(component_name)
|
||||
"""print("root_propertyGroup_name", root_propertyGroup_name)"""
|
||||
print("component_meta", component_meta, component_invalid)
|
||||
|
||||
if root_propertyGroup_name:
|
||||
propertyGroup = getattr(component_meta, root_propertyGroup_name, None)
|
||||
"""print("propertyGroup", propertyGroup)"""
|
||||
if propertyGroup:
|
||||
# if the component has only 0 or 1 field names, display inline, otherwise change layout
|
||||
single_field = len(propertyGroup.field_names) < 2
|
||||
prop_group_location = box.row(align=True).column()
|
||||
"""if single_field:
|
||||
prop_group_location = row.column(align=True)#.split(factor=0.9)#layout.row(align=False)"""
|
||||
|
||||
if component_visible:
|
||||
if component_invalid:
|
||||
error_message = invalid_details if component_invalid else "Missing component UI data, please reload registry !"
|
||||
prop_group_location.label(text=error_message)
|
||||
draw_propertyGroup(propertyGroup, prop_group_location, [root_propertyGroup_name], component_name)
|
||||
else :
|
||||
row.label(text="details hidden, click on toggle to display")
|
||||
else:
|
||||
error_message = invalid_details if component_invalid else "Missing component UI data, please reload registry !"
|
||||
row.label(text=error_message)
|
||||
|
||||
# "footer" with additional controls
|
||||
if component_invalid:
|
||||
if root_propertyGroup_name:
|
||||
propertyGroup = getattr(component_meta, root_propertyGroup_name, None)
|
||||
if propertyGroup:
|
||||
unit_struct = len(propertyGroup.field_names) == 0
|
||||
if unit_struct:
|
||||
op = row.operator(Fix_Component_Operator.bl_idname, text="", icon="SHADERFX")
|
||||
op.component_name = component_name
|
||||
row.separator()
|
||||
|
||||
op = row.operator(RemoveComponentOperator.bl_idname, text="", icon="X")
|
||||
op.component_name = component_name
|
||||
row.separator()
|
||||
|
||||
op = row.operator(CopyComponentOperator.bl_idname, text="", icon="COPYDOWN")
|
||||
op.source_component_name = component_name
|
||||
op.source_object_name = object.name
|
||||
row.separator()
|
||||
|
||||
#if not single_field:
|
||||
toggle_icon = "TRIA_DOWN" if component_visible else "TRIA_RIGHT"
|
||||
op = row.operator(Toggle_ComponentVisibility.bl_idname, text="", icon=toggle_icon)
|
||||
op.component_name = component_name
|
||||
#row.separator()
|
||||
|
||||
else:
|
||||
layout.label(text ="Select an object to edit its components")
|
30
tools/blenvy/bevy_components/helpers.py
Normal file
@ -0,0 +1,30 @@
|
||||
import bpy
|
||||
import json
|
||||
|
||||
# Makes an empty, at the specified location, rotation, scale stores it in existing collection, from https://blender.stackexchange.com/questions/51290/how-to-add-empty-object-not-using-bpy-ops
|
||||
def make_empty(name, location, rotation, scale, collection):
|
||||
object_data = None
|
||||
empty_obj = bpy.data.objects.new( name, object_data )
|
||||
|
||||
empty_obj.empty_display_size = 2
|
||||
empty_obj.empty_display_type = 'PLAIN_AXES'
|
||||
|
||||
empty_obj.name = name
|
||||
empty_obj.location = location
|
||||
empty_obj.scale = scale
|
||||
empty_obj.rotation_euler = rotation
|
||||
|
||||
collection.objects.link( empty_obj )
|
||||
#bpy.context.view_layer.update()
|
||||
return empty_obj
|
||||
|
||||
def upsert_settings(name, data):
|
||||
stored_settings = bpy.data.texts[name] if name in bpy.data.texts else bpy.data.texts.new(name)
|
||||
stored_settings.clear()
|
||||
stored_settings.write(json.dumps(data))
|
||||
|
||||
def load_settings(name):
|
||||
stored_settings = bpy.data.texts[name] if name in bpy.data.texts else None
|
||||
if stored_settings != None:
|
||||
return json.loads(stored_settings.as_string())
|
||||
return None
|
0
tools/blenvy/bevy_components/propGroups/__init__.py
Normal file
@ -0,0 +1,178 @@
|
||||
from bpy_types import PropertyGroup
|
||||
|
||||
conversion_tables = {
|
||||
"bool": lambda value: value,
|
||||
|
||||
"char": lambda value: '"'+value+'"',
|
||||
"str": lambda value: '"'+value+'"',
|
||||
"alloc::string::String": lambda value: '"'+str(value)+'"',
|
||||
"alloc::borrow::Cow<str>": lambda value: '"'+str(value)+'"',
|
||||
|
||||
"glam::Vec2": lambda value: "Vec2(x:"+str(value[0])+ ", y:"+str(value[1])+")",
|
||||
"glam::DVec2": lambda value: "DVec2(x:"+str(value[0])+ ", y:"+str(value[1])+")",
|
||||
"glam::UVec2": lambda value: "UVec2(x:"+str(value[0])+ ", y:"+str(value[1])+")",
|
||||
|
||||
"glam::Vec3": lambda value: "Vec3(x:"+str(value[0])+ ", y:"+str(value[1])+ ", z:"+str(value[2])+")",
|
||||
"glam::Vec3A": lambda value: "Vec3A(x:"+str(value[0])+ ", y:"+str(value[1])+ ", z:"+str(value[2])+")",
|
||||
"glam::UVec3": lambda value: "UVec3(x:"+str(value[0])+ ", y:"+str(value[1])+ ", z:"+str(value[2])+")",
|
||||
|
||||
"glam::Vec4": lambda value: "Vec4(x:"+str(value[0])+ ", y:"+str(value[1])+ ", z:"+str(value[2])+ ", w:"+str(value[3])+")",
|
||||
"glam::DVec4": lambda value: "DVec4(x:"+str(value[0])+ ", y:"+str(value[1])+ ", z:"+str(value[2])+ ", w:"+str(value[3])+")",
|
||||
"glam::UVec4": lambda value: "UVec4(x:"+str(value[0])+ ", y:"+str(value[1])+ ", z:"+str(value[2])+ ", w:"+str(value[3])+")",
|
||||
|
||||
"glam::Quat": lambda value: "Quat(x:"+str(value[0])+ ", y:"+str(value[1])+ ", z:"+str(value[2])+ ", w:"+str(value[3])+")",
|
||||
|
||||
"bevy_render::color::Color": lambda value: "Rgba(red:"+str(value[0])+ ", green:"+str(value[1])+ ", blue:"+str(value[2])+ ", alpha:"+str(value[3])+ ")",
|
||||
}
|
||||
|
||||
#converts the value of a property group(no matter its complexity) into a single custom property value
|
||||
# this is more or less a glorified "to_ron()" method (not quite but close to)
|
||||
def property_group_value_to_custom_property_value(property_group, definition, registry, parent=None, value=None):
|
||||
long_name = definition["long_name"]
|
||||
type_info = definition["typeInfo"] if "typeInfo" in definition else None
|
||||
type_def = definition["type"] if "type" in definition else None
|
||||
is_value_type = long_name in conversion_tables
|
||||
# print("computing custom property: component name:", long_name, "type_info", type_info, "type_def", type_def, "value", value)
|
||||
|
||||
if is_value_type:
|
||||
value = conversion_tables[long_name](value)
|
||||
elif type_info == "Struct":
|
||||
values = {}
|
||||
if len(property_group.field_names) ==0:
|
||||
value = '()'
|
||||
else:
|
||||
for index, field_name in enumerate(property_group.field_names):
|
||||
item_long_name = definition["properties"][field_name]["type"]["$ref"].replace("#/$defs/", "")
|
||||
item_definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None
|
||||
|
||||
value = getattr(property_group, field_name)
|
||||
is_property_group = isinstance(value, PropertyGroup)
|
||||
child_property_group = value if is_property_group else None
|
||||
if item_definition != None:
|
||||
value = property_group_value_to_custom_property_value(child_property_group, item_definition, registry, parent=long_name, value=value)
|
||||
else:
|
||||
value = '""'
|
||||
values[field_name] = value
|
||||
value = values
|
||||
elif type_info == "Tuple":
|
||||
values = {}
|
||||
for index, field_name in enumerate(property_group.field_names):
|
||||
item_long_name = definition["prefixItems"][index]["type"]["$ref"].replace("#/$defs/", "")
|
||||
item_definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None
|
||||
|
||||
value = getattr(property_group, field_name)
|
||||
is_property_group = isinstance(value, PropertyGroup)
|
||||
child_property_group = value if is_property_group else None
|
||||
if item_definition != None:
|
||||
value = property_group_value_to_custom_property_value(child_property_group, item_definition, registry, parent=long_name, value=value)
|
||||
else:
|
||||
value = '""'
|
||||
values[field_name] = value
|
||||
value = tuple(e for e in list(values.values()))
|
||||
|
||||
elif type_info == "TupleStruct":
|
||||
values = {}
|
||||
for index, field_name in enumerate(property_group.field_names):
|
||||
#print("toto", index, definition["prefixItems"][index]["type"]["$ref"])
|
||||
item_long_name = definition["prefixItems"][index]["type"]["$ref"].replace("#/$defs/", "")
|
||||
item_definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None
|
||||
|
||||
value = getattr(property_group, field_name)
|
||||
is_property_group = isinstance(value, PropertyGroup)
|
||||
child_property_group = value if is_property_group else None
|
||||
if item_definition != None:
|
||||
value = property_group_value_to_custom_property_value(child_property_group, item_definition, registry, parent=long_name, value=value)
|
||||
else:
|
||||
value = '""'
|
||||
values[field_name] = value
|
||||
|
||||
value = tuple(e for e in list(values.values()))
|
||||
elif type_info == "Enum":
|
||||
selected = getattr(property_group, "selection")
|
||||
if type_def == "object":
|
||||
selection_index = property_group.field_names.index("variant_"+selected)
|
||||
variant_name = property_group.field_names[selection_index]
|
||||
variant_definition = definition["oneOf"][selection_index-1]
|
||||
if "prefixItems" in variant_definition:
|
||||
value = getattr(property_group, variant_name)
|
||||
is_property_group = isinstance(value, PropertyGroup)
|
||||
child_property_group = value if is_property_group else None
|
||||
|
||||
value = property_group_value_to_custom_property_value(child_property_group, variant_definition, registry, parent=long_name, value=value)
|
||||
value = selected + str(value,) #"{}{},".format(selected ,value)
|
||||
elif "properties" in variant_definition:
|
||||
value = getattr(property_group, variant_name)
|
||||
is_property_group = isinstance(value, PropertyGroup)
|
||||
child_property_group = value if is_property_group else None
|
||||
|
||||
value = property_group_value_to_custom_property_value(child_property_group, variant_definition, registry, parent=long_name, value=value)
|
||||
value = selected + str(value,)
|
||||
else:
|
||||
value = getattr(property_group, variant_name)
|
||||
is_property_group = isinstance(value, PropertyGroup)
|
||||
child_property_group = value if is_property_group else None
|
||||
if child_property_group:
|
||||
value = property_group_value_to_custom_property_value(child_property_group, variant_definition, registry, parent=long_name, value=value)
|
||||
value = selected + str(value,)
|
||||
else:
|
||||
value = selected # here the value of the enum is just the name of the variant
|
||||
else:
|
||||
value = selected
|
||||
|
||||
elif type_info == "List":
|
||||
item_list = getattr(property_group, "list")
|
||||
value = []
|
||||
for item in item_list:
|
||||
item_long_name = getattr(item, "long_name")
|
||||
definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None
|
||||
if definition != None:
|
||||
item_value = property_group_value_to_custom_property_value(item, definition, registry, long_name, None)
|
||||
if item_long_name.startswith("wrapper_"): #if we have a "fake" tupple for aka for value types, we need to remove one nested level
|
||||
item_value = item_value[0]
|
||||
else:
|
||||
item_value = '""'
|
||||
value.append(item_value)
|
||||
|
||||
elif type_info == "Map":
|
||||
keys_list = getattr(property_group, "list", {})
|
||||
values_list = getattr(property_group, "values_list")
|
||||
value = {}
|
||||
for index, key in enumerate(keys_list):
|
||||
# first get the keys
|
||||
key_long_name = getattr(key, "long_name")
|
||||
definition = registry.type_infos[key_long_name] if key_long_name in registry.type_infos else None
|
||||
if definition != None:
|
||||
key_value = property_group_value_to_custom_property_value(key, definition, registry, long_name, None)
|
||||
if key_long_name.startswith("wrapper_"): #if we have a "fake" tupple for aka for value types, we need to remove one nested level
|
||||
key_value = key_value[0]
|
||||
else:
|
||||
key_value = '""'
|
||||
# and then the values
|
||||
val = values_list[index]
|
||||
value_long_name = getattr(val, "long_name")
|
||||
definition = registry.type_infos[value_long_name] if value_long_name in registry.type_infos else None
|
||||
if definition != None:
|
||||
val_value = property_group_value_to_custom_property_value(val, definition, registry, long_name, None)
|
||||
if value_long_name.startswith("wrapper_"): #if we have a "fake" tupple for aka for value types, we need to remove one nested level
|
||||
val_value = val_value[0]
|
||||
else:
|
||||
val_value = '""'
|
||||
|
||||
value[key_value] = val_value
|
||||
value = str(value).replace('{','@').replace('}','²') # FIXME: eeek !!
|
||||
else:
|
||||
value = conversion_tables[long_name](value) if is_value_type else value
|
||||
value = '""' if isinstance(value, PropertyGroup) else value
|
||||
|
||||
#print("generating custom property value", value, type(value))
|
||||
if isinstance(value, str):
|
||||
value = value.replace("'", "")
|
||||
|
||||
if parent == None:
|
||||
value = str(value).replace("'", "")
|
||||
value = value.replace(",)",")")
|
||||
value = value.replace("{", "(").replace("}", ")") # FIXME: deal with hashmaps
|
||||
value = value.replace("True", "true").replace("False", "false")
|
||||
value = value.replace('@', '{').replace('²', '}')
|
||||
return value
|
||||
|
@ -0,0 +1,312 @@
|
||||
from bpy_types import PropertyGroup
|
||||
import re
|
||||
|
||||
def parse_struct_string(string, start_nesting=0):
|
||||
#print("processing struct string", string, "start_nesting", start_nesting)
|
||||
fields = {}
|
||||
buff = []
|
||||
current_fieldName = None
|
||||
nesting_level = 0
|
||||
|
||||
start_offset = 0
|
||||
end_offset = 0
|
||||
|
||||
for index, char in enumerate(string):
|
||||
buff.append(char)
|
||||
if char == "," and nesting_level == start_nesting:
|
||||
#print("first case", end_offset)
|
||||
end_offset = index
|
||||
end_offset = len(string) if end_offset == 0 else end_offset
|
||||
|
||||
val = "".join(string[start_offset:end_offset])
|
||||
fields[current_fieldName] = val.strip()
|
||||
start_offset = index + 1
|
||||
#print("done with field name", current_fieldName, "value", fields[current_fieldName])
|
||||
|
||||
if char == "[" or char == "(":
|
||||
nesting_level += 1
|
||||
if nesting_level == start_nesting:
|
||||
start_offset = index + 1
|
||||
#print("nesting & setting start offset", start_offset)
|
||||
#print("nesting down", nesting_level)
|
||||
|
||||
if char == "]" or char == ")" :
|
||||
#print("nesting up", nesting_level)
|
||||
if nesting_level == start_nesting:
|
||||
end_offset = index
|
||||
#print("unesting & setting end offset", end_offset)
|
||||
nesting_level -= 1
|
||||
|
||||
|
||||
if char == ":" and nesting_level == start_nesting:
|
||||
end_offset = index
|
||||
fieldName = "".join(string[start_offset:end_offset])
|
||||
current_fieldName = fieldName.strip()
|
||||
start_offset = index + 1
|
||||
end_offset = 0 #hack
|
||||
#print("starting field name", fieldName, "index", index)
|
||||
buff = []
|
||||
|
||||
end_offset = len(string) if end_offset == 0 else end_offset
|
||||
#print("final start and end offset", start_offset, end_offset, "total length", len(string))
|
||||
|
||||
val = "".join(string[start_offset:end_offset])
|
||||
|
||||
fields[current_fieldName] = val.strip()
|
||||
#print("done with all fields", fields)
|
||||
return fields
|
||||
|
||||
def parse_tuplestruct_string(string, start_nesting=0):
|
||||
#print("processing tuppleStruct", string, "start_nesting", start_nesting)
|
||||
fields = []
|
||||
buff = []
|
||||
nesting_level = 0
|
||||
field_index = 0
|
||||
|
||||
start_offset = 0
|
||||
end_offset = 0
|
||||
# todo: strip all stuff before start_nesting
|
||||
|
||||
for index, char in enumerate(string):
|
||||
buff.append(char)
|
||||
if char == "," and nesting_level == start_nesting:
|
||||
end_offset = index
|
||||
end_offset = len(string) if end_offset == 0 else end_offset
|
||||
|
||||
val = "".join(string[start_offset:end_offset])
|
||||
fields.append(val.strip())
|
||||
field_index += 1
|
||||
#print("start and end offset", start_offset, end_offset, "total length", len(string))
|
||||
#print("done with field name", field_index, "value", fields)
|
||||
start_offset = index + 1
|
||||
end_offset = 0 # hack
|
||||
|
||||
if char == "[" or char == "(":
|
||||
nesting_level += 1
|
||||
if nesting_level == start_nesting:
|
||||
start_offset = index + 1
|
||||
#print("nesting & setting start offset", start_offset)
|
||||
#print("nesting down", nesting_level)
|
||||
|
||||
if char == "]" or char == ")" :
|
||||
if nesting_level == start_nesting:
|
||||
end_offset = index
|
||||
#print("unesting & setting end offset", end_offset)
|
||||
#print("nesting up", nesting_level)
|
||||
nesting_level -= 1
|
||||
|
||||
|
||||
end_offset = len(string) if end_offset == 0 else end_offset
|
||||
#print("final start and end offset", start_offset, end_offset, "total length", len(string))
|
||||
|
||||
val = "".join(string[start_offset:end_offset]) #if end_offset != 0 else buff)
|
||||
fields.append(val.strip())
|
||||
fields = list(filter(lambda entry: entry != '', fields))
|
||||
#print("done with all fields", fields)
|
||||
return fields
|
||||
|
||||
|
||||
def parse_vec2(value, caster, typeName):
|
||||
parsed = parse_struct_string(value.replace(typeName,"").replace("(", "").replace(")","") )
|
||||
return [caster(parsed['x']), caster(parsed['y'])]
|
||||
|
||||
def parse_vec3(value, caster, typeName):
|
||||
parsed = parse_struct_string(value.replace(typeName,"").replace("(", "").replace(")","") )
|
||||
return [caster(parsed['x']), caster(parsed['y']), caster(parsed['z'])]
|
||||
|
||||
def parse_vec4(value, caster, typeName):
|
||||
parsed = parse_struct_string(value.replace(typeName,"").replace("(", "").replace(")","") )
|
||||
return [caster(parsed['x']), caster(parsed['y']), caster(parsed['z']), caster(parsed['w'])]
|
||||
|
||||
def parse_color(value, caster, typeName):
|
||||
parsed = parse_struct_string(value.replace(typeName,"").replace("(", "").replace(")","") )
|
||||
return [caster(parsed['red']), caster(parsed['green']), caster(parsed['blue']), caster(parsed['alpha'])]
|
||||
|
||||
def to_int(input):
|
||||
return int(float(input))
|
||||
|
||||
type_mappings = {
|
||||
"bool": lambda value: True if value == "true" else False,
|
||||
|
||||
"u8": lambda value: int(value),
|
||||
"u16": lambda value: int(value),
|
||||
"u32": lambda value: int(value),
|
||||
"u64": lambda value: int(value),
|
||||
"u128": lambda value: int(value),
|
||||
"u64": lambda value: int(value),
|
||||
"usize": lambda value: int(value),
|
||||
|
||||
"i8": lambda value: int(value),
|
||||
"i16": lambda value: int(value),
|
||||
"i32": lambda value: int(value),
|
||||
"i64": lambda value: int(value),
|
||||
"i128": lambda value: int(value),
|
||||
"isize": lambda value: int(value),
|
||||
|
||||
'f32': lambda value: float(value),
|
||||
'f64': lambda value: float(value),
|
||||
|
||||
"glam::Vec2": lambda value: parse_vec2(value, float, "Vec2"),
|
||||
"glam::DVec2": lambda value: parse_vec2(value, float, "DVec2"),
|
||||
"glam::UVec2": lambda value: parse_vec2(value, to_int, "UVec2"),
|
||||
|
||||
'glam::Vec3': lambda value: parse_vec3(value, float, "Vec3"),
|
||||
"glam::Vec3A": lambda value: parse_vec3(value, float, "Vec3A"),
|
||||
"glam::UVec3": lambda value: parse_vec3(value, to_int, "UVec3"),
|
||||
|
||||
"glam::Vec4": lambda value: parse_vec4(value, float, "Vec4"),
|
||||
"glam::DVec4": lambda value: parse_vec4(value, float, "DVec4"),
|
||||
"glam::UVec4": lambda value: parse_vec4(value, to_int, "UVec4"),
|
||||
|
||||
"glam::Quat": lambda value: parse_vec4(value, float, "Quat"),
|
||||
|
||||
'alloc::string::String': lambda value: str(value.replace('"', "")),
|
||||
'alloc::borrow::Cow<str>': lambda value: str(value.replace('"', "")),
|
||||
|
||||
'bevy_render::color::Color': lambda value: parse_color(value, float, "Rgba"),
|
||||
'bevy_ecs::entity::Entity': lambda value: int(value),
|
||||
}
|
||||
|
||||
def is_def_value_type(definition, registry):
|
||||
if definition == None:
|
||||
return True
|
||||
value_types_defaults = registry.value_types_defaults
|
||||
long_name = definition["long_name"]
|
||||
is_value_type = long_name in value_types_defaults
|
||||
return is_value_type
|
||||
|
||||
#converts the value of a single custom property into a value (values) of a property group
|
||||
def property_group_value_from_custom_property_value(property_group, definition, registry, value, nesting = []):
|
||||
value_types_defaults = registry.value_types_defaults
|
||||
type_info = definition["typeInfo"] if "typeInfo" in definition else None
|
||||
type_def = definition["type"] if "type" in definition else None
|
||||
properties = definition["properties"] if "properties" in definition else {}
|
||||
prefixItems = definition["prefixItems"] if "prefixItems" in definition else []
|
||||
long_name = definition["long_name"]
|
||||
|
||||
#is_value_type = type_def in value_types_defaults or long_name in value_types_defaults
|
||||
is_value_type = long_name in value_types_defaults
|
||||
nesting = nesting + [definition["short_name"]]
|
||||
|
||||
|
||||
if is_value_type:
|
||||
value = value.replace("(", "").replace(")", "")# FIXME: temporary, incoherent use of nesting levels between parse_tuplestruct_string & parse_struct_string
|
||||
value = type_mappings[long_name](value) if long_name in type_mappings else value
|
||||
return value
|
||||
elif type_info == "Struct":
|
||||
if len(property_group.field_names) != 0 :
|
||||
custom_property_values = parse_struct_string(value, start_nesting=1 if value.startswith("(") else 0)
|
||||
for index, field_name in enumerate(property_group.field_names):
|
||||
item_long_name = definition["properties"][field_name]["type"]["$ref"].replace("#/$defs/", "")
|
||||
item_definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None
|
||||
|
||||
custom_prop_value = custom_property_values[field_name]
|
||||
#print("field name", field_name, "value", custom_prop_value)
|
||||
propGroup_value = getattr(property_group, field_name)
|
||||
is_property_group = isinstance(propGroup_value, PropertyGroup)
|
||||
child_property_group = propGroup_value if is_property_group else None
|
||||
if item_definition != None:
|
||||
custom_prop_value = property_group_value_from_custom_property_value(child_property_group, item_definition, registry, value=custom_prop_value, nesting=nesting)
|
||||
else:
|
||||
custom_prop_value = custom_prop_value
|
||||
|
||||
if is_def_value_type(item_definition, registry):
|
||||
setattr(property_group , field_name, custom_prop_value)
|
||||
|
||||
|
||||
else:
|
||||
if len(value) > 2: #a unit struct should be two chars long :()
|
||||
#print("struct with zero fields")
|
||||
raise Exception("input string too big for a unit struct")
|
||||
|
||||
elif type_info == "Tuple":
|
||||
custom_property_values = parse_tuplestruct_string(value, start_nesting=1 if len(nesting) == 1 else 1)
|
||||
|
||||
for index, field_name in enumerate(property_group.field_names):
|
||||
item_long_name = definition["prefixItems"][index]["type"]["$ref"].replace("#/$defs/", "")
|
||||
item_definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None
|
||||
|
||||
custom_property_value = custom_property_values[index]
|
||||
|
||||
propGroup_value = getattr(property_group, field_name)
|
||||
is_property_group = isinstance(propGroup_value, PropertyGroup)
|
||||
child_property_group = propGroup_value if is_property_group else None
|
||||
if item_definition != None:
|
||||
custom_property_value = property_group_value_from_custom_property_value(child_property_group, item_definition, registry, value=custom_property_value, nesting=nesting)
|
||||
if is_def_value_type(item_definition, registry):
|
||||
setattr(property_group , field_name, custom_property_value)
|
||||
|
||||
elif type_info == "TupleStruct":
|
||||
custom_property_values = parse_tuplestruct_string(value, start_nesting=1 if len(nesting) == 1 else 0)
|
||||
for index, field_name in enumerate(property_group.field_names):
|
||||
item_long_name = definition["prefixItems"][index]["type"]["$ref"].replace("#/$defs/", "")
|
||||
item_definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None
|
||||
|
||||
custom_prop_value = custom_property_values[index]
|
||||
|
||||
value = getattr(property_group, field_name)
|
||||
is_property_group = isinstance(value, PropertyGroup)
|
||||
child_property_group = value if is_property_group else None
|
||||
if item_definition != None:
|
||||
custom_prop_value = property_group_value_from_custom_property_value(child_property_group, item_definition, registry, value=custom_prop_value, nesting=nesting)
|
||||
|
||||
if is_def_value_type(item_definition, registry):
|
||||
setattr(property_group , field_name, custom_prop_value)
|
||||
|
||||
elif type_info == "Enum":
|
||||
field_names = property_group.field_names
|
||||
if type_def == "object":
|
||||
regexp = re.search('(^[^\(]+)(\((.*)\))', value)
|
||||
try:
|
||||
chosen_variant_raw = regexp.group(1)
|
||||
chosen_variant_value = regexp.group(3)
|
||||
chosen_variant_name = "variant_" + chosen_variant_raw
|
||||
except:
|
||||
chosen_variant_raw = value
|
||||
chosen_variant_value = ""
|
||||
chosen_variant_name = "variant_" + chosen_variant_raw
|
||||
selection_index = property_group.field_names.index(chosen_variant_name)
|
||||
variant_definition = definition["oneOf"][selection_index-1]
|
||||
# first we set WHAT variant is selected
|
||||
setattr(property_group, "selection", chosen_variant_raw)
|
||||
|
||||
# and then we set the value of the variant
|
||||
if "prefixItems" in variant_definition:
|
||||
value = getattr(property_group, chosen_variant_name)
|
||||
is_property_group = isinstance(value, PropertyGroup)
|
||||
child_property_group = value if is_property_group else None
|
||||
|
||||
chosen_variant_value = "(" +chosen_variant_value +")" # needed to handle nesting correctly
|
||||
value = property_group_value_from_custom_property_value(child_property_group, variant_definition, registry, value=chosen_variant_value, nesting=nesting)
|
||||
|
||||
elif "properties" in variant_definition:
|
||||
value = getattr(property_group, chosen_variant_name)
|
||||
is_property_group = isinstance(value, PropertyGroup)
|
||||
child_property_group = value if is_property_group else None
|
||||
|
||||
value = property_group_value_from_custom_property_value(child_property_group, variant_definition, registry, value=chosen_variant_value, nesting=nesting)
|
||||
|
||||
else:
|
||||
chosen_variant_raw = value
|
||||
setattr(property_group, field_names[0], chosen_variant_raw)
|
||||
|
||||
elif type_info == "List":
|
||||
item_list = getattr(property_group, "list")
|
||||
item_long_name = getattr(property_group, "long_name")
|
||||
custom_property_values = parse_tuplestruct_string(value, start_nesting=2 if item_long_name.startswith("wrapper_") and value.startswith('(') else 1) # TODO : the additional check here is wrong, there is an issue somewhere in higher level stuff
|
||||
# clear list first
|
||||
item_list.clear()
|
||||
for raw_value in custom_property_values:
|
||||
new_entry = item_list.add()
|
||||
item_long_name = getattr(new_entry, "long_name") # we get the REAL type name
|
||||
definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None
|
||||
|
||||
if definition != None:
|
||||
property_group_value_from_custom_property_value(new_entry, definition, registry, value=raw_value, nesting=nesting)
|
||||
else:
|
||||
try:
|
||||
value = value.replace("(", "").replace(")", "")# FIXME: temporary, incoherent use of nesting levels between parse_tuplestruct_string & parse_struct_string
|
||||
value = type_mappings[long_name](value) if long_name in type_mappings else value
|
||||
return value
|
||||
except:
|
||||
pass
|
95
tools/blenvy/bevy_components/propGroups/process_component.py
Normal file
@ -0,0 +1,95 @@
|
||||
import bpy
|
||||
from bpy_types import PropertyGroup
|
||||
from bpy.props import (PointerProperty)
|
||||
from . import process_structs
|
||||
from . import process_tupples
|
||||
from . import process_enum
|
||||
from . import process_list
|
||||
from . import process_map
|
||||
|
||||
def process_component(registry, definition, update, extras=None, nesting = [], nesting_long_names = []):
|
||||
long_name = definition['long_name']
|
||||
short_name = definition["short_name"]
|
||||
type_info = definition["typeInfo"] if "typeInfo" in definition else None
|
||||
type_def = definition["type"] if "type" in definition else None
|
||||
properties = definition["properties"] if "properties" in definition else {}
|
||||
prefixItems = definition["prefixItems"] if "prefixItems" in definition else []
|
||||
|
||||
has_properties = len(properties.keys()) > 0
|
||||
has_prefixItems = len(prefixItems) > 0
|
||||
is_enum = type_info == "Enum"
|
||||
is_list = type_info == "List"
|
||||
is_map = type_info == "Map"
|
||||
|
||||
__annotations__ = {}
|
||||
tupple_or_struct = None
|
||||
|
||||
with_properties = False
|
||||
with_items = False
|
||||
with_enum = False
|
||||
with_list = False
|
||||
with_map = False
|
||||
|
||||
|
||||
if has_properties:
|
||||
__annotations__ = __annotations__ | process_structs.process_structs(registry, definition, properties, update, nesting, nesting_long_names)
|
||||
with_properties = True
|
||||
tupple_or_struct = "struct"
|
||||
|
||||
if has_prefixItems:
|
||||
__annotations__ = __annotations__ | process_tupples.process_tupples(registry, definition, prefixItems, update, nesting, nesting_long_names)
|
||||
with_items = True
|
||||
tupple_or_struct = "tupple"
|
||||
|
||||
if is_enum:
|
||||
__annotations__ = __annotations__ | process_enum.process_enum(registry, definition, update, nesting, nesting_long_names)
|
||||
with_enum = True
|
||||
|
||||
if is_list:
|
||||
__annotations__ = __annotations__ | process_list.process_list(registry, definition, update, nesting, nesting_long_names)
|
||||
with_list= True
|
||||
|
||||
if is_map:
|
||||
__annotations__ = __annotations__ | process_map.process_map(registry, definition, update, nesting, nesting_long_names)
|
||||
with_map = True
|
||||
|
||||
field_names = []
|
||||
for a in __annotations__:
|
||||
field_names.append(a)
|
||||
|
||||
|
||||
extras = extras if extras is not None else {
|
||||
"long_name": long_name
|
||||
}
|
||||
root_component = nesting_long_names[0] if len(nesting_long_names) > 0 else long_name
|
||||
# print("")
|
||||
property_group_params = {
|
||||
**extras,
|
||||
'__annotations__': __annotations__,
|
||||
'tupple_or_struct': tupple_or_struct,
|
||||
'field_names': field_names,
|
||||
**dict(with_properties = with_properties, with_items= with_items, with_enum= with_enum, with_list= with_list, with_map = with_map, short_name= short_name, long_name=long_name),
|
||||
'root_component': root_component
|
||||
}
|
||||
#FIXME: YIKES, but have not found another way:
|
||||
""" Withouth this ; the following does not work
|
||||
-BasicTest
|
||||
- NestingTestLevel2
|
||||
-BasicTest => the registration & update callback of this one overwrites the first "basicTest"
|
||||
have not found a cleaner workaround so far
|
||||
"""
|
||||
property_group_name = registry.generate_propGroup_name(nesting, long_name)
|
||||
(property_group_pointer, property_group_class) = property_group_from_infos(property_group_name, property_group_params)
|
||||
# add our component propertyGroup to the registry
|
||||
registry.register_component_propertyGroup(property_group_name, property_group_pointer)
|
||||
|
||||
return (property_group_pointer, property_group_class)
|
||||
|
||||
def property_group_from_infos(property_group_name, property_group_parameters):
|
||||
# print("creating property group", property_group_name)
|
||||
property_group_class = type(property_group_name, (PropertyGroup,), property_group_parameters)
|
||||
|
||||
bpy.utils.register_class(property_group_class)
|
||||
property_group_pointer = PointerProperty(type=property_group_class)
|
||||
|
||||
return (property_group_pointer, property_group_class)
|
67
tools/blenvy/bevy_components/propGroups/process_enum.py
Normal file
@ -0,0 +1,67 @@
|
||||
from bpy.props import (StringProperty)
|
||||
from . import process_component
|
||||
|
||||
def process_enum(registry, definition, update, nesting, nesting_long_names):
|
||||
blender_property_mapping = registry.blender_property_mapping
|
||||
short_name = definition["short_name"]
|
||||
long_name = definition["long_name"]
|
||||
|
||||
type_def = definition["type"] if "type" in definition else None
|
||||
variants = definition["oneOf"]
|
||||
|
||||
nesting = nesting + [short_name]
|
||||
nesting_long_names = nesting_long_names = [long_name]
|
||||
|
||||
__annotations__ = {}
|
||||
original_type_name = "enum"
|
||||
|
||||
# print("processing enum", short_name, long_name, definition)
|
||||
|
||||
if type_def == "object":
|
||||
labels = []
|
||||
additional_annotations = {}
|
||||
for variant in variants:
|
||||
variant_name = variant["long_name"]
|
||||
variant_prefixed_name = "variant_" + variant_name
|
||||
labels.append(variant_name)
|
||||
|
||||
if "prefixItems" in variant:
|
||||
#print("tupple variant in enum", variant)
|
||||
registry.add_custom_type(variant_name, variant)
|
||||
(sub_component_group, _) = process_component.process_component(registry, variant, update, {"nested": True}, nesting, nesting_long_names)
|
||||
additional_annotations[variant_prefixed_name] = sub_component_group
|
||||
elif "properties" in variant:
|
||||
#print("struct variant in enum", variant)
|
||||
registry.add_custom_type(variant_name, variant)
|
||||
(sub_component_group, _) = process_component.process_component(registry, variant, update, {"nested": True}, nesting, nesting_long_names)
|
||||
additional_annotations[variant_prefixed_name] = sub_component_group
|
||||
else: # for the cases where it's neither a tupple nor a structs: FIXME: not 100% sure of this
|
||||
#print("other variant in enum")
|
||||
annotations = {"variant_"+variant_name: StringProperty(default="----<ignore_field>----")}
|
||||
additional_annotations = additional_annotations | annotations
|
||||
|
||||
items = tuple((e, e, e) for e in labels)
|
||||
|
||||
blender_property_def = blender_property_mapping[original_type_name]
|
||||
blender_property = blender_property_def["type"](
|
||||
**blender_property_def["presets"],# we inject presets first
|
||||
items=items, # this is needed by Blender's EnumProperty , which we are using here
|
||||
update= update
|
||||
)
|
||||
__annotations__["selection"] = blender_property
|
||||
|
||||
for a in additional_annotations:
|
||||
__annotations__[a] = additional_annotations[a]
|
||||
# enum_value => what field to display
|
||||
# a second field + property for the "content" of the enum
|
||||
else:
|
||||
items = tuple((e, e, "") for e in variants)
|
||||
blender_property_def = blender_property_mapping[original_type_name]
|
||||
blender_property = blender_property_def["type"](
|
||||
**blender_property_def["presets"],# we inject presets first
|
||||
items=items,
|
||||
update= update
|
||||
)
|
||||
__annotations__["selection"] = blender_property
|
||||
|
||||
return __annotations__
|
37
tools/blenvy/bevy_components/propGroups/process_list.py
Normal file
@ -0,0 +1,37 @@
|
||||
from bpy.props import (StringProperty, IntProperty, CollectionProperty)
|
||||
from .utils import generate_wrapper_propertyGroup
|
||||
from . import process_component
|
||||
|
||||
def process_list(registry, definition, update, nesting=[], nesting_long_names=[]):
|
||||
value_types_defaults = registry.value_types_defaults
|
||||
type_infos = registry.type_infos
|
||||
|
||||
short_name = definition["short_name"]
|
||||
long_name = definition["long_name"]
|
||||
ref_name = definition["items"]["type"]["$ref"].replace("#/$defs/", "")
|
||||
|
||||
nesting = nesting+[short_name]
|
||||
nesting_long_names = nesting_long_names + [long_name]
|
||||
|
||||
item_definition = type_infos[ref_name]
|
||||
item_long_name = item_definition["long_name"]
|
||||
is_item_value_type = item_long_name in value_types_defaults
|
||||
|
||||
property_group_class = None
|
||||
#if the content of the list is a unit type, we need to generate a fake wrapper, otherwise we cannot use layout.prop(group, "propertyName") as there is no propertyName !
|
||||
if is_item_value_type:
|
||||
property_group_class = generate_wrapper_propertyGroup(long_name, item_long_name, definition["items"]["type"]["$ref"], registry, update)
|
||||
else:
|
||||
(_, list_content_group_class) = process_component.process_component(registry, item_definition, update, {"nested": True, "long_name": item_long_name}, nesting)
|
||||
property_group_class = list_content_group_class
|
||||
|
||||
item_collection = CollectionProperty(type=property_group_class)
|
||||
|
||||
item_long_name = item_long_name if not is_item_value_type else "wrapper_" + item_long_name
|
||||
__annotations__ = {
|
||||
"list": item_collection,
|
||||
"list_index": IntProperty(name = "Index for list", default = 0, update=update),
|
||||
"long_name": StringProperty(default=item_long_name)
|
||||
}
|
||||
|
||||
return __annotations__
|
85
tools/blenvy/bevy_components/propGroups/process_map.py
Normal file
@ -0,0 +1,85 @@
|
||||
from bpy.props import (StringProperty, IntProperty, CollectionProperty, PointerProperty)
|
||||
from .utils import generate_wrapper_propertyGroup
|
||||
from . import process_component
|
||||
|
||||
def process_map(registry, definition, update, nesting=[], nesting_long_names=[]):
|
||||
value_types_defaults = registry.value_types_defaults
|
||||
type_infos = registry.type_infos
|
||||
|
||||
short_name = definition["short_name"]
|
||||
long_name = definition["long_name"]
|
||||
|
||||
nesting = nesting + [short_name]
|
||||
nesting_long_names = nesting_long_names + [long_name]
|
||||
|
||||
value_ref_name = definition["valueType"]["type"]["$ref"].replace("#/$defs/", "")
|
||||
key_ref_name = definition["keyType"]["type"]["$ref"].replace("#/$defs/", "")
|
||||
|
||||
#print("definition", definition)
|
||||
__annotations__ = {}
|
||||
if key_ref_name in type_infos:
|
||||
key_definition = type_infos[key_ref_name]
|
||||
original_long_name = key_definition["long_name"]
|
||||
is_key_value_type = original_long_name in value_types_defaults
|
||||
definition_link = definition["keyType"]["type"]["$ref"]
|
||||
|
||||
#if the content of the list is a unit type, we need to generate a fake wrapper, otherwise we cannot use layout.prop(group, "propertyName") as there is no propertyName !
|
||||
if is_key_value_type:
|
||||
keys_property_group_class = generate_wrapper_propertyGroup(f"{long_name}_keys", original_long_name, definition_link, registry, update)
|
||||
else:
|
||||
(_, list_content_group_class) = process_component.process_component(registry, key_definition, update, {"nested": True, "long_name": original_long_name}, nesting, nesting_long_names)
|
||||
keys_property_group_class = list_content_group_class
|
||||
|
||||
keys_collection = CollectionProperty(type=keys_property_group_class)
|
||||
keys_property_group_pointer = PointerProperty(type=keys_property_group_class)
|
||||
else:
|
||||
__annotations__["list"] = StringProperty(default="N/A")
|
||||
registry.add_missing_typeInfo(key_ref_name)
|
||||
# the root component also becomes invalid (in practice it is not always a component, but good enough)
|
||||
registry.add_invalid_component(nesting_long_names[0])
|
||||
|
||||
if value_ref_name in type_infos:
|
||||
value_definition = type_infos[value_ref_name]
|
||||
original_long_name = value_definition["long_name"]
|
||||
is_value_value_type = original_long_name in value_types_defaults
|
||||
definition_link = definition["valueType"]["type"]["$ref"]
|
||||
|
||||
#if the content of the list is a unit type, we need to generate a fake wrapper, otherwise we cannot use layout.prop(group, "propertyName") as there is no propertyName !
|
||||
if is_value_value_type:
|
||||
values_property_group_class = generate_wrapper_propertyGroup(f"{long_name}_values", original_long_name, definition_link, registry, update)
|
||||
else:
|
||||
(_, list_content_group_class) = process_component.process_component(registry, value_definition, update, {"nested": True, "long_name": original_long_name}, nesting, nesting_long_names)
|
||||
values_property_group_class = list_content_group_class
|
||||
|
||||
values_collection = CollectionProperty(type=values_property_group_class)
|
||||
values_property_group_pointer = PointerProperty(type=values_property_group_class)
|
||||
|
||||
else:
|
||||
#__annotations__["list"] = StringProperty(default="N/A")
|
||||
registry.add_missing_typeInfo(value_ref_name)
|
||||
# the root component also becomes invalid (in practice it is not always a component, but good enough)
|
||||
registry.add_invalid_component(nesting_long_names[0])
|
||||
|
||||
|
||||
if key_ref_name in type_infos and value_ref_name in type_infos:
|
||||
__annotations__ = {
|
||||
"list": keys_collection,
|
||||
"list_index": IntProperty(name = "Index for keys", default = 0, update=update),
|
||||
"keys_setter":keys_property_group_pointer,
|
||||
|
||||
"values_list": values_collection,
|
||||
"values_list_index": IntProperty(name = "Index for values", default = 0, update=update),
|
||||
"values_setter":values_property_group_pointer,
|
||||
}
|
||||
|
||||
"""__annotations__["list"] = StringProperty(default="N/A")
|
||||
__annotations__["values_list"] = StringProperty(default="N/A")
|
||||
__annotations__["keys_setter"] = StringProperty(default="N/A")"""
|
||||
|
||||
"""registry.add_missing_typeInfo(key_ref_name)
|
||||
registry.add_missing_typeInfo(value_ref_name)
|
||||
# the root component also becomes invalid (in practice it is not always a component, but good enough)
|
||||
registry.add_invalid_component(nesting_long_names[0])
|
||||
print("setting invalid flag for", nesting_long_names[0])"""
|
||||
|
||||
return __annotations__
|
48
tools/blenvy/bevy_components/propGroups/process_structs.py
Normal file
@ -0,0 +1,48 @@
|
||||
from bpy.props import (StringProperty)
|
||||
from . import process_component
|
||||
|
||||
def process_structs(registry, definition, properties, update, nesting, nesting_long_names):
|
||||
value_types_defaults = registry.value_types_defaults
|
||||
blender_property_mapping = registry.blender_property_mapping
|
||||
type_infos = registry.type_infos
|
||||
long_name = definition["long_name"]
|
||||
short_name = definition["short_name"]
|
||||
|
||||
__annotations__ = {}
|
||||
default_values = {}
|
||||
nesting = nesting + [short_name]
|
||||
nesting_long_names = nesting_long_names + [long_name]
|
||||
|
||||
for property_name in properties.keys():
|
||||
ref_name = properties[property_name]["type"]["$ref"].replace("#/$defs/", "")
|
||||
|
||||
if ref_name in type_infos:
|
||||
original = type_infos[ref_name]
|
||||
original_long_name = original["long_name"]
|
||||
is_value_type = original_long_name in value_types_defaults
|
||||
value = value_types_defaults[original_long_name] if is_value_type else None
|
||||
default_values[property_name] = value
|
||||
|
||||
if is_value_type:
|
||||
if original_long_name in blender_property_mapping:
|
||||
blender_property_def = blender_property_mapping[original_long_name]
|
||||
blender_property = blender_property_def["type"](
|
||||
**blender_property_def["presets"],# we inject presets first
|
||||
name = property_name,
|
||||
default = value,
|
||||
update = update
|
||||
)
|
||||
__annotations__[property_name] = blender_property
|
||||
else:
|
||||
original_long_name = original["long_name"]
|
||||
(sub_component_group, _) = process_component.process_component(registry, original, update, {"nested": True, "long_name": original_long_name}, nesting, nesting_long_names)
|
||||
__annotations__[property_name] = sub_component_group
|
||||
# if there are sub fields, add an attribute "sub_fields" possibly a pointer property ? or add a standard field to the type , that is stored under "attributes" and not __annotations (better)
|
||||
else:
|
||||
# component not found in type_infos, generating placeholder
|
||||
__annotations__[property_name] = StringProperty(default="N/A")
|
||||
registry.add_missing_typeInfo(ref_name)
|
||||
# the root component also becomes invalid (in practice it is not always a component, but good enough)
|
||||
registry.add_invalid_component(nesting_long_names[0])
|
||||
|
||||
return __annotations__
|
55
tools/blenvy/bevy_components/propGroups/process_tupples.py
Normal file
@ -0,0 +1,55 @@
|
||||
from bpy.props import (StringProperty)
|
||||
from . import process_component
|
||||
|
||||
def process_tupples(registry, definition, prefixItems, update, nesting=[], nesting_long_names=[]):
|
||||
value_types_defaults = registry.value_types_defaults
|
||||
blender_property_mapping = registry.blender_property_mapping
|
||||
type_infos = registry.type_infos
|
||||
long_name = definition["long_name"]
|
||||
short_name = definition["short_name"]
|
||||
|
||||
nesting = nesting + [short_name]
|
||||
nesting_long_names = nesting_long_names + [long_name]
|
||||
__annotations__ = {}
|
||||
|
||||
default_values = []
|
||||
prefix_infos = []
|
||||
for index, item in enumerate(prefixItems):
|
||||
ref_name = item["type"]["$ref"].replace("#/$defs/", "")
|
||||
|
||||
property_name = str(index)# we cheat a bit, property names are numbers here, as we do not have a real property name
|
||||
|
||||
if ref_name in type_infos:
|
||||
original = type_infos[ref_name]
|
||||
original_long_name = original["long_name"]
|
||||
is_value_type = original_long_name in value_types_defaults
|
||||
|
||||
value = value_types_defaults[original_long_name] if is_value_type else None
|
||||
default_values.append(value)
|
||||
prefix_infos.append(original)
|
||||
|
||||
if is_value_type:
|
||||
if original_long_name in blender_property_mapping:
|
||||
blender_property_def = blender_property_mapping[original_long_name]
|
||||
blender_property = blender_property_def["type"](
|
||||
**blender_property_def["presets"],# we inject presets first
|
||||
name = property_name,
|
||||
default=value,
|
||||
update= update
|
||||
)
|
||||
|
||||
__annotations__[property_name] = blender_property
|
||||
else:
|
||||
original_long_name = original["long_name"]
|
||||
(sub_component_group, _) = process_component.process_component(registry, original, update, {"nested": True, "long_name": original_long_name}, nesting)
|
||||
__annotations__[property_name] = sub_component_group
|
||||
else:
|
||||
# component not found in type_infos, generating placeholder
|
||||
__annotations__[property_name] = StringProperty(default="N/A")
|
||||
registry.add_missing_typeInfo(ref_name)
|
||||
# the root component also becomes invalid (in practice it is not always a component, but good enough)
|
||||
registry.add_invalid_component(nesting_long_names[0])
|
||||
|
||||
|
||||
return __annotations__
|
||||
|
44
tools/blenvy/bevy_components/propGroups/prop_groups.py
Normal file
@ -0,0 +1,44 @@
|
||||
import bpy
|
||||
from .conversions_from_prop_group import property_group_value_to_custom_property_value
|
||||
from .process_component import process_component
|
||||
from .utils import update_calback_helper
|
||||
|
||||
import json
|
||||
## main callback function, fired whenever any property changes, no matter the nesting level
|
||||
def update_component(self, context, definition, component_name):
|
||||
registry = bpy.context.window_manager.components_registry
|
||||
current_object = bpy.context.object
|
||||
update_disabled = current_object["__disable__update"] if "__disable__update" in current_object else False
|
||||
update_disabled = registry.disable_all_object_updates or update_disabled # global settings
|
||||
if update_disabled:
|
||||
return
|
||||
print("")
|
||||
print("update in component", component_name, self, "current_object", current_object.name)
|
||||
components_in_object = current_object.components_meta.components
|
||||
component_meta = next(filter(lambda component: component["long_name"] == component_name, components_in_object), None)
|
||||
|
||||
if component_meta != None:
|
||||
property_group_name = registry.get_propertyGroupName_from_longName(component_name)
|
||||
property_group = getattr(component_meta, property_group_name)
|
||||
# we use our helper to set the values
|
||||
object = context.object
|
||||
previous = json.loads(object['bevy_components'])
|
||||
previous[component_name] = property_group_value_to_custom_property_value(property_group, definition, registry, None)
|
||||
object['bevy_components'] = json.dumps(previous)
|
||||
|
||||
|
||||
def generate_propertyGroups_for_components():
|
||||
registry = bpy.context.window_manager.components_registry
|
||||
if not registry.has_type_infos():
|
||||
registry.load_type_infos()
|
||||
|
||||
type_infos = registry.type_infos
|
||||
|
||||
for component_name in type_infos:
|
||||
definition = type_infos[component_name]
|
||||
is_component = definition['isComponent'] if "isComponent" in definition else False
|
||||
root_property_name = component_name if is_component else None
|
||||
process_component(registry, definition, update_calback_helper(definition, update_component, root_property_name), None, [])
|
||||
|
||||
# if we had to add any wrapper types on the fly, process them now
|
||||
registry.process_custom_types()
|
63
tools/blenvy/bevy_components/propGroups/utils.py
Normal file
@ -0,0 +1,63 @@
|
||||
# helper function that returns a lambda, used for the PropertyGroups update function
|
||||
def update_calback_helper(definition, update, component_name_override):
|
||||
return lambda self, context: update(self, context, definition, component_name_override)
|
||||
|
||||
import bpy
|
||||
from bpy.props import (StringProperty)
|
||||
from bpy_types import PropertyGroup
|
||||
|
||||
# this helper creates a "fake"/wrapper property group that is NOT a real type in the registry
|
||||
# usefull for things like value types in list items etc
|
||||
def generate_wrapper_propertyGroup(wrapped_type_long_name_name, item_long_name, definition_link, registry, update):
|
||||
value_types_defaults = registry.value_types_defaults
|
||||
blender_property_mapping = registry.blender_property_mapping
|
||||
is_item_value_type = item_long_name in value_types_defaults
|
||||
|
||||
wrapper_name = "wrapper_" + wrapped_type_long_name_name
|
||||
|
||||
wrapper_definition = {
|
||||
"isComponent": False,
|
||||
"isResource": False,
|
||||
"items": False,
|
||||
"prefixItems": [
|
||||
{
|
||||
"type": {
|
||||
"$ref": definition_link
|
||||
}
|
||||
}
|
||||
],
|
||||
"short_name": wrapper_name, # FIXME !!!
|
||||
"long_name": wrapper_name,
|
||||
"type": "array",
|
||||
"typeInfo": "TupleStruct"
|
||||
}
|
||||
|
||||
# we generate a very small 'hash' for the component name
|
||||
property_group_name = registry.generate_propGroup_name(nesting=[], longName=wrapper_name)
|
||||
registry.add_custom_type(wrapper_name, wrapper_definition)
|
||||
|
||||
|
||||
blender_property = StringProperty(default="", update=update)
|
||||
if item_long_name in blender_property_mapping:
|
||||
value = value_types_defaults[item_long_name] if is_item_value_type else None
|
||||
blender_property_def = blender_property_mapping[item_long_name]
|
||||
blender_property = blender_property_def["type"](
|
||||
**blender_property_def["presets"],# we inject presets first
|
||||
name = "property_name",
|
||||
default = value,
|
||||
update = update
|
||||
)
|
||||
|
||||
wrapper_annotations = {
|
||||
'0' : blender_property
|
||||
}
|
||||
property_group_params = {
|
||||
'__annotations__': wrapper_annotations,
|
||||
'tupple_or_struct': "tupple",
|
||||
'field_names': ['0'],
|
||||
**dict(with_properties = False, with_items= True, with_enum= False, with_list= False, with_map =False, short_name=wrapper_name, long_name=wrapper_name),
|
||||
}
|
||||
property_group_class = type(property_group_name, (PropertyGroup,), property_group_params)
|
||||
bpy.utils.register_class(property_group_class)
|
||||
|
||||
return property_group_class
|
0
tools/blenvy/bevy_components/registry/__init__.py
Normal file
237
tools/blenvy/bevy_components/registry/operators.py
Normal file
@ -0,0 +1,237 @@
|
||||
import os
|
||||
import bpy
|
||||
from bpy_types import (Operator)
|
||||
from bpy.props import (StringProperty)
|
||||
from bpy_extras.io_utils import ImportHelper
|
||||
|
||||
from ..helpers import upsert_settings
|
||||
from ..components.metadata import apply_customProperty_values_to_object_propertyGroups, apply_propertyGroup_values_to_object_customProperties, ensure_metadata_for_all_objects
|
||||
from ..propGroups.prop_groups import generate_propertyGroups_for_components
|
||||
|
||||
class ReloadRegistryOperator(Operator):
|
||||
"""Reloads registry (schema file) from disk, generates propertyGroups for components & ensures all objects have metadata """
|
||||
bl_idname = "object.reload_registry"
|
||||
bl_label = "Reload Registry"
|
||||
bl_options = {"UNDO"}
|
||||
|
||||
component_type: StringProperty(
|
||||
name="component_type",
|
||||
description="component type to add",
|
||||
) # type: ignore
|
||||
|
||||
def execute(self, context):
|
||||
print("reload registry")
|
||||
context.window_manager.components_registry.load_schema()
|
||||
generate_propertyGroups_for_components()
|
||||
print("")
|
||||
print("")
|
||||
print("")
|
||||
ensure_metadata_for_all_objects()
|
||||
|
||||
# now force refresh the ui
|
||||
for area in context.screen.areas:
|
||||
for region in area.regions:
|
||||
if region.type == "UI":
|
||||
region.tag_redraw()
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
class COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_ALL(Operator):
|
||||
"""Apply registry to ALL objects: update the custom property values of all objects based on their definition, if any"""
|
||||
bl_idname = "object.refresh_custom_properties_all"
|
||||
bl_label = "Apply Registry to all objects"
|
||||
bl_options = {"UNDO"}
|
||||
|
||||
@classmethod
|
||||
def register(cls):
|
||||
bpy.types.WindowManager.custom_properties_from_components_progress_all = bpy.props.FloatProperty(default=-1.0) #bpy.props.PointerProperty(type=RenameHelper)
|
||||
|
||||
@classmethod
|
||||
def unregister(cls):
|
||||
del bpy.types.WindowManager.custom_properties_from_components_progress_all
|
||||
|
||||
def execute(self, context):
|
||||
print("apply registry to all")
|
||||
#context.window_manager.components_registry.load_schema()
|
||||
total = len(bpy.data.objects)
|
||||
|
||||
for index, object in enumerate(bpy.data.objects):
|
||||
apply_propertyGroup_values_to_object_customProperties(object)
|
||||
progress = index / total
|
||||
context.window_manager.custom_properties_from_components_progress_all = progress
|
||||
# now force refresh the ui
|
||||
bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1)
|
||||
context.window_manager.custom_properties_from_components_progress_all = -1.0
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
class COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_CURRENT(Operator):
|
||||
"""Apply registry to CURRENT object: update the custom property values of current object based on their definition, if any"""
|
||||
bl_idname = "object.refresh_custom_properties_current"
|
||||
bl_label = "Apply Registry to current object"
|
||||
bl_options = {"UNDO"}
|
||||
|
||||
@classmethod
|
||||
def register(cls):
|
||||
bpy.types.WindowManager.custom_properties_from_components_progress = bpy.props.FloatProperty(default=-1.0) #bpy.props.PointerProperty(type=RenameHelper)
|
||||
|
||||
@classmethod
|
||||
def unregister(cls):
|
||||
del bpy.types.WindowManager.custom_properties_from_components_progress
|
||||
|
||||
def execute(self, context):
|
||||
print("apply registry to current object")
|
||||
object = context.object
|
||||
context.window_manager.custom_properties_from_components_progress = 0.5
|
||||
# now force refresh the ui
|
||||
bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1)
|
||||
apply_propertyGroup_values_to_object_customProperties(object)
|
||||
|
||||
context.window_manager.custom_properties_from_components_progress = -1.0
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class COMPONENTS_OT_REFRESH_PROPGROUPS_FROM_CUSTOM_PROPERTIES_CURRENT(Operator):
|
||||
"""Update UI values from custom properties to CURRENT object"""
|
||||
bl_idname = "object.refresh_ui_from_custom_properties_current"
|
||||
bl_label = "Apply custom_properties to current object"
|
||||
bl_options = {"UNDO"}
|
||||
|
||||
@classmethod
|
||||
def register(cls):
|
||||
bpy.types.WindowManager.components_from_custom_properties_progress = bpy.props.FloatProperty(default=-1.0) #bpy.props.PointerProperty(type=RenameHelper)
|
||||
|
||||
@classmethod
|
||||
def unregister(cls):
|
||||
del bpy.types.WindowManager.components_from_custom_properties_progress
|
||||
|
||||
def execute(self, context):
|
||||
print("apply custom properties to current object")
|
||||
object = context.object
|
||||
error = False
|
||||
try:
|
||||
apply_customProperty_values_to_object_propertyGroups(object)
|
||||
progress = 0.5
|
||||
context.window_manager.components_from_custom_properties_progress = progress
|
||||
try:
|
||||
# now force refresh the ui
|
||||
bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1)
|
||||
except:pass # ony run in ui
|
||||
|
||||
except Exception as error_message:
|
||||
del object["__disable__update"] # make sure custom properties are updateable afterwards, even in the case of failure
|
||||
error = True
|
||||
self.report({'ERROR'}, "Failed to update propertyGroup values from custom property: Error:" + str(error_message))
|
||||
if not error:
|
||||
self.report({'INFO'}, "Sucessfully generated UI values for custom properties for selected object")
|
||||
context.window_manager.components_from_custom_properties_progress = -1.0
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class COMPONENTS_OT_REFRESH_PROPGROUPS_FROM_CUSTOM_PROPERTIES_ALL(Operator):
|
||||
"""Update UI values from custom properties to ALL object"""
|
||||
bl_idname = "object.refresh_ui_from_custom_properties_all"
|
||||
bl_label = "Apply custom_properties to all objects"
|
||||
bl_options = {"UNDO"}
|
||||
|
||||
@classmethod
|
||||
def register(cls):
|
||||
bpy.types.WindowManager.components_from_custom_properties_progress_all = bpy.props.FloatProperty(default=-1.0) #bpy.props.PointerProperty(type=RenameHelper)
|
||||
|
||||
@classmethod
|
||||
def unregister(cls):
|
||||
del bpy.types.WindowManager.components_from_custom_properties_progress_all
|
||||
|
||||
def execute(self, context):
|
||||
print("apply custom properties to all object")
|
||||
bpy.context.window_manager.components_registry.disable_all_object_updates = True
|
||||
errors = []
|
||||
total = len(bpy.data.objects)
|
||||
|
||||
for index, object in enumerate(bpy.data.objects):
|
||||
|
||||
try:
|
||||
apply_customProperty_values_to_object_propertyGroups(object)
|
||||
except Exception as error:
|
||||
del object["__disable__update"] # make sure custom properties are updateable afterwards, even in the case of failure
|
||||
errors.append( "object: '" + object.name + "', error: " + str(error))
|
||||
|
||||
progress = index / total
|
||||
context.window_manager.components_from_custom_properties_progress_all = progress
|
||||
# now force refresh the ui
|
||||
bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1)
|
||||
|
||||
|
||||
|
||||
if len(errors) > 0:
|
||||
self.report({'ERROR'}, "Failed to update propertyGroup values from custom property: Errors:" + str(errors))
|
||||
else:
|
||||
self.report({'INFO'}, "Sucessfully generated UI values for custom properties for all objects")
|
||||
bpy.context.window_manager.components_registry.disable_all_object_updates = False
|
||||
context.window_manager.components_from_custom_properties_progress_all = -1.0
|
||||
return {'FINISHED'}
|
||||
|
||||
class OT_OpenFilebrowser(Operator, ImportHelper):
|
||||
"""Browse for registry json file"""
|
||||
bl_idname = "generic.open_filebrowser"
|
||||
bl_label = "Open the file browser"
|
||||
|
||||
filter_glob: StringProperty(
|
||||
default='*.json',
|
||||
options={'HIDDEN'}
|
||||
) # type: ignore
|
||||
|
||||
def execute(self, context):
|
||||
"""Do something with the selected file(s)."""
|
||||
#filename, extension = os.path.splitext(self.filepath)
|
||||
file_path = bpy.data.filepath
|
||||
# Get the folder
|
||||
folder_path = os.path.dirname(file_path)
|
||||
relative_path = os.path.relpath(self.filepath, folder_path)
|
||||
|
||||
registry = context.window_manager.components_registry
|
||||
registry.schemaPath = relative_path
|
||||
|
||||
upsert_settings(registry.settings_save_path, {"schemaPath": relative_path})
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class OT_select_object(Operator):
|
||||
"""Select object by name"""
|
||||
bl_idname = "object.select"
|
||||
bl_label = "Select object"
|
||||
bl_options = {"UNDO"}
|
||||
|
||||
object_name: StringProperty(
|
||||
name="object_name",
|
||||
description="object to select's name ",
|
||||
) # type: ignore
|
||||
|
||||
def execute(self, context):
|
||||
if self.object_name:
|
||||
object = bpy.data.objects[self.object_name]
|
||||
scenes_of_object = list(object.users_scene)
|
||||
if len(scenes_of_object) > 0:
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
bpy.context.window.scene = scenes_of_object[0]
|
||||
object.select_set(True)
|
||||
bpy.context.view_layer.objects.active = object
|
||||
return {'FINISHED'}
|
||||
|
||||
class OT_select_component_name_to_replace(Operator):
|
||||
"""Select component name to replace"""
|
||||
bl_idname = "object.select_component_name_to_replace"
|
||||
bl_label = "Select component name for bulk replace"
|
||||
bl_options = {"UNDO"}
|
||||
|
||||
component_name: StringProperty(
|
||||
name="component_name",
|
||||
description="component name to replace",
|
||||
) # type: ignore
|
||||
|
||||
def execute(self, context):
|
||||
context.window_manager.bevy_component_rename_helper.original_name = self.component_name
|
||||
return {'FINISHED'}
|
||||
|
363
tools/blenvy/bevy_components/registry/registry.py
Normal file
@ -0,0 +1,363 @@
|
||||
import bpy
|
||||
import json
|
||||
import os
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from bpy_types import (PropertyGroup)
|
||||
from bpy.props import (StringProperty, BoolProperty, FloatProperty, FloatVectorProperty, IntProperty, IntVectorProperty, EnumProperty, PointerProperty, CollectionProperty)
|
||||
|
||||
from ..helpers import load_settings
|
||||
from ..propGroups.prop_groups import generate_propertyGroups_for_components
|
||||
from ..components.metadata import ComponentMetadata, ensure_metadata_for_all_objects
|
||||
|
||||
# helper class to store missing bevy types information
|
||||
class MissingBevyType(bpy.types.PropertyGroup):
|
||||
long_name: bpy.props.StringProperty(
|
||||
name="type",
|
||||
) # type: ignore
|
||||
|
||||
# helper function to deal with timer
|
||||
def toggle_watcher(self, context):
|
||||
#print("toggling watcher", self.watcher_enabled, watch_schema, self, bpy.app.timers)
|
||||
if not self.watcher_enabled:
|
||||
try:
|
||||
bpy.app.timers.unregister(watch_schema)
|
||||
except Exception as error:
|
||||
pass
|
||||
else:
|
||||
self.watcher_active = True
|
||||
bpy.app.timers.register(watch_schema)
|
||||
|
||||
def watch_schema():
|
||||
self = bpy.context.window_manager.components_registry
|
||||
# print("watching schema file for changes")
|
||||
try:
|
||||
stamp = os.stat(self.schemaFullPath).st_mtime
|
||||
stamp = str(stamp)
|
||||
if stamp != self.schemaTimeStamp and self.schemaTimeStamp != "":
|
||||
print("FILE CHANGED !!", stamp, self.schemaTimeStamp)
|
||||
# see here for better ways : https://stackoverflow.com/questions/11114492/check-if-a-file-is-not-open-nor-being-used-by-another-process
|
||||
"""try:
|
||||
os.rename(path, path)
|
||||
#return False
|
||||
except OSError: # file is in use
|
||||
print("in use")
|
||||
#return True"""
|
||||
#bpy.ops.object.reload_registry()
|
||||
# we need to add an additional delay as the file might not have loaded yet
|
||||
bpy.app.timers.register(lambda: bpy.ops.object.reload_registry(), first_interval=1)
|
||||
|
||||
self.schemaTimeStamp = stamp
|
||||
except Exception as error:
|
||||
pass
|
||||
return self.watcher_poll_frequency if self.watcher_enabled else None
|
||||
|
||||
|
||||
# this is where we store the information for all available components
|
||||
class ComponentsRegistry(PropertyGroup):
|
||||
|
||||
settings_save_path = ".bevy_components_settings" # where to store data in bpy.texts
|
||||
|
||||
schemaPath: bpy.props.StringProperty(
|
||||
name="schema path",
|
||||
description="path to the registry schema file",
|
||||
default="registry.json"
|
||||
)# type: ignore
|
||||
schemaFullPath : bpy.props.StringProperty(
|
||||
name="schema full path",
|
||||
description="path to the registry schema file",
|
||||
)# type: ignore
|
||||
|
||||
registry: bpy.props. StringProperty(
|
||||
name="registry",
|
||||
description="component registry"
|
||||
)# type: ignore
|
||||
|
||||
missing_type_infos: StringProperty(
|
||||
name="missing type infos",
|
||||
description="unregistered/missing type infos"
|
||||
)# type: ignore
|
||||
|
||||
disable_all_object_updates: BoolProperty(name="disable_object_updates", default=False) # type: ignore
|
||||
|
||||
## file watcher
|
||||
watcher_enabled: BoolProperty(name="Watcher_enabled", default=True, update=toggle_watcher)# type: ignore
|
||||
watcher_active: BoolProperty(name = "Flag for watcher status", default = False)# type: ignore
|
||||
|
||||
watcher_poll_frequency: IntProperty(
|
||||
name="watcher poll frequency",
|
||||
description="frequency (s) at wich to poll for changes to the registry file",
|
||||
min=1,
|
||||
max=10,
|
||||
default=1
|
||||
)# type: ignore
|
||||
|
||||
schemaTimeStamp: StringProperty(
|
||||
name="last timestamp of schema file",
|
||||
description="",
|
||||
default=""
|
||||
)# type: ignore
|
||||
|
||||
|
||||
missing_types_list: CollectionProperty(name="missing types list", type=MissingBevyType)# type: ignore
|
||||
missing_types_list_index: IntProperty(name = "Index for missing types list", default = 0)# type: ignore
|
||||
|
||||
blender_property_mapping = {
|
||||
"bool": dict(type=BoolProperty, presets=dict()),
|
||||
|
||||
"u8": dict(type=IntProperty, presets=dict(min=0, max=255)),
|
||||
"u16": dict(type=IntProperty, presets=dict(min=0, max=65535)),
|
||||
"u32": dict(type=IntProperty, presets=dict(min=0)),
|
||||
"u64": dict(type=IntProperty, presets=dict(min=0)),
|
||||
"u128": dict(type=IntProperty, presets=dict(min=0)),
|
||||
"u64": dict(type=IntProperty, presets=dict(min=0)),
|
||||
"usize": dict(type=IntProperty, presets=dict(min=0)),
|
||||
|
||||
"i8": dict(type=IntProperty, presets=dict()),
|
||||
"i16":dict(type=IntProperty, presets=dict()),
|
||||
"i32":dict(type=IntProperty, presets=dict()),
|
||||
"i64":dict(type=IntProperty, presets=dict()),
|
||||
"i128":dict(type=IntProperty, presets=dict()),
|
||||
"isize": dict(type=IntProperty, presets=dict()),
|
||||
|
||||
"f32": dict(type=FloatProperty, presets=dict()),
|
||||
"f64": dict(type=FloatProperty, presets=dict()),
|
||||
|
||||
"glam::Vec2": {"type": FloatVectorProperty, "presets": dict(size = 2) },
|
||||
"glam::DVec2": {"type": FloatVectorProperty, "presets": dict(size = 2) },
|
||||
"glam::UVec2": {"type": FloatVectorProperty, "presets": dict(size = 2) },
|
||||
|
||||
"glam::Vec3": {"type": FloatVectorProperty, "presets": {"size":3} },
|
||||
"glam::Vec3A":{"type": FloatVectorProperty, "presets": {"size":3} },
|
||||
"glam::DVec3":{"type": FloatVectorProperty, "presets": {"size":3} },
|
||||
"glam::UVec3":{"type": FloatVectorProperty, "presets": {"size":3} },
|
||||
|
||||
"glam::Vec4": {"type": FloatVectorProperty, "presets": {"size":4} },
|
||||
"glam::Vec4A": {"type": FloatVectorProperty, "presets": {"size":4} },
|
||||
"glam::DVec4": {"type": FloatVectorProperty, "presets": {"size":4} },
|
||||
"glam::UVec4":{"type": FloatVectorProperty, "presets": {"size":4, "min":0.0} },
|
||||
|
||||
"glam::Quat": {"type": FloatVectorProperty, "presets": {"size":4} },
|
||||
|
||||
"bevy_render::color::Color": dict(type = FloatVectorProperty, presets=dict(subtype='COLOR', size=4)),
|
||||
|
||||
"char": dict(type=StringProperty, presets=dict()),
|
||||
"str": dict(type=StringProperty, presets=dict()),
|
||||
"alloc::string::String": dict(type=StringProperty, presets=dict()),
|
||||
"alloc::borrow::Cow<str>": dict(type=StringProperty, presets=dict()),
|
||||
|
||||
|
||||
"enum": dict(type=EnumProperty, presets=dict()),
|
||||
|
||||
'bevy_ecs::entity::Entity': {"type": IntProperty, "presets": {"min":0} },
|
||||
'bevy_utils::Uuid': dict(type=StringProperty, presets=dict()),
|
||||
|
||||
}
|
||||
|
||||
|
||||
value_types_defaults = {
|
||||
"string":" ",
|
||||
"boolean": True,
|
||||
"float": 0.0,
|
||||
"uint": 0,
|
||||
"int":0,
|
||||
|
||||
# todo : we are re-doing the work of the bevy /rust side here, but it seems more pratical to alway look for the same field name on the blender side for matches
|
||||
"bool": True,
|
||||
|
||||
"u8": 0,
|
||||
"u16":0,
|
||||
"u32":0,
|
||||
"u64":0,
|
||||
"u128":0,
|
||||
"usize":0,
|
||||
|
||||
"i8": 0,
|
||||
"i16":0,
|
||||
"i32":0,
|
||||
"i64":0,
|
||||
"i128":0,
|
||||
"isize":0,
|
||||
|
||||
"f32": 0.0,
|
||||
"f64":0.0,
|
||||
|
||||
"char": " ",
|
||||
"str": " ",
|
||||
"alloc::string::String": " ",
|
||||
"alloc::borrow::Cow<str>": " ",
|
||||
|
||||
"glam::Vec2": [0.0, 0.0],
|
||||
"glam::DVec2": [0.0, 0.0],
|
||||
"glam::UVec2": [0, 0],
|
||||
|
||||
"glam::Vec3": [0.0, 0.0, 0.0],
|
||||
"glam::Vec3A":[0.0, 0.0, 0.0],
|
||||
"glam::UVec3": [0, 0, 0],
|
||||
|
||||
"glam::Vec4": [0.0, 0.0, 0.0, 0.0],
|
||||
"glam::DVec4": [0.0, 0.0, 0.0, 0.0],
|
||||
"glam::UVec4": [0, 0, 0, 0],
|
||||
|
||||
"glam::Quat": [0.0, 0.0, 0.0, 0.0],
|
||||
|
||||
"bevy_render::color::Color": [1.0, 1.0, 0.0, 1.0],
|
||||
|
||||
'bevy_ecs::entity::Entity': 0,#4294967295, # this is the same as Bevy's Entity::Placeholder, too big for Blender..sigh
|
||||
'bevy_utils::Uuid': '"'+str(uuid.uuid4())+'"'
|
||||
|
||||
}
|
||||
|
||||
type_infos = {}
|
||||
type_infos_missing = []
|
||||
component_propertyGroups = {}
|
||||
custom_types_to_add = {}
|
||||
invalid_components = []
|
||||
|
||||
@classmethod
|
||||
def register(cls):
|
||||
bpy.types.WindowManager.components_registry = PointerProperty(type=ComponentsRegistry)
|
||||
bpy.context.window_manager.components_registry.watcher_active = False
|
||||
|
||||
@classmethod
|
||||
def unregister(cls):
|
||||
bpy.context.window_manager.components_registry.watcher_active = False
|
||||
|
||||
for propgroup_name in cls.component_propertyGroups.keys():
|
||||
try:
|
||||
delattr(ComponentMetadata, propgroup_name)
|
||||
#print("unregistered propertyGroup", propgroup_name)
|
||||
except Exception as error:
|
||||
pass
|
||||
#print("failed to remove", error, "ComponentMetadata")
|
||||
|
||||
try:
|
||||
bpy.app.timers.unregister(watch_schema)
|
||||
except Exception as error:
|
||||
pass
|
||||
|
||||
del bpy.types.WindowManager.components_registry
|
||||
|
||||
def load_schema(self):
|
||||
print("load schema", self)
|
||||
# cleanup previous data if any
|
||||
self.propGroupIdCounter = 0
|
||||
self.long_names_to_propgroup_names.clear()
|
||||
self.missing_types_list.clear()
|
||||
self.type_infos.clear()
|
||||
self.type_infos_missing.clear()
|
||||
self.component_propertyGroups.clear()
|
||||
self.custom_types_to_add.clear()
|
||||
self.invalid_components.clear()
|
||||
|
||||
# now prepare paths to load data
|
||||
file_path = bpy.data.filepath
|
||||
# Get the folder
|
||||
folder_path = os.path.dirname(file_path)
|
||||
path = os.path.join(folder_path, self.schemaPath)
|
||||
self.schemaFullPath = path
|
||||
|
||||
f = Path(bpy.path.abspath(path)) # make a path object of abs path
|
||||
with open(path) as f:
|
||||
data = json.load(f)
|
||||
defs = data["$defs"]
|
||||
self.registry = json.dumps(defs) # FIXME:meh ?
|
||||
|
||||
# start timer
|
||||
if not self.watcher_active and self.watcher_enabled:
|
||||
self.watcher_active = True
|
||||
print("registering function", watch_schema)
|
||||
bpy.app.timers.register(watch_schema)
|
||||
|
||||
|
||||
# we load the json once, so we do not need to do it over & over again
|
||||
def load_type_infos(self):
|
||||
print("load type infos")
|
||||
ComponentsRegistry.type_infos = json.loads(self.registry)
|
||||
|
||||
def has_type_infos(self):
|
||||
return len(self.type_infos.keys()) != 0
|
||||
|
||||
def load_settings(self):
|
||||
print("loading settings")
|
||||
settings = load_settings(self.settings_save_path)
|
||||
|
||||
if settings!= None:
|
||||
print("settings", settings)
|
||||
self.schemaPath = settings["schemaPath"]
|
||||
self.load_schema()
|
||||
generate_propertyGroups_for_components()
|
||||
ensure_metadata_for_all_objects()
|
||||
|
||||
|
||||
# we keep a list of component propertyGroup around
|
||||
def register_component_propertyGroup(self, name, propertyGroup):
|
||||
self.component_propertyGroups[name] = propertyGroup
|
||||
|
||||
# to be able to give the user more feedback on any missin/unregistered types in their schema file
|
||||
def add_missing_typeInfo(self, long_name):
|
||||
if not long_name in self.type_infos_missing:
|
||||
self.type_infos_missing.append(long_name)
|
||||
setattr(self, "missing_type_infos", str(self.type_infos_missing))
|
||||
item = self.missing_types_list.add()
|
||||
item.long_name = long_name
|
||||
|
||||
def add_custom_type(self, long_name, type_definition):
|
||||
self.custom_types_to_add[long_name] = type_definition
|
||||
|
||||
def process_custom_types(self):
|
||||
for long_name in self.custom_types_to_add:
|
||||
self.type_infos[long_name] = self.custom_types_to_add[long_name]
|
||||
self.custom_types_to_add.clear()
|
||||
|
||||
# add an invalid component to the list (long name)
|
||||
def add_invalid_component(self, component_name):
|
||||
self.invalid_components.append(component_name)
|
||||
|
||||
|
||||
###########
|
||||
|
||||
propGroupIdCounter: IntProperty(
|
||||
name="propGroupIdCounter",
|
||||
description="",
|
||||
min=0,
|
||||
max=1000000000,
|
||||
default=0
|
||||
) # type: ignore
|
||||
|
||||
long_names_to_propgroup_names = {}
|
||||
|
||||
# generate propGroup name from nesting level & shortName: each shortName + nesting is unique
|
||||
def generate_propGroup_name(self, nesting, longName):
|
||||
#print("gen propGroup name for", shortName, nesting)
|
||||
self.propGroupIdCounter += 1
|
||||
|
||||
propGroupIndex = str(self.propGroupIdCounter)
|
||||
propGroupName = propGroupIndex + "_ui"
|
||||
|
||||
key = str(nesting) + longName if len(nesting) > 0 else longName
|
||||
self.long_names_to_propgroup_names[key] = propGroupName
|
||||
return propGroupName
|
||||
|
||||
def get_propertyGroupName_from_longName(self, longName):
|
||||
return self.long_names_to_propgroup_names.get(longName, None)
|
||||
|
||||
def long_name_to_key():
|
||||
pass
|
||||
|
||||
###########
|
||||
|
||||
"""
|
||||
object[component_definition.name] = 0.5
|
||||
property_manager = object.id_properties_ui(component_definition.name)
|
||||
property_manager.update(min=-10, max=10, soft_min=-5, soft_max=5)
|
||||
|
||||
print("property_manager", property_manager)
|
||||
|
||||
object[component_definition.name] = [0.8,0.2,1.0]
|
||||
property_manager = object.id_properties_ui(component_definition.name)
|
||||
property_manager.update(subtype='COLOR')
|
||||
|
||||
#IDPropertyUIManager
|
||||
#rna_ui = object[component_definition.name].get('_RNA_UI')
|
||||
"""
|
339
tools/blenvy/bevy_components/registry/ui.py
Normal file
@ -0,0 +1,339 @@
|
||||
import json
|
||||
import bpy
|
||||
from bpy_types import (UIList)
|
||||
from bpy.props import (StringProperty)
|
||||
|
||||
from ..components.operators import OT_rename_component, RemoveComponentFromAllObjectsOperator, RemoveComponentOperator
|
||||
from .operators import(
|
||||
COMPONENTS_OT_REFRESH_PROPGROUPS_FROM_CUSTOM_PROPERTIES_ALL,
|
||||
COMPONENTS_OT_REFRESH_PROPGROUPS_FROM_CUSTOM_PROPERTIES_CURRENT,
|
||||
OT_OpenFilebrowser,
|
||||
OT_select_component_name_to_replace,
|
||||
OT_select_object, ReloadRegistryOperator,
|
||||
COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_ALL,
|
||||
COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_CURRENT)
|
||||
|
||||
class BEVY_COMPONENTS_PT_Configuration(bpy.types.Panel):
|
||||
bl_idname = "BEVY_COMPONENTS_PT_Configuration"
|
||||
bl_label = "Components"
|
||||
bl_space_type = 'VIEW_3D'
|
||||
bl_region_type = 'UI'
|
||||
bl_category = "Bevy Components"
|
||||
bl_context = "objectmode"
|
||||
bl_parent_id = "BLENVY_PT_SidePanel"
|
||||
bl_options = {'DEFAULT_CLOSED'}
|
||||
bl_description = "list of missing/unregistered type from the bevy side"
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return context.window_manager.blenvy.mode == 'SETTINGS'
|
||||
return context.object is not None
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
registry = context.window_manager.components_registry
|
||||
|
||||
|
||||
row = layout.row()
|
||||
col = row.column()
|
||||
col.enabled = False
|
||||
col.prop(registry, "schemaPath", text="Registry Schema path")
|
||||
col = row.column()
|
||||
col.operator(OT_OpenFilebrowser.bl_idname, text="Browse for registry schema file (json)")
|
||||
|
||||
layout.separator()
|
||||
layout.operator(ReloadRegistryOperator.bl_idname, text="reload registry" , icon="FILE_REFRESH")
|
||||
|
||||
layout.separator()
|
||||
row = layout.row()
|
||||
|
||||
row.prop(registry, "watcher_enabled", text="enable registry file polling")
|
||||
row.prop(registry, "watcher_poll_frequency", text="registry file poll frequency (s)")
|
||||
|
||||
layout.separator()
|
||||
layout.separator()
|
||||
|
||||
|
||||
class BEVY_COMPONENTS_PT_AdvancedToolsPanel(bpy.types.Panel):
|
||||
"""panel listing all the missing bevy types in the schema"""
|
||||
bl_idname = "BEVY_COMPONENTS_PT_AdvancedToolsPanel"
|
||||
bl_label = "Advanced tools"
|
||||
bl_space_type = 'VIEW_3D'
|
||||
bl_region_type = 'UI'
|
||||
bl_category = "Bevy Components"
|
||||
bl_context = "objectmode"
|
||||
bl_parent_id = "BLENVY_PT_SidePanel"
|
||||
bl_options = {'DEFAULT_CLOSED'}
|
||||
bl_description = "advanced tooling"
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return context.window_manager.blenvy.mode == 'TOOLS'
|
||||
|
||||
def draw_invalid_or_unregistered_header(self, layout, items):
|
||||
row = layout.row()
|
||||
|
||||
for item in items:
|
||||
col = row.column()
|
||||
col.label(text=item)
|
||||
|
||||
|
||||
def draw_invalid_or_unregistered(self, layout, status, component_name, object):
|
||||
available_components = bpy.context.window_manager.components_list
|
||||
registry = bpy.context.window_manager.components_registry
|
||||
registry_has_type_infos = registry.has_type_infos()
|
||||
|
||||
row = layout.row()
|
||||
|
||||
col = row.column()
|
||||
col.label(text=component_name)
|
||||
|
||||
col = row.column()
|
||||
operator = col.operator(OT_select_object.bl_idname, text=object.name)
|
||||
operator.object_name = object.name
|
||||
|
||||
col = row.column()
|
||||
col.label(text=status)
|
||||
|
||||
col = row.column()
|
||||
col.prop(available_components, "list", text="")
|
||||
|
||||
col = row.column()
|
||||
operator = col.operator(OT_rename_component.bl_idname, text="", icon="SHADERFX") #rename
|
||||
new_name = registry.type_infos[available_components.list]['long_name'] if available_components.list in registry.type_infos else ""
|
||||
operator.original_name = component_name
|
||||
operator.target_objects = json.dumps([object.name])
|
||||
operator.new_name = new_name
|
||||
col.enabled = registry_has_type_infos and component_name != "" and component_name != new_name
|
||||
|
||||
|
||||
col = row.column()
|
||||
operator = col.operator(RemoveComponentOperator.bl_idname, text="", icon="X")
|
||||
operator.object_name = object.name
|
||||
operator.component_name = component_name
|
||||
|
||||
col = row.column()
|
||||
col = row.column()
|
||||
operator = col.operator(OT_select_component_name_to_replace.bl_idname, text="", icon="EYEDROPPER") #text="select for rename",
|
||||
operator.component_name = component_name
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
registry = bpy.context.window_manager.components_registry
|
||||
registry_has_type_infos = registry.has_type_infos()
|
||||
selected_object = context.selected_objects[0] if len(context.selected_objects) > 0 else None
|
||||
available_components = bpy.context.window_manager.components_list
|
||||
|
||||
row = layout.row()
|
||||
box= row.box()
|
||||
box.label(text="Invalid/ unregistered components")
|
||||
|
||||
objects_with_invalid_components = []
|
||||
invalid_component_names = []
|
||||
|
||||
self.draw_invalid_or_unregistered_header(layout, ["Component", "Object", "Status", "Target"])
|
||||
|
||||
for object in bpy.data.objects: # TODO: very inneficent
|
||||
if len(object.keys()) > 0:
|
||||
if "components_meta" in object:
|
||||
components_metadata = object.components_meta.components
|
||||
comp_names = []
|
||||
for index, component_meta in enumerate(components_metadata):
|
||||
long_name = component_meta.long_name
|
||||
if component_meta.invalid:
|
||||
self.draw_invalid_or_unregistered(layout, "Invalid", long_name, object)
|
||||
|
||||
if not object.name in objects_with_invalid_components:
|
||||
objects_with_invalid_components.append(object.name)
|
||||
|
||||
if not long_name in invalid_component_names:
|
||||
invalid_component_names.append(long_name)
|
||||
|
||||
|
||||
comp_names.append(long_name)
|
||||
|
||||
for custom_property in object.keys():
|
||||
if custom_property != 'components_meta' and custom_property != 'bevy_components' and custom_property not in comp_names:
|
||||
self.draw_invalid_or_unregistered(layout, "Unregistered", custom_property, object)
|
||||
|
||||
if not object.name in objects_with_invalid_components:
|
||||
objects_with_invalid_components.append(object.name)
|
||||
"""if not long_name in invalid_component_names:
|
||||
invalid_component_names.append(custom_property)""" # FIXME
|
||||
layout.separator()
|
||||
layout.separator()
|
||||
original_name = bpy.context.window_manager.bevy_component_rename_helper.original_name
|
||||
|
||||
row = layout.row()
|
||||
col = row.column()
|
||||
col.label(text="Original")
|
||||
col = row.column()
|
||||
col.label(text="New")
|
||||
col = row.column()
|
||||
col.label(text="------")
|
||||
|
||||
row = layout.row()
|
||||
col = row.column()
|
||||
box = col.box()
|
||||
box.label(text=original_name)
|
||||
|
||||
col = row.column()
|
||||
col.prop(available_components, "list", text="")
|
||||
#row.prop(available_components, "filter",text="Filter")
|
||||
|
||||
col = row.column()
|
||||
components_rename_progress = context.window_manager.components_rename_progress
|
||||
|
||||
if components_rename_progress == -1.0:
|
||||
operator = col.operator(OT_rename_component.bl_idname, text="apply", icon="SHADERFX")
|
||||
operator.target_objects = json.dumps(objects_with_invalid_components)
|
||||
new_name = registry.type_infos[available_components.list]['short_name'] if available_components.list in registry.type_infos else ""
|
||||
operator.new_name = new_name
|
||||
col.enabled = registry_has_type_infos and original_name != "" and original_name != new_name
|
||||
else:
|
||||
if hasattr(layout,"progress") : # only for Blender > 4.0
|
||||
col.progress(factor = components_rename_progress, text=f"updating {components_rename_progress * 100.0:.2f}%")
|
||||
|
||||
col = row.column()
|
||||
remove_components_progress = context.window_manager.components_remove_progress
|
||||
if remove_components_progress == -1.0:
|
||||
operator = row.operator(RemoveComponentFromAllObjectsOperator.bl_idname, text="", icon="X")
|
||||
operator.component_name = context.window_manager.bevy_component_rename_helper.original_name
|
||||
col.enabled = registry_has_type_infos and original_name != ""
|
||||
else:
|
||||
if hasattr(layout,"progress") : # only for Blender > 4.0
|
||||
col.progress(factor = remove_components_progress, text=f"updating {remove_components_progress * 100.0:.2f}%")
|
||||
|
||||
layout.separator()
|
||||
layout.separator()
|
||||
row = layout.row()
|
||||
box= row.box()
|
||||
box.label(text="Conversions between custom properties and components & vice-versa")
|
||||
|
||||
row = layout.row()
|
||||
row.label(text="WARNING ! The following operations will overwrite your existing custom properties if they have matching types on the bevy side !")
|
||||
row.alert = True
|
||||
|
||||
##
|
||||
row = layout.row()
|
||||
custom_properties_from_components_progress_current = context.window_manager.custom_properties_from_components_progress
|
||||
|
||||
if custom_properties_from_components_progress_current == -1.0:
|
||||
row.operator(COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_CURRENT.bl_idname, text="update custom properties of current object" , icon="LOOP_FORWARDS")
|
||||
row.enabled = registry_has_type_infos and selected_object is not None
|
||||
else:
|
||||
if hasattr(layout,"progress") : # only for Blender > 4.0
|
||||
layout.progress(factor = custom_properties_from_components_progress_current, text=f"updating {custom_properties_from_components_progress_current * 100.0:.2f}%")
|
||||
|
||||
layout.separator()
|
||||
row = layout.row()
|
||||
custom_properties_from_components_progress_all = context.window_manager.custom_properties_from_components_progress_all
|
||||
|
||||
if custom_properties_from_components_progress_all == -1.0:
|
||||
row.operator(COMPONENTS_OT_REFRESH_CUSTOM_PROPERTIES_ALL.bl_idname, text="update custom properties of ALL objects" , icon="LOOP_FORWARDS")
|
||||
row.enabled = registry_has_type_infos
|
||||
else:
|
||||
if hasattr(layout,"progress") : # only for Blender > 4.0
|
||||
layout.progress(factor = custom_properties_from_components_progress_all, text=f"updating {custom_properties_from_components_progress_all * 100.0:.2f}%")
|
||||
|
||||
########################
|
||||
|
||||
row = layout.row()
|
||||
row.label(text="WARNING ! The following operations will try to overwrite your existing ui values if they have matching types on the bevy side !")
|
||||
row.alert = True
|
||||
|
||||
components_from_custom_properties_progress_current = context.window_manager.components_from_custom_properties_progress
|
||||
|
||||
row = layout.row()
|
||||
if components_from_custom_properties_progress_current == -1.0:
|
||||
row.operator(COMPONENTS_OT_REFRESH_PROPGROUPS_FROM_CUSTOM_PROPERTIES_CURRENT.bl_idname, text="update UI FROM custom properties of current object" , icon="LOOP_BACK")
|
||||
row.enabled = registry_has_type_infos and selected_object is not None
|
||||
else:
|
||||
if hasattr(layout,"progress") : # only for Blender > 4.0
|
||||
layout.progress(factor = components_from_custom_properties_progress_current, text=f"updating {components_from_custom_properties_progress_current * 100.0:.2f}%")
|
||||
|
||||
layout.separator()
|
||||
row = layout.row()
|
||||
components_from_custom_properties_progress_all = context.window_manager.components_from_custom_properties_progress_all
|
||||
|
||||
if components_from_custom_properties_progress_all == -1.0:
|
||||
row.operator(COMPONENTS_OT_REFRESH_PROPGROUPS_FROM_CUSTOM_PROPERTIES_ALL.bl_idname, text="update UI FROM custom properties of ALL objects" , icon="LOOP_BACK")
|
||||
row.enabled = registry_has_type_infos
|
||||
else:
|
||||
if hasattr(layout,"progress") : # only for Blender > 4.0
|
||||
layout.progress(factor = components_from_custom_properties_progress_all, text=f"updating {components_from_custom_properties_progress_all * 100.0:.2f}%")
|
||||
|
||||
|
||||
class BEVY_COMPONENTS_PT_MissingTypesPanel(bpy.types.Panel):
|
||||
"""panel listing all the missing bevy types in the schema"""
|
||||
bl_idname = "BEVY_COMPONENTS_PT_MissingTypesPanel"
|
||||
bl_label = "Missing/Unregistered Types"
|
||||
bl_space_type = 'VIEW_3D'
|
||||
bl_region_type = 'UI'
|
||||
bl_category = "Bevy Components"
|
||||
bl_context = "objectmode"
|
||||
bl_parent_id = "BLENVY_PT_SidePanel"
|
||||
bl_options = {'DEFAULT_CLOSED'}
|
||||
bl_description = "list of missing/unregistered type from the bevy side"
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return context.window_manager.blenvy.mode == 'TOOLS'
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
registry = bpy.context.window_manager.components_registry
|
||||
|
||||
layout.label(text="Missing types ")
|
||||
layout.template_list("MISSING_TYPES_UL_List", "Missing types list", registry, "missing_types_list", registry, "missing_types_list_index")
|
||||
|
||||
|
||||
class MISSING_TYPES_UL_List(UIList):
|
||||
"""Missing components UIList."""
|
||||
|
||||
use_filter_name_reverse: bpy.props.BoolProperty(
|
||||
name="Reverse Name",
|
||||
default=False,
|
||||
options=set(),
|
||||
description="Reverse name filtering",
|
||||
) # type: ignore
|
||||
|
||||
use_order_name = bpy.props.BoolProperty(name="Name", default=False, options=set(),
|
||||
description="Sort groups by their name (case-insensitive)")
|
||||
|
||||
def filter_items__(self, context, data, propname):
|
||||
"""Filter and order items in the list."""
|
||||
# We initialize filtered and ordered as empty lists. Notice that # if all sorting and filtering is disabled, we will return # these empty.
|
||||
filtered = []
|
||||
ordered = []
|
||||
items = getattr(data, propname)
|
||||
|
||||
helper_funcs = bpy.types.UI_UL_list
|
||||
|
||||
|
||||
print("filter, order", items, self, dict(self))
|
||||
if self.filter_name:
|
||||
print("ssdfs", self.filter_name)
|
||||
filtered= helper_funcs.filter_items_by_name(self.filter_name, self.bitflag_filter_item, items, "long_name", reverse=self.use_filter_name_reverse)
|
||||
|
||||
if not filtered:
|
||||
filtered = [self.bitflag_filter_item] * len(items)
|
||||
|
||||
if self.use_order_name:
|
||||
ordered = helper_funcs.sort_items_by_name(items, "name")
|
||||
|
||||
|
||||
return filtered, ordered
|
||||
|
||||
|
||||
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
|
||||
if self.layout_type in {'DEFAULT', 'COMPACT'}:
|
||||
row = layout.row()
|
||||
#row.enabled = False
|
||||
#row.alert = True
|
||||
row.prop(item, "long_name", text="")
|
||||
|
||||
elif self.layout_type in {'GRID'}:
|
||||
layout.alignment = 'CENTER'
|
||||
row = layout.row()
|
||||
row.prop(item, "long_name", text="")
|
0
tools/blenvy/blueprints/__init__.py
Normal file
@ -28,6 +28,9 @@ class OT_select_blueprint(Operator):
|
||||
bpy.context.window.scene = scene
|
||||
bpy.context.view_layer.objects.active = None
|
||||
bpy.context.view_layer.active_layer_collection = bpy.context.view_layer.layer_collection.children[self.blueprint_collection_name]
|
||||
#bpy.context.view_layer.collections.active = collection
|
||||
# bpy.context.view_layer.active_layer_collection = collection
|
||||
"""for o in collection.objects:
|
||||
o.select_set(True)"""
|
||||
|
||||
return {'FINISHED'}
|
0
tools/blenvy/core/__init__.py
Normal file
@ -1,10 +1,6 @@
|
||||
import bpy
|
||||
import json
|
||||
import os
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from bpy_types import (PropertyGroup)
|
||||
from bpy.props import (StringProperty, BoolProperty, FloatProperty, FloatVectorProperty, IntProperty, IntVectorProperty, EnumProperty, PointerProperty, CollectionProperty)
|
||||
from bpy.props import (EnumProperty, PointerProperty)
|
||||
|
||||
|
||||
class BlenvyManager(PropertyGroup):
|
||||
@ -15,6 +11,7 @@ class BlenvyManager(PropertyGroup):
|
||||
('BLUEPRINTS', "Blueprints", ""),
|
||||
('ASSETS', "Assets", ""),
|
||||
('SETTINGS', "Settings", ""),
|
||||
('TOOLS', "Tools", ""),
|
||||
)
|
||||
) # type: ignore
|
||||
|
@ -1,7 +1,8 @@
|
||||
import os
|
||||
import bpy
|
||||
from bpy_types import (Operator)
|
||||
from bpy.props import (StringProperty, EnumProperty)
|
||||
from bpy.props import (EnumProperty)
|
||||
|
||||
|
||||
|
||||
|
||||
class OT_switch_bevy_tooling(Operator):
|
||||
"""Switch bevy tooling"""
|
||||
@ -12,15 +13,17 @@ class OT_switch_bevy_tooling(Operator):
|
||||
|
||||
tool: EnumProperty(
|
||||
items=(
|
||||
('COMPONENTS', "Components", ""),
|
||||
('COMPONENTS', "Components", "Switch to components"),
|
||||
('BLUEPRINTS', "Blueprints", ""),
|
||||
('ASSETS', "Assets", ""),
|
||||
('SETTINGS', "Settings", ""),
|
||||
|
||||
('TOOLS', "Tools", ""),
|
||||
)
|
||||
) # type: ignore
|
||||
|
||||
|
||||
@classmethod
|
||||
def description(cls, context, properties):
|
||||
return properties.tool
|
||||
|
||||
def execute(self, context):
|
||||
context.window_manager.blenvy.mode = self.tool
|
@ -39,8 +39,6 @@ class BLENVY_PT_SidePanel(bpy.types.Panel):
|
||||
world_scene_active = context.scene.name in main_scene_names
|
||||
library_scene_active = context.scene.name in library_scene_names
|
||||
|
||||
layout.label(text="Active Blueprint: "+ active_collection.name.upper())
|
||||
|
||||
|
||||
# Now to actual drawing of the UI
|
||||
target = row.box() if active_mode == 'COMPONENTS' else row
|
||||
@ -56,24 +54,20 @@ class BLENVY_PT_SidePanel(bpy.types.Panel):
|
||||
tool_switch_components.tool = "ASSETS"
|
||||
|
||||
target = row.box() if active_mode == 'SETTINGS' else row
|
||||
tool_switch_components = target.operator(operator="bevy.tooling_switch", text="", icon="TOOL_SETTINGS")
|
||||
tool_switch_components = target.operator(operator="bevy.tooling_switch", text="", icon="SETTINGS")
|
||||
tool_switch_components.tool = "SETTINGS"
|
||||
|
||||
"""row.label(text="", icon="PROPERTIES")
|
||||
row.label(text="", icon="PACKAGE")
|
||||
row.label(text="", icon="ASSET_MANAGER")
|
||||
row.label(text="", icon="TOOL_SETTINGS")"""
|
||||
|
||||
|
||||
|
||||
|
||||
target = row.box() if active_mode == 'TOOLS' else row
|
||||
tool_switch_components = target.operator(operator="bevy.tooling_switch", text="", icon="TOOL_SETTINGS")
|
||||
tool_switch_components.tool = "TOOLS"
|
||||
|
||||
# Debug stuff
|
||||
"""layout.label(text="Active Blueprint: "+ active_collection.name.upper())
|
||||
layout.label(text="World scene active: "+ str(world_scene_active))
|
||||
layout.label(text="Library scene active: "+ str(library_scene_active))
|
||||
layout.label(text=blenvy.mode)"""
|
||||
|
||||
layout.label(text=blenvy.mode)
|
||||
|
||||
if blenvy.mode == "SETTINGS":
|
||||
"""if blenvy.mode == "SETTINGS":
|
||||
header, panel = layout.panel("auto_export", default_closed=False)
|
||||
header.label(text="Auto Export")
|
||||
if panel:
|
||||
@ -92,11 +86,11 @@ class BLENVY_PT_SidePanel(bpy.types.Panel):
|
||||
op.gltf_export_id = "gltf_auto_export" # we specify that we are in a special case
|
||||
|
||||
op = layout.operator("EXPORT_SCENES_OT_auto_gltf", text="Auto Export Settings")
|
||||
op.auto_export = True
|
||||
op.auto_export = True"""
|
||||
|
||||
header, panel = layout.panel("components", default_closed=False)
|
||||
header.label(text="Components")
|
||||
if panel:
|
||||
panel.label(text="YOOO")
|
||||
"""header, panel = layout.panel("components", default_closed=False)
|
||||
header.label(text="Components")
|
||||
if panel:
|
||||
panel.label(text="YOOO")"""
|
||||
|
||||
|
BIN
tools/blenvy/docs/blender_addon_add_scene.png
Normal file
After Width: | Height: | Size: 9.8 KiB |
BIN
tools/blenvy/docs/blender_addon_add_scene2.png
Normal file
After Width: | Height: | Size: 12 KiB |
BIN
tools/blenvy/docs/blender_addon_add_scene3.png
Normal file
After Width: | Height: | Size: 10 KiB |
BIN
tools/blenvy/docs/blender_addon_install.png
Normal file
After Width: | Height: | Size: 13 KiB |
BIN
tools/blenvy/docs/blender_addon_install2.png
Normal file
After Width: | Height: | Size: 16 KiB |
BIN
tools/blenvy/docs/blender_addon_install_zip.png
Normal file
After Width: | Height: | Size: 8.8 KiB |
BIN
tools/blenvy/docs/blender_addon_materials.png
Normal file
After Width: | Height: | Size: 5.3 KiB |
BIN
tools/blenvy/docs/blender_addon_materials2.png
Normal file
After Width: | Height: | Size: 20 KiB |
BIN
tools/blenvy/docs/blender_addon_use.png
Normal file
After Width: | Height: | Size: 43 KiB |
BIN
tools/blenvy/docs/blender_addon_use2.png
Normal file
After Width: | Height: | Size: 20 KiB |
BIN
tools/blenvy/docs/blender_addon_use3.png
Normal file
After Width: | Height: | Size: 17 KiB |
BIN
tools/blenvy/docs/blender_addon_use4.png
Normal file
After Width: | Height: | Size: 4.2 KiB |
BIN
tools/blenvy/docs/combine_override.png
Normal file
After Width: | Height: | Size: 2.7 KiB |
BIN
tools/blenvy/docs/exported_collections.png
Normal file
After Width: | Height: | Size: 9.0 KiB |
BIN
tools/blenvy/docs/exported_library_files.png
Normal file
After Width: | Height: | Size: 4.6 KiB |
BIN
tools/blenvy/docs/force_export.jpg
Normal file
After Width: | Height: | Size: 44 KiB |
BIN
tools/blenvy/docs/nested_blueprints.png
Normal file
After Width: | Height: | Size: 85 KiB |
BIN
tools/blenvy/docs/nested_blueprints2.png
Normal file
After Width: | Height: | Size: 29 KiB |
BIN
tools/blenvy/docs/nested_blueprints3.png
Normal file
After Width: | Height: | Size: 23 KiB |
1159
tools/blenvy/docs/options.svg
Normal file
After Width: | Height: | Size: 51 KiB |
881
tools/blenvy/docs/process.svg
Normal file
@ -0,0 +1,881 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||
|
||||
<svg
|
||||
width="210mm"
|
||||
height="297mm"
|
||||
viewBox="0 0 210 297"
|
||||
version="1.1"
|
||||
id="svg1"
|
||||
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
|
||||
sodipodi:docname="process.svg"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<sodipodi:namedview
|
||||
id="namedview1"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#000000"
|
||||
borderopacity="0.25"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:deskcolor="#d1d1d1"
|
||||
inkscape:document-units="mm"
|
||||
inkscape:zoom="0.72426347"
|
||||
inkscape:cx="-439.06674"
|
||||
inkscape:cy="597.15838"
|
||||
inkscape:window-width="2560"
|
||||
inkscape:window-height="1011"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="32"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="layer1" />
|
||||
<defs
|
||||
id="defs1">
|
||||
<marker
|
||||
style="overflow:visible"
|
||||
id="RoundedArrow"
|
||||
refX="0"
|
||||
refY="0"
|
||||
orient="auto-start-reverse"
|
||||
inkscape:stockid="Rounded arrow"
|
||||
markerWidth="1"
|
||||
markerHeight="1"
|
||||
viewBox="0 0 1 1"
|
||||
inkscape:isstock="true"
|
||||
inkscape:collect="always"
|
||||
preserveAspectRatio="xMidYMid">
|
||||
<path
|
||||
transform="scale(0.7)"
|
||||
d="m -0.21114562,-4.1055728 6.42229122,3.21114561 a 1,1 90 0 1 0,1.78885438 L -0.21114562,4.1055728 A 1.236068,1.236068 31.717474 0 1 -2,3 v -6 a 1.236068,1.236068 148.28253 0 1 1.78885438,-1.1055728 z"
|
||||
style="fill:context-stroke;fill-rule:evenodd;stroke:none"
|
||||
id="path8" />
|
||||
</marker>
|
||||
<marker
|
||||
style="overflow:visible"
|
||||
id="ArrowWide"
|
||||
refX="0"
|
||||
refY="0"
|
||||
orient="auto-start-reverse"
|
||||
inkscape:stockid="Wide arrow"
|
||||
markerWidth="1"
|
||||
markerHeight="1"
|
||||
viewBox="0 0 1 1"
|
||||
inkscape:isstock="true"
|
||||
inkscape:collect="always"
|
||||
preserveAspectRatio="xMidYMid">
|
||||
<path
|
||||
style="fill:none;stroke:context-stroke;stroke-width:1;stroke-linecap:butt"
|
||||
d="M 3,-3 0,0 3,3"
|
||||
transform="rotate(180,0.125,0)"
|
||||
sodipodi:nodetypes="ccc"
|
||||
id="path1" />
|
||||
</marker>
|
||||
</defs>
|
||||
<g
|
||||
inkscape:label="Layer 1"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1">
|
||||
<rect
|
||||
style="fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:1;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect77"
|
||||
width="220.61754"
|
||||
height="312.25241"
|
||||
x="-4.5802779"
|
||||
y="-3.4230094"
|
||||
ry="4.6700392" />
|
||||
<rect
|
||||
style="fill:none;stroke:#000000;stroke-width:0.309086;stroke-dasharray:none"
|
||||
id="rect1"
|
||||
width="201.30733"
|
||||
height="72.947769"
|
||||
x="4.9794211"
|
||||
y="3.6587322"
|
||||
ry="2.5855451" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:none;stroke:#000000;stroke-width:0.264583"
|
||||
x="102.90446"
|
||||
y="11.789397"
|
||||
id="text1"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:'Ubuntu Bold';fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="102.90446"
|
||||
y="11.789397">Original</tspan></text>
|
||||
<rect
|
||||
style="fill:none;stroke:#000000;stroke-width:0.294902;stroke-dasharray:none"
|
||||
id="rect6"
|
||||
width="82.234131"
|
||||
height="56.321247"
|
||||
x="115.08189"
|
||||
y="16.034706"
|
||||
ry="5.4638991" />
|
||||
<g
|
||||
id="g16"
|
||||
transform="translate(-90.659631,6.5443841)">
|
||||
<rect
|
||||
style="fill:none;stroke:#000000;stroke-width:0.307482;stroke-dasharray:none"
|
||||
id="rect5"
|
||||
width="89.355553"
|
||||
height="56.348793"
|
||||
x="104.23762"
|
||||
y="9.3243046"
|
||||
ry="5.4665713" />
|
||||
<g
|
||||
id="g15"
|
||||
transform="translate(-8.7898623,-68.268248)"
|
||||
style="fill:#efff81;fill-opacity:1">
|
||||
<rect
|
||||
style="fill:#efff81;fill-opacity:1;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
id="rect7"
|
||||
width="21.240173"
|
||||
height="10.947481"
|
||||
x="120.16953"
|
||||
y="92.749634"
|
||||
ry="1.0620492" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:#efff81;fill-opacity:1;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
x="124.65015"
|
||||
y="97.640045"
|
||||
id="text7"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan7"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="124.65015"
|
||||
y="97.640045">Object A</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="124.65015"
|
||||
y="101.76033"
|
||||
id="tspan72">(unique)</tspan></text>
|
||||
</g>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:none;stroke:#000000;stroke-width:0.264583"
|
||||
x="126.33414"
|
||||
y="14.70156"
|
||||
id="text8"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan8"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:'Ubuntu Bold';fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="126.33414"
|
||||
y="14.70156">Main Scene (world/level)</tspan></text>
|
||||
<g
|
||||
id="g14"
|
||||
transform="translate(-15.875,-65.181456)"
|
||||
style="fill:#81ffc7;fill-opacity:1">
|
||||
<rect
|
||||
style="fill:#81ffc7;fill-opacity:1;stroke:#000000;stroke-width:0.355848;stroke-dasharray:none"
|
||||
id="rect8"
|
||||
width="38.74361"
|
||||
height="10.856215"
|
||||
x="157.82838"
|
||||
y="84.181671"
|
||||
ry="1.0531952" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:#81ffc7;fill-opacity:1;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
x="162.26337"
|
||||
y="90.613953"
|
||||
id="text9"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan9"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="162.26337"
|
||||
y="90.613953">Object B1 (instance)</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g13"
|
||||
transform="translate(-11.892453,-70.473123)">
|
||||
<rect
|
||||
style="fill:#81ffc7;fill-opacity:1;stroke:#000000;stroke-width:0.358688;stroke-dasharray:none"
|
||||
id="rect9"
|
||||
width="39.374626"
|
||||
height="10.853376"
|
||||
x="153.73479"
|
||||
y="107.09112"
|
||||
ry="1.0529197" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
x="158.16835"
|
||||
y="113.52198"
|
||||
id="text10"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan10"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="158.16835"
|
||||
y="113.52198">Object C0 (instance)</tspan></text>
|
||||
</g>
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:2.11666, 0.264583;stroke-dashoffset:0;marker-end:url(#ArrowWide)"
|
||||
d="M 226.2693,25.28543 H 183.09695"
|
||||
id="path16" />
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:2.11666, 0.264583;stroke-dashoffset:0;marker-end:url(#ArrowWide)"
|
||||
d="M 226.2693,42.010871 H 183.09695"
|
||||
id="path17" />
|
||||
<g
|
||||
id="g17"
|
||||
transform="translate(-11.892453,-56.675475)">
|
||||
<rect
|
||||
style="fill:#81ffc7;fill-opacity:1;stroke:#000000;stroke-width:0.358688;stroke-dasharray:none"
|
||||
id="rect17"
|
||||
width="39.374626"
|
||||
height="10.853376"
|
||||
x="153.73479"
|
||||
y="107.09112"
|
||||
ry="1.0529197" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:#000000;stroke:none;stroke-width:0.264583;stroke-dasharray:none;-inkscape-font-specification:Ubuntu;font-family:Ubuntu;font-weight:normal;font-style:normal;font-stretch:normal;font-variant:normal;fill-opacity:1"
|
||||
x="158.16835"
|
||||
y="113.52198"
|
||||
id="text17"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan17"
|
||||
style="stroke-width:0.264583;-inkscape-font-specification:Ubuntu;font-family:Ubuntu;font-weight:normal;font-style:normal;font-stretch:normal;font-variant:normal;stroke:none;fill:#000000;fill-opacity:1"
|
||||
x="158.16835"
|
||||
y="113.52198">Object C1 (instance)</tspan></text>
|
||||
</g>
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:2.11666, 0.264583;stroke-dashoffset:0;marker-end:url(#ArrowWide)"
|
||||
d="m 226.2693,42.010871 -42.64565,15.18405"
|
||||
id="path18"
|
||||
sodipodi:nodetypes="cc" />
|
||||
</g>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:none;stroke:#000000;stroke-width:0.264583"
|
||||
x="145.92572"
|
||||
y="22.124336"
|
||||
id="text11"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan11"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:'Ubuntu Bold';fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="145.92572"
|
||||
y="22.124336">Library Scene</tspan></text>
|
||||
<rect
|
||||
style="fill:#ff9342;fill-opacity:1;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
id="rect11"
|
||||
width="56.246941"
|
||||
height="10.784719"
|
||||
x="138.19809"
|
||||
y="26.06472"
|
||||
ry="1.046259" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
x="142.59734"
|
||||
y="32.46125"
|
||||
id="text12"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan12"
|
||||
style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="142.59734"
|
||||
y="32.46125">Object B: collection/ blueprint</tspan></text>
|
||||
<g
|
||||
id="g69"
|
||||
transform="translate(0,-5.0567386)">
|
||||
<rect
|
||||
style="fill:#ff9342;fill-opacity:1;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
id="rect12"
|
||||
width="56.246941"
|
||||
height="10.784719"
|
||||
x="138.19809"
|
||||
y="46.759216"
|
||||
ry="1.046259" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
x="142.59734"
|
||||
y="53.155746"
|
||||
id="text13"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan13"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="142.59734"
|
||||
y="53.155746">Object C: collection/ blueprint</tspan></text>
|
||||
</g>
|
||||
<path
|
||||
style="fill:none;fill-opacity:1;stroke:#000000;stroke-width:1;stroke-dasharray:none;stroke-dashoffset:0;marker-end:url(#RoundedArrow)"
|
||||
d="m 108.69996,76.300286 v 9.304509"
|
||||
id="path28"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<rect
|
||||
style="fill:none;stroke:#000000;stroke-width:0.336158;stroke-dasharray:none"
|
||||
id="rect28"
|
||||
width="201.28026"
|
||||
height="86.298004"
|
||||
x="4.9929576"
|
||||
y="91.43959"
|
||||
ry="3.0587275" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:#000000;stroke:none;stroke-width:0.264583;fill-opacity:1"
|
||||
x="97.815292"
|
||||
y="99.943169"
|
||||
id="text28"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan28"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:'Ubuntu Bold';fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="97.815292"
|
||||
y="99.943169">Transform Step</tspan></text>
|
||||
<rect
|
||||
style="fill:none;stroke:#000000;stroke-width:0.320846;stroke-dasharray:none"
|
||||
id="rect29"
|
||||
width="82.208183"
|
||||
height="66.687843"
|
||||
x="115.09486"
|
||||
y="106.46085"
|
||||
ry="6.4695954" />
|
||||
<g
|
||||
id="g36"
|
||||
transform="translate(-90.659631,96.957538)">
|
||||
<rect
|
||||
style="fill:none;stroke:#000000;stroke-width:0.336001;stroke-dasharray:none"
|
||||
id="rect30"
|
||||
width="89.327034"
|
||||
height="67.307678"
|
||||
x="104.25188"
|
||||
y="9.3385649"
|
||||
ry="6.5297275" />
|
||||
<g
|
||||
id="g31"
|
||||
transform="translate(-8.7898623,-68.268248)"
|
||||
style="fill:#efff81;fill-opacity:1">
|
||||
<rect
|
||||
style="fill:#efff81;fill-opacity:1;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
id="rect31"
|
||||
width="21.240173"
|
||||
height="10.947481"
|
||||
x="120.16953"
|
||||
y="92.749634"
|
||||
ry="1.0620492" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:#efff81;fill-opacity:1;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
x="124.65015"
|
||||
y="97.640045"
|
||||
id="text31"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan31"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583;stroke-opacity:1"
|
||||
x="124.65015"
|
||||
y="97.640045">Object A</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583;stroke-opacity:1"
|
||||
x="124.65015"
|
||||
y="101.76033"
|
||||
id="tspan73">(unique)</tspan></text>
|
||||
</g>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:none;stroke:#000000;stroke-width:0.264583"
|
||||
x="126.33414"
|
||||
y="14.70156"
|
||||
id="text32"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan32"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:'Ubuntu Bold';fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="126.33414"
|
||||
y="14.70156">Temporary Scene (world/level)</tspan></text>
|
||||
<g
|
||||
id="g33"
|
||||
transform="translate(-19.939658,-65.832448)"
|
||||
style="fill:#81ffc7;fill-opacity:1">
|
||||
<rect
|
||||
style="fill:#81ffc7;fill-opacity:1;stroke:#000000;stroke-width:0.505434;stroke-dasharray:none"
|
||||
id="rect32"
|
||||
width="52.860485"
|
||||
height="16.052649"
|
||||
x="157.90318"
|
||||
y="84.256462"
|
||||
ry="1.5573175" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:#81ffc7;fill-opacity:1;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
x="162.26337"
|
||||
y="90.613953"
|
||||
id="text33"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan33"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="162.26337"
|
||||
y="90.613953">Object B1 (Empty)</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="162.26337"
|
||||
y="94.734238"
|
||||
id="tspan44">+ blueprintName("ObjectB")</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="fill:#81ffc7;fill-opacity:1;stroke-width:0.264583"
|
||||
x="162.26337"
|
||||
y="98.854515"
|
||||
id="tspan47"><tspan
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none"
|
||||
id="tspan78">+ spawnHere</tspan> </tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g34"
|
||||
transform="translate(-15.528392,-70.796885)">
|
||||
<rect
|
||||
style="fill:#81ffc7;fill-opacity:1;stroke:#000000;stroke-width:0.528181;stroke-dasharray:none"
|
||||
id="rect33"
|
||||
width="51.925056"
|
||||
height="17.845793"
|
||||
x="153.81953"
|
||||
y="107.17587"
|
||||
ry="1.7312757" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
x="158.16835"
|
||||
y="113.52198"
|
||||
id="text34"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan34"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="158.16835"
|
||||
y="113.52198">Object C0 (Empty)</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="158.16835"
|
||||
y="117.64227"
|
||||
id="tspan45">+ blueprintName("ObjectC")</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="stroke-width:0.264583"
|
||||
x="158.16835"
|
||||
y="121.76254"
|
||||
id="tspan48"><tspan
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none"
|
||||
id="tspan79">+ spawnHere</tspan> </tspan></text>
|
||||
</g>
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:2.11666, 0.264583;stroke-dashoffset:0;marker-end:url(#ArrowWide)"
|
||||
d="M 226.2693,25.28543 H 183.09695"
|
||||
id="path34" />
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:2.11666, 0.264583;stroke-dashoffset:0;marker-end:url(#ArrowWide)"
|
||||
d="M 226.2693,42.010871 H 183.09695"
|
||||
id="path35" />
|
||||
<g
|
||||
id="g35"
|
||||
transform="translate(-15.952034,-50.670754)">
|
||||
<rect
|
||||
style="fill:#81ffc7;fill-opacity:1;stroke:#000000;stroke-width:0.526025;stroke-dasharray:none"
|
||||
id="rect35"
|
||||
width="52.478481"
|
||||
height="17.513748"
|
||||
x="153.81845"
|
||||
y="107.17479"
|
||||
ry="1.6990631" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
x="158.16835"
|
||||
y="113.52198"
|
||||
id="text35"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan35"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="158.16835"
|
||||
y="113.52198">Object C1 (Empty)</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="158.16835"
|
||||
y="117.64227"
|
||||
id="tspan46">+ blueprintName("ObjectC")</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="stroke-width:0.264583"
|
||||
x="158.16835"
|
||||
y="121.76254"
|
||||
id="tspan49"><tspan
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none"
|
||||
id="tspan80">+ spawnHere</tspan> </tspan></text>
|
||||
</g>
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:2.11666, 0.264583;stroke-dashoffset:0;marker-end:url(#ArrowWide)"
|
||||
d="m 226.2693,42.010871 -42.64565,15.18405"
|
||||
id="path36"
|
||||
sodipodi:nodetypes="cc" />
|
||||
</g>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="124.89855"
|
||||
y="112.52866"
|
||||
id="text36"><tspan
|
||||
sodipodi:role="line"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:'Ubuntu Bold';fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="124.89855"
|
||||
y="112.52866"
|
||||
id="tspan39">Determine used Collections to export</tspan></text>
|
||||
<rect
|
||||
style="fill:#ff9342;fill-opacity:1;stroke:#000000;stroke-width:0.32101;stroke-dasharray:none"
|
||||
id="rect36"
|
||||
width="56.190514"
|
||||
height="15.891233"
|
||||
x="138.2263"
|
||||
y="116.5061"
|
||||
ry="1.5416578" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
x="142.59734"
|
||||
y="122.87442"
|
||||
id="text37"><tspan
|
||||
sodipodi:role="line"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="142.59734"
|
||||
y="122.87442"
|
||||
id="tspan40">Object B</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="stroke-width:0.264583"
|
||||
x="142.59734"
|
||||
y="126.84317"
|
||||
id="tspan41" /></text>
|
||||
<rect
|
||||
style="fill:#ff9342;fill-opacity:1;stroke:#000000;stroke-width:0.321404;stroke-dasharray:none"
|
||||
id="rect37"
|
||||
width="56.190121"
|
||||
height="15.930397"
|
||||
x="138.2265"
|
||||
y="134.55496"
|
||||
ry="1.5454572" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
x="142.59734"
|
||||
y="140.92308"
|
||||
id="text38"><tspan
|
||||
sodipodi:role="line"
|
||||
style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="142.59734"
|
||||
y="140.92308"
|
||||
id="tspan42">Object C</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="stroke-width:0.264583"
|
||||
x="142.59734"
|
||||
y="144.89183"
|
||||
id="tspan43" /></text>
|
||||
<path
|
||||
style="fill:none;fill-opacity:1;stroke:#000000;stroke-width:1;stroke-dasharray:none;stroke-dashoffset:0;marker-end:url(#RoundedArrow)"
|
||||
d="m 108.69996,177.9456 v 9.30451"
|
||||
id="path49"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<rect
|
||||
style="fill:none;stroke:#000000;stroke-width:0.336158;stroke-dasharray:none"
|
||||
id="rect49"
|
||||
width="201.28026"
|
||||
height="86.298004"
|
||||
x="4.9929576"
|
||||
y="193.0849"
|
||||
ry="3.0587275" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:#000000;stroke:none;stroke-width:0.264583;fill-opacity:1"
|
||||
x="106.60861"
|
||||
y="201.20203"
|
||||
id="text50"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan50"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:'Ubuntu Bold';fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="106.60861"
|
||||
y="201.20203">Result</tspan></text>
|
||||
<rect
|
||||
style="fill:none;stroke:#000000;stroke-width:0.272675;stroke-dasharray:none"
|
||||
id="rect50"
|
||||
width="82.256355"
|
||||
height="48.138226"
|
||||
x="115.07077"
|
||||
y="207.5529"
|
||||
ry="4.6700392" />
|
||||
<g
|
||||
id="g63"
|
||||
transform="translate(-90.659631,198.60285)">
|
||||
<g
|
||||
id="g52"
|
||||
transform="translate(-8.7898623,-68.268248)"
|
||||
style="fill:#efff81;fill-opacity:1">
|
||||
<rect
|
||||
style="fill:#efff81;fill-opacity:1;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
id="rect52"
|
||||
width="21.240173"
|
||||
height="10.947481"
|
||||
x="120.16953"
|
||||
y="92.749634"
|
||||
ry="1.0620492" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:#efff81;fill-opacity:1;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
x="124.65015"
|
||||
y="97.110878"
|
||||
id="text52"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan52"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="124.65015"
|
||||
y="97.110878">Entity A</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="124.65015"
|
||||
y="101.23116"
|
||||
id="tspan74">(unique)</tspan></text>
|
||||
</g>
|
||||
<rect
|
||||
style="fill:none;stroke:#000000;stroke-width:0.336001;stroke-dasharray:none"
|
||||
id="rect51"
|
||||
width="89.327034"
|
||||
height="67.307678"
|
||||
x="104.25188"
|
||||
y="9.3385649"
|
||||
ry="6.5297275" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:#0090ff;fill-opacity:1;stroke:#0090ff;stroke-width:0.264583;stroke-opacity:1"
|
||||
x="135.33"
|
||||
y="14.70156"
|
||||
id="text53"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan53"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:'Ubuntu Bold';fill:#0090ff;fill-opacity:1;stroke:none;stroke-width:0.264583;stroke-opacity:1"
|
||||
x="135.33"
|
||||
y="14.70156">world/level.gltf</tspan></text>
|
||||
<g
|
||||
id="g56"
|
||||
transform="translate(-19.939658,-65.832448)"
|
||||
style="fill:#81ffc7;fill-opacity:1">
|
||||
<rect
|
||||
style="fill:#81ffc7;fill-opacity:1;stroke:#000000;stroke-width:0.505434;stroke-dasharray:none"
|
||||
id="rect53"
|
||||
width="52.860485"
|
||||
height="16.052649"
|
||||
x="157.90318"
|
||||
y="84.256462"
|
||||
ry="1.5573175" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:#81ffc7;fill-opacity:1;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
x="162.26337"
|
||||
y="90.613953"
|
||||
id="text56"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan54"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="162.26337"
|
||||
y="90.613953">Entity B1</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="162.26337"
|
||||
y="94.734238"
|
||||
id="tspan55">+ blueprintName("ObjectB")</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="162.26337"
|
||||
y="98.854515"
|
||||
id="tspan56">+ spawnHere </tspan></text>
|
||||
</g>
|
||||
<g
|
||||
id="g59"
|
||||
transform="translate(-15.528392,-70.796885)">
|
||||
<rect
|
||||
style="fill:#81ffc7;fill-opacity:1;stroke:#000000;stroke-width:0.528181;stroke-dasharray:none"
|
||||
id="rect56"
|
||||
width="51.925056"
|
||||
height="17.845793"
|
||||
x="153.81953"
|
||||
y="107.17587"
|
||||
ry="1.7312757" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
x="158.16835"
|
||||
y="113.52198"
|
||||
id="text59"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan57"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="158.16835"
|
||||
y="113.52198">Entity C0</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="158.16835"
|
||||
y="117.64227"
|
||||
id="tspan58">+ blueprintName("ObjectC")</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="158.16835"
|
||||
y="121.76254"
|
||||
id="tspan59">+ spawnHere </tspan></text>
|
||||
</g>
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:2.11666, 0.264583;stroke-dashoffset:0;marker-end:url(#ArrowWide)"
|
||||
d="M 226.2693,25.28543 H 183.09695"
|
||||
id="path59" />
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:2.11666, 0.264583;stroke-dashoffset:0;marker-end:url(#ArrowWide)"
|
||||
d="M 226.2693,42.010871 H 183.09695"
|
||||
id="path60" />
|
||||
<g
|
||||
id="g62"
|
||||
transform="translate(-15.952034,-50.670754)">
|
||||
<rect
|
||||
style="fill:#81ffc7;fill-opacity:1;stroke:#000000;stroke-width:0.526025;stroke-dasharray:none"
|
||||
id="rect60"
|
||||
width="52.478481"
|
||||
height="17.513748"
|
||||
x="153.81845"
|
||||
y="107.17479"
|
||||
ry="1.6990631" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
x="158.16835"
|
||||
y="113.52198"
|
||||
id="text62"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan60"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="158.16835"
|
||||
y="113.52198">Entity C1</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="158.16835"
|
||||
y="117.64227"
|
||||
id="tspan61">+ blueprintName("ObjectC")</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="158.16835"
|
||||
y="121.76254"
|
||||
id="tspan62">+ spawnHere </tspan></text>
|
||||
</g>
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:2.11666, 0.264583;stroke-dashoffset:0;marker-end:url(#ArrowWide)"
|
||||
d="m 226.2693,42.010871 -42.64565,15.18405"
|
||||
id="path62"
|
||||
sodipodi:nodetypes="cc" />
|
||||
</g>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:#0090ff;fill-opacity:1;stroke:#0090ff;stroke-width:0.264583;stroke-opacity:1"
|
||||
x="117.87988"
|
||||
y="214.18282"
|
||||
id="text63"><tspan
|
||||
sodipodi:role="line"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:'Ubuntu Bold';fill:#0090ff;fill-opacity:1;stroke:none;stroke-width:0.264583;stroke-opacity:1"
|
||||
x="117.87988"
|
||||
y="214.18282"
|
||||
id="tspan63">Library of gltf files (one per Collection/Blueprint)</tspan></text>
|
||||
<rect
|
||||
style="fill:#ff9342;fill-opacity:1;stroke:#000000;stroke-width:0.32101;stroke-dasharray:none"
|
||||
id="rect63"
|
||||
width="56.190514"
|
||||
height="15.891233"
|
||||
x="138.2263"
|
||||
y="218.15141"
|
||||
ry="1.5416578" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
x="142.59734"
|
||||
y="224.51973"
|
||||
id="text66"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan64"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="142.59734"
|
||||
y="224.51973">Object B.gltf </tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="142.59734"
|
||||
y="228.64001"
|
||||
id="tspan65"> + blueprintName("ObjectB")</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="stroke-width:0.264583"
|
||||
x="142.59734"
|
||||
y="232.60876"
|
||||
id="tspan66" /></text>
|
||||
<rect
|
||||
style="fill:#ff9342;fill-opacity:1;stroke:#000000;stroke-width:0.321404;stroke-dasharray:none"
|
||||
id="rect66"
|
||||
width="56.190121"
|
||||
height="15.930397"
|
||||
x="138.2265"
|
||||
y="236.20027"
|
||||
ry="1.5454572" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
x="142.59734"
|
||||
y="242.56839"
|
||||
id="text69"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan67"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="142.59734"
|
||||
y="242.56839">Object C:.gltf</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="142.59734"
|
||||
y="246.68867"
|
||||
id="tspan68"> + blueprintName("ObjectC")</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="stroke-width:0.264583"
|
||||
x="142.59734"
|
||||
y="250.65742"
|
||||
id="tspan69" /></text>
|
||||
<g
|
||||
id="g70"
|
||||
transform="translate(0,11.775225)">
|
||||
<rect
|
||||
style="fill:#ff9342;fill-opacity:1;stroke:#000000;stroke-width:0.27923;stroke-dasharray:none"
|
||||
id="rect69"
|
||||
width="56.232292"
|
||||
height="12.014979"
|
||||
x="138.20541"
|
||||
y="46.766541"
|
||||
ry="1.1656103" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;fill:none;stroke:#000000;stroke-width:0.264583;stroke-dasharray:none"
|
||||
x="142.59734"
|
||||
y="52.097412"
|
||||
id="text70"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan70"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="142.59734"
|
||||
y="52.097412">Object D: unused collection/ </tspan><tspan
|
||||
sodipodi:role="line"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Ubuntu;-inkscape-font-specification:Ubuntu;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583"
|
||||
x="142.59734"
|
||||
y="56.217693"
|
||||
id="tspan71">blueprint</tspan></text>
|
||||
</g>
|
||||
<path
|
||||
style="fill:none;fill-opacity:1;stroke:#000000;stroke-width:1;stroke-dasharray:none;stroke-dashoffset:0;marker-end:url(#RoundedArrow)"
|
||||
d="M 56.761407,72.285855 V 101.91653"
|
||||
id="path74"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<path
|
||||
style="fill:none;fill-opacity:1;stroke:#000000;stroke-width:1;stroke-dasharray:none;stroke-dashoffset:0;marker-end:url(#RoundedArrow)"
|
||||
d="m 56.761407,173.65448 v 29.63068"
|
||||
id="path75"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<path
|
||||
style="fill:none;fill-opacity:1;stroke:#000000;stroke-width:1;stroke-dasharray:none;stroke-dashoffset:0;marker-end:url(#RoundedArrow)"
|
||||
d="M 157.79469,72.285855 V 101.91653"
|
||||
id="path76"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<path
|
||||
style="fill:none;fill-opacity:1;stroke:#000000;stroke-width:1;stroke-dasharray:none;stroke-dashoffset:0;marker-end:url(#RoundedArrow)"
|
||||
d="m 157.79469,173.40943 v 29.63068"
|
||||
id="path77"
|
||||
sodipodi:nodetypes="cc" />
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 38 KiB |
BIN
tools/blenvy/docs/purge_orphan1_data1.png
Normal file
After Width: | Height: | Size: 8.2 KiB |
BIN
tools/blenvy/docs/purge_orphan1_data2.png
Normal file
After Width: | Height: | Size: 3.9 KiB |
BIN
tools/blenvy/docs/purge_orphan1_data3.png
Normal file
After Width: | Height: | Size: 5.5 KiB |
BIN
tools/blenvy/docs/workflow_empties.jpg
Normal file
After Width: | Height: | Size: 35 KiB |
BIN
tools/blenvy/docs/workflow_original.jpg
Normal file
After Width: | Height: | Size: 41 KiB |
0
tools/blenvy/gltf_auto_export/__init__.py
Normal file
183
tools/blenvy/gltf_auto_export/auto_export/auto_export.py
Normal file
@ -0,0 +1,183 @@
|
||||
import copy
|
||||
import json
|
||||
import os
|
||||
from types import SimpleNamespace
|
||||
import bpy
|
||||
import traceback
|
||||
|
||||
|
||||
from .preferences import AutoExportGltfAddonPreferences
|
||||
|
||||
from .get_blueprints_to_export import get_blueprints_to_export
|
||||
from .get_levels_to_export import get_levels_to_export
|
||||
from .get_standard_exporter_settings import get_standard_exporter_settings
|
||||
|
||||
from .export_main_scenes import export_main_scene
|
||||
from .export_blueprints import export_blueprints
|
||||
|
||||
from ..helpers.helpers_scenes import (get_scenes, )
|
||||
from ..helpers.helpers_blueprints import blueprints_scan
|
||||
|
||||
from ..modules.export_materials import cleanup_materials, export_materials
|
||||
from ..modules.bevy_scene_components import remove_scene_components, upsert_scene_components
|
||||
|
||||
|
||||
"""this is the main 'central' function for all auto export """
|
||||
def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
|
||||
# have the export parameters (not auto export, just gltf export) have changed: if yes (for example switch from glb to gltf, compression or not, animations or not etc), we need to re-export everything
|
||||
print ("changed_export_parameters", changed_export_parameters)
|
||||
try:
|
||||
# path to the current blend file
|
||||
file_path = bpy.data.filepath
|
||||
# Get the folder
|
||||
blend_file_path = os.path.dirname(file_path)
|
||||
|
||||
# get the preferences for our addon
|
||||
export_root_folder = getattr(addon_prefs, "export_root_folder")
|
||||
export_output_folder = getattr(addon_prefs,"export_output_folder")
|
||||
export_models_path = os.path.join(blend_file_path, export_output_folder)
|
||||
|
||||
#should we use change detection or not
|
||||
export_change_detection = getattr(addon_prefs, "export_change_detection")
|
||||
export_scene_settings = getattr(addon_prefs,"export_scene_settings")
|
||||
|
||||
do_export_blueprints = getattr(addon_prefs,"export_blueprints")
|
||||
export_materials_library = getattr(addon_prefs,"export_materials_library")
|
||||
print("export_materials_library", export_materials_library)
|
||||
|
||||
# standard gltf export settings are stored differently
|
||||
standard_gltf_exporter_settings = get_standard_exporter_settings()
|
||||
gltf_extension = standard_gltf_exporter_settings.get("export_format", 'GLB')
|
||||
gltf_extension = '.glb' if gltf_extension == 'GLB' else '.gltf'
|
||||
|
||||
# generate the actual complete output path
|
||||
export_blueprints_path = os.path.join(blend_file_path, export_root_folder, getattr(addon_prefs,"export_blueprints_path"))
|
||||
export_levels_path = os.path.join(blend_file_path, export_root_folder, getattr(addon_prefs, "export_levels_path"))
|
||||
|
||||
print("export_blueprints_path", export_blueprints_path)
|
||||
|
||||
# here we do a bit of workaround by creating an override # TODO: do this at the "UI" level
|
||||
print("collection_instances_combine_mode", addon_prefs.collection_instances_combine_mode)
|
||||
"""if hasattr(addon_prefs, "__annotations__") :
|
||||
tmp = {}
|
||||
for k in AutoExportGltfAddonPreferences.__annotations__:
|
||||
item = AutoExportGltfAddonPreferences.__annotations__[k]
|
||||
#print("tutu",k, item.keywords.get('default', None) )
|
||||
default = item.keywords.get('default', None)
|
||||
tmp[k] = default
|
||||
|
||||
for (k, v) in addon_prefs.properties.items():
|
||||
tmp[k] = v
|
||||
|
||||
addon_prefs = SimpleNamespace(**tmp) #copy.deepcopy(addon_prefs)
|
||||
addon_prefs.__annotations__ = tmp"""
|
||||
addon_prefs.export_blueprints_path = export_blueprints_path
|
||||
addon_prefs.export_levels_path = export_levels_path
|
||||
addon_prefs.export_gltf_extension = gltf_extension
|
||||
addon_prefs.export_models_path = export_models_path
|
||||
|
||||
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs)
|
||||
|
||||
print("main scenes", main_scene_names, "library_scenes", library_scene_names)
|
||||
print("export_output_folder", export_output_folder)
|
||||
|
||||
blueprints_data = blueprints_scan(level_scenes, library_scenes, addon_prefs)
|
||||
blueprints_per_scene = blueprints_data.blueprints_per_scenes
|
||||
internal_blueprints = [blueprint.name for blueprint in blueprints_data.internal_blueprints]
|
||||
external_blueprints = [blueprint.name for blueprint in blueprints_data.external_blueprints]
|
||||
|
||||
if export_scene_settings:
|
||||
# inject/ update scene components
|
||||
upsert_scene_components(level_scenes)
|
||||
#inject/ update light shadow information
|
||||
for light in bpy.data.lights:
|
||||
enabled = 'true' if light.use_shadow else 'false'
|
||||
light['BlenderLightShadows'] = f"(enabled: {enabled}, buffer_bias: {light.shadow_buffer_bias})"
|
||||
|
||||
# export
|
||||
if do_export_blueprints:
|
||||
print("EXPORTING")
|
||||
# get blueprints/collections infos
|
||||
(blueprints_to_export) = get_blueprints_to_export(changes_per_scene, changed_export_parameters, blueprints_data, addon_prefs)
|
||||
|
||||
# get level/main scenes infos
|
||||
(main_scenes_to_export) = get_levels_to_export(changes_per_scene, changed_export_parameters, blueprints_data, addon_prefs)
|
||||
|
||||
# since materials export adds components we need to call this before blueprints are exported
|
||||
# export materials & inject materials components into relevant objects
|
||||
if export_materials_library:
|
||||
export_materials(blueprints_data.blueprint_names, library_scenes, blend_file_path, addon_prefs)
|
||||
|
||||
# update the list of tracked exports
|
||||
exports_total = len(blueprints_to_export) + len(main_scenes_to_export) + (1 if export_materials_library else 0)
|
||||
bpy.context.window_manager.auto_export_tracker.exports_total = exports_total
|
||||
bpy.context.window_manager.auto_export_tracker.exports_count = exports_total
|
||||
|
||||
bpy.context.window_manager.exportedCollections.clear()
|
||||
for blueprint in blueprints_to_export:
|
||||
bla = bpy.context.window_manager.exportedCollections.add()
|
||||
bla.name = blueprint.name
|
||||
print("-------------------------------")
|
||||
#print("collections: all:", collections)
|
||||
#print("collections: not found on disk:", collections_not_on_disk)
|
||||
print("BLUEPRINTS: local/internal:", internal_blueprints)
|
||||
print("BLUEPRINTS: external:", external_blueprints)
|
||||
print("BLUEPRINTS: per_scene:", blueprints_per_scene)
|
||||
print("-------------------------------")
|
||||
print("BLUEPRINTS: to export:", [blueprint.name for blueprint in blueprints_to_export])
|
||||
print("-------------------------------")
|
||||
print("MAIN SCENES: to export:", main_scenes_to_export)
|
||||
print("-------------------------------")
|
||||
# backup current active scene
|
||||
old_current_scene = bpy.context.scene
|
||||
# backup current selections
|
||||
old_selections = bpy.context.selected_objects
|
||||
|
||||
# first export any main/level/world scenes
|
||||
if len(main_scenes_to_export) > 0:
|
||||
print("export MAIN scenes")
|
||||
for scene_name in main_scenes_to_export:
|
||||
print(" exporting scene:", scene_name)
|
||||
export_main_scene(bpy.data.scenes[scene_name], blend_file_path, addon_prefs, blueprints_data)
|
||||
|
||||
# now deal with blueprints/collections
|
||||
do_export_library_scene = not export_change_detection or changed_export_parameters or len(blueprints_to_export) > 0
|
||||
if do_export_library_scene:
|
||||
print("export LIBRARY")
|
||||
# we only want to go through the library scenes where our blueprints to export are present
|
||||
"""for (scene_name, blueprints_to_export) in blueprints_per_scene.items():
|
||||
print(" exporting blueprints from scene:", scene_name)
|
||||
print(" blueprints to export", blueprints_to_export)"""
|
||||
export_blueprints(blueprints_to_export, blend_file_path, addon_prefs, blueprints_data)
|
||||
|
||||
# reset current scene from backup
|
||||
bpy.context.window.scene = old_current_scene
|
||||
|
||||
# reset selections
|
||||
for obj in old_selections:
|
||||
obj.select_set(True)
|
||||
if export_materials_library:
|
||||
cleanup_materials(blueprints_data.blueprint_names, library_scenes)
|
||||
|
||||
else:
|
||||
for scene_name in main_scene_names:
|
||||
export_main_scene(bpy.data.scenes[scene_name], blend_file_path, addon_prefs, [])
|
||||
|
||||
|
||||
|
||||
except Exception as error:
|
||||
print(traceback.format_exc())
|
||||
|
||||
def error_message(self, context):
|
||||
self.layout.label(text="Failure during auto_export: Error: "+ str(error))
|
||||
|
||||
bpy.context.window_manager.popup_menu(error_message, title="Error", icon='ERROR')
|
||||
|
||||
finally:
|
||||
# FIXME: error handling ? also redundant
|
||||
[main_scene_names, main_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs)
|
||||
|
||||
if export_scene_settings:
|
||||
# inject/ update scene components
|
||||
remove_scene_components(main_scenes)
|
||||
|
@ -0,0 +1,39 @@
|
||||
import json
|
||||
import bpy
|
||||
|
||||
"""
|
||||
This should ONLY be run when actually doing exports/aka calling auto_export function, because we only care about the difference in settings between EXPORTS
|
||||
"""
|
||||
def did_export_settings_change():
|
||||
# compare both the auto export settings & the gltf settings
|
||||
previous_auto_settings = bpy.data.texts[".gltf_auto_export_settings_previous"] if ".gltf_auto_export_settings_previous" in bpy.data.texts else None
|
||||
previous_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings_previous"] if ".gltf_auto_export_gltf_settings_previous" in bpy.data.texts else None
|
||||
|
||||
current_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else None
|
||||
current_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else None
|
||||
|
||||
#check if params have changed
|
||||
|
||||
# if there were no setting before, it is new, we need export
|
||||
changed = False
|
||||
if previous_auto_settings == None:
|
||||
print("previous settings missing, exporting")
|
||||
changed = True
|
||||
elif previous_gltf_settings == None:
|
||||
print("previous gltf settings missing, exporting")
|
||||
changed = True
|
||||
else:
|
||||
auto_settings_changed = sorted(json.loads(previous_auto_settings.as_string()).items()) != sorted(json.loads(current_auto_settings.as_string()).items()) if current_auto_settings != None else False
|
||||
gltf_settings_changed = sorted(json.loads(previous_gltf_settings.as_string()).items()) != sorted(json.loads(current_gltf_settings.as_string()).items()) if current_gltf_settings != None else False
|
||||
|
||||
"""print("auto settings previous", sorted(json.loads(previous_auto_settings.as_string()).items()))
|
||||
print("auto settings current", sorted(json.loads(current_auto_settings.as_string()).items()))
|
||||
print("auto_settings_changed", auto_settings_changed)"""
|
||||
|
||||
"""print("gltf settings previous", sorted(json.loads(previous_gltf_settings.as_string()).items()))
|
||||
print("gltf settings current", sorted(json.loads(current_gltf_settings.as_string()).items()))
|
||||
print("gltf_settings_changed", gltf_settings_changed)"""
|
||||
|
||||
changed = auto_settings_changed or gltf_settings_changed
|
||||
|
||||
return changed
|
@ -0,0 +1,44 @@
|
||||
import os
|
||||
import bpy
|
||||
|
||||
from ..constants import TEMPSCENE_PREFIX
|
||||
from ..helpers.generate_and_export import generate_and_export
|
||||
from .export_gltf import (generate_gltf_export_preferences)
|
||||
from ..helpers.helpers_scenes import clear_hollow_scene, copy_hollowed_collection_into
|
||||
|
||||
|
||||
def export_blueprints(blueprints, blend_file_path, addon_prefs, blueprints_data):
|
||||
export_blueprints_path = getattr(addon_prefs,"export_blueprints_path")
|
||||
gltf_export_preferences = generate_gltf_export_preferences(addon_prefs)
|
||||
|
||||
try:
|
||||
# save current active collection
|
||||
active_collection = bpy.context.view_layer.active_layer_collection
|
||||
export_materials_library = getattr(addon_prefs,"export_materials_library")
|
||||
|
||||
for blueprint in blueprints:
|
||||
print("exporting collection", blueprint.name)
|
||||
gltf_output_path = os.path.join(export_blueprints_path, blueprint.name)
|
||||
export_settings = { **gltf_export_preferences, 'use_active_scene': True, 'use_active_collection': True, 'use_active_collection_with_nested':True}
|
||||
|
||||
# if we are using the material library option, do not export materials, use placeholder instead
|
||||
if export_materials_library:
|
||||
export_settings['export_materials'] = 'PLACEHOLDER'
|
||||
|
||||
collection = bpy.data.collections[blueprint.name]
|
||||
generate_and_export(
|
||||
addon_prefs,
|
||||
temp_scene_name=TEMPSCENE_PREFIX+collection.name,
|
||||
export_settings=export_settings,
|
||||
gltf_output_path=gltf_output_path,
|
||||
tempScene_filler= lambda temp_collection: copy_hollowed_collection_into(collection, temp_collection, blueprints_data=blueprints_data, addon_prefs=addon_prefs),
|
||||
tempScene_cleaner= lambda temp_scene, params: clear_hollow_scene(original_root_collection=collection, temp_scene=temp_scene, **params)
|
||||
)
|
||||
|
||||
# reset active collection to the one we save before
|
||||
bpy.context.view_layer.active_layer_collection = active_collection
|
||||
|
||||
except Exception as error:
|
||||
print("failed to export collections to gltf: ", error)
|
||||
raise error
|
||||
|
77
tools/blenvy/gltf_auto_export/auto_export/export_gltf.py
Normal file
@ -0,0 +1,77 @@
|
||||
import json
|
||||
import os
|
||||
import bpy
|
||||
|
||||
from .get_standard_exporter_settings import get_standard_exporter_settings
|
||||
from .preferences import (AutoExportGltfPreferenceNames)
|
||||
|
||||
def generate_gltf_export_preferences(addon_prefs):
|
||||
# default values
|
||||
gltf_export_preferences = dict(
|
||||
# export_format= 'GLB', #'GLB', 'GLTF_SEPARATE', 'GLTF_EMBEDDED'
|
||||
check_existing=False,
|
||||
|
||||
use_selection=False,
|
||||
use_visible=True, # Export visible and hidden objects. See Object/Batch Export to skip.
|
||||
use_renderable=False,
|
||||
use_active_collection= False,
|
||||
use_active_collection_with_nested=False,
|
||||
use_active_scene = False,
|
||||
|
||||
export_cameras=True,
|
||||
export_extras=True, # For custom exported properties.
|
||||
export_lights=True,
|
||||
|
||||
#export_texcoords=True,
|
||||
#export_normals=True,
|
||||
# here add draco settings
|
||||
#export_draco_mesh_compression_enable = False,
|
||||
|
||||
#export_tangents=False,
|
||||
#export_materials
|
||||
#export_colors=True,
|
||||
#export_attributes=True,
|
||||
#use_mesh_edges
|
||||
#use_mesh_vertices
|
||||
|
||||
|
||||
#export_yup=True,
|
||||
#export_skins=True,
|
||||
#export_morph=False,
|
||||
#export_apply=False,
|
||||
#export_animations=False,
|
||||
#export_optimize_animation_size=False
|
||||
)
|
||||
|
||||
for key in addon_prefs.__annotations__.keys():
|
||||
if str(key) not in AutoExportGltfPreferenceNames:
|
||||
#print("overriding setting", key, "value", getattr(addon_prefs,key))
|
||||
gltf_export_preferences[key] = getattr(addon_prefs, key)
|
||||
|
||||
|
||||
standard_gltf_exporter_settings = get_standard_exporter_settings()
|
||||
|
||||
constant_keys = [
|
||||
'use_selection',
|
||||
'use_visible',
|
||||
'use_active_collection',
|
||||
'use_active_collection_with_nested',
|
||||
'use_active_scene',
|
||||
'export_cameras',
|
||||
'export_extras', # For custom exported properties.
|
||||
'export_lights',
|
||||
]
|
||||
|
||||
# a certain number of essential params should NEVER be overwritten , no matter the settings of the standard exporter
|
||||
for key in standard_gltf_exporter_settings.keys():
|
||||
if str(key) not in constant_keys:
|
||||
gltf_export_preferences[key] = standard_gltf_exporter_settings.get(key)
|
||||
return gltf_export_preferences
|
||||
|
||||
|
||||
#https://docs.blender.org/api/current/bpy.ops.export_scene.html#bpy.ops.export_scene.gltf
|
||||
def export_gltf (path, export_settings):
|
||||
settings = {**export_settings, "filepath": path}
|
||||
# print("export settings",settings)
|
||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||
#bpy.ops.export_scene.gltf(**settings)
|
@ -0,0 +1,77 @@
|
||||
import os
|
||||
import bpy
|
||||
from pathlib import Path
|
||||
|
||||
from ..constants import TEMPSCENE_PREFIX
|
||||
from ..helpers.generate_and_export import generate_and_export
|
||||
from .export_gltf import (generate_gltf_export_preferences, export_gltf)
|
||||
from ..modules.bevy_dynamic import is_object_dynamic, is_object_static
|
||||
from ..helpers.helpers_scenes import clear_hollow_scene, copy_hollowed_collection_into
|
||||
from ..helpers.helpers_blueprints import inject_blueprints_list_into_main_scene, remove_blueprints_list_from_main_scene
|
||||
|
||||
def export_main_scene(scene, blend_file_path, addon_prefs, blueprints_data):
|
||||
gltf_export_preferences = generate_gltf_export_preferences(addon_prefs)
|
||||
export_root_folder = getattr(addon_prefs, "export_root_folder")
|
||||
export_output_folder = getattr(addon_prefs,"export_output_folder")
|
||||
export_levels_path = getattr(addon_prefs,"export_levels_path")
|
||||
|
||||
export_blueprints = getattr(addon_prefs,"export_blueprints")
|
||||
export_separate_dynamic_and_static_objects = getattr(addon_prefs, "export_separate_dynamic_and_static_objects")
|
||||
|
||||
export_settings = { **gltf_export_preferences,
|
||||
'use_active_scene': True,
|
||||
'use_active_collection':True,
|
||||
'use_active_collection_with_nested':True,
|
||||
'use_visible': False,
|
||||
'use_renderable': False,
|
||||
'export_apply':True
|
||||
}
|
||||
|
||||
if export_blueprints :
|
||||
gltf_output_path = os.path.join(export_levels_path, scene.name)
|
||||
|
||||
inject_blueprints_list_into_main_scene(scene, blueprints_data, addon_prefs)
|
||||
return
|
||||
if export_separate_dynamic_and_static_objects:
|
||||
#print("SPLIT STATIC AND DYNAMIC")
|
||||
# first export static objects
|
||||
generate_and_export(
|
||||
addon_prefs,
|
||||
temp_scene_name=TEMPSCENE_PREFIX,
|
||||
export_settings=export_settings,
|
||||
gltf_output_path=gltf_output_path,
|
||||
tempScene_filler= lambda temp_collection: copy_hollowed_collection_into(scene.collection, temp_collection, blueprints_data=blueprints_data, filter=is_object_static, addon_prefs=addon_prefs),
|
||||
tempScene_cleaner= lambda temp_scene, params: clear_hollow_scene(original_root_collection=scene.collection, temp_scene=temp_scene, **params)
|
||||
)
|
||||
|
||||
# then export all dynamic objects
|
||||
gltf_output_path = os.path.join(export_levels_path, scene.name+ "_dynamic")
|
||||
generate_and_export(
|
||||
addon_prefs,
|
||||
temp_scene_name=TEMPSCENE_PREFIX,
|
||||
export_settings=export_settings,
|
||||
gltf_output_path=gltf_output_path,
|
||||
tempScene_filler= lambda temp_collection: copy_hollowed_collection_into(scene.collection, temp_collection, blueprints_data=blueprints_data, filter=is_object_dynamic, addon_prefs=addon_prefs),
|
||||
tempScene_cleaner= lambda temp_scene, params: clear_hollow_scene(original_root_collection=scene.collection, temp_scene=temp_scene, **params)
|
||||
)
|
||||
|
||||
else:
|
||||
#print("NO SPLIT")
|
||||
generate_and_export(
|
||||
addon_prefs,
|
||||
temp_scene_name=TEMPSCENE_PREFIX,
|
||||
export_settings=export_settings,
|
||||
gltf_output_path=gltf_output_path,
|
||||
tempScene_filler= lambda temp_collection: copy_hollowed_collection_into(scene.collection, temp_collection, blueprints_data=blueprints_data, addon_prefs=addon_prefs),
|
||||
tempScene_cleaner= lambda temp_scene, params: clear_hollow_scene(original_root_collection=scene.collection, temp_scene=temp_scene, **params)
|
||||
)
|
||||
|
||||
else:
|
||||
gltf_output_path = os.path.join(export_root_folder, export_output_folder, scene.name)
|
||||
print(" exporting gltf to", gltf_output_path, ".gltf/glb")
|
||||
export_gltf(gltf_output_path, export_settings)
|
||||
|
||||
remove_blueprints_list_from_main_scene(scene)
|
||||
|
||||
|
||||
|
@ -0,0 +1,60 @@
|
||||
import bpy
|
||||
import os
|
||||
from ..helpers.helpers_scenes import (get_scenes, )
|
||||
from ..helpers.helpers_blueprints import find_blueprints_not_on_disk
|
||||
|
||||
# TODO: this should also take the split/embed mode into account: if a nested collection changes AND embed is active, its container collection should also be exported
|
||||
def get_blueprints_to_export(changes_per_scene, changed_export_parameters, blueprints_data, addon_prefs):
|
||||
export_change_detection = getattr(addon_prefs, "export_change_detection")
|
||||
export_gltf_extension = getattr(addon_prefs, "export_gltf_extension", ".glb")
|
||||
export_blueprints_path = getattr(addon_prefs,"export_blueprints_path", "")
|
||||
collection_instances_combine_mode = getattr(addon_prefs, "collection_instances_combine_mode")
|
||||
|
||||
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs)
|
||||
internal_blueprints = blueprints_data.internal_blueprints
|
||||
blueprints_to_export = internal_blueprints # just for clarity
|
||||
|
||||
# print("export_change_detection", export_change_detection, "changed_export_parameters", changed_export_parameters, "changes_per_scene", changes_per_scene)
|
||||
|
||||
# if the export parameters have changed, bail out early
|
||||
# we need to re_export everything if the export parameters have been changed
|
||||
if export_change_detection and not changed_export_parameters:
|
||||
changed_blueprints = []
|
||||
|
||||
# first check if all collections have already been exported before (if this is the first time the exporter is run
|
||||
# in your current Blender session for example)
|
||||
blueprints_not_on_disk = find_blueprints_not_on_disk(internal_blueprints, export_blueprints_path, export_gltf_extension)
|
||||
|
||||
for scene in library_scenes:
|
||||
if scene.name in changes_per_scene:
|
||||
changed_objects = list(changes_per_scene[scene.name].keys())
|
||||
changed_blueprints = [blueprints_data.blueprints_from_objects[changed] for changed in changed_objects if changed in blueprints_data.blueprints_from_objects]
|
||||
# we only care about local blueprints/collections
|
||||
changed_local_blueprints = [blueprint for blueprint in changed_blueprints if blueprint.name in blueprints_data.blueprints_per_name.keys() and blueprint.local]
|
||||
# FIXME: double check this: why are we combining these two ?
|
||||
changed_blueprints += changed_local_blueprints
|
||||
|
||||
|
||||
blueprints_to_export = list(set(changed_blueprints + blueprints_not_on_disk))
|
||||
|
||||
|
||||
# filter out blueprints that are not marked & deal with the different combine modes
|
||||
# we check for blueprint & object specific overrides ...
|
||||
filtered_blueprints = []
|
||||
for blueprint in blueprints_to_export:
|
||||
if blueprint.marked:
|
||||
filtered_blueprints.append(blueprint)
|
||||
else:
|
||||
blueprint_instances = blueprints_data.internal_collection_instances.get(blueprint.name, [])
|
||||
# print("INSTANCES", blueprint_instances, blueprints_data.internal_collection_instances)
|
||||
# marked blueprints that have changed are always exported, regardless of whether they are in use (have instances) or not
|
||||
for blueprint_instance in blueprint_instances:
|
||||
combine_mode = blueprint_instance['_combine'] if '_combine' in blueprint_instance else collection_instances_combine_mode
|
||||
if combine_mode == "Split": # we only keep changed blueprints if mode is set to split for at least one instance (aka if ALL instances of a blueprint are merged, do not export ? )
|
||||
filtered_blueprints.append(blueprint)
|
||||
|
||||
blueprints_to_export = list(set(filtered_blueprints))
|
||||
|
||||
|
||||
# changed/all blueprints to export
|
||||
return (blueprints_to_export)
|
@ -0,0 +1,51 @@
|
||||
import bpy
|
||||
from ..helpers.helpers_blueprints import check_if_blueprint_on_disk
|
||||
from ..helpers.helpers_scenes import (get_scenes, )
|
||||
|
||||
# IF collection_instances_combine_mode is not 'split' check for each scene if any object in changes_per_scene has an instance in the scene
|
||||
def changed_object_in_scene(scene_name, changes_per_scene, blueprints_data, collection_instances_combine_mode):
|
||||
# Embed / EmbedExternal
|
||||
blueprints_from_objects = blueprints_data.blueprints_from_objects
|
||||
|
||||
blueprint_instances_in_scene = blueprints_data.blueprint_instances_per_main_scene.get(scene_name, None)
|
||||
if blueprint_instances_in_scene is not None:
|
||||
changed_objects = [object_name for change in changes_per_scene.values() for object_name in change.keys()]
|
||||
changed_blueprints = [blueprints_from_objects[changed] for changed in changed_objects if changed in blueprints_from_objects]
|
||||
changed_blueprints_with_instances_in_scene = [blueprint for blueprint in changed_blueprints if blueprint.name in blueprint_instances_in_scene.keys()]
|
||||
|
||||
|
||||
changed_blueprint_instances= [object for blueprint in changed_blueprints_with_instances_in_scene for object in blueprint_instances_in_scene[blueprint.name]]
|
||||
# print("changed_blueprint_instances", changed_blueprint_instances,)
|
||||
|
||||
level_needs_export = False
|
||||
for blueprint_instance in changed_blueprint_instances:
|
||||
blueprint = blueprints_data.blueprint_name_from_instances[blueprint_instance]
|
||||
combine_mode = blueprint_instance['_combine'] if '_combine' in blueprint_instance else collection_instances_combine_mode
|
||||
#print("COMBINE MODE FOR OBJECT", combine_mode)
|
||||
if combine_mode == 'Embed':
|
||||
level_needs_export = True
|
||||
break
|
||||
elif combine_mode == 'EmbedExternal' and not blueprint.local:
|
||||
level_needs_export = True
|
||||
break
|
||||
# changes => list of changed objects (regardless of wether they have been changed in main scene or in lib scene)
|
||||
# wich of those objects are blueprint instances
|
||||
# we need a list of changed objects that are blueprint instances
|
||||
return level_needs_export
|
||||
return False
|
||||
|
||||
|
||||
# this also takes the split/embed mode into account: if a collection instance changes AND embed is active, its container level/world should also be exported
|
||||
def get_levels_to_export(changes_per_scene, changed_export_parameters, blueprints_data, addon_prefs):
|
||||
export_change_detection = getattr(addon_prefs, "export_change_detection")
|
||||
export_gltf_extension = getattr(addon_prefs, "export_gltf_extension")
|
||||
export_levels_path = getattr(addon_prefs, "export_levels_path")
|
||||
collection_instances_combine_mode = getattr(addon_prefs, "collection_instances_combine_mode")
|
||||
|
||||
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs)
|
||||
|
||||
# determine list of main scenes to export
|
||||
# we have more relaxed rules to determine if the main scenes have changed : any change is ok, (allows easier handling of changes, render settings etc)
|
||||
main_scenes_to_export = [scene_name for scene_name in main_scene_names if not export_change_detection or changed_export_parameters or scene_name in changes_per_scene.keys() or changed_object_in_scene(scene_name, changes_per_scene, blueprints_data, collection_instances_combine_mode) or not check_if_blueprint_on_disk(scene_name, export_levels_path, export_gltf_extension) ]
|
||||
|
||||
return (main_scenes_to_export)
|
@ -0,0 +1,14 @@
|
||||
import bpy
|
||||
import json
|
||||
|
||||
def get_standard_exporter_settings():
|
||||
standard_gltf_exporter_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else None
|
||||
if standard_gltf_exporter_settings != None:
|
||||
try:
|
||||
standard_gltf_exporter_settings = json.loads(standard_gltf_exporter_settings.as_string())
|
||||
except:
|
||||
standard_gltf_exporter_settings = {}
|
||||
else:
|
||||
standard_gltf_exporter_settings = {}
|
||||
|
||||
return standard_gltf_exporter_settings
|
22
tools/blenvy/gltf_auto_export/auto_export/internals.py
Normal file
@ -0,0 +1,22 @@
|
||||
import bpy
|
||||
|
||||
class SceneLink(bpy.types.PropertyGroup):
|
||||
name: bpy.props.StringProperty(name="") # type: ignore
|
||||
scene: bpy.props.PointerProperty(type=bpy.types.Scene) # type: ignore
|
||||
|
||||
class SceneLinks(bpy.types.PropertyGroup):
|
||||
name = bpy.props.StringProperty(name="List of scenes to export", default="Unknown")
|
||||
items: bpy.props.CollectionProperty(type = SceneLink) # type: ignore
|
||||
|
||||
class CUSTOM_PG_sceneName(bpy.types.PropertyGroup):
|
||||
name: bpy.props.StringProperty() # type: ignore
|
||||
display: bpy.props.BoolProperty() # type: ignore
|
||||
|
||||
class CollectionToExport(bpy.types.PropertyGroup):
|
||||
name: bpy.props.StringProperty(name="") # type: ignore
|
||||
|
||||
class BlueprintsToExport(bpy.types.PropertyGroup):
|
||||
name = bpy.props.StringProperty(name="List of collections to export", default="Unknown")
|
||||
items: bpy.props.CollectionProperty(type = CollectionToExport) # type: ignore
|
||||
|
||||
|
468
tools/blenvy/gltf_auto_export/auto_export/operators.py
Normal file
@ -0,0 +1,468 @@
|
||||
import json
|
||||
import bpy
|
||||
from bpy.types import Operator
|
||||
from bpy_extras.io_utils import ExportHelper
|
||||
from bpy.props import (IntProperty, StringProperty, BoolProperty)
|
||||
|
||||
from ..ui.operators import OT_OpenFolderbrowser, draw_folder_browser
|
||||
|
||||
#from ..ui.main import GLTF_PT_auto_export_general, GLTF_PT_auto_export_main, GLTF_PT_auto_export_root
|
||||
|
||||
from .preferences import (AutoExportGltfAddonPreferences, AutoExportGltfPreferenceNames)
|
||||
from .auto_export import auto_export
|
||||
from ..helpers.generate_complete_preferences_dict import generate_complete_preferences_dict_auto
|
||||
from ..helpers.serialize_scene import serialize_scene
|
||||
|
||||
def bubble_up_changes(object, changes_per_scene):
|
||||
if object.parent:
|
||||
changes_per_scene[object.parent.name] = bpy.data.objects[object.parent.name]
|
||||
bubble_up_changes(object.parent, changes_per_scene)
|
||||
|
||||
|
||||
class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences):#, ExportHelper):
|
||||
"""auto export gltf"""
|
||||
#bl_idname = "object.xxx"
|
||||
bl_idname = "export_scenes.auto_gltf"
|
||||
bl_label = "Apply settings"
|
||||
bl_options = {'PRESET'} # we do not add UNDO otherwise it leads to an invisible operation that resets the state of the saved serialized scene, breaking compares for normal undo/redo operations
|
||||
# ExportHelper mixin class uses this
|
||||
#filename_ext = ''
|
||||
#filepath: bpy.props.StringProperty(subtype="FILE_PATH", default="") # type: ignore
|
||||
|
||||
#list of settings (other than purely gltf settings) whose change should trigger a re-generation of gltf files
|
||||
white_list = [
|
||||
'auto_export',
|
||||
'export_root_folder',
|
||||
'export_output_folder',
|
||||
'export_change_detection',
|
||||
'export_scene_settings',
|
||||
|
||||
'main_scene_names',
|
||||
'library_scene_names',
|
||||
|
||||
'export_blueprints',
|
||||
'export_blueprints_path',
|
||||
'export_marked_assets',
|
||||
'collection_instances_combine_mode',
|
||||
|
||||
'export_levels_path',
|
||||
'export_separate_dynamic_and_static_objects',
|
||||
|
||||
'export_materials_library',
|
||||
'export_materials_path',
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def register(cls):
|
||||
bpy.types.WindowManager.main_scene = bpy.props.PointerProperty(type=bpy.types.Scene, name="main scene", description="main_scene_picker", poll=cls.is_scene_ok)
|
||||
bpy.types.WindowManager.library_scene = bpy.props.PointerProperty(type=bpy.types.Scene, name="library scene", description="library_scene_picker", poll=cls.is_scene_ok)
|
||||
|
||||
bpy.types.WindowManager.main_scenes_list_index = IntProperty(name = "Index for main scenes list", default = 0)
|
||||
bpy.types.WindowManager.library_scenes_list_index = IntProperty(name = "Index for library scenes list", default = 0)
|
||||
|
||||
cls.main_scenes_index = 0
|
||||
cls.library_scenes_index = 0
|
||||
|
||||
@classmethod
|
||||
def unregister(cls):
|
||||
del bpy.types.WindowManager.main_scene
|
||||
del bpy.types.WindowManager.library_scene
|
||||
|
||||
del bpy.types.WindowManager.main_scenes_list_index
|
||||
del bpy.types.WindowManager.library_scenes_list_index
|
||||
|
||||
def is_scene_ok(self, scene):
|
||||
try:
|
||||
operator = bpy.context.space_data.active_operator
|
||||
return scene.name not in operator.main_scenes and scene.name not in operator.library_scenes
|
||||
except:
|
||||
return True
|
||||
|
||||
def format_settings(self):
|
||||
# find all props to save
|
||||
exceptional = [
|
||||
# options that don't start with 'export_'
|
||||
'collection_instances_combine_mode',
|
||||
]
|
||||
all_props = self.properties
|
||||
export_props = {
|
||||
x: getattr(self, x) for x in dir(all_props)
|
||||
if (x.startswith("export_") or x in exceptional) and all_props.get(x) is not None
|
||||
}
|
||||
# we inject all that we need, the above is not sufficient
|
||||
for (k, v) in self.properties.items():
|
||||
if k in self.white_list or k not in AutoExportGltfPreferenceNames:
|
||||
value = v
|
||||
# FIXME: really weird having to do this
|
||||
if k == "collection_instances_combine_mode":
|
||||
value = self.collection_instances_combine_mode
|
||||
if k == "export_materials":
|
||||
value = self.export_materials
|
||||
export_props[k] = value
|
||||
# we add main & library scene names to our preferences
|
||||
|
||||
export_props['main_scene_names'] = list(map(lambda scene_data: scene_data.name, getattr(self,"main_scenes")))
|
||||
export_props['library_scene_names'] = list(map(lambda scene_data: scene_data.name, getattr(self,"library_scenes")))
|
||||
return export_props
|
||||
|
||||
def save_settings(self, context):
|
||||
print("save settings")
|
||||
auto_export_settings = self.format_settings()
|
||||
self.properties['main_scene_names'] = auto_export_settings['main_scene_names']
|
||||
self.properties['library_scene_names'] = auto_export_settings['library_scene_names']
|
||||
|
||||
stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
|
||||
stored_settings.clear()
|
||||
|
||||
auto_export_settings = generate_complete_preferences_dict_auto(auto_export_settings)
|
||||
stored_settings.write(json.dumps(auto_export_settings))
|
||||
print("saved settings", auto_export_settings)
|
||||
#print("saving settings", bpy.data.texts[".gltf_auto_export_settings"].as_string(), "raw", json.dumps(export_props))
|
||||
|
||||
def load_settings(self, context):
|
||||
print("loading settings")
|
||||
settings = None
|
||||
try:
|
||||
settings = bpy.data.texts[".gltf_auto_export_settings"].as_string()
|
||||
settings = json.loads(settings)
|
||||
except: pass
|
||||
|
||||
self.will_save_settings = False
|
||||
if settings:
|
||||
#print("loading settings in invoke AutoExportGLTF", settings)
|
||||
try:
|
||||
for (k, v) in settings.items():
|
||||
#print("loading setting", k, v)
|
||||
setattr(self, k, v)
|
||||
self.will_save_settings = True
|
||||
|
||||
# Update filter if user saved settings
|
||||
if hasattr(self, 'export_format'):
|
||||
self.filter_glob = '*.glb' if self.export_format == 'GLB' else '*.gltf'
|
||||
|
||||
# inject scenes data
|
||||
if hasattr(self, 'main_scene_names'):
|
||||
main_scenes = self.main_scenes
|
||||
main_scenes.clear()
|
||||
for item_name in self.main_scene_names:
|
||||
item = main_scenes.add()
|
||||
item.name = item_name
|
||||
|
||||
if hasattr(self, 'library_scene_names'):
|
||||
library_scenes = self.library_scenes
|
||||
library_scenes.clear()
|
||||
for item_name in self.library_scene_names:
|
||||
item = library_scenes.add()
|
||||
item.name = item_name
|
||||
|
||||
except Exception as error:
|
||||
print("error", error)
|
||||
self.report({"ERROR"}, "Loading export settings failed. Removed corrupted settings")
|
||||
bpy.data.texts.remove(bpy.data.texts[".gltf_auto_export_settings"])
|
||||
else:
|
||||
self.will_save_settings = True
|
||||
|
||||
"""
|
||||
This should ONLY be run when actually doing exports/aka calling auto_export function, because we only care about the difference in settings between EXPORTS
|
||||
"""
|
||||
def did_export_settings_change(self):
|
||||
# compare both the auto export settings & the gltf settings
|
||||
previous_auto_settings = bpy.data.texts[".gltf_auto_export_settings_previous"] if ".gltf_auto_export_settings_previous" in bpy.data.texts else None
|
||||
previous_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings_previous"] if ".gltf_auto_export_gltf_settings_previous" in bpy.data.texts else None
|
||||
|
||||
current_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else None
|
||||
current_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else None
|
||||
|
||||
#check if params have changed
|
||||
|
||||
# if there were no setting before, it is new, we need export
|
||||
changed = False
|
||||
if previous_auto_settings == None:
|
||||
#print("previous settings missing, exporting")
|
||||
changed = True
|
||||
elif previous_gltf_settings == None:
|
||||
#print("previous gltf settings missing, exporting")
|
||||
previous_gltf_settings = bpy.data.texts.new(".gltf_auto_export_gltf_settings_previous")
|
||||
previous_gltf_settings.write(json.dumps({}))
|
||||
if current_gltf_settings == None:
|
||||
current_gltf_settings = bpy.data.texts.new(".gltf_auto_export_gltf_settings")
|
||||
current_gltf_settings.write(json.dumps({}))
|
||||
|
||||
changed = True
|
||||
|
||||
else:
|
||||
auto_settings_changed = sorted(json.loads(previous_auto_settings.as_string()).items()) != sorted(json.loads(current_auto_settings.as_string()).items()) if current_auto_settings != None else False
|
||||
gltf_settings_changed = sorted(json.loads(previous_gltf_settings.as_string()).items()) != sorted(json.loads(current_gltf_settings.as_string()).items()) if current_gltf_settings != None else False
|
||||
|
||||
"""print("auto settings previous", sorted(json.loads(previous_auto_settings.as_string()).items()))
|
||||
print("auto settings current", sorted(json.loads(current_auto_settings.as_string()).items()))
|
||||
print("auto_settings_changed", auto_settings_changed)
|
||||
|
||||
print("gltf settings previous", sorted(json.loads(previous_gltf_settings.as_string()).items()))
|
||||
print("gltf settings current", sorted(json.loads(current_gltf_settings.as_string()).items()))
|
||||
print("gltf_settings_changed", gltf_settings_changed)"""
|
||||
|
||||
changed = auto_settings_changed or gltf_settings_changed
|
||||
# now write the current settings to the "previous settings"
|
||||
if current_auto_settings != None:
|
||||
previous_auto_settings = bpy.data.texts[".gltf_auto_export_settings_previous"] if ".gltf_auto_export_settings_previous" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings_previous")
|
||||
previous_auto_settings.clear()
|
||||
previous_auto_settings.write(current_auto_settings.as_string()) # TODO : check if this is always valid
|
||||
|
||||
if current_gltf_settings != None:
|
||||
previous_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings_previous"] if ".gltf_auto_export_gltf_settings_previous" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings_previous")
|
||||
previous_gltf_settings.clear()
|
||||
previous_gltf_settings.write(current_gltf_settings.as_string())
|
||||
|
||||
return changed
|
||||
|
||||
def did_objects_change(self):
|
||||
# sigh... you need to save & reset the frame otherwise it saves the values AT THE CURRENT FRAME WHICH CAN DIFFER ACROSS SCENES
|
||||
current_frames = [scene.frame_current for scene in bpy.data.scenes]
|
||||
for scene in bpy.data.scenes:
|
||||
scene.frame_set(0)
|
||||
|
||||
current_scene = bpy.context.window.scene
|
||||
bpy.context.window.scene = bpy.data.scenes[0]
|
||||
#serialize scene at frame 0
|
||||
"""with bpy.context.temp_override(scene=bpy.data.scenes[1]):
|
||||
bpy.context.scene.frame_set(0)"""
|
||||
current = serialize_scene()
|
||||
bpy.context.window.scene = current_scene
|
||||
|
||||
# reset previous frames
|
||||
for (index, scene) in enumerate(bpy.data.scenes):
|
||||
scene.frame_set(int(current_frames[index]))
|
||||
|
||||
previous_stored = bpy.data.texts[".TESTING"] if ".TESTING" in bpy.data.texts else None # bpy.data.texts.new(".TESTING")
|
||||
if previous_stored == None:
|
||||
previous_stored = bpy.data.texts.new(".TESTING")
|
||||
previous_stored.write(current)
|
||||
return {}
|
||||
previous = json.loads(previous_stored.as_string())
|
||||
current = json.loads(current)
|
||||
|
||||
changes_per_scene = {}
|
||||
# TODO : how do we deal with changed scene names ???
|
||||
for scene in current:
|
||||
# print('scene', scene)
|
||||
previous_object_names = list(previous[scene].keys())
|
||||
current_object_names =list(current[scene].keys())
|
||||
#print("previous_object_names", len(previous_object_names), previous_object_names)
|
||||
#print("current_object_names", len(current_object_names), current_object_names)
|
||||
|
||||
"""if len(previous_object_names) > len(current_object_names):
|
||||
print("removed")
|
||||
if len(current_object_names) > len(previous_object_names):
|
||||
print("added")"""
|
||||
added = list(set(current_object_names) - set(previous_object_names))
|
||||
removed = list(set(previous_object_names) - set(current_object_names))
|
||||
"""print("removed", removed)
|
||||
print("added",added)"""
|
||||
for obj in added:
|
||||
if not scene in changes_per_scene:
|
||||
changes_per_scene[scene] = {}
|
||||
changes_per_scene[scene][obj] = bpy.data.objects[obj]
|
||||
# TODO: how do we deal with this, as we obviously do not have data for removed objects ?
|
||||
for obj in removed:
|
||||
if not scene in changes_per_scene:
|
||||
changes_per_scene[scene] = {}
|
||||
changes_per_scene[scene][obj] = None # bpy.data.objects[obj]
|
||||
|
||||
for object_name in list(current[scene].keys()): # todo : exclude directly added/removed objects
|
||||
#print("ob", object_name)
|
||||
if object_name in previous[scene]:
|
||||
# print("object", object_name,"in previous scene, comparing")
|
||||
current_obj = current[scene][object_name]
|
||||
prev_obj = previous[scene][object_name]
|
||||
same = str(current_obj) == str(prev_obj)
|
||||
|
||||
if "Camera" in object_name:
|
||||
pass#print(" current", current_obj, prev_obj)
|
||||
"""if "Fox" in object_name:
|
||||
print(" current", current_obj)
|
||||
print(" previou", prev_obj)
|
||||
print(" same?", same)"""
|
||||
#print("foo", same)
|
||||
if not same:
|
||||
""" print(" current", current_obj)
|
||||
print(" previou", prev_obj)"""
|
||||
if not scene in changes_per_scene:
|
||||
changes_per_scene[scene] = {}
|
||||
|
||||
changes_per_scene[scene][object_name] = bpy.data.objects[object_name]
|
||||
bubble_up_changes(bpy.data.objects[object_name], changes_per_scene[scene])
|
||||
# now bubble up for instances & parents
|
||||
previous_stored.clear()
|
||||
previous_stored.write(json.dumps(current))
|
||||
|
||||
print("changes per scene alternative", changes_per_scene)
|
||||
return changes_per_scene
|
||||
|
||||
|
||||
def execute(self, context):
|
||||
bpy.context.window_manager.auto_export_tracker.disable_change_detection()
|
||||
if self.direct_mode:
|
||||
self.load_settings(context)
|
||||
if self.will_save_settings:
|
||||
self.save_settings(context)
|
||||
#print("self", self.auto_export)
|
||||
if self.auto_export: # only do the actual exporting if auto export is actually enabled
|
||||
#changes_per_scene = context.window_manager.auto_export_tracker.changed_objects_per_scene
|
||||
|
||||
#& do the export
|
||||
if self.direct_mode: #Do not auto export when applying settings in the menu, do it on save only
|
||||
# determine changed objects
|
||||
changes_per_scene = self.did_objects_change()
|
||||
# determine changed parameters
|
||||
params_changed = self.did_export_settings_change()
|
||||
auto_export(changes_per_scene, params_changed, self)
|
||||
# cleanup
|
||||
# reset the list of changes in the tracker
|
||||
bpy.context.window_manager.auto_export_tracker.clear_changes()
|
||||
print("AUTO EXPORT DONE")
|
||||
bpy.app.timers.register(bpy.context.window_manager.auto_export_tracker.enable_change_detection, first_interval=0.1)
|
||||
else:
|
||||
print("auto export disabled, skipping")
|
||||
return {'FINISHED'}
|
||||
|
||||
def invoke(self, context, event):
|
||||
#print("invoke")
|
||||
bpy.context.window_manager.auto_export_tracker.disable_change_detection()
|
||||
self.load_settings(context)
|
||||
wm = context.window_manager
|
||||
#wm.fileselect_add(self)
|
||||
return context.window_manager.invoke_props_dialog(self, title="Auto export", width=640)
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
"""def modal(self, context, event):
|
||||
|
||||
if event.type == 'SPACE':
|
||||
wm = context.window_manager
|
||||
wm.invoke_popup(self)
|
||||
#wm.invoke_props_dialog(self)
|
||||
|
||||
if event.type in {'ESC'}:
|
||||
return {'CANCELLED'}
|
||||
|
||||
return {'RUNNING_MODAL'}"""
|
||||
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
operator = self
|
||||
|
||||
controls_enabled = self.auto_export
|
||||
|
||||
layout.prop(self, "auto_export")
|
||||
layout.separator()
|
||||
|
||||
toggle_icon = "TRIA_DOWN" if self.show_general_settings else "TRIA_RIGHT"
|
||||
layout.prop(self, "show_general_settings", text="General", icon=toggle_icon)
|
||||
if self.show_general_settings:
|
||||
section = layout.box()
|
||||
section.enabled = controls_enabled
|
||||
|
||||
draw_folder_browser(section, "Export root folder", self.export_root_folder, "export_root_folder")
|
||||
row = section.row()
|
||||
draw_folder_browser(row, "Assets Folder (non blueprints mode only)", self.export_root_folder, "export_output_folder")
|
||||
row.enabled = not self.export_blueprints
|
||||
section.prop(operator, "export_blueprints")
|
||||
section.prop(operator, "export_scene_settings")
|
||||
|
||||
"""header, panel = layout.panel("my_panel_id", default_closed=False)
|
||||
header.label(text="Hello World")
|
||||
if panel:
|
||||
panel.label(text="Success")"""
|
||||
|
||||
toggle_icon = "TRIA_DOWN" if self.show_change_detection_settings else "TRIA_RIGHT"
|
||||
layout.prop(operator, "show_change_detection_settings", text="Change Detection", icon=toggle_icon)
|
||||
if self.show_change_detection_settings:
|
||||
section = layout.box()
|
||||
section.enabled = controls_enabled
|
||||
section.prop(operator, "export_change_detection", text="Use change detection")
|
||||
|
||||
# main/level scenes
|
||||
toggle_icon = "TRIA_DOWN" if self.show_scene_settings else "TRIA_RIGHT"
|
||||
layout.prop(operator, "show_scene_settings", text="Scenes", icon=toggle_icon)
|
||||
if self.show_scene_settings:
|
||||
section = layout.box()
|
||||
section.enabled = controls_enabled
|
||||
rows = 2
|
||||
row = section.row()
|
||||
row.label(text="main scenes")
|
||||
row.prop(context.window_manager, "main_scene", text='')
|
||||
|
||||
row = section.row()
|
||||
row.template_list("SCENE_UL_GLTF_auto_export", "level scenes", operator, "main_scenes", operator, "main_scenes_index", rows=rows)
|
||||
|
||||
col = row.column(align=True)
|
||||
sub_row = col.row()
|
||||
add_operator = sub_row.operator("scene_list.list_action", icon='ADD', text="")
|
||||
add_operator.action = 'ADD'
|
||||
add_operator.scene_type = 'level'
|
||||
#add_operator.operator = operator
|
||||
sub_row.enabled = context.window_manager.main_scene is not None
|
||||
|
||||
sub_row = col.row()
|
||||
remove_operator = sub_row.operator("scene_list.list_action", icon='REMOVE', text="")
|
||||
remove_operator.action = 'REMOVE'
|
||||
remove_operator.scene_type = 'level'
|
||||
col.separator()
|
||||
|
||||
# library scenes
|
||||
row = section.row()
|
||||
row.label(text="library scenes")
|
||||
row.prop(context.window_manager, "library_scene", text='')
|
||||
|
||||
row = section.row()
|
||||
row.template_list("SCENE_UL_GLTF_auto_export", "library scenes", operator, "library_scenes", operator, "library_scenes_index", rows=rows)
|
||||
|
||||
col = row.column(align=True)
|
||||
sub_row = col.row()
|
||||
add_operator = sub_row.operator("scene_list.list_action", icon='ADD', text="")
|
||||
add_operator.action = 'ADD'
|
||||
add_operator.scene_type = 'library'
|
||||
sub_row.enabled = context.window_manager.library_scene is not None
|
||||
|
||||
|
||||
sub_row = col.row()
|
||||
remove_operator = sub_row.operator("scene_list.list_action", icon='REMOVE', text="")
|
||||
remove_operator.action = 'REMOVE'
|
||||
remove_operator.scene_type = 'library'
|
||||
col.separator()
|
||||
|
||||
toggle_icon = "TRIA_DOWN" if self.show_blueprint_settings else "TRIA_RIGHT"
|
||||
layout.prop(operator, "show_blueprint_settings", text="Blueprints", icon=toggle_icon)
|
||||
if self.show_blueprint_settings:
|
||||
section = layout.box()
|
||||
section.enabled = controls_enabled
|
||||
section = section.box()
|
||||
section.enabled = controls_enabled and self.export_blueprints
|
||||
|
||||
# collections/blueprints
|
||||
draw_folder_browser(section, "Blueprints folder", self.export_root_folder, "export_blueprints_path")
|
||||
#section.prop(operator, "export_blueprints_path")
|
||||
section.prop(operator, "collection_instances_combine_mode")
|
||||
section.prop(operator, "export_marked_assets")
|
||||
section.separator()
|
||||
|
||||
draw_folder_browser(section, "Levels folder", self.export_root_folder, "export_levels_path")
|
||||
#section.prop(operator, "export_levels_path")
|
||||
|
||||
section.prop(operator, "export_separate_dynamic_and_static_objects")
|
||||
section.separator()
|
||||
|
||||
# materials
|
||||
section.prop(operator, "export_materials_library")
|
||||
section = section.box()
|
||||
section.enabled = controls_enabled and self.export_materials_library
|
||||
draw_folder_browser(section, 'Materials folder', self.export_root_folder, "export_materials_path")
|
||||
#section.prop(operator, "export_materials_path")
|
||||
|
||||
|
||||
def cancel(self, context):
|
||||
print("cancel")
|
||||
#bpy.context.window_manager.auto_export_tracker.enable_change_detection()
|
||||
bpy.app.timers.register(bpy.context.window_manager.auto_export_tracker.enable_change_detection, first_interval=1)
|
||||
|
208
tools/blenvy/gltf_auto_export/auto_export/preferences.py
Normal file
@ -0,0 +1,208 @@
|
||||
|
||||
import os
|
||||
from bpy.types import AddonPreferences
|
||||
from bpy.props import (BoolProperty,
|
||||
IntProperty,
|
||||
StringProperty,
|
||||
EnumProperty,
|
||||
CollectionProperty
|
||||
)
|
||||
|
||||
from .internals import (CUSTOM_PG_sceneName)
|
||||
|
||||
AutoExportGltfPreferenceNames = [
|
||||
'will_save_settings',
|
||||
'direct_mode',# specific to main auto_export operator
|
||||
|
||||
'show_general_settings',
|
||||
'auto_export',
|
||||
'export_root_folder',
|
||||
'export_output_folder',
|
||||
'export_scene_settings',
|
||||
|
||||
'show_change_detection_settings',
|
||||
'export_change_detection',
|
||||
|
||||
'show_scene_settings',
|
||||
'main_scenes',
|
||||
'library_scenes',
|
||||
'main_scenes_index',
|
||||
'library_scenes_index',
|
||||
'main_scene_names',
|
||||
'library_scene_names',
|
||||
|
||||
'show_blueprint_settings',
|
||||
'export_blueprints',
|
||||
'export_blueprints_path',
|
||||
'export_marked_assets',
|
||||
'collection_instances_combine_mode',
|
||||
|
||||
'export_levels_path',
|
||||
'export_separate_dynamic_and_static_objects',
|
||||
|
||||
'export_materials_library',
|
||||
'export_materials_path',
|
||||
]
|
||||
|
||||
def on_export_output_folder_updated(self, context):
|
||||
#self.export_root_folder = os.path.relpath(self.export_root_folder)
|
||||
#self.export_output_folder = os.path.join(self.export_root_folder, self.export_output_folder)
|
||||
print("on_foo_updated", self.export_root_folder, self.export_output_folder)
|
||||
|
||||
class AutoExportGltfAddonPreferences(AddonPreferences):
|
||||
# this must match the add-on name, use '__package__'
|
||||
# when defining this in a submodule of a python package.
|
||||
bl_idname = __package__
|
||||
bl_options = {'PRESET'}
|
||||
|
||||
#### these are for the operator
|
||||
will_save_settings: BoolProperty(
|
||||
name='Remember Export Settings',
|
||||
description='Store glTF export settings in the Blender project',
|
||||
default=True
|
||||
) # type: ignore
|
||||
|
||||
# use when operator is called directly, works a bit differently than inside the ui
|
||||
direct_mode: BoolProperty(
|
||||
default=False
|
||||
) # type: ignore
|
||||
|
||||
|
||||
auto_export: BoolProperty(
|
||||
name='Auto export',
|
||||
description='Automatically export to gltf on save',
|
||||
default=False
|
||||
) # type: ignore
|
||||
|
||||
#### general
|
||||
# for UI only, workaround for lacking panels
|
||||
show_general_settings: BoolProperty(
|
||||
name="show_general settings",
|
||||
description="show/hide general settings (UI only: has no impact on exports)",
|
||||
default=True
|
||||
) # type: ignore
|
||||
|
||||
export_root_folder: StringProperty(
|
||||
name = "Project Root Path",
|
||||
description="The root folder of your (Bevy) project (not assets!)",
|
||||
# subtype='DIR_PATH',
|
||||
default='../'
|
||||
#update=on_export_output_folder_updated) # type: ignore
|
||||
)
|
||||
|
||||
export_output_folder: StringProperty(
|
||||
name='Export folder',
|
||||
description='The root folder for all exports(relative to the root folder/path) Defaults to "assets" ',
|
||||
default='./assets',
|
||||
#subtype='DIR_PATH',
|
||||
options={'HIDDEN'}
|
||||
# update=on_export_output_folder_updated
|
||||
) # type: ignore
|
||||
|
||||
# for UI only, workaround for lacking panels
|
||||
show_change_detection_settings: BoolProperty(
|
||||
name="show change detection settings",
|
||||
description="show/hide change detection settings (UI only: has no impact on exports)",
|
||||
default=True
|
||||
) # type: ignore
|
||||
|
||||
export_change_detection: BoolProperty(
|
||||
name='Change detection',
|
||||
description='Use change detection to determine what/if should be exported',
|
||||
default=True
|
||||
) # type: ignore
|
||||
|
||||
# scenes
|
||||
# for UI only, workaround for lacking panels
|
||||
show_scene_settings: BoolProperty(
|
||||
name="show scene settings",
|
||||
description="show/hide scene settings (UI only: has no impact on exports)",
|
||||
default=True
|
||||
) # type: ignore
|
||||
|
||||
# scene components
|
||||
export_scene_settings: BoolProperty(
|
||||
name='Export scene settings',
|
||||
description='Export scene settings ie AmbientLighting, Bloom, AO etc',
|
||||
default=False
|
||||
) # type: ignore
|
||||
|
||||
# blueprint settings
|
||||
# for UI only, workaround for lacking panels
|
||||
show_blueprint_settings: BoolProperty(
|
||||
name="show blueprint settings",
|
||||
description="show/hide blueprint settings (UI only: has no impact on exports)",
|
||||
default=True
|
||||
) # type: ignore
|
||||
|
||||
export_blueprints: BoolProperty(
|
||||
name='Export Blueprints',
|
||||
description='Replaces collection instances with an Empty with a BlueprintName custom property, and enabled a lot more features !',
|
||||
default=True
|
||||
) # type: ignore
|
||||
|
||||
export_blueprints_path: StringProperty(
|
||||
name='Blueprints path',
|
||||
description='path to export the blueprints to (relative to the export folder)',
|
||||
default='assets/blueprints',
|
||||
#subtype='DIR_PATH'
|
||||
) # type: ignore
|
||||
|
||||
export_levels_path: StringProperty(
|
||||
name='Levels path',
|
||||
description='path to export the levels (main scenes) to (relative to the export folder)',
|
||||
default='assets/levels',
|
||||
#subtype='DIR_PATH'
|
||||
) # type: ignore
|
||||
|
||||
export_separate_dynamic_and_static_objects: BoolProperty(
|
||||
name="Export levels' dynamic and static objects seperatly",
|
||||
description="""For MAIN scenes only (aka levels), toggle this to generate 2 files per level:
|
||||
- one with all dynamic data: collection or instances marked as dynamic/ saveable
|
||||
- one with all static data: anything else that is NOT marked as dynamic""",
|
||||
default=False
|
||||
) # type: ignore
|
||||
|
||||
export_materials_library: BoolProperty(
|
||||
name='Export materials library',
|
||||
description='remove materials from blueprints and use the material library instead',
|
||||
default=False
|
||||
) # type: ignore
|
||||
|
||||
export_materials_path: StringProperty(
|
||||
name='Materials path',
|
||||
description='path to export the materials libraries to (relative to the export folder)',
|
||||
default='assets/materials',
|
||||
#subtype='DIR_PATH'
|
||||
) # type: ignore
|
||||
|
||||
""" combine mode can be
|
||||
- 'Split' (default): replace with an empty, creating links to sub blueprints
|
||||
- 'Embed' : treat it as an embeded object and do not replace it with an empty
|
||||
- 'EmbedExternal': embed any instance of a non local collection (ie external assets)
|
||||
|
||||
- 'Inject': inject components from sub collection instances into the curent object => this is now a seperate custom property that you can apply to a collecion instance
|
||||
"""
|
||||
|
||||
collection_instances_combine_mode : EnumProperty(
|
||||
name='Collection instances',
|
||||
items=(
|
||||
('Split', 'Split', 'replace collection instances with an empty + blueprint, creating links to sub blueprints (Default, Recomended)'),
|
||||
('Embed', 'Embed', 'treat collection instances as embeded objects and do not replace them with an empty'),
|
||||
('EmbedExternal', 'EmbedExternal', 'treat instances of external (not specifified in the current blend file) collections (aka assets etc) as embeded objects and do not replace them with empties'),
|
||||
#('Inject', 'Inject', 'inject components from sub collection instances into the curent object')
|
||||
),
|
||||
default='Split'
|
||||
) # type: ignore
|
||||
|
||||
export_marked_assets: BoolProperty(
|
||||
name='Auto export marked assets',
|
||||
description='Collections that have been marked as assets will be systematically exported, even if not in use in another scene',
|
||||
default=True
|
||||
) # type: ignore
|
||||
|
||||
main_scenes: CollectionProperty(name="main scenes", type=CUSTOM_PG_sceneName) # type: ignore
|
||||
main_scenes_index: IntProperty(name = "Index for main scenes list", default = 0) # type: ignore
|
||||
|
||||
library_scenes: CollectionProperty(name="library scenes", type=CUSTOM_PG_sceneName) # type: ignore
|
||||
library_scenes_index: IntProperty(name = "Index for library scenes list", default = 0) # type: ignore
|
195
tools/blenvy/gltf_auto_export/auto_export/tracker.py
Normal file
@ -0,0 +1,195 @@
|
||||
import json
|
||||
import bpy
|
||||
|
||||
from bpy.types import (PropertyGroup)
|
||||
from bpy.props import (PointerProperty, IntProperty, StringProperty)
|
||||
|
||||
from .get_blueprints_to_export import get_blueprints_to_export
|
||||
|
||||
from ..constants import TEMPSCENE_PREFIX
|
||||
from .internals import BlueprintsToExport
|
||||
from ..helpers.helpers_scenes import (get_scenes)
|
||||
from .preferences import AutoExportGltfAddonPreferences
|
||||
|
||||
class AutoExportTracker(PropertyGroup):
|
||||
|
||||
changed_objects_per_scene = {}
|
||||
change_detection_enabled = True
|
||||
export_params_changed = False
|
||||
|
||||
gltf_settings_backup = None
|
||||
last_operator = None
|
||||
dummy_file_path = ""
|
||||
|
||||
exports_total : IntProperty(
|
||||
name='exports_total',
|
||||
description='Number of total exports',
|
||||
default=0
|
||||
) # type: ignore
|
||||
|
||||
exports_count : IntProperty(
|
||||
name='exports_count',
|
||||
description='Number of exports in progress',
|
||||
default=0
|
||||
) # type: ignore
|
||||
|
||||
@classmethod
|
||||
def register(cls):
|
||||
bpy.types.WindowManager.auto_export_tracker = PointerProperty(type=AutoExportTracker)
|
||||
# register list of exportable collections
|
||||
bpy.types.WindowManager.exportedCollections = bpy.props.CollectionProperty(type=BlueprintsToExport)
|
||||
|
||||
# setup handlers for updates & saving
|
||||
#bpy.app.handlers.save_post.append(cls.save_handler)
|
||||
#bpy.app.handlers.depsgraph_update_post.append(cls.deps_update_handler)
|
||||
|
||||
@classmethod
|
||||
def unregister(cls):
|
||||
# remove handlers & co
|
||||
"""try:
|
||||
bpy.app.handlers.depsgraph_update_post.remove(cls.deps_update_handler)
|
||||
except:pass
|
||||
try:
|
||||
bpy.app.handlers.save_post.remove(cls.save_handler)
|
||||
except:pass"""
|
||||
del bpy.types.WindowManager.auto_export_tracker
|
||||
del bpy.types.WindowManager.exportedCollections
|
||||
|
||||
@classmethod
|
||||
def save_handler(cls, scene, depsgraph):
|
||||
print("-------------")
|
||||
print("saved", bpy.data.filepath)
|
||||
# auto_export(changes_per_scene, export_parameters_changed)
|
||||
bpy.ops.export_scenes.auto_gltf(direct_mode= True)
|
||||
|
||||
# (re)set a few things after exporting
|
||||
# reset wether the gltf export paramters were changed since the last save
|
||||
cls.export_params_changed = False
|
||||
# reset whether there have been changed objects since the last save
|
||||
cls.changed_objects_per_scene.clear()
|
||||
# all our logic is done, mark this as done
|
||||
|
||||
@classmethod
|
||||
def deps_post_update_handler(cls, scene, depsgraph):
|
||||
# print("change detection enabled", cls.change_detection_enabled)
|
||||
|
||||
"""ops = bpy.context.window_manager.operators
|
||||
print("last operators", ops)
|
||||
for op in ops:
|
||||
print("operator", op)"""
|
||||
active_operator = bpy.context.active_operator
|
||||
if active_operator:
|
||||
#print("Operator", active_operator.bl_label, active_operator.bl_idname)
|
||||
if active_operator.bl_idname == "EXPORT_SCENE_OT_gltf" and active_operator.gltf_export_id == "gltf_auto_export":
|
||||
# we backup any existing gltf export settings, if there were any
|
||||
scene = bpy.context.scene
|
||||
if "glTF2ExportSettings" in scene:
|
||||
existing_setting = scene["glTF2ExportSettings"]
|
||||
bpy.context.window_manager.gltf_settings_backup = json.dumps(dict(existing_setting))
|
||||
|
||||
# we force saving params
|
||||
active_operator.will_save_settings = True
|
||||
# we set the last operator here so we can clear the specific settings (yeah for overly complex logic)
|
||||
cls.last_operator = active_operator
|
||||
#print("active_operator", active_operator.has_active_exporter_extensions, active_operator.__annotations__.keys(), active_operator.filepath, active_operator.gltf_export_id)
|
||||
return
|
||||
|
||||
if active_operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf":
|
||||
# we force saving params
|
||||
active_operator.will_save_settings = True
|
||||
active_operator.auto_export = True
|
||||
# if we are using the operator, bail out for the rest
|
||||
print("setting stuff for auto_export")
|
||||
return
|
||||
|
||||
# only deal with changes if we are NOT in the mids of saving/exporting
|
||||
if cls.change_detection_enabled:
|
||||
# ignore anything going on with temporary scenes
|
||||
if not scene.name.startswith(TEMPSCENE_PREFIX):
|
||||
#print("depsgraph_update_post", scene.name)
|
||||
changed_scene = scene.name or ""
|
||||
#print("-------------")
|
||||
if not changed_scene in cls.changed_objects_per_scene:
|
||||
cls.changed_objects_per_scene[changed_scene] = {}
|
||||
# print("cls.changed_objects_per_scene", cls.changed_objects_per_scene)
|
||||
# depsgraph = bpy.context.evaluated_depsgraph_get()
|
||||
for obj in depsgraph.updates:
|
||||
#print("depsgraph update", obj)
|
||||
if isinstance(obj.id, bpy.types.Object):
|
||||
# get the actual object
|
||||
object = bpy.data.objects[obj.id.name]
|
||||
#print(" changed object", obj.id.name, "changes", obj, "evalutated", obj.id.is_evaluated, "transforms", obj.is_updated_transform, "geometry", obj.is_updated_geometry)
|
||||
if obj.is_updated_transform or obj.is_updated_geometry:
|
||||
cls.changed_objects_per_scene[scene.name][obj.id.name] = object
|
||||
|
||||
elif isinstance(obj.id, bpy.types.Material): # or isinstance(obj.id, bpy.types.ShaderNodeTree):
|
||||
# print(" changed material", obj.id, "scene", scene.name,)
|
||||
material = bpy.data.materials[obj.id.name]
|
||||
#now find which objects are using the material
|
||||
for obj in bpy.data.objects:
|
||||
for slot in obj.material_slots:
|
||||
if slot.material == material:
|
||||
cls.changed_objects_per_scene[scene.name][obj.name] = obj
|
||||
#print("changed_objects_per_scene", cls.changed_objects_per_scene)
|
||||
"""for obj_name_original in cls.changed_objects_per_scene[scene_name]:
|
||||
if obj_name_original != ls.changed_objects_per_scene[scene_name][obj_name_original]"""
|
||||
items = 0
|
||||
for scene_name in cls.changed_objects_per_scene:
|
||||
items += len(cls.changed_objects_per_scene[scene_name].keys())
|
||||
if items == 0:
|
||||
cls.changed_objects_per_scene.clear()
|
||||
#print("changed_objects_per_scene", cls.changed_objects_per_scene)
|
||||
|
||||
# filter out invalid objects
|
||||
"""for scene_name in cls.changed_objects_per_scene.keys():
|
||||
bla = {}
|
||||
for object_name in cls.changed_objects_per_scene[scene.name]:
|
||||
object = cls.changed_objects_per_scene[scene.name][object_name]"""
|
||||
#print("sdfsd", object, object.valid)
|
||||
#if not cls.changed_objects_per_scene[scene.name][object_name].invalid:
|
||||
# bla[object_name] = cls.changed_objects_per_scene[scene.name][object_name]
|
||||
#cls.changed_objects_per_scene[scene.name]= bla
|
||||
#cls.changed_objects_per_scene[scene_name] = [o for o in cls.changed_objects_per_scene[scene_name] if not o.invalid]
|
||||
|
||||
# get a list of exportable collections for display
|
||||
# keep it simple, just use Simplenamespace for compatibility with the rest of our code
|
||||
# TODO: debounce
|
||||
|
||||
def disable_change_detection(self):
|
||||
#print("disable change detection")
|
||||
self.change_detection_enabled = False
|
||||
self.__class__.change_detection_enabled = False
|
||||
return None
|
||||
|
||||
def enable_change_detection(self):
|
||||
#print("enable change detection")
|
||||
self.change_detection_enabled = True
|
||||
self.__class__.change_detection_enabled = True
|
||||
#print("bpy.context.window_manager.auto_export_tracker.change_detection_enabled", bpy.context.window_manager.auto_export_tracker.change_detection_enabled)
|
||||
return None
|
||||
|
||||
def clear_changes(self):
|
||||
self.changed_objects_per_scene.clear()
|
||||
self.__class__.changed_objects_per_scene.clear()
|
||||
|
||||
def export_finished(self):
|
||||
#print("export_finished")
|
||||
self.exports_count -= 1
|
||||
if self.exports_count == 0:
|
||||
print("preparing to reset change detection")
|
||||
bpy.app.timers.register(self.enable_change_detection, first_interval=0.1)
|
||||
#self.enable_change_detection()
|
||||
return None
|
||||
|
||||
|
||||
def get_auto_exporter_settings():
|
||||
auto_exporter_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else None
|
||||
if auto_exporter_settings != None:
|
||||
try:
|
||||
auto_exporter_settings = json.loads(auto_exporter_settings.as_string())
|
||||
except:
|
||||
auto_exporter_settings = {}
|
||||
else:
|
||||
auto_exporter_settings = {}
|
||||
|
||||
return auto_exporter_settings
|
1
tools/blenvy/gltf_auto_export/constants.py
Normal file
@ -0,0 +1 @@
|
||||
TEMPSCENE_PREFIX = "__temp_scene"
|
0
tools/blenvy/gltf_auto_export/helpers/__init__.py
Normal file
58
tools/blenvy/gltf_auto_export/helpers/generate_and_export.py
Normal file
@ -0,0 +1,58 @@
|
||||
import bpy
|
||||
from ..auto_export.export_gltf import export_gltf
|
||||
from .helpers_collections import (set_active_collection)
|
||||
|
||||
"""
|
||||
generates a temporary scene, fills it with data, cleans up after itself
|
||||
* named using temp_scene_name
|
||||
* filled using the tempScene_filler
|
||||
* written on disk to gltf_output_path, with the gltf export parameters in export_settings
|
||||
* cleaned up using tempScene_cleaner
|
||||
|
||||
"""
|
||||
def generate_and_export(addon_prefs, export_settings, gltf_output_path, temp_scene_name="__temp_scene", tempScene_filler=None, tempScene_cleaner=None):
|
||||
|
||||
temp_scene = bpy.data.scenes.new(name=temp_scene_name)
|
||||
temp_root_collection = temp_scene.collection
|
||||
|
||||
# save active scene
|
||||
original_scene = bpy.context.window.scene
|
||||
# and selected collection
|
||||
original_collection = bpy.context.view_layer.active_layer_collection
|
||||
# and mode
|
||||
original_mode = bpy.context.active_object.mode if bpy.context.active_object != None else None
|
||||
# we change the mode to object mode, otherwise the gltf exporter is not happy
|
||||
if original_mode != None and original_mode != 'OBJECT':
|
||||
print("setting to object mode", original_mode)
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
# we set our active scene to be this one : this is needed otherwise the stand-in empties get generated in the wrong scene
|
||||
bpy.context.window.scene = temp_scene
|
||||
|
||||
area = [area for area in bpy.context.screen.areas if area.type == "VIEW_3D"][0]
|
||||
region = [region for region in area.regions if region.type == 'WINDOW'][0]
|
||||
with bpy.context.temp_override(scene=temp_scene, area=area, region=region):
|
||||
# detect scene mistmatch
|
||||
scene_mismatch = bpy.context.scene.name != bpy.context.window.scene.name
|
||||
if scene_mismatch:
|
||||
raise Exception("Context scene mismatch, aborting", bpy.context.scene.name, bpy.context.window.scene.name)
|
||||
|
||||
set_active_collection(bpy.context.scene, temp_root_collection.name)
|
||||
# generate contents of temporary scene
|
||||
scene_filler_data = tempScene_filler(temp_root_collection)
|
||||
# export the temporary scene
|
||||
try:
|
||||
export_gltf(gltf_output_path, export_settings)
|
||||
except Exception as error:
|
||||
print("failed to export gltf !", error)
|
||||
raise error
|
||||
# restore everything
|
||||
tempScene_cleaner(temp_scene, scene_filler_data)
|
||||
|
||||
# reset active scene
|
||||
bpy.context.window.scene = original_scene
|
||||
# reset active collection
|
||||
bpy.context.view_layer.active_layer_collection = original_collection
|
||||
# reset mode
|
||||
if original_mode != None:
|
||||
bpy.ops.object.mode_set( mode = original_mode )
|
||||
|
@ -0,0 +1,47 @@
|
||||
|
||||
from ..auto_export.preferences import AutoExportGltfAddonPreferences
|
||||
from io_scene_gltf2 import (ExportGLTF2_Base)
|
||||
|
||||
# given the input (actual) gltf settings, filters out any invalid/useless params & params that are equal to defaults
|
||||
def generate_complete_preferences_dict_gltf(settings):
|
||||
complete_preferences = {}
|
||||
defaults = {}
|
||||
gltf_parameters_to_ignore = ["use_active_collection", "use_active_collection_with_nested", "use_active_scene", "use_selection", "will_save_settings", "gltf_export_id"]
|
||||
def filter_out(pair):
|
||||
key, value = pair
|
||||
if key in gltf_parameters_to_ignore:
|
||||
return False
|
||||
return True
|
||||
|
||||
for k in ExportGLTF2_Base.__annotations__: # we use parameters from the base class of the standard gltf exporter, that contains all relevant parameters
|
||||
item = ExportGLTF2_Base.__annotations__[k]
|
||||
#print("item", item)
|
||||
default = item.keywords.get('default', None)
|
||||
#complete_preferences[k] = default
|
||||
defaults[k] = default
|
||||
|
||||
for key in list(settings.keys()):
|
||||
if key in defaults and settings[key] != defaults[key]: # only write out values different from defaults
|
||||
complete_preferences[key] = settings[key]
|
||||
|
||||
complete_preferences = dict(filter(filter_out, dict(complete_preferences).items()))
|
||||
return complete_preferences
|
||||
|
||||
# given the input (actual) auto settings, filters out any invalid/useless params & params that are equal to defaults
|
||||
def generate_complete_preferences_dict_auto(settings):
|
||||
complete_preferences = {}
|
||||
defaults = {}
|
||||
|
||||
for k in AutoExportGltfAddonPreferences.__annotations__:
|
||||
item = AutoExportGltfAddonPreferences.__annotations__[k]
|
||||
default = item.keywords.get('default', None)
|
||||
#complete_preferences[k] = default
|
||||
defaults[k] = default
|
||||
|
||||
for key in list(settings.keys()):
|
||||
if key in defaults:
|
||||
if settings[key] != defaults[key]: # only write out values different from defaults
|
||||
complete_preferences[key] = settings[key]
|
||||
else:
|
||||
complete_preferences[key] = settings[key]
|
||||
return complete_preferences
|
400
tools/blenvy/gltf_auto_export/helpers/helpers_blueprints.py
Normal file
@ -0,0 +1,400 @@
|
||||
|
||||
import os
|
||||
from types import SimpleNamespace
|
||||
import bpy
|
||||
|
||||
class Blueprint:
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.local = True
|
||||
self.marked = False # If marked as asset or with auto_export flag, always export if changed
|
||||
self.scene = None # Not sure, could be usefull for tracking
|
||||
|
||||
self.instances = []
|
||||
self.objects = []
|
||||
self.nested_blueprints = []
|
||||
|
||||
self.collection = None # should we just sublclass ?
|
||||
|
||||
def __repr__(self):
|
||||
return f'Name: {self.name} Local: {self.local}, Scene: {self.scene}, Instances: {self.instances}, Objects: {self.objects}, nested_blueprints: {self.nested_blueprints}'
|
||||
|
||||
def __str__(self):
|
||||
return f'Name: "{self.name}", Local: {self.local}, Scene: {self.scene}, Instances: {self.instances}, Objects: {self.objects}, nested_blueprints: {self.nested_blueprints}'
|
||||
|
||||
|
||||
def find_blueprints_not_on_disk(blueprints, folder_path, extension):
|
||||
not_found_blueprints = []
|
||||
for blueprint in blueprints:
|
||||
gltf_output_path = os.path.join(folder_path, blueprint.name + extension)
|
||||
# print("gltf_output_path", gltf_output_path)
|
||||
found = os.path.exists(gltf_output_path) and os.path.isfile(gltf_output_path)
|
||||
if not found:
|
||||
not_found_blueprints.append(blueprint)
|
||||
return not_found_blueprints
|
||||
|
||||
def check_if_blueprint_on_disk(scene_name, folder_path, extension):
|
||||
gltf_output_path = os.path.join(folder_path, scene_name + extension)
|
||||
found = os.path.exists(gltf_output_path) and os.path.isfile(gltf_output_path)
|
||||
print("level", scene_name, "found", found, "path", gltf_output_path)
|
||||
return found
|
||||
|
||||
# blueprints: any collection with either
|
||||
# - an instance
|
||||
# - marked as asset
|
||||
# - with the "auto_export" flag
|
||||
# https://blender.stackexchange.com/questions/167878/how-to-get-all-collections-of-the-current-scene
|
||||
def blueprints_scan(main_scenes, library_scenes, addon_prefs):
|
||||
export_marked_assets = getattr(addon_prefs,"export_marked_assets")
|
||||
|
||||
blueprints = {}
|
||||
blueprints_from_objects = {}
|
||||
blueprint_name_from_instances = {}
|
||||
collections = []
|
||||
|
||||
# main scenes
|
||||
blueprint_instances_per_main_scene = {}
|
||||
internal_collection_instances = {}
|
||||
external_collection_instances = {}
|
||||
|
||||
# meh
|
||||
def add_object_to_collection_instances(collection_name, object, internal=True):
|
||||
collection_category = internal_collection_instances if internal else external_collection_instances
|
||||
if not collection_name in collection_category.keys():
|
||||
#print("ADDING INSTANCE OF", collection_name, "object", object.name, "categ", collection_category)
|
||||
collection_category[collection_name] = [] #.append(collection_name)
|
||||
collection_category[collection_name].append(object)
|
||||
|
||||
for scene in main_scenes:# should it only be main scenes ? what about collection instances inside other scenes ?
|
||||
for object in scene.objects:
|
||||
#print("object", object.name)
|
||||
if object.instance_type == 'COLLECTION':
|
||||
collection = object.instance_collection
|
||||
collection_name = object.instance_collection.name
|
||||
#print(" from collection:", collection_name)
|
||||
|
||||
collection_from_library = False
|
||||
for library_scene in library_scenes: # should be only in library scenes
|
||||
collection_from_library = library_scene.user_of_id(collection) > 0 # TODO: also check if it is an imported asset
|
||||
if collection_from_library:
|
||||
break
|
||||
|
||||
add_object_to_collection_instances(collection_name=collection_name, object=object, internal = collection_from_library)
|
||||
|
||||
# experiment with custom properties from assets stored in other blend files
|
||||
"""if not collection_from_library:
|
||||
for property_name in object.keys():
|
||||
print("stuff", property_name)
|
||||
for property_name in collection.keys():
|
||||
print("OTHER", property_name)"""
|
||||
|
||||
# blueprints[collection_name].instances.append(object)
|
||||
|
||||
# FIXME: this only account for direct instances of blueprints, not for any nested blueprint inside a blueprint
|
||||
if scene.name not in blueprint_instances_per_main_scene.keys():
|
||||
blueprint_instances_per_main_scene[scene.name] = {}
|
||||
if collection_name not in blueprint_instances_per_main_scene[scene.name].keys():
|
||||
blueprint_instances_per_main_scene[scene.name][collection_name] = []
|
||||
blueprint_instances_per_main_scene[scene.name][collection_name].append(object)
|
||||
|
||||
blueprint_name_from_instances[object] = collection_name
|
||||
|
||||
"""# add any indirect ones
|
||||
# FIXME: needs to be recursive, either here or above
|
||||
for nested_blueprint in blueprints[collection_name].nested_blueprints:
|
||||
if not nested_blueprint in blueprint_instances_per_main_scene[scene.name]:
|
||||
blueprint_instances_per_main_scene[scene.name].append(nested_blueprint)"""
|
||||
|
||||
for collection in bpy.data.collections:
|
||||
#print("collection", collection, collection.name_full, "users", collection.users)
|
||||
|
||||
collection_from_library = False
|
||||
defined_in_scene = None
|
||||
for scene in library_scenes: # should be only in library scenes
|
||||
collection_from_library = scene.user_of_id(collection) > 0
|
||||
if collection_from_library:
|
||||
defined_in_scene = scene
|
||||
break
|
||||
if not collection_from_library:
|
||||
continue
|
||||
|
||||
|
||||
if (
|
||||
'AutoExport' in collection and collection['AutoExport'] == True # get marked collections
|
||||
or export_marked_assets and collection.asset_data is not None # or if you have marked collections as assets you can auto export them too
|
||||
or collection.name in list(internal_collection_instances.keys()) # or if the collection has an instance in one of the main scenes
|
||||
):
|
||||
blueprint = Blueprint(collection.name)
|
||||
blueprint.local = True
|
||||
blueprint.marked = 'AutoExport' in collection and collection['AutoExport'] == True or export_marked_assets and collection.asset_data is not None
|
||||
blueprint.objects = [object.name for object in collection.all_objects if not object.instance_type == 'COLLECTION'] # inneficient, double loop
|
||||
blueprint.nested_blueprints = [object.instance_collection.name for object in collection.all_objects if object.instance_type == 'COLLECTION'] # FIXME: not precise enough, aka "what is a blueprint"
|
||||
blueprint.collection = collection
|
||||
blueprint.instances = internal_collection_instances[collection.name] if collection.name in internal_collection_instances else []
|
||||
blueprint.scene = defined_in_scene
|
||||
blueprints[collection.name] = blueprint
|
||||
|
||||
# add nested collections to internal/external_collection instances
|
||||
# FIXME: inneficient, third loop over all_objects
|
||||
for object in collection.all_objects:
|
||||
if object.instance_type == 'COLLECTION':
|
||||
add_object_to_collection_instances(collection_name=object.instance_collection.name, object=object, internal = blueprint.local)
|
||||
|
||||
# now create reverse lookup , so you can find the collection from any of its contained objects
|
||||
for object in collection.all_objects:
|
||||
blueprints_from_objects[object.name] = blueprint#collection.name
|
||||
|
||||
#
|
||||
collections.append(collection)
|
||||
|
||||
# add any collection that has an instance in the main scenes, but is not present in any of the scenes (IE NON LOCAL/ EXTERNAL)
|
||||
for collection_name in external_collection_instances:
|
||||
collection = bpy.data.collections[collection_name]
|
||||
blueprint = Blueprint(collection.name)
|
||||
blueprint.local = False
|
||||
blueprint.marked = True #external ones are always marked, as they have to have been marked in their original file #'AutoExport' in collection and collection['AutoExport'] == True
|
||||
blueprint.objects = [object.name for object in collection.all_objects if not object.instance_type == 'COLLECTION'] # inneficient, double loop
|
||||
blueprint.nested_blueprints = [object.instance_collection.name for object in collection.all_objects if object.instance_type == 'COLLECTION'] # FIXME: not precise enough, aka "what is a blueprint"
|
||||
blueprint.collection = collection
|
||||
blueprint.instances = external_collection_instances[collection.name] if collection.name in external_collection_instances else []
|
||||
blueprints[collection.name] = blueprint
|
||||
#print("EXTERNAL COLLECTION", collection, dict(collection))
|
||||
|
||||
# add nested collections to internal/external_collection instances
|
||||
# FIXME: inneficient, third loop over all_objects
|
||||
"""for object in collection.all_objects:
|
||||
if object.instance_type == 'COLLECTION':
|
||||
add_object_to_collection_instances(collection_name=object.instance_collection.name, object=object, internal = blueprint.local)"""
|
||||
|
||||
# now create reverse lookup , so you can find the collection from any of its contained objects
|
||||
for object in collection.all_objects:
|
||||
blueprints_from_objects[object.name] = blueprint#collection.name
|
||||
|
||||
|
||||
# then add any nested collections at root level (so we can have a flat list, regardless of nesting)
|
||||
# TODO: do this recursively
|
||||
for blueprint_name in list(blueprints.keys()):
|
||||
parent_blueprint = blueprints[blueprint_name]
|
||||
|
||||
for nested_blueprint_name in parent_blueprint.nested_blueprints:
|
||||
if not nested_blueprint_name in blueprints.keys():
|
||||
collection = bpy.data.collections[nested_blueprint_name]
|
||||
blueprint = Blueprint(collection.name)
|
||||
blueprint.local = parent_blueprint.local
|
||||
blueprint.objects = [object.name for object in collection.all_objects if not object.instance_type == 'COLLECTION'] # inneficient, double loop
|
||||
blueprint.nested_blueprints = [object.instance_collection.name for object in collection.all_objects if object.instance_type == 'COLLECTION'] # FIXME: not precise enough, aka "what is a blueprint"
|
||||
blueprint.collection = collection
|
||||
blueprint.instances = external_collection_instances[collection.name] if collection.name in external_collection_instances else []
|
||||
blueprint.scene = parent_blueprint.scene if parent_blueprint.local else None
|
||||
blueprints[collection.name] = blueprint
|
||||
|
||||
|
||||
# now create reverse lookup , so you can find the collection from any of its contained objects
|
||||
for object in collection.all_objects:
|
||||
blueprints_from_objects[object.name] = blueprint#collection.name
|
||||
|
||||
|
||||
blueprints = dict(sorted(blueprints.items()))
|
||||
|
||||
'''print("BLUEPRINTS")
|
||||
for blueprint_name in blueprints:
|
||||
print(" ", blueprints[blueprint_name])
|
||||
|
||||
"""print("BLUEPRINTS LOOKUP")
|
||||
print(blueprints_from_objects)"""
|
||||
|
||||
print("BLUEPRINT INSTANCES PER MAIN SCENE")
|
||||
print(blueprint_instances_per_main_scene)'''
|
||||
|
||||
|
||||
"""changes_test = {'Library': {
|
||||
'Blueprint1_mesh': bpy.data.objects['Blueprint1_mesh'],
|
||||
'Fox_mesh': bpy.data.objects['Fox_mesh'],
|
||||
'External_blueprint2_Cylinder': bpy.data.objects['External_blueprint2_Cylinder']}
|
||||
}
|
||||
# which main scene has been impacted by this
|
||||
# does one of the main scenes contain an INSTANCE of an impacted blueprint
|
||||
for scene in main_scenes:
|
||||
changed_objects = list(changes_test["Library"].keys()) # just a hack for testing
|
||||
#bluprint_instances_in_scene = blueprint_instances_per_main_scene[scene.name]
|
||||
#print("instances per scene", bluprint_instances_in_scene, "changed_objects", changed_objects)
|
||||
|
||||
changed_blueprints_with_instances_in_scene = [blueprints_from_objects[changed] for changed in changed_objects if changed in blueprints_from_objects]
|
||||
print("changed_blueprints_with_instances_in_scene", changed_blueprints_with_instances_in_scene)
|
||||
level_needs_export = len(changed_blueprints_with_instances_in_scene) > 0
|
||||
if level_needs_export:
|
||||
print("level needs export", scene.name)
|
||||
|
||||
for scene in library_scenes:
|
||||
changed_objects = list(changes_test[scene.name].keys())
|
||||
changed_blueprints = [blueprints_from_objects[changed] for changed in changed_objects if changed in blueprints_from_objects]
|
||||
# we only care about local blueprints/collections
|
||||
changed_local_blueprints = [blueprint_name for blueprint_name in changed_blueprints if blueprint_name in blueprints.keys() and blueprints[blueprint_name].local]
|
||||
print("changed blueprints", changed_local_blueprints)"""
|
||||
|
||||
# additional helper data structures for lookups etc
|
||||
blueprints_per_name = blueprints
|
||||
blueprints = [] # flat list
|
||||
internal_blueprints = []
|
||||
external_blueprints = []
|
||||
blueprints_per_scenes = {}
|
||||
|
||||
blueprint_instances_per_library_scene = {}
|
||||
|
||||
for blueprint in blueprints_per_name.values():
|
||||
blueprints.append(blueprint)
|
||||
if blueprint.local:
|
||||
internal_blueprints.append(blueprint)
|
||||
if blueprint.scene:
|
||||
if not blueprint.scene.name in blueprints_per_scenes:
|
||||
blueprints_per_scenes[blueprint.scene.name] = []
|
||||
blueprints_per_scenes[blueprint.scene.name].append(blueprint.name) # meh
|
||||
|
||||
else:
|
||||
external_blueprints.append(blueprint)
|
||||
|
||||
# we also need to have blueprint instances for
|
||||
|
||||
data = {
|
||||
"blueprints": blueprints,
|
||||
"blueprints_per_name": blueprints_per_name,
|
||||
"blueprint_names": list(blueprints_per_name.keys()),
|
||||
"blueprints_from_objects": blueprints_from_objects,
|
||||
|
||||
"internal_blueprints": internal_blueprints,
|
||||
"external_blueprints": external_blueprints,
|
||||
"blueprints_per_scenes": blueprints_per_scenes,
|
||||
|
||||
"blueprint_instances_per_main_scene": blueprint_instances_per_main_scene,
|
||||
"blueprint_instances_per_library_scene": blueprint_instances_per_library_scene,
|
||||
|
||||
# not sure about these two
|
||||
"internal_collection_instances": internal_collection_instances,
|
||||
"external_collection_instances": external_collection_instances,
|
||||
|
||||
"blueprint_name_from_instances": blueprint_name_from_instances
|
||||
}
|
||||
|
||||
return SimpleNamespace(**data)
|
||||
|
||||
|
||||
import json
|
||||
from .object_makers import (make_empty)
|
||||
|
||||
|
||||
def add_scene_property(scene, property_name, property_data):
|
||||
root_collection = scene.collection
|
||||
scene_property = None
|
||||
for object in scene.objects:
|
||||
if object.name == property_name:
|
||||
scene_property = object
|
||||
break
|
||||
|
||||
if scene_property is None:
|
||||
scene_property = make_empty(property_name, [0,0,0], [0,0,0], [0,0,0], root_collection)
|
||||
|
||||
for key in property_data.keys():
|
||||
scene_property[key] = property_data[key]
|
||||
|
||||
|
||||
def inject_blueprints_list_into_main_scene(scene, blueprints_data, addon_prefs):
|
||||
export_root_folder = getattr(addon_prefs, "export_root_folder")
|
||||
export_output_folder = getattr(addon_prefs,"export_output_folder")
|
||||
export_levels_path = getattr(addon_prefs,"export_levels_path")
|
||||
export_blueprints_path = getattr(addon_prefs, "export_blueprints_path")
|
||||
export_gltf_extension = getattr(addon_prefs, "export_gltf_extension")
|
||||
|
||||
# print("injecting assets/blueprints data into scene")
|
||||
assets_list_name = f"assets_list_{scene.name}_components"
|
||||
assets_list_data = {}
|
||||
|
||||
|
||||
# FIXME: temporary hack
|
||||
for blueprint in blueprints_data.blueprints:
|
||||
bpy.context.window_manager.blueprints_registry.add_blueprint(blueprint)
|
||||
|
||||
blueprint_instance_names_for_scene = blueprints_data.blueprint_instances_per_main_scene.get(scene.name, None)
|
||||
# find all blueprints used in a scene
|
||||
blueprints_in_scene = []
|
||||
if blueprint_instance_names_for_scene: # what are the blueprints used in this scene, inject those into the assets list component
|
||||
children_per_blueprint = {}
|
||||
for blueprint_name in blueprint_instance_names_for_scene:
|
||||
blueprint = blueprints_data.blueprints_per_name.get(blueprint_name, None)
|
||||
if blueprint:
|
||||
children_per_blueprint[blueprint_name] = blueprint.nested_blueprints
|
||||
blueprints_in_scene += blueprint.nested_blueprints
|
||||
assets_list_data["BlueprintsList"] = f"({json.dumps(dict(children_per_blueprint))})"
|
||||
print(blueprint_instance_names_for_scene)
|
||||
add_scene_property(scene, assets_list_name, assets_list_data)
|
||||
|
||||
|
||||
relative_blueprints_path = os.path.relpath(export_blueprints_path, export_root_folder)
|
||||
|
||||
blueprint_assets_list = []
|
||||
if blueprint_instance_names_for_scene:
|
||||
for blueprint_name in blueprint_instance_names_for_scene:
|
||||
blueprint = blueprints_data.blueprints_per_name.get(blueprint_name, None)
|
||||
if blueprint is not None:
|
||||
print("BLUEPRINT", blueprint)
|
||||
blueprint_exported_path = None
|
||||
if blueprint.local:
|
||||
blueprint_exported_path = os.path.join(relative_blueprints_path, f"{blueprint.name}{export_gltf_extension}")
|
||||
else:
|
||||
# get the injected path of the external blueprints
|
||||
blueprint_exported_path = blueprint.collection['Export_path'] if 'Export_path' in blueprint.collection else None
|
||||
print("foo", dict(blueprint.collection))
|
||||
if blueprint_exported_path is not None:
|
||||
blueprint_assets_list.append({"name": blueprint.name, "path": blueprint_exported_path, "type": "MODEL", "internal": True})
|
||||
|
||||
|
||||
# fetch images/textures
|
||||
# see https://blender.stackexchange.com/questions/139859/how-to-get-absolute-file-path-for-linked-texture-image
|
||||
textures = []
|
||||
for ob in bpy.data.objects:
|
||||
if ob.type == "MESH":
|
||||
for mat_slot in ob.material_slots:
|
||||
if mat_slot.material:
|
||||
if mat_slot.material.node_tree:
|
||||
textures.extend([x.image.filepath for x in mat_slot.material.node_tree.nodes if x.type=='TEX_IMAGE'])
|
||||
print("textures", textures)
|
||||
|
||||
assets_list_name = f"assets_{scene.name}"
|
||||
assets_list_data = {"blueprints": json.dumps(blueprint_assets_list), "sounds":[], "images":[]}
|
||||
scene["assets"] = json.dumps(blueprint_assets_list)
|
||||
|
||||
print("blueprint assets", blueprint_assets_list)
|
||||
add_scene_property(scene, assets_list_name, assets_list_data)
|
||||
for blueprint in blueprint_assets_list:
|
||||
bpy.context.window_manager.assets_registry.add_asset(**blueprint)
|
||||
|
||||
|
||||
'''root_collection = scene.collection
|
||||
|
||||
assets_list = None
|
||||
for object in scene.objects:
|
||||
if object.name == assets_list_name:
|
||||
assets_list = object
|
||||
break
|
||||
|
||||
if assets_list is None:
|
||||
assets_list = make_empty(assets_list_name, [0,0,0], [0,0,0], [0,0,0], root_collection)
|
||||
|
||||
blueprint_names_for_scene = blueprints_data.blueprint_instances_per_main_scene.get(scene.name, None)
|
||||
# find all blueprints used in a scene
|
||||
if blueprint_names_for_scene: # what are the blueprints used in this scene, inject those into the assets list component
|
||||
children_per_blueprint = {}
|
||||
for blueprint_name in blueprint_names_for_scene:
|
||||
blueprint = blueprints_data.blueprints_per_name.get(blueprint_name, None)
|
||||
if blueprint:
|
||||
children_per_blueprint[blueprint_name] = blueprint.nested_blueprints
|
||||
assets_list["BlueprintsList"] = f"({json.dumps(dict(children_per_blueprint))})"'''
|
||||
|
||||
def remove_blueprints_list_from_main_scene(scene):
|
||||
assets_list = None
|
||||
assets_list_name = f"assets_list_{scene.name}_components"
|
||||
|
||||
for object in scene.objects:
|
||||
if object.name == assets_list_name:
|
||||
assets_list = object
|
||||
if assets_list is not None:
|
||||
bpy.data.objects.remove(assets_list, do_unlink=True)
|
23
tools/blenvy/gltf_auto_export/helpers/helpers_collections.py
Normal file
@ -0,0 +1,23 @@
|
||||
import bpy
|
||||
|
||||
# traverse all collections
|
||||
def traverse_tree(t):
|
||||
yield t
|
||||
for child in t.children:
|
||||
yield from traverse_tree(child)
|
||||
|
||||
#Recursivly transverse layer_collection for a particular name
|
||||
def recurLayerCollection(layerColl, collName):
|
||||
found = None
|
||||
if (layerColl.name == collName):
|
||||
return layerColl
|
||||
for layer in layerColl.children:
|
||||
found = recurLayerCollection(layer, collName)
|
||||
if found:
|
||||
return found
|
||||
|
||||
def set_active_collection(scene, collection_name):
|
||||
layer_collection = bpy.data.scenes[scene.name].view_layers['ViewLayer'].layer_collection
|
||||
layerColl = recurLayerCollection(layer_collection, collection_name)
|
||||
# set active collection to the collection
|
||||
bpy.context.view_layer.active_layer_collection = layerColl
|
222
tools/blenvy/gltf_auto_export/helpers/helpers_scenes.py
Normal file
@ -0,0 +1,222 @@
|
||||
import json
|
||||
import bpy
|
||||
from .object_makers import (make_empty)
|
||||
|
||||
|
||||
# these are mostly for when using this add-on together with the bevy_components add-on
|
||||
custom_properties_to_filter_out = ['_combine', 'template', 'components_meta']
|
||||
|
||||
def is_component_valid(object, component_name):
|
||||
if "components_meta" in object or hasattr(object, "components_meta"):
|
||||
target_components_metadata = object.components_meta.components
|
||||
component_meta = next(filter(lambda component: component["long_name"] == component_name, target_components_metadata), None)
|
||||
if component_meta != None:
|
||||
return component_meta.enabled and not component_meta.invalid
|
||||
return True
|
||||
|
||||
def remove_unwanted_custom_properties(object):
|
||||
to_remove = []
|
||||
component_names = list(object.keys()) # to avoid 'IDPropertyGroup changed size during iteration' issues
|
||||
for component_name in component_names:
|
||||
if not is_component_valid(object, component_name):
|
||||
to_remove.append(component_name)
|
||||
for cp in custom_properties_to_filter_out + to_remove:
|
||||
if cp in object:
|
||||
del object[cp]
|
||||
|
||||
# TODO: rename actions ?
|
||||
# reference https://github.com/KhronosGroup/glTF-Blender-IO/blob/main/addons/io_scene_gltf2/blender/exp/animation/gltf2_blender_gather_action.py#L481
|
||||
def copy_animation_data(source, target):
|
||||
if source.animation_data:
|
||||
ad = source.animation_data
|
||||
|
||||
blender_actions = []
|
||||
blender_tracks = {}
|
||||
|
||||
# TODO: this might need to be modified/ adapted to match the standard gltf exporter settings
|
||||
for track in ad.nla_tracks:
|
||||
non_muted_strips = [strip for strip in track.strips if strip.action is not None and strip.mute is False]
|
||||
for strip in non_muted_strips: #t.strips:
|
||||
# print(" ", source.name,'uses',strip.action.name, "active", strip.active, "action", strip.action)
|
||||
blender_actions.append(strip.action)
|
||||
blender_tracks[strip.action.name] = track.name
|
||||
|
||||
# Remove duplicate actions.
|
||||
blender_actions = list(set(blender_actions))
|
||||
# sort animations alphabetically (case insensitive) so they have a defined order and match Blender's Action list
|
||||
blender_actions.sort(key = lambda a: a.name.lower())
|
||||
|
||||
markers_per_animation = {}
|
||||
animations_infos = []
|
||||
|
||||
for action in blender_actions:
|
||||
animation_name = blender_tracks[action.name]
|
||||
animations_infos.append(
|
||||
f'(name: "{animation_name}", frame_start: {action.frame_range[0]}, frame_end: {action.frame_range[1]}, frames_length: {action.frame_range[1] - action.frame_range[0]}, frame_start_override: {action.frame_start}, frame_end_override: {action.frame_end})'
|
||||
)
|
||||
markers_per_animation[animation_name] = {}
|
||||
|
||||
for marker in action.pose_markers:
|
||||
if marker.frame not in markers_per_animation[animation_name]:
|
||||
markers_per_animation[animation_name][marker.frame] = []
|
||||
markers_per_animation[animation_name][marker.frame].append(marker.name)
|
||||
|
||||
# best method, using the built-in link animation operator
|
||||
with bpy.context.temp_override(active_object=source, selected_editable_objects=[target]):
|
||||
bpy.ops.object.make_links_data(type='ANIMATION')
|
||||
|
||||
"""if target.animation_data == None:
|
||||
target.animation_data_create()
|
||||
target.animation_data.action = source.animation_data.action.copy()
|
||||
|
||||
print("copying animation data for", source.name, target.animation_data)
|
||||
properties = [p.identifier for p in source.animation_data.bl_rna.properties if not p.is_readonly]
|
||||
for prop in properties:
|
||||
print("copying stuff", prop)
|
||||
setattr(target.animation_data, prop, getattr(source.animation_data, prop))"""
|
||||
|
||||
# we add an "AnimationInfos" component
|
||||
target['AnimationInfos'] = f'(animations: {animations_infos})'.replace("'","")
|
||||
|
||||
# and animation markers
|
||||
markers_formated = '{'
|
||||
for animation in markers_per_animation.keys():
|
||||
markers_formated += f'"{animation}":'
|
||||
markers_formated += "{"
|
||||
for frame in markers_per_animation[animation].keys():
|
||||
markers = markers_per_animation[animation][frame]
|
||||
markers_formated += f"{frame}:{markers}, ".replace("'", '"')
|
||||
markers_formated += '}, '
|
||||
markers_formated += '}'
|
||||
target["AnimationMarkers"] = f'( {markers_formated} )'
|
||||
|
||||
|
||||
def duplicate_object(object, parent, combine_mode, destination_collection, blueprints_data, nester=""):
|
||||
copy = None
|
||||
internal_blueprint_names = [blueprint.name for blueprint in blueprints_data.internal_blueprints]
|
||||
# print("COMBINE MODE", combine_mode)
|
||||
if object.instance_type == 'COLLECTION' and (combine_mode == 'Split' or (combine_mode == 'EmbedExternal' and (object.instance_collection.name in internal_blueprint_names)) ):
|
||||
#print("creating empty for", object.name, object.instance_collection.name, internal_blueprint_names, combine_mode)
|
||||
collection_name = object.instance_collection.name
|
||||
original_name = object.name
|
||||
|
||||
object.name = original_name + "____bak"
|
||||
empty_obj = make_empty(original_name, object.location, object.rotation_euler, object.scale, destination_collection)
|
||||
|
||||
"""we inject the collection/blueprint name, as a component called 'BlueprintName', but we only do this in the empty, not the original object"""
|
||||
empty_obj['BlueprintName'] = '("'+collection_name+'")'
|
||||
empty_obj["BlueprintPath"] = ''
|
||||
empty_obj['SpawnHere'] = '()'
|
||||
|
||||
# we also inject a list of all sub blueprints, so that the bevy side can preload them
|
||||
blueprint_name = collection_name
|
||||
children_per_blueprint = {}
|
||||
blueprint = blueprints_data.blueprints_per_name.get(blueprint_name, None)
|
||||
if blueprint:
|
||||
children_per_blueprint[blueprint_name] = blueprint.nested_blueprints
|
||||
empty_obj["BlueprintsList"] = f"({json.dumps(dict(children_per_blueprint))})"
|
||||
|
||||
# we copy custom properties over from our original object to our empty
|
||||
for component_name, component_value in object.items():
|
||||
if component_name not in custom_properties_to_filter_out and is_component_valid(object, component_name): #copy only valid properties
|
||||
empty_obj[component_name] = component_value
|
||||
copy = empty_obj
|
||||
else:
|
||||
# for objects which are NOT collection instances or when embeding
|
||||
# we create a copy of our object and its children, to leave the original one as it is
|
||||
original_name = object.name
|
||||
object.name = original_name + "____bak"
|
||||
copy = object.copy()
|
||||
copy.name = original_name
|
||||
|
||||
destination_collection.objects.link(copy)
|
||||
|
||||
"""if object.parent == None:
|
||||
if parent_empty is not None:
|
||||
copy.parent = parent_empty
|
||||
"""
|
||||
# do this both for empty replacements & normal copies
|
||||
if parent is not None:
|
||||
copy.parent = parent
|
||||
remove_unwanted_custom_properties(copy)
|
||||
copy_animation_data(object, copy)
|
||||
|
||||
for child in object.children:
|
||||
duplicate_object(child, copy, combine_mode, destination_collection, blueprints_data, nester+" ")
|
||||
|
||||
# copies the contents of a collection into another one while replacing library instances with empties
|
||||
def copy_hollowed_collection_into(source_collection, destination_collection, parent_empty=None, filter=None, blueprints_data=None, addon_prefs={}):
|
||||
collection_instances_combine_mode = getattr(addon_prefs, "collection_instances_combine_mode")
|
||||
|
||||
for object in source_collection.objects:
|
||||
if object.name.endswith("____bak"): # some objects could already have been handled, ignore them
|
||||
continue
|
||||
if filter is not None and filter(object) is False:
|
||||
continue
|
||||
#check if a specific collection instance does not have an ovveride for combine_mode
|
||||
combine_mode = object['_combine'] if '_combine' in object else collection_instances_combine_mode
|
||||
parent = parent_empty
|
||||
duplicate_object(object, parent, combine_mode, destination_collection, blueprints_data)
|
||||
|
||||
# for every child-collection of the source, copy its content into a new sub-collection of the destination
|
||||
for collection in source_collection.children:
|
||||
original_name = collection.name
|
||||
collection.name = original_name + "____bak"
|
||||
collection_placeholder = make_empty(original_name, [0,0,0], [0,0,0], [1,1,1], destination_collection)
|
||||
|
||||
if parent_empty is not None:
|
||||
collection_placeholder.parent = parent_empty
|
||||
copy_hollowed_collection_into(
|
||||
source_collection = collection,
|
||||
destination_collection = destination_collection,
|
||||
parent_empty = collection_placeholder,
|
||||
filter = filter,
|
||||
blueprints_data = blueprints_data,
|
||||
addon_prefs=addon_prefs
|
||||
)
|
||||
|
||||
|
||||
|
||||
return {}
|
||||
|
||||
# clear & remove "hollow scene"
|
||||
def clear_hollow_scene(temp_scene, original_root_collection):
|
||||
def restore_original_names(collection):
|
||||
if collection.name.endswith("____bak"):
|
||||
collection.name = collection.name.replace("____bak", "")
|
||||
for object in collection.objects:
|
||||
if object.instance_type == 'COLLECTION':
|
||||
if object.name.endswith("____bak"):
|
||||
object.name = object.name.replace("____bak", "")
|
||||
else:
|
||||
if object.name.endswith("____bak"):
|
||||
object.name = object.name.replace("____bak", "")
|
||||
for child_collection in collection.children:
|
||||
restore_original_names(child_collection)
|
||||
|
||||
|
||||
# remove any data we created
|
||||
temp_root_collection = temp_scene.collection
|
||||
temp_scene_objects = [o for o in temp_root_collection.all_objects]
|
||||
for object in temp_scene_objects:
|
||||
#print("removing", object.name)
|
||||
bpy.data.objects.remove(object, do_unlink=True)
|
||||
|
||||
# remove the temporary scene
|
||||
bpy.data.scenes.remove(temp_scene, do_unlink=True)
|
||||
|
||||
# reset original names
|
||||
restore_original_names(original_root_collection)
|
||||
|
||||
# convenience utility to get lists of scenes
|
||||
def get_scenes(addon_prefs):
|
||||
level_scene_names= getattr(addon_prefs,"main_scene_names", []) #list(map(lambda scene: scene.name, getattr(addon_prefs,"main_scenes")))
|
||||
library_scene_names = getattr(addon_prefs,"library_scene_names", []) #list(map(lambda scene: scene.name, getattr(addon_prefs,"library_scenes")))
|
||||
|
||||
level_scene_names = list(filter(lambda name: name in bpy.data.scenes, level_scene_names))
|
||||
library_scene_names = list(filter(lambda name: name in bpy.data.scenes, library_scene_names))
|
||||
|
||||
level_scenes = list(map(lambda name: bpy.data.scenes[name], level_scene_names))
|
||||
library_scenes = list(map(lambda name: bpy.data.scenes[name], library_scene_names))
|
||||
|
||||
return [level_scene_names, level_scenes, library_scene_names, library_scenes]
|
47
tools/blenvy/gltf_auto_export/helpers/object_makers.py
Normal file
@ -0,0 +1,47 @@
|
||||
import bmesh
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
# Makes an empty, at the specified location, rotation, scale stores it in existing collection, from https://blender.stackexchange.com/questions/51290/how-to-add-empty-object-not-using-bpy-ops
|
||||
def make_empty(name, location, rotation, scale, collection):
|
||||
object_data = None
|
||||
empty_obj = bpy.data.objects.new( name, object_data )
|
||||
|
||||
empty_obj.empty_display_size = 2
|
||||
empty_obj.empty_display_type = 'PLAIN_AXES'
|
||||
|
||||
empty_obj.name = name
|
||||
empty_obj.location = location
|
||||
empty_obj.scale = scale
|
||||
empty_obj.rotation_euler = rotation
|
||||
|
||||
collection.objects.link( empty_obj )
|
||||
#bpy.context.view_layer.update()
|
||||
return empty_obj
|
||||
|
||||
def make_cube(name, location=[0,0,0], rotation=[0,0,0], scale=[1,1,1], collection=None):
|
||||
new_mesh = bpy.data.meshes.new(name+"_Mesh") #None
|
||||
"""verts = [( 1.0, 1.0, 0.0),
|
||||
( 1.0, -1.0, 0.0),
|
||||
(-1.0, -1.0, 0.0),
|
||||
(-1.0, 1.0, 0.0),
|
||||
] # 4 verts made with XYZ coords
|
||||
edges = []
|
||||
faces = [[0, 1, 2, 3]]
|
||||
new_mesh.from_pydata(verts, edges, faces)"""
|
||||
|
||||
|
||||
bm = bmesh.new()
|
||||
bmesh.ops.create_cube(bm, size=0.1, matrix=mathutils.Matrix.Translation(location)) # FIXME: other ways to set position seems to fail ?
|
||||
bm.to_mesh(new_mesh)
|
||||
bm.free()
|
||||
|
||||
new_object = bpy.data.objects.new(name, new_mesh)
|
||||
new_object.name = name
|
||||
new_object.location = location
|
||||
new_object.scale = scale
|
||||
new_object.rotation_euler = rotation
|
||||
|
||||
if collection != None:
|
||||
collection.objects.link( new_object )
|
||||
return new_object
|
@ -0,0 +1,10 @@
|
||||
import bpy
|
||||
import rna_prop_ui
|
||||
|
||||
# fake way to make our operator's changes be visible to the change/depsgraph update handler in gltf_auto_export
|
||||
def ping_depsgraph_update(object=None):
|
||||
if object == None:
|
||||
object = bpy.data.scenes[0]
|
||||
rna_prop_ui.rna_idprop_ui_create(object, "________temp", default=0)
|
||||
rna_prop_ui.rna_idprop_ui_prop_clear(object, "________temp")
|
||||
return None
|
233
tools/blenvy/gltf_auto_export/helpers/serialize_scene.py
Normal file
@ -0,0 +1,233 @@
|
||||
import json
|
||||
from mathutils import Color
|
||||
import numpy as np
|
||||
import bpy
|
||||
from ..constants import TEMPSCENE_PREFIX
|
||||
|
||||
fields_to_ignore_generic = ["tag", "type", "update_tag", "use_extra_user", "use_fake_user", "user_clear", "user_of_id", "user_remap", "users",
|
||||
'animation_data_clear', 'animation_data_create', 'asset_clear', 'asset_data', 'asset_generate_preview', 'asset_mark', 'bl_rna', 'evaluated_get',
|
||||
'library', 'library_weak_reference', 'make_local','name', 'name_full', 'original',
|
||||
'override_create', 'override_hierarchy_create', 'override_library', 'preview', 'preview_ensure', 'rna_type',
|
||||
'session_uid', 'copy', 'id_type', 'is_embedded_data', 'is_evaluated', 'is_library_indirect', 'is_missing', 'is_runtime_data']
|
||||
|
||||
# possible alternatives https://blender.stackexchange.com/questions/286010/bpy-detect-modified-mesh-data-vertices-edges-loops-or-polygons-for-cachin
|
||||
def mesh_hash(obj):
|
||||
# this is incomplete, how about edges ?
|
||||
vertex_count = len(obj.data.vertices)
|
||||
vertices_np = np.empty(vertex_count * 3, dtype=np.float32)
|
||||
obj.data.vertices.foreach_get("co", vertices_np)
|
||||
h = str(hash(vertices_np.tobytes()))
|
||||
return h
|
||||
|
||||
# TODO: redo this one, this is essentially modifiec copy & pasted data, not fitting
|
||||
def animation_hash(obj):
|
||||
animation_data = obj.animation_data
|
||||
if not animation_data:
|
||||
return None
|
||||
blender_actions = []
|
||||
blender_tracks = {}
|
||||
|
||||
# TODO: this might need to be modified/ adapted to match the standard gltf exporter settings
|
||||
for track in animation_data.nla_tracks:
|
||||
strips = [strip for strip in track.strips if strip.action is not None]
|
||||
for strip in strips:
|
||||
# print(" ", source.name,'uses',strip.action.name, "active", strip.active, "action", strip.action)
|
||||
blender_actions.append(strip.action)
|
||||
blender_tracks[strip.action.name] = track.name
|
||||
|
||||
# Remove duplicate actions.
|
||||
blender_actions = list(set(blender_actions))
|
||||
# sort animations alphabetically (case insensitive) so they have a defined order and match Blender's Action list
|
||||
blender_actions.sort(key = lambda a: a.name.lower())
|
||||
|
||||
markers_per_animation = {}
|
||||
animations_infos = []
|
||||
|
||||
for action in blender_actions:
|
||||
animation_name = blender_tracks[action.name]
|
||||
animations_infos.append(
|
||||
f'(name: "{animation_name}", frame_start: {action.frame_range[0]}, frame_end: {action.frame_range[1]}, frames_length: {action.frame_range[1] - action.frame_range[0]}, frame_start_override: {action.frame_start}, frame_end_override: {action.frame_end})'
|
||||
)
|
||||
markers_per_animation[animation_name] = {}
|
||||
|
||||
for marker in action.pose_markers:
|
||||
if marker.frame not in markers_per_animation[animation_name]:
|
||||
markers_per_animation[animation_name][marker.frame] = []
|
||||
markers_per_animation[animation_name][marker.frame].append(marker.name)
|
||||
|
||||
compact_result = hash(str((blender_actions, blender_tracks, markers_per_animation, animations_infos)))
|
||||
return compact_result
|
||||
|
||||
|
||||
def camera_hash(obj):
|
||||
camera_fields = ["angle", "angle_x", "angle_y", "animation_data", "background_images", "clip_end", "clip_start", "display_size", "dof", "fisheye_fov"]
|
||||
camera_data = obj.data
|
||||
fields_to_ignore= fields_to_ignore_generic
|
||||
|
||||
all_field_names = dir(camera_data)
|
||||
fields = [getattr(camera_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||
# TODO: the above is not enough, certain fields are left as bpy.data.xx
|
||||
#print("camera", obj, fields)
|
||||
return str(fields)
|
||||
|
||||
def light_hash(obj):
|
||||
light_data = obj.data
|
||||
fields_to_ignore = fields_to_ignore_generic
|
||||
|
||||
all_field_names = dir(light_data)
|
||||
fields = [getattr(light_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||
return str(fields)
|
||||
|
||||
def bones_hash(bones):
|
||||
fields_to_ignore = fields_to_ignore_generic + ['AxisRollFromMatrix', 'MatrixFromAxisRoll', 'evaluate_envelope', 'convert_local_to_pose', 'foreach_get', 'foreach_set', 'get', 'set', 'find', 'items', 'keys', 'values']
|
||||
|
||||
bones_result = []
|
||||
for bone in bones:
|
||||
all_field_names = dir(bone)
|
||||
fields = [getattr(bone, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||
bones_result.append(fields)
|
||||
#print("fields of bone", bones_result)
|
||||
return str(hash(str(bones_result)))
|
||||
|
||||
# fixme: not good enough ?
|
||||
def armature_hash(obj):
|
||||
fields_to_ignore = fields_to_ignore_generic + ['display_type', 'is_editmode', 'pose_position', 'foreach_get', 'get']
|
||||
fields_to_convert = {'bones': bones_hash}#, 'collections_all': bones_hash}
|
||||
armature_data = obj.data
|
||||
all_field_names = dir(armature_data)
|
||||
|
||||
fields = [getattr(armature_data, prop, None) if not prop in fields_to_convert.keys() else fields_to_convert[prop](getattr(armature_data, prop)) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||
#print("ARMATURE", fields)
|
||||
|
||||
"""for bone in armature_data.bones:
|
||||
print("bone", bone, bone_hash(bone))"""
|
||||
return str(fields)
|
||||
|
||||
def field_value(data):
|
||||
pass
|
||||
|
||||
def color(color_data):
|
||||
# print("color", color_data, type(color_data))
|
||||
return str(peel_value(color_data))
|
||||
|
||||
def lineart(lineart_data):
|
||||
fields_to_ignore = fields_to_ignore_generic
|
||||
|
||||
all_field_names = dir(lineart_data)
|
||||
fields = [getattr(lineart_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||
return str(fields)
|
||||
|
||||
def node_tree(nodetree_data):
|
||||
fields_to_ignore = fields_to_ignore_generic+ ['contains_tree','get_output_node', 'interface_update', 'override_template_create']
|
||||
all_field_names = dir(nodetree_data)
|
||||
fields = [getattr(nodetree_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||
|
||||
# print("node tree", fields)
|
||||
return str(fields)
|
||||
|
||||
|
||||
def peel_value( value ):
|
||||
try:
|
||||
len( value )
|
||||
return [ peel_value( x ) for x in value ]
|
||||
except TypeError:
|
||||
return value
|
||||
|
||||
def material_hash(material):
|
||||
fields_to_ignore = fields_to_ignore_generic
|
||||
fields_to_convert = {'diffuse_color': color, 'line_color': color, 'lineart': lineart, 'node_tree': node_tree} # TODO: perhaps use types rather than names
|
||||
all_field_names = dir(material)
|
||||
fields = [getattr(material, prop, None) if not prop in fields_to_convert.keys() else fields_to_convert[prop](getattr(material, prop)) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||
|
||||
type_of = [type(getattr(material, prop, None)) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||
names = [prop for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||
|
||||
tutu = [t == Color for t in type_of] # bpy.types.MaterialLineArt bpy.types.ShaderNodeTree
|
||||
#print("fields", type_of)
|
||||
|
||||
"""for prop in [prop for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]:
|
||||
bla = getattr(material, prop, None)
|
||||
if hasattr(bla, "rna_type"):
|
||||
print("YOLO", prop, bla, peel_value(bla), "type", type(bla), bla.rna_type, bla.rna_type == bpy.types.FloatProperty, type(bla) == bpy.types.bpy_prop_collection)
|
||||
print("types", type(bla) == bpy.types.bpy_prop_collection, type(bla) == bpy.types.FloatColorAttributeValue)"""
|
||||
|
||||
# print("oooooh", material, material.bl_rna.properties.items())
|
||||
|
||||
return str(fields)#str(hash(str(fields)))
|
||||
|
||||
# TODO: this is partially taken from export_materials utilities, perhaps we could avoid having to fetch things multiple times ?
|
||||
def materials_hash(obj, cache):
|
||||
# print("materials")
|
||||
materials = []
|
||||
for material_slot in obj.material_slots:
|
||||
material = material_slot.material
|
||||
cached_hash = cache['materials'].get(material.name, None)
|
||||
if cached_hash:
|
||||
# print("CACHHHHHED", cached_hash)
|
||||
materials.append(cached_hash)
|
||||
else:
|
||||
mat = material_hash(material)
|
||||
cache['materials'][material.name] = mat
|
||||
materials.append(mat)
|
||||
# print("NOT CACHHH", mat)
|
||||
|
||||
# materials = [material_hash(material_slot.material) if not material_slot.material.name in cache["materials"] else cache["materials"][material_slot.material.name] for material_slot in obj.material_slots]
|
||||
return str(hash(str(materials)))
|
||||
|
||||
def custom_properties_hash(obj):
|
||||
custom_properties = {}
|
||||
for property_name in obj.keys():
|
||||
if property_name not in '_RNA_UI' and property_name != 'components_meta':
|
||||
custom_properties[property_name] = obj[property_name]
|
||||
|
||||
return str(hash(str(custom_properties)))
|
||||
|
||||
|
||||
def serialize_scene():
|
||||
cache = {"materials":{}}
|
||||
print("serializing scene")
|
||||
data = {}
|
||||
for scene in bpy.data.scenes:
|
||||
if scene.name.startswith(TEMPSCENE_PREFIX):
|
||||
continue
|
||||
data[scene.name] = {}
|
||||
for object in scene.objects:
|
||||
object = bpy.data.objects[object.name]
|
||||
|
||||
#loc, rot, scale = bpy.context.object.matrix_world.decompose()
|
||||
|
||||
transform = str((object.location, object.rotation_euler, object.scale)) #str((object.matrix_world.to_translation(), object.matrix_world.to_euler('XYZ'), object.matrix_world.to_quaternion()))#
|
||||
visibility = object.visible_get()
|
||||
custom_properties = custom_properties_hash(object) if len(object.keys()) > 0 else None
|
||||
animations = animation_hash(object)
|
||||
mesh = mesh_hash(object) if object.type == 'MESH' else None
|
||||
camera = camera_hash(object) if object.type == 'CAMERA' else None
|
||||
light = light_hash(object) if object.type == 'LIGHT' else None
|
||||
armature = armature_hash(object) if object.type == 'ARMATURE' else None
|
||||
parent = object.parent.name if object.parent else None
|
||||
collections = [collection.name for collection in object.users_collection]
|
||||
materials = materials_hash(object, cache) if len(object.material_slots) > 0 else None
|
||||
|
||||
data[scene.name][object.name] = {
|
||||
"name": object.name,
|
||||
"transforms": transform,
|
||||
"visibility": visibility,
|
||||
"custom_properties": custom_properties,
|
||||
"animations": animations,
|
||||
"mesh": mesh,
|
||||
"camera": camera,
|
||||
"light": light,
|
||||
"armature": armature,
|
||||
"parent": parent,
|
||||
"collections": collections,
|
||||
"materials": materials
|
||||
}
|
||||
|
||||
"""print("data", data)
|
||||
print("")
|
||||
print("")
|
||||
print("data json", json.dumps(data))"""
|
||||
|
||||
return json.dumps(data)
|
||||
|
||||
|
402
tools/blenvy/gltf_auto_export/helpers/to_remove_later.py
Normal file
@ -0,0 +1,402 @@
|
||||
bl_info = {
|
||||
"name": "gltf_auto_export",
|
||||
"author": "kaosigh",
|
||||
"version": (0, 10, 0),
|
||||
"blender": (3, 4, 0),
|
||||
"location": "File > Import-Export",
|
||||
"description": "glTF/glb auto-export",
|
||||
"warning": "",
|
||||
"wiki_url": "https://github.com/kaosat-dev/Blender_bevy_components_workflow",
|
||||
"tracker_url": "https://github.com/kaosat-dev/Blender_bevy_components_workflow/issues/new",
|
||||
"category": "Import-Export"
|
||||
}
|
||||
|
||||
import bpy
|
||||
from bpy.props import (BoolProperty,
|
||||
IntProperty,
|
||||
StringProperty,
|
||||
EnumProperty,
|
||||
CollectionProperty
|
||||
)
|
||||
|
||||
|
||||
# glTF extensions are named following a convention with known prefixes.
|
||||
# See: https://github.com/KhronosGroup/glTF/tree/main/extensions#about-gltf-extensions
|
||||
# also: https://github.com/KhronosGroup/glTF/blob/main/extensions/Prefixes.md
|
||||
glTF_extension_name = "EXT_auto_export"
|
||||
|
||||
# Support for an extension is "required" if a typical glTF viewer cannot be expected
|
||||
# to load a given model without understanding the contents of the extension.
|
||||
# For example, a compression scheme or new image format (with no fallback included)
|
||||
# would be "required", but physics metadata or app-specific settings could be optional.
|
||||
extension_is_required = False
|
||||
from io_scene_gltf2 import (GLTF_PT_export_main, GLTF_PT_export_include)
|
||||
|
||||
class ExampleExtensionProperties(bpy.types.PropertyGroup):
|
||||
enabled: bpy.props.BoolProperty(
|
||||
name=bl_info["name"],
|
||||
description='Include this extension in the exported glTF file.',
|
||||
default=True
|
||||
)
|
||||
|
||||
auto_export_main_scene_name: StringProperty(
|
||||
name='Main scene',
|
||||
description='The name of the main scene/level/world to auto export',
|
||||
default='Scene'
|
||||
)
|
||||
auto_export_output_folder: StringProperty(
|
||||
name='Export folder (relative)',
|
||||
description='The root folder for all exports(relative to current file) Defaults to current folder',
|
||||
default=''
|
||||
)
|
||||
auto_export_library_scene_name: StringProperty(
|
||||
name='Library scene',
|
||||
description='The name of the library scene to auto export',
|
||||
default='Library'
|
||||
)
|
||||
# scene components
|
||||
auto_export_scene_settings: BoolProperty(
|
||||
name='Export scene settings',
|
||||
description='Export scene settings ie AmbientLighting, Bloom, AO etc',
|
||||
default=False
|
||||
)
|
||||
|
||||
# blueprint settings
|
||||
auto_export_blueprints: BoolProperty(
|
||||
name='Export Blueprints',
|
||||
description='Replaces collection instances with an Empty with a BlueprintName custom property',
|
||||
default=True
|
||||
)
|
||||
auto_export_blueprints_path: StringProperty(
|
||||
name='Blueprints path',
|
||||
description='path to export the blueprints to (relative to the Export folder)',
|
||||
default='library'
|
||||
)
|
||||
|
||||
auto_export_materials_library: BoolProperty(
|
||||
name='Export materials library',
|
||||
description='remove materials from blueprints and use the material library instead',
|
||||
default=False
|
||||
)
|
||||
auto_export_materials_path: StringProperty(
|
||||
name='Materials path',
|
||||
description='path to export the materials libraries to (relative to the root folder)',
|
||||
default='materials'
|
||||
)
|
||||
|
||||
def register():
|
||||
bpy.utils.register_class(ExampleExtensionProperties)
|
||||
bpy.types.Scene.ExampleExtensionProperties = bpy.props.PointerProperty(type=ExampleExtensionProperties)
|
||||
|
||||
def register_panel():
|
||||
# Register the panel on demand, we need to be sure to only register it once
|
||||
# This is necessary because the panel is a child of the extensions panel,
|
||||
# which may not be registered when we try to register this extension
|
||||
try:
|
||||
bpy.utils.register_class(GLTF_PT_UserExtensionPanel)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# If the glTF exporter is disabled, we need to unregister the extension panel
|
||||
# Just return a function to the exporter so it can unregister the panel
|
||||
return unregister_panel
|
||||
|
||||
|
||||
def unregister_panel():
|
||||
# Since panel is registered on demand, it is possible it is not registered
|
||||
try:
|
||||
bpy.utils.unregister_class(GLTF_PT_UserExtensionPanel)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def unregister():
|
||||
unregister_panel()
|
||||
bpy.utils.unregister_class(ExampleExtensionProperties)
|
||||
del bpy.types.Scene.ExampleExtensionProperties
|
||||
|
||||
class GLTF_PT_UserExtensionPanel(bpy.types.Panel):
|
||||
|
||||
bl_space_type = 'FILE_BROWSER'
|
||||
bl_region_type = 'TOOL_PROPS'
|
||||
bl_label = "Enabled"
|
||||
bl_parent_id = "GLTF_PT_export_user_extensions"
|
||||
bl_options = {'DEFAULT_CLOSED'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
sfile = context.space_data
|
||||
operator = sfile.active_operator
|
||||
return operator.bl_idname == "EXPORT_SCENE_OT_gltf"
|
||||
|
||||
def draw_header(self, context):
|
||||
props = bpy.context.scene.ExampleExtensionProperties
|
||||
self.layout.prop(props, 'enabled')
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False # No animation.
|
||||
|
||||
props = bpy.context.scene.ExampleExtensionProperties
|
||||
layout.active = props.enabled
|
||||
|
||||
props = bpy.context.scene.ExampleExtensionProperties
|
||||
for bla in props.__annotations__:
|
||||
layout.prop(props, bla)
|
||||
|
||||
|
||||
class glTF2ExportUserExtension:
|
||||
|
||||
def __init__(self):
|
||||
# We need to wait until we create the gltf2UserExtension to import the gltf2 modules
|
||||
# Otherwise, it may fail because the gltf2 may not be loaded yet
|
||||
from io_scene_gltf2.io.com.gltf2_io_extensions import Extension
|
||||
self.Extension = Extension
|
||||
self.properties = bpy.context.scene.ExampleExtensionProperties
|
||||
|
||||
def gather_node_hook(self, gltf2_object, blender_object, export_settings):
|
||||
if self.properties.enabled:
|
||||
if gltf2_object.extensions is None:
|
||||
gltf2_object.extensions = {}
|
||||
print("bla bla")
|
||||
gltf2_object.extensions[glTF_extension_name] = self.Extension(
|
||||
name=glTF_extension_name,
|
||||
extension={"auto_export_blueprints": self.properties.auto_export_blueprints},
|
||||
required=extension_is_required
|
||||
)
|
||||
|
||||
|
||||
def did_export_parameters_change(current_params, previous_params):
|
||||
set1 = set(previous_params.items())
|
||||
set2 = set(current_params.items())
|
||||
difference = dict(set1 ^ set2)
|
||||
|
||||
changed_param_names = list(set(difference.keys())- set(AutoExportGltfPreferenceNames))
|
||||
changed_parameters = len(changed_param_names) > 0
|
||||
return changed_parameters
|
||||
|
||||
# original in export_blueprints => export_collections
|
||||
# The part below is not necessary NORMALLY , but blender crashes in the "normal" case when using bpy.context.temp_override,
|
||||
#if relevant we replace sub collections instances with placeholders too
|
||||
# this is not needed if a collection/blueprint does not have sub blueprints or sub collections
|
||||
collection_in_blueprint_hierarchy = collection_name in blueprint_hierarchy and len(blueprint_hierarchy[collection_name]) > 0
|
||||
collection_has_child_collections = len(bpy.data.collections[collection_name].children) > 0
|
||||
#if collection_in_blueprint_hierarchy or collection_has_child_collections:
|
||||
|
||||
|
||||
|
||||
"""else:
|
||||
print("standard export")
|
||||
# set active scene to be the library scene
|
||||
original_scene = bpy.context.window.scene
|
||||
bpy.context.window.scene = library_scene
|
||||
with bpy.context.temp_override(scene=library_scene):
|
||||
print("active scene", bpy.context.scene)
|
||||
export_gltf(gltf_output_path, export_settings)
|
||||
bpy.context.window.scene = original_scene"""
|
||||
|
||||
"""
|
||||
blueprint_template = object['Template'] if 'Template' in object else False
|
||||
if blueprint_template and parent_empty is None: # ONLY WORKS AT ROOT LEVEL
|
||||
print("BLUEPRINT TEMPLATE", blueprint_template, destination_collection, parent_empty)
|
||||
for object in source_collection.objects:
|
||||
if object.type == 'EMPTY' and object.name.endswith("components"):
|
||||
original_collection = bpy.data.collections[collection_name]
|
||||
components_holder = object
|
||||
print("WE CAN INJECT into", object, "data from", original_collection)
|
||||
|
||||
# now we look for components inside the collection
|
||||
components = {}
|
||||
for object in original_collection.objects:
|
||||
if object.type == 'EMPTY' and object.name.endswith("components"):
|
||||
for component_name in object.keys():
|
||||
if component_name not in '_RNA_UI':
|
||||
print( component_name , "-" , object[component_name] )
|
||||
components[component_name] = object[component_name]
|
||||
|
||||
# copy template components into target object
|
||||
for key in components:
|
||||
print("copying ", key,"to", components_holder)
|
||||
if not key in components_holder:
|
||||
components_holder[key] = components[key]
|
||||
"""
|
||||
|
||||
# potentially useful alternative
|
||||
def duplicate_object2(object, original_name):
|
||||
print("copy object", object)
|
||||
|
||||
with bpy.context.temp_override(object=object, active_object = object):
|
||||
bpy.ops.object.duplicate(linked=False)
|
||||
new_obj = bpy.context.active_object
|
||||
|
||||
print("new obj", new_obj, "bpy.context.view_layer", bpy.context.view_layer.objects)
|
||||
for obj in bpy.context.view_layer.objects:
|
||||
print("obj", obj)
|
||||
bpy.context.view_layer.update()
|
||||
new_obj.name = original_name
|
||||
|
||||
if object.animation_data:
|
||||
print("OJECT ANIMATION")
|
||||
new_obj.animation_data.action = object.animation_data.action.copy()
|
||||
|
||||
return new_obj
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if active_operator:
|
||||
# print("Operator", active_operator.bl_label, active_operator.bl_idname, "bla", bpy.context.window_manager.gltf_exporter_running)
|
||||
if active_operator.bl_idname == "EXPORT_SCENE_OT_gltf" : #and not bpy.context.window_manager.gltf_exporter_running:
|
||||
# we force saving params
|
||||
active_operator.will_save_settings = True
|
||||
if active_operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf":
|
||||
# we force saving params
|
||||
active_operator.will_save_settings = True
|
||||
|
||||
|
||||
"""
|
||||
print("matching")
|
||||
try:
|
||||
bpy.app.timers.unregister(cls.gltf_exporter_handler)
|
||||
except:pass
|
||||
bpy.app.timers.register(cls.gltf_exporter_handler, first_interval=3)
|
||||
# we backup any existing gltf export settings, if there where any
|
||||
scene = bpy.context.scene
|
||||
if "glTF2ExportSettings" in scene:
|
||||
existing_setting = scene["glTF2ExportSettings"]
|
||||
cls.existing_gltf_settings = existing_setting
|
||||
bpy.context.window_manager.gltf_exporter_running = True
|
||||
|
||||
|
||||
else:
|
||||
if bpy.context.window_manager.gltf_exporter_running:
|
||||
bpy.context.window_manager.gltf_exporter_running = False"""
|
||||
|
||||
|
||||
"""@classmethod
|
||||
def gltf_exporter_handler(cls):
|
||||
# FOr some reason, the active operator here is always None, so using a workaround
|
||||
# active_operator = bpy.context.active_operator
|
||||
print("here", bpy.context.window_manager.gltf_exporter_running)
|
||||
|
||||
if bpy.context.window_manager.gltf_exporter_running:
|
||||
try:
|
||||
dummy_file_path = "/home/ckaos/projects/bevy/Blender_bevy_components_worklflow/testing/bevy_example/assets/dummy.glb"
|
||||
|
||||
import os
|
||||
if os.path.exists(dummy_file_path):
|
||||
print("dummy file exists, assuming it worked")
|
||||
os.unlink(dummy_file_path)
|
||||
|
||||
# get the parameters
|
||||
scene = bpy.context.scene
|
||||
if "glTF2ExportSettings" in scene:
|
||||
settings = scene["glTF2ExportSettings"]
|
||||
formatted_settings = dict(settings)
|
||||
|
||||
export_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings")
|
||||
|
||||
#check if params have changed
|
||||
bpy.context.window_manager.gltf_settings_changed = sorted(json.loads(export_settings.as_string()).items()) != sorted(formatted_settings.items())
|
||||
|
||||
print("gltf NEW settings", formatted_settings, "OLD settings", export_settings, "CHANGED ?", bpy.context.window_manager.gltf_settings_changed)
|
||||
|
||||
# now write new settings
|
||||
export_settings.clear()
|
||||
export_settings.write(json.dumps(formatted_settings))
|
||||
|
||||
|
||||
# now reset the original gltf_settings
|
||||
if getattr(cls, "existing_gltf_settings", None) != None:
|
||||
print("resetting original gltf settings")
|
||||
scene["glTF2ExportSettings"] = cls.existing_gltf_settings
|
||||
else:
|
||||
print("no pre_existing settings")
|
||||
if "glTF2ExportSettings" in scene:
|
||||
del scene["glTF2ExportSettings"]
|
||||
cls.existing_gltf_settings = None
|
||||
except:pass
|
||||
bpy.context.window_manager.gltf_exporter_running = False
|
||||
return None
|
||||
|
||||
|
||||
else:
|
||||
try:
|
||||
bpy.app.timers.unregister(cls.gltf_exporter_handler)
|
||||
except:pass
|
||||
return None
|
||||
return 1"""
|
||||
|
||||
|
||||
def invoke_override(self, context, event):
|
||||
settings = context.scene.get(self.scene_key)
|
||||
self.will_save_settings = False
|
||||
if settings:
|
||||
try:
|
||||
for (k, v) in settings.items():
|
||||
setattr(self, k, v)
|
||||
self.will_save_settings = True
|
||||
|
||||
# Update filter if user saved settings
|
||||
if hasattr(self, 'export_format'):
|
||||
self.filter_glob = '*.glb' if self.export_format == 'GLB' else '*.gltf'
|
||||
|
||||
except (AttributeError, TypeError):
|
||||
self.report({"ERROR"}, "Loading export settings failed. Removed corrupted settings")
|
||||
del context.scene[self.scene_key]
|
||||
|
||||
import sys
|
||||
preferences = bpy.context.preferences
|
||||
for addon_name in preferences.addons.keys():
|
||||
try:
|
||||
if hasattr(sys.modules[addon_name], 'glTF2ExportUserExtension') or hasattr(sys.modules[addon_name], 'glTF2ExportUserExtensions'):
|
||||
pass #exporter_extension_panel_unregister_functors.append(sys.modules[addon_name].register_panel())
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# self.has_active_exporter_extensions = len(exporter_extension_panel_unregister_functors) > 0
|
||||
print("ovverride")
|
||||
wm = context.window_manager
|
||||
wm.fileselect_add(self)
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
|
||||
|
||||
from io_scene_gltf2 import (ExportGLTF2, GLTF_PT_export_main, GLTF_PT_export_include)
|
||||
|
||||
|
||||
from io_scene_gltf2 import (ExportGLTF2, GLTF_PT_export_main,ExportGLTF2_Base, GLTF_PT_export_include)
|
||||
import io_scene_gltf2 as gltf_exporter_original
|
||||
#import io_scene_gltf2.GLTF_PT_export_data_scene as GLTF_PT_export_data_scene_original
|
||||
"""
|
||||
class GLTF_PT_export_data(gltf_exporter_original.GLTF_PT_export_data):
|
||||
bl_space_type = 'FILE_BROWSER'
|
||||
bl_region_type = 'TOOL_PROPS'
|
||||
bl_label = "Data"
|
||||
bl_parent_id = "GLTF_PT_auto_export_gltf"
|
||||
bl_options = {'DEFAULT_CLOSED'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
sfile = context.space_data
|
||||
operator = sfile.active_operator
|
||||
|
||||
return operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf"
|
||||
|
||||
class GLTF_PT_export_data_scene(gltf_exporter_original.GLTF_PT_export_data_scene):
|
||||
bl_space_type = 'FILE_BROWSER'
|
||||
bl_region_type = 'TOOL_PROPS'
|
||||
bl_label = "Scene Graph"
|
||||
bl_parent_id = "GLTF_PT_export_data"
|
||||
bl_options = {'DEFAULT_CLOSED'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
sfile = context.space_data
|
||||
operator = sfile.active_operator
|
||||
return operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf"
|
||||
|
||||
def draw(self, context):
|
||||
return super().draw(context)"""
|
0
tools/blenvy/gltf_auto_export/modules/__init__.py
Normal file
29
tools/blenvy/gltf_auto_export/modules/bevy_dynamic.py
Normal file
@ -0,0 +1,29 @@
|
||||
import bpy
|
||||
|
||||
|
||||
# checks if an object is dynamic
|
||||
# TODO: for efficiency, it might make sense to write this flag semi automatically at the root level of the object so we can skip the inner loop
|
||||
# TODO: we need to recompute these on blueprint changes too
|
||||
# even better, keep a list of dynamic objects per scene , updated only when needed ?
|
||||
def is_object_dynamic(object):
|
||||
is_dynamic = object['Dynamic'] if 'Dynamic' in object else False
|
||||
# only look for data in the original collection if it is not alread marked as dynamic at instance level
|
||||
if not is_dynamic and object.type == 'EMPTY' and hasattr(object, 'instance_collection') and object.instance_collection != None :
|
||||
#print("collection", object.instance_collection, "object", object.name)
|
||||
# get the name of the collection this is an instance of
|
||||
collection_name = object.instance_collection.name
|
||||
original_collection = bpy.data.collections[collection_name]
|
||||
|
||||
# scan original collection, look for a 'Dynamic' flag
|
||||
for object in original_collection.objects:
|
||||
#print(" inner", object)
|
||||
if object.type == 'EMPTY' and object.name.endswith("components"):
|
||||
for component_name in object.keys():
|
||||
#print(" compo", component_name)
|
||||
if component_name == 'Dynamic':
|
||||
is_dynamic = True
|
||||
break
|
||||
return is_dynamic
|
||||
|
||||
def is_object_static(object):
|
||||
return not is_object_dynamic(object)
|
@ -0,0 +1,64 @@
|
||||
|
||||
import bpy
|
||||
from ..helpers.object_makers import make_empty
|
||||
|
||||
# TODO: replace this with placing scene level custom properties once support for that has been added to bevy_gltf
|
||||
def upsert_scene_components(main_scenes):
|
||||
for scene in main_scenes:
|
||||
lighting_components_name = f"lighting_components_{scene.name}"
|
||||
lighting_components = bpy.data.objects.get(lighting_components_name, None)
|
||||
if not lighting_components:
|
||||
root_collection = scene.collection
|
||||
lighting_components = make_empty('lighting_components_'+scene.name, [0,0,0], [0,0,0], [0,0,0], root_collection)
|
||||
|
||||
if scene.world is not None:
|
||||
lighting_components['BlenderBackgroundShader'] = ambient_color_to_component(scene.world)
|
||||
lighting_components['BlenderShadowSettings'] = scene_shadows_to_component(scene)
|
||||
|
||||
if scene.eevee.use_bloom:
|
||||
lighting_components['BloomSettings'] = scene_bloom_to_component(scene)
|
||||
elif 'BloomSettings' in lighting_components:
|
||||
del lighting_components['BloomSettings']
|
||||
|
||||
if scene.eevee.use_gtao:
|
||||
lighting_components['SSAOSettings'] = scene_ao_to_component(scene)
|
||||
elif 'SSAOSettings' in lighting_components:
|
||||
del lighting_components['SSAOSettings']
|
||||
|
||||
def remove_scene_components(main_scenes):
|
||||
for scene in main_scenes:
|
||||
lighting_components_name = f"lighting_components_{scene.name}"
|
||||
lighting_components = bpy.data.objects.get(lighting_components_name, None)
|
||||
if lighting_components:
|
||||
bpy.data.objects.remove(lighting_components, do_unlink=True)
|
||||
|
||||
|
||||
def ambient_color_to_component(world):
|
||||
color = None
|
||||
strength = None
|
||||
try:
|
||||
color = world.node_tree.nodes['Background'].inputs[0].default_value
|
||||
strength = world.node_tree.nodes['Background'].inputs[1].default_value
|
||||
except Exception as ex:
|
||||
print("failed to parse ambient color: Only background is supported")
|
||||
|
||||
|
||||
if color is not None and strength is not None:
|
||||
colorRgba = f"Rgba(red: {color[0]}, green: {color[1]}, blue: {color[2]}, alpha: {color[3]})"
|
||||
component = f"( color: {colorRgba}, strength: {strength})"
|
||||
return component
|
||||
return None
|
||||
|
||||
def scene_shadows_to_component(scene):
|
||||
cascade_size = scene.eevee.shadow_cascade_size
|
||||
component = f"(cascade_size: {cascade_size})"
|
||||
return component
|
||||
|
||||
def scene_bloom_to_component(scene):
|
||||
component = f"BloomSettings(intensity: {scene.eevee.bloom_intensity})"
|
||||
return component
|
||||
|
||||
def scene_ao_to_component(scene):
|
||||
ssao = scene.eevee.use_gtao
|
||||
component= "SSAOSettings()"
|
||||
return component
|
127
tools/blenvy/gltf_auto_export/modules/export_materials.py
Normal file
@ -0,0 +1,127 @@
|
||||
import os
|
||||
import bpy
|
||||
from pathlib import Path
|
||||
|
||||
from ..helpers.generate_and_export import generate_and_export
|
||||
|
||||
from ..helpers.helpers_collections import (traverse_tree)
|
||||
from ..auto_export.export_gltf import (export_gltf, generate_gltf_export_preferences)
|
||||
from ..helpers.object_makers import make_cube
|
||||
|
||||
# get materials per object, and injects the materialInfo component
|
||||
def get_materials(object):
|
||||
material_slots = object.material_slots
|
||||
used_materials_names = []
|
||||
#materials_per_object = {}
|
||||
current_project_name = Path(bpy.context.blend_data.filepath).stem
|
||||
|
||||
for m in material_slots:
|
||||
material = m.material
|
||||
# print(" slot", m, "material", material)
|
||||
used_materials_names.append(material.name)
|
||||
# TODO:, also respect slots & export multiple materials if applicable !
|
||||
object['MaterialInfo'] = '(name: "'+material.name+'", source: "'+current_project_name + '")'
|
||||
|
||||
return used_materials_names
|
||||
|
||||
def clear_material_info(collection_names, library_scenes):
|
||||
for scene in library_scenes:
|
||||
root_collection = scene.collection
|
||||
for cur_collection in traverse_tree(root_collection):
|
||||
if cur_collection.name in collection_names:
|
||||
for object in cur_collection.all_objects:
|
||||
if 'MaterialInfo' in dict(object): # FIXME: hasattr does not work ????
|
||||
del object["MaterialInfo"]
|
||||
|
||||
|
||||
def get_all_materials(collection_names, library_scenes):
|
||||
#print("collecton", layerColl, "otot", layerColl.all_objects) #all_objects
|
||||
used_material_names = []
|
||||
for scene in library_scenes:
|
||||
root_collection = scene.collection
|
||||
for cur_collection in traverse_tree(root_collection):
|
||||
if cur_collection.name in collection_names:
|
||||
for object in cur_collection.all_objects:
|
||||
used_material_names = used_material_names + get_materials(object)
|
||||
# we only want unique names
|
||||
used_material_names = list(set(used_material_names))
|
||||
return used_material_names
|
||||
|
||||
|
||||
# creates a new object with the applied material, for the material library
|
||||
def make_material_object(name, location=[0,0,0], rotation=[0,0,0], scale=[1,1,1], material=None, collection=None):
|
||||
#original_active_object = bpy.context.active_object
|
||||
#bpy.ops.mesh.primitive_cube_add(size=0.1, location=location)
|
||||
object = make_cube(name, location=location, rotation=rotation, scale=scale, collection=collection)
|
||||
if material:
|
||||
if object.data.materials:
|
||||
# assign to 1st material slot
|
||||
object.data.materials[0] = material
|
||||
else:
|
||||
# no slots
|
||||
object.data.materials.append(material)
|
||||
return object
|
||||
|
||||
|
||||
# generates a materials scene:
|
||||
def generate_materials_scene_content(root_collection, used_material_names):
|
||||
for index, material_name in enumerate(used_material_names):
|
||||
material = bpy.data.materials[material_name]
|
||||
make_material_object("Material_"+material_name, [index * 0.2,0,0], material=material, collection=root_collection)
|
||||
return {}
|
||||
|
||||
def clear_materials_scene(temp_scene):
|
||||
root_collection = temp_scene.collection
|
||||
scene_objects = [o for o in root_collection.objects]
|
||||
for object in scene_objects:
|
||||
#print("removing ", object)
|
||||
try:
|
||||
mesh = bpy.data.meshes[object.name+"_Mesh"]
|
||||
bpy.data.meshes.remove(mesh, do_unlink=True)
|
||||
except Exception as error:
|
||||
pass
|
||||
#print("could not remove mesh", error)
|
||||
|
||||
try:
|
||||
bpy.data.objects.remove(object, do_unlink=True)
|
||||
except:pass
|
||||
|
||||
bpy.data.scenes.remove(temp_scene)
|
||||
|
||||
# exports the materials used inside the current project:
|
||||
# the name of the output path is <materials_folder>/<name_of_your_blend_file>_materials_library.gltf/glb
|
||||
def export_materials(collections, library_scenes, folder_path, addon_prefs):
|
||||
gltf_export_preferences = generate_gltf_export_preferences(addon_prefs)
|
||||
export_materials_path = getattr(addon_prefs,"export_materials_path")
|
||||
export_root_folder = getattr(addon_prefs, "export_root_folder")
|
||||
|
||||
|
||||
used_material_names = get_all_materials(collections, library_scenes)
|
||||
current_project_name = Path(bpy.context.blend_data.filepath).stem
|
||||
|
||||
export_settings = { **gltf_export_preferences,
|
||||
'use_active_scene': True,
|
||||
'use_active_collection':True,
|
||||
'use_active_collection_with_nested':True,
|
||||
'use_visible': False,
|
||||
'use_renderable': False,
|
||||
'export_apply':True
|
||||
}
|
||||
|
||||
gltf_output_path = os.path.join(export_root_folder, export_materials_path, current_project_name + "_materials_library")
|
||||
|
||||
print(" exporting Materials to", gltf_output_path, ".gltf/glb")
|
||||
|
||||
generate_and_export(
|
||||
addon_prefs,
|
||||
temp_scene_name="__materials_scene",
|
||||
export_settings=export_settings,
|
||||
gltf_output_path=gltf_output_path,
|
||||
tempScene_filler= lambda temp_collection: generate_materials_scene_content(temp_collection, used_material_names),
|
||||
tempScene_cleaner= lambda temp_scene, params: clear_materials_scene(temp_scene=temp_scene)
|
||||
)
|
||||
|
||||
|
||||
def cleanup_materials(collections, library_scenes):
|
||||
# remove temporary components
|
||||
clear_material_info(collections, library_scenes)
|
323
tools/blenvy/gltf_auto_export/ui/main.py
Normal file
@ -0,0 +1,323 @@
|
||||
from typing import Set
|
||||
import bpy
|
||||
######################################################
|
||||
## ui logic & co
|
||||
|
||||
# side panel that opens auto_export specific gltf settings & the auto export settings themselves
|
||||
class GLTF_PT_auto_export_SidePanel(bpy.types.Panel):
|
||||
bl_idname = "GLTF_PT_auto_export_SidePanel"
|
||||
bl_label = "Auto export"
|
||||
bl_space_type = 'VIEW_3D'
|
||||
bl_region_type = 'UI'
|
||||
bl_category = "Auto Export"
|
||||
bl_context = "objectmode"
|
||||
bl_parent_id = "BLENVY_PT_SidePanel"
|
||||
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return context.window_manager.blenvy.mode == 'SETTINGS'
|
||||
|
||||
"""def draw_header(self, context):
|
||||
layout = self.layout
|
||||
layout.label(text="Auto export ")"""
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.label(text="MAKE SURE TO KEEP 'REMEMBER EXPORT SETTINGS' TOGGLED !!")
|
||||
op = layout.operator("EXPORT_SCENE_OT_gltf", text='Gltf Settings')#'glTF 2.0 (.glb/.gltf)')
|
||||
#op.export_format = 'GLTF_SEPARATE'
|
||||
op.use_selection=True
|
||||
op.will_save_settings=True
|
||||
op.use_visible=True # Export visible and hidden objects. See Object/Batch Export to skip.
|
||||
op.use_renderable=True
|
||||
op.use_active_collection = True
|
||||
op.use_active_collection_with_nested=True
|
||||
op.use_active_scene = True
|
||||
op.filepath="____dummy____"
|
||||
op.gltf_export_id = "gltf_auto_export" # we specify that we are in a special case
|
||||
|
||||
op = layout.operator("EXPORT_SCENES_OT_auto_gltf", text="Auto Export Settings")
|
||||
op.auto_export = True
|
||||
|
||||
class GLTF_PT_auto_export_changes_list(bpy.types.Panel):
|
||||
bl_space_type = 'VIEW_3D'
|
||||
bl_region_type = 'UI'
|
||||
bl_label = "Changes per scene since last save "
|
||||
bl_parent_id = "GLTF_PT_auto_export_SidePanel"
|
||||
bl_options = {'DEFAULT_CLOSED'}
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False # No animation.
|
||||
|
||||
#if "auto_export_tracker" in context.window_manager:
|
||||
changed_objects_per_scene = context.window_manager.auto_export_tracker.changed_objects_per_scene
|
||||
for scene_name in changed_objects_per_scene:
|
||||
layout.label(text=f'{scene_name}')
|
||||
for object_name in list(changed_objects_per_scene[scene_name].keys()):
|
||||
row = layout.row()
|
||||
row.label(text=f' {object_name}')
|
||||
|
||||
# main ui in the file => export
|
||||
class GLTF_PT_auto_export_main(bpy.types.Panel):
|
||||
bl_space_type = 'FILE_BROWSER'
|
||||
bl_region_type = 'TOOL_PROPS'
|
||||
bl_label = ""
|
||||
bl_parent_id = "FILE_PT_operator"
|
||||
bl_options = {'HIDE_HEADER'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
sfile = context.space_data
|
||||
operator = sfile.active_operator
|
||||
|
||||
return operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf"
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False # No animation.
|
||||
|
||||
class GLTF_PT_auto_export_root(bpy.types.Panel):
|
||||
bl_space_type = 'FILE_BROWSER'
|
||||
bl_region_type = 'TOOL_PROPS'
|
||||
bl_label = "Auto export"
|
||||
bl_parent_id = "GLTF_PT_auto_export_main"
|
||||
#bl_options = {'DEFAULT_CLOSED'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
sfile = context.space_data
|
||||
operator = sfile.active_operator
|
||||
return operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf"
|
||||
|
||||
def draw_header(self, context):
|
||||
sfile = context.space_data
|
||||
operator = sfile.active_operator
|
||||
self.layout.prop(operator, "auto_export", text="")
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False # No animation.
|
||||
|
||||
sfile = context.space_data
|
||||
operator = sfile.active_operator
|
||||
|
||||
layout.active = operator.auto_export
|
||||
layout.prop(operator, 'will_save_settings')
|
||||
|
||||
class GLTF_PT_auto_export_general(bpy.types.Panel):
|
||||
bl_space_type = 'FILE_BROWSER'
|
||||
bl_region_type = 'TOOL_PROPS'
|
||||
bl_label = "General"
|
||||
bl_parent_id = "GLTF_PT_auto_export_root"
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
sfile = context.space_data
|
||||
operator = sfile.active_operator
|
||||
|
||||
return operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf" #"EXPORT_SCENE_OT_gltf"
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False # No animation.
|
||||
|
||||
sfile = context.space_data
|
||||
operator = sfile.active_operator
|
||||
|
||||
layout.active = operator.auto_export
|
||||
layout.prop(operator, "export_output_folder")
|
||||
layout.prop(operator, "export_scene_settings")
|
||||
|
||||
|
||||
class GLTF_PT_auto_export_change_detection(bpy.types.Panel):
|
||||
bl_space_type = 'FILE_BROWSER'
|
||||
bl_region_type = 'TOOL_PROPS'
|
||||
bl_label = "Change detection"
|
||||
bl_parent_id = "GLTF_PT_auto_export_root"
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
sfile = context.space_data
|
||||
operator = sfile.active_operator
|
||||
|
||||
return operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf" #"EXPORT_SCENE_OT_gltf"
|
||||
def draw_header(self, context):
|
||||
layout = self.layout
|
||||
sfile = context.space_data
|
||||
operator = sfile.active_operator
|
||||
layout.prop(operator, "export_change_detection", text="")
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False # No animation.
|
||||
|
||||
sfile = context.space_data
|
||||
operator = sfile.active_operator
|
||||
|
||||
layout.active = operator.auto_export
|
||||
layout.prop(operator, "export_change_detection")
|
||||
|
||||
|
||||
|
||||
class GLTF_PT_auto_export_scenes(bpy.types.Panel):
|
||||
bl_space_type = 'FILE_BROWSER'
|
||||
bl_region_type = 'TOOL_PROPS'
|
||||
bl_label = "Scenes"
|
||||
bl_parent_id = "GLTF_PT_auto_export_root"
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
sfile = context.space_data
|
||||
operator = sfile.active_operator
|
||||
|
||||
return operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf" #"EXPORT_SCENE_OT_gltf"
|
||||
|
||||
def draw_header(self, context):
|
||||
layout = self.layout
|
||||
sfile = context.space_data
|
||||
operator = sfile.active_operator
|
||||
#layout.label(text="export scenes")#layout.prop(operator, "export_blueprints", text="")
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False # No animation.
|
||||
|
||||
sfile = context.space_data
|
||||
operator = sfile.active_operator
|
||||
|
||||
# scene selectors
|
||||
row = layout.row()
|
||||
col = row.column(align=True)
|
||||
col.separator()
|
||||
|
||||
layout.active = operator.auto_export
|
||||
source = operator
|
||||
rows = 2
|
||||
|
||||
# main/level scenes
|
||||
row = layout.row()
|
||||
row.label(text="main scenes")
|
||||
row.prop(context.window_manager, "main_scene", text='')
|
||||
|
||||
row = layout.row()
|
||||
row.template_list("SCENE_UL_GLTF_auto_export", "level scenes", source, "main_scenes", source, "main_scenes_index", rows=rows)
|
||||
|
||||
col = row.column(align=True)
|
||||
sub_row = col.row()
|
||||
add_operator = sub_row.operator("scene_list.list_action", icon='ADD', text="")
|
||||
add_operator.action = 'ADD'
|
||||
add_operator.scene_type = 'level'
|
||||
#add_operator.source = operator
|
||||
sub_row.enabled = context.window_manager.main_scene is not None
|
||||
|
||||
sub_row = col.row()
|
||||
remove_operator = sub_row.operator("scene_list.list_action", icon='REMOVE', text="")
|
||||
remove_operator.action = 'REMOVE'
|
||||
remove_operator.scene_type = 'level'
|
||||
col.separator()
|
||||
|
||||
#up_operator = col.operator("scene_list.list_action", icon='TRIA_UP', text="")
|
||||
#up_operator.action = 'UP'
|
||||
#col.operator("scene_list.list_action", icon='TRIA_DOWN', text="").action = 'DOWN'
|
||||
|
||||
# library scenes
|
||||
row = layout.row()
|
||||
row.label(text="library scenes")
|
||||
row.prop(context.window_manager, "library_scene", text='')
|
||||
|
||||
row = layout.row()
|
||||
row.template_list("SCENE_UL_GLTF_auto_export", "library scenes", source, "library_scenes", source, "library_scenes_index", rows=rows)
|
||||
|
||||
col = row.column(align=True)
|
||||
sub_row = col.row()
|
||||
add_operator = sub_row.operator("scene_list.list_action", icon='ADD', text="")
|
||||
add_operator.action = 'ADD'
|
||||
add_operator.scene_type = 'library'
|
||||
sub_row.enabled = context.window_manager.library_scene is not None
|
||||
|
||||
|
||||
sub_row = col.row()
|
||||
remove_operator = sub_row.operator("scene_list.list_action", icon='REMOVE', text="")
|
||||
remove_operator.action = 'REMOVE'
|
||||
remove_operator.scene_type = 'library'
|
||||
col.separator()
|
||||
|
||||
class GLTF_PT_auto_export_blueprints(bpy.types.Panel):
|
||||
bl_space_type = 'FILE_BROWSER'
|
||||
bl_region_type = 'TOOL_PROPS'
|
||||
bl_label = "Blueprints"
|
||||
bl_parent_id = "GLTF_PT_auto_export_root"
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
sfile = context.space_data
|
||||
operator = sfile.active_operator
|
||||
|
||||
return operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf" #"EXPORT_SCENE_OT_gltf"
|
||||
|
||||
|
||||
def draw_header(self, context):
|
||||
layout = self.layout
|
||||
sfile = context.space_data
|
||||
operator = sfile.active_operator
|
||||
layout.prop(operator, "export_blueprints", text="")
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False # No animation.
|
||||
|
||||
sfile = context.space_data
|
||||
operator = sfile.active_operator
|
||||
|
||||
layout.active = operator.auto_export and operator.export_blueprints
|
||||
|
||||
# collections/blueprints
|
||||
layout.prop(operator, "export_blueprints_path")
|
||||
layout.prop(operator, "collection_instances_combine_mode")
|
||||
layout.prop(operator, "export_marked_assets")
|
||||
layout.prop(operator, "export_separate_dynamic_and_static_objects")
|
||||
layout.separator()
|
||||
# materials
|
||||
layout.prop(operator, "export_materials_library")
|
||||
layout.prop(operator, "export_materials_path")
|
||||
|
||||
class SCENE_UL_GLTF_auto_export(bpy.types.UIList):
|
||||
# The draw_item function is called for each item of the collection that is visible in the list.
|
||||
# data is the RNA object containing the collection,
|
||||
# item is the current drawn item of the collection,
|
||||
# icon is the "computed" icon for the item (as an integer, because some objects like materials or textures
|
||||
# have custom icons ID, which are not available as enum items).
|
||||
# active_data is the RNA object containing the active property for the collection (i.e. integer pointing to the
|
||||
# active item of the collection).
|
||||
# active_propname is the name of the active property (use 'getattr(active_data, active_propname)').
|
||||
# index is index of the current item in the collection.
|
||||
# flt_flag is the result of the filtering process for this item.
|
||||
# Note: as index and flt_flag are optional arguments, you do not have to use/declare them here if you don't
|
||||
# need them.
|
||||
def draw_item(self, context, layout, data, item, icon, active_data, active_propname):
|
||||
ob = data
|
||||
# draw_item must handle the three layout types... Usually 'DEFAULT' and 'COMPACT' can share the same code.
|
||||
if self.layout_type in {'DEFAULT', 'COMPACT'}:
|
||||
# You should always start your row layout by a label (icon + text), or a non-embossed text field,
|
||||
# this will also make the row easily selectable in the list! The later also enables ctrl-click rename.
|
||||
# We use icon_value of label, as our given icon is an integer value, not an enum ID.
|
||||
# Note "data" names should never be translated!
|
||||
#if ma:
|
||||
# layout.prop(ma, "name", text="", emboss=False, icon_value=icon)
|
||||
#else:
|
||||
# layout.label(text="", translate=False, icon_value=icon)
|
||||
layout.label(text=item.name, icon_value=icon)
|
||||
#layout.prop(item, "name", text="", emboss=False, icon_value=icon)
|
||||
# 'GRID' layout type should be as compact as possible (typically a single icon!).
|
||||
elif self.layout_type == 'GRID':
|
||||
layout.alignment = 'CENTER'
|
||||
layout.label(text="", icon_value=icon)
|
175
tools/blenvy/gltf_auto_export/ui/operators.py
Normal file
@ -0,0 +1,175 @@
|
||||
|
||||
import bpy
|
||||
from bpy.types import Operator
|
||||
|
||||
|
||||
class ASSETS_LIST_OT_actions(Operator):
|
||||
"""Add / remove etc assets"""
|
||||
bl_idname = "asset_list.list_action"
|
||||
bl_label = "Asset Actions"
|
||||
bl_description = "Move items up and down, add and remove"
|
||||
bl_options = {'REGISTER'}
|
||||
|
||||
|
||||
class SCENES_LIST_OT_actions(Operator):
|
||||
"""Move items up and down, add and remove"""
|
||||
bl_idname = "scene_list.list_action"
|
||||
bl_label = "List Actions"
|
||||
bl_description = "Move items up and down, add and remove"
|
||||
bl_options = {'REGISTER'}
|
||||
|
||||
action: bpy.props.EnumProperty(
|
||||
items=(
|
||||
('UP', "Up", ""),
|
||||
('DOWN', "Down", ""),
|
||||
('REMOVE', "Remove", ""),
|
||||
('ADD', "Add", ""))) # type: ignore
|
||||
|
||||
|
||||
scene_type: bpy.props.StringProperty()#TODO: replace with enum
|
||||
|
||||
def invoke(self, context, event):
|
||||
source = context.active_operator
|
||||
target_name = "library_scenes"
|
||||
target_index = "library_scenes_index"
|
||||
if self.scene_type == "level":
|
||||
target_name = "main_scenes"
|
||||
target_index = "main_scenes_index"
|
||||
|
||||
target = getattr(source, target_name)
|
||||
idx = getattr(source, target_index)
|
||||
current_index = getattr(source, target_index)
|
||||
|
||||
try:
|
||||
item = target[idx]
|
||||
except IndexError:
|
||||
pass
|
||||
else:
|
||||
if self.action == 'DOWN' and idx < len(target) - 1:
|
||||
target.move(idx, idx + 1)
|
||||
setattr(source, target_index, current_index +1 )
|
||||
info = 'Item "%s" moved to position %d' % (item.name, current_index + 1)
|
||||
self.report({'INFO'}, info)
|
||||
|
||||
elif self.action == 'UP' and idx >= 1:
|
||||
target.move(idx, idx - 1)
|
||||
setattr(source, target_index, current_index -1 )
|
||||
info = 'Item "%s" moved to position %d' % (item.name, current_index + 1)
|
||||
self.report({'INFO'}, info)
|
||||
|
||||
elif self.action == 'REMOVE':
|
||||
info = 'Item "%s" removed from list' % (target[idx].name)
|
||||
setattr(source, target_index, current_index -1 )
|
||||
target.remove(idx)
|
||||
self.report({'INFO'}, info)
|
||||
|
||||
if self.action == 'ADD':
|
||||
new_scene_name = None
|
||||
if self.scene_type == "level":
|
||||
if context.window_manager.main_scene:
|
||||
new_scene_name = context.window_manager.main_scene.name
|
||||
else:
|
||||
if context.window_manager.library_scene:
|
||||
new_scene_name = context.window_manager.library_scene.name
|
||||
if new_scene_name:
|
||||
item = target.add()
|
||||
item.name = new_scene_name#f"Rule {idx +1}"
|
||||
|
||||
if self.scene_type == "level":
|
||||
context.window_manager.main_scene = None
|
||||
else:
|
||||
context.window_manager.library_scene = None
|
||||
|
||||
#name = f"Rule {idx +1}"
|
||||
#target.append({"name": name})
|
||||
setattr(source, target_index, len(target) - 1)
|
||||
#source[target_index] = len(target) - 1
|
||||
info = '"%s" added to list' % (item.name)
|
||||
self.report({'INFO'}, info)
|
||||
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
import os
|
||||
from bpy_extras.io_utils import ImportHelper
|
||||
|
||||
class OT_OpenFolderbrowser(Operator, ImportHelper):
|
||||
"""Browse for registry json file"""
|
||||
bl_idname = "generic.open_folderbrowser"
|
||||
bl_label = "Select folder"
|
||||
|
||||
# Define this to tell 'fileselect_add' that we want a directoy
|
||||
directory: bpy.props.StringProperty(
|
||||
name="Outdir Path",
|
||||
description="selected folder"
|
||||
# subtype='DIR_PATH' is not needed to specify the selection mode.
|
||||
# But this will be anyway a directory path.
|
||||
) # type: ignore
|
||||
|
||||
# Filters folders
|
||||
filter_folder: bpy.props.BoolProperty(
|
||||
default=True,
|
||||
options={"HIDDEN"}
|
||||
) # type: ignore
|
||||
|
||||
target_property: bpy.props.StringProperty(
|
||||
name="target_property",
|
||||
options={'HIDDEN'}
|
||||
) # type: ignore
|
||||
|
||||
def execute(self, context):
|
||||
"""Do something with the selected file(s)."""
|
||||
operator = context.active_operator
|
||||
new_path = self.directory
|
||||
target_path_name = self.target_property
|
||||
|
||||
# path to the current blend file
|
||||
blend_file_path = bpy.data.filepath
|
||||
# Get the folder
|
||||
blend_file_folder_path = os.path.dirname(blend_file_path)
|
||||
print("blend_file_folder_path", blend_file_folder_path)
|
||||
|
||||
print("new_path", self.directory, self.target_property, operator)
|
||||
|
||||
path_names = ['export_output_folder', 'export_blueprints_path', 'export_levels_path', 'export_materials_path']
|
||||
export_root_folder = operator.export_root_folder
|
||||
#export_root_path_absolute = os.path.join(blend_file_folder_path, export_root_folder)
|
||||
|
||||
if target_path_name == 'export_root_folder':
|
||||
print("changing root new_path")
|
||||
# we need to change all other relative paths before setting the new absolute path
|
||||
for path_name in path_names:
|
||||
# get absolute path
|
||||
relative_path = getattr(operator, path_name, None)
|
||||
if relative_path is not None:
|
||||
absolute_path = os.path.join(export_root_folder, relative_path)
|
||||
print("absolute path for", path_name, absolute_path)
|
||||
relative_path = os.path.relpath(absolute_path, new_path)
|
||||
setattr(operator, path_name, relative_path)
|
||||
|
||||
# store the root path as relative to the current blend file
|
||||
setattr(operator, target_path_name, new_path)
|
||||
|
||||
else:
|
||||
relative_path = os.path.relpath(new_path, export_root_folder)
|
||||
setattr(operator, target_path_name, relative_path)
|
||||
|
||||
#filename, extension = os.path.splitext(self.filepath)
|
||||
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
def draw_folder_browser(layout, label, value, target_property):
|
||||
row = layout.row()
|
||||
row.label(text=label)
|
||||
|
||||
'''box = row.box()
|
||||
box.scale_y = 0.5
|
||||
box.label(text=value)'''
|
||||
|
||||
col = row.column()
|
||||
col.enabled = False
|
||||
col.prop(bpy.context.active_operator, target_property, text="")
|
||||
|
||||
folder_selector = row.operator(OT_OpenFolderbrowser.bl_idname, icon="FILE_FOLDER", text="")
|
||||
folder_selector.target_property = target_property #"export_root_folder"
|
0
tools/blenvy/tests/__init__.py
Normal file
217
tools/blenvy/tests/component_values_shuffler.py
Normal file
@ -0,0 +1,217 @@
|
||||
|
||||
import random
|
||||
import string
|
||||
import uuid
|
||||
from bpy_types import PropertyGroup
|
||||
|
||||
def random_bool():
|
||||
return bool(random.getrandbits(1))
|
||||
|
||||
def rand_int():
|
||||
return random.randint(0, 100)
|
||||
|
||||
def rand_float():
|
||||
return random.random()
|
||||
|
||||
def random_word(length):
|
||||
letters = string.ascii_lowercase
|
||||
return ''.join(random.choice(letters) for i in range(length))
|
||||
|
||||
def random_vec(length, type,):
|
||||
value = []
|
||||
for i in range(0, length):
|
||||
if type == 'float':
|
||||
value.append(rand_float())
|
||||
if type == 'int':
|
||||
value.append(rand_int())
|
||||
return value
|
||||
|
||||
type_mappings = {
|
||||
"bool": random_bool,
|
||||
|
||||
"u8": rand_int,
|
||||
"u16": rand_int,
|
||||
"u32": rand_int,
|
||||
"u64": rand_int,
|
||||
"u128": rand_int,
|
||||
"u64": rand_int,
|
||||
"usize": rand_int,
|
||||
|
||||
"i8": rand_int,
|
||||
"i16": rand_int,
|
||||
"i32": rand_int,
|
||||
"i64": rand_int,
|
||||
"i128": rand_int,
|
||||
"isize": rand_int,
|
||||
|
||||
'f32': rand_float,
|
||||
'f64': rand_float,
|
||||
|
||||
"glam::Vec2": lambda : random_vec(2, 'float'),
|
||||
"glam::DVec2": lambda : random_vec(2, 'float'),
|
||||
"glam::UVec2": lambda : random_vec(2, 'int'),
|
||||
|
||||
'glam::Vec3': lambda : random_vec(3, 'float'),
|
||||
"glam::Vec3A": lambda : random_vec(3, 'float'),
|
||||
"glam::UVec3": lambda : random_vec(3, 'int'),
|
||||
|
||||
"glam::Vec4": lambda : random_vec(4, 'float'),
|
||||
"glam::DVec4": lambda : random_vec(4, 'float'),
|
||||
"glam::UVec4": lambda : random_vec(4, 'int'),
|
||||
|
||||
"glam::Quat": lambda : random_vec(4, 'float'),
|
||||
|
||||
'bevy_render::color::Color': lambda : random_vec(4, 'float'),
|
||||
'alloc::string::String': lambda : random_word(8),
|
||||
'alloc::borrow::Cow<str>': lambda : random_word(8),
|
||||
|
||||
'bevy_ecs::entity::Entity': lambda: 0, #4294967295, #
|
||||
'bevy_utils::Uuid': lambda: '"'+str( uuid.UUID("73b3b118-7d01-4778-8bcc-4e79055f5d22") )+'"'
|
||||
}
|
||||
#
|
||||
|
||||
def is_def_value_type(definition, registry):
|
||||
if definition == None:
|
||||
return True
|
||||
value_types_defaults = registry.value_types_defaults
|
||||
long_name = definition["long_name"]
|
||||
is_value_type = long_name in value_types_defaults
|
||||
return is_value_type
|
||||
|
||||
# see https://docs.python.org/3/library/random.html
|
||||
def component_values_shuffler(seed=1, property_group=None, definition=None, registry=None, parent=None):
|
||||
if parent == None:
|
||||
random.seed(seed)
|
||||
|
||||
value_types_defaults = registry.value_types_defaults
|
||||
component_name = definition["short_name"]
|
||||
type_info = definition["typeInfo"] if "typeInfo" in definition else None
|
||||
type_def = definition["type"] if "type" in definition else None
|
||||
properties = definition["properties"] if "properties" in definition else {}
|
||||
prefixItems = definition["prefixItems"] if "prefixItems" in definition else []
|
||||
has_properties = len(properties.keys()) > 0
|
||||
has_prefixItems = len(prefixItems) > 0
|
||||
is_enum = type_info == "Enum"
|
||||
is_list = type_info == "List"
|
||||
long_name = definition["long_name"]
|
||||
|
||||
#is_value_type = type_def in value_types_defaults or long_name in value_types_defaults
|
||||
is_value_type = long_name in value_types_defaults
|
||||
|
||||
if is_value_type:
|
||||
fieldValue = type_mappings[long_name]()
|
||||
return fieldValue
|
||||
|
||||
elif type_info == "Struct":
|
||||
for index, field_name in enumerate(property_group.field_names):
|
||||
item_long_name = definition["properties"][field_name]["type"]["$ref"].replace("#/$defs/", "")
|
||||
item_definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None
|
||||
|
||||
value = getattr(property_group, field_name)
|
||||
is_property_group = isinstance(value, PropertyGroup)
|
||||
child_property_group = value if is_property_group else None
|
||||
if item_definition != None:
|
||||
value = component_values_shuffler(seed, child_property_group, item_definition, registry, parent=component_name)
|
||||
else:
|
||||
value = '""'
|
||||
is_item_value_type = is_def_value_type(item_definition, registry)
|
||||
if is_item_value_type:
|
||||
#print("setting attr", field_name , "for", component_name, "to", value, "value type", is_item_value_type)
|
||||
setattr(property_group , field_name, value)
|
||||
|
||||
elif type_info == "Tuple":
|
||||
#print("tup")
|
||||
|
||||
for index, field_name in enumerate(property_group.field_names):
|
||||
item_long_name = definition["prefixItems"][index]["type"]["$ref"].replace("#/$defs/", "")
|
||||
item_definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None
|
||||
|
||||
value = getattr(property_group, field_name)
|
||||
is_property_group = isinstance(value, PropertyGroup)
|
||||
child_property_group = value if is_property_group else None
|
||||
if item_definition != None:
|
||||
value = component_values_shuffler(seed, child_property_group, item_definition, registry, parent=component_name)
|
||||
else:
|
||||
value = '""'
|
||||
|
||||
is_item_value_type = is_def_value_type(item_definition, registry)
|
||||
if is_item_value_type:
|
||||
#print("setting attr", field_name , "for", component_name, "to", value, "value type", is_item_value_type)
|
||||
setattr(property_group , field_name, value)
|
||||
|
||||
elif type_info == "TupleStruct":
|
||||
#print("tupstruct")
|
||||
for index, field_name in enumerate(property_group.field_names):
|
||||
item_long_name = definition["prefixItems"][index]["type"]["$ref"].replace("#/$defs/", "")
|
||||
item_definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None
|
||||
|
||||
value = getattr(property_group, field_name)
|
||||
is_property_group = isinstance(value, PropertyGroup)
|
||||
child_property_group = value if is_property_group else None
|
||||
if item_definition != None:
|
||||
value = component_values_shuffler(seed, child_property_group, item_definition, registry, parent=component_name)
|
||||
else:
|
||||
value = '""'
|
||||
|
||||
is_item_value_type = is_def_value_type(item_definition, registry)
|
||||
if is_item_value_type:
|
||||
setattr(property_group , field_name, value)
|
||||
|
||||
elif type_info == "Enum":
|
||||
available_variants = definition["oneOf"] if type_def != "object" else list(map(lambda x: x["long_name"], definition["oneOf"]))
|
||||
selected = random.choice(available_variants)
|
||||
|
||||
# set selected variant
|
||||
setattr(property_group , "selection", selected)
|
||||
|
||||
if type_def == "object":
|
||||
selection_index = property_group.field_names.index("variant_"+selected)
|
||||
variant_name = property_group.field_names[selection_index]
|
||||
variant_definition = definition["oneOf"][selection_index-1]
|
||||
if "prefixItems" in variant_definition:
|
||||
value = getattr(property_group, variant_name)
|
||||
is_property_group = isinstance(value, PropertyGroup)
|
||||
child_property_group = value if is_property_group else None
|
||||
|
||||
value = component_values_shuffler(seed, child_property_group, variant_definition, registry, parent=component_name)
|
||||
value = selected + str(value,)
|
||||
elif "properties" in variant_definition:
|
||||
value = getattr(property_group, variant_name)
|
||||
is_property_group = isinstance(value, PropertyGroup)
|
||||
child_property_group = value if is_property_group else None
|
||||
|
||||
value = component_values_shuffler(seed, child_property_group, variant_definition, registry, parent=component_name)
|
||||
value = selected + str(value,)
|
||||
else:
|
||||
value = selected # here the value of the enum is just the name of the variant
|
||||
else:
|
||||
value = selected
|
||||
|
||||
|
||||
|
||||
elif type_info == "List":
|
||||
item_list = getattr(property_group, "list")
|
||||
item_list.clear()
|
||||
|
||||
item_long_name = getattr(property_group, "long_name")
|
||||
number_of_list_items_to_add = random.randint(1, 2)
|
||||
|
||||
for i in range(0, number_of_list_items_to_add):
|
||||
new_entry = item_list.add()
|
||||
item_long_name = getattr(new_entry, "long_name") # we get the REAL type name
|
||||
|
||||
definition = registry.type_infos[item_long_name] if item_long_name in registry.type_infos else None
|
||||
|
||||
if definition != None:
|
||||
component_values_shuffler(seed, new_entry, definition, registry, parent=component_name)
|
||||
else:
|
||||
pass
|
||||
else:
|
||||
print("something else")
|
||||
fieldValue = type_mappings[long_name]() if long_name in type_mappings else 'None'
|
||||
return fieldValue
|
||||
|
||||
#return value
|
||||
|
||||
|
||||
|
1
tools/blenvy/tests/expected_bevy_hierarchy.json
Normal file
@ -0,0 +1 @@
|
||||
{"Blueprint7_hierarchy.001":["Blueprint4_nested.001","Cube.001"],"Cylinder":["Cylinder.001","Cylinder.001"],"Blueprint8_animated_no_bones":["Cylinder.002"],"Blueprint7_hierarchy":["Cube.001"],"Collection 2":["Blueprint8_animated_no_bones","Collection 2 1","Empty_in_collection","Spot"],"Fox_mesh":["fox1","fox1"],"_rootJoint":["b_Root_00","b_Root_00"],"b_Root_00":["b_Hip_01","b_Hip_01"],"Blueprint1":["Blueprint1_mesh"],"Fox":["Fox_mesh","_rootJoint","Fox_mesh","_rootJoint"],"Light":["Light","DirectionalLight Gizmo"],"b_Spine01_02":["b_Spine02_03","b_Spine02_03"],"b_RightLeg01_019":["b_RightLeg02_020","b_RightLeg02_020"],"b_LeftFoot01_017":["b_LeftFoot02_018","b_LeftFoot02_018"],"b_LeftForeArm_010":["b_LeftHand_011","b_LeftHand_011"],"Collection":["Blueprint1.001","Blueprint4_nested","Blueprint6_animated","Blueprint7_hierarchy","Camera","Cube","Empty","External_blueprint","External_blueprint2","Light","Plane"],"Cylinder.001":["Cylinder.002","Blueprint7_hierarchy.001","Empty_as_child"],"b_Hip_01":["b_Spine01_02","b_Tail01_012","b_LeftLeg01_015","b_RightLeg01_019","b_Spine01_02","b_Tail01_012","b_LeftLeg01_015","b_RightLeg01_019"],"world":["no_name"],"Parent_Object":["Cube.003","Blueprint1","Cylinder.001"],"Blueprint6_animated.001":["Fox"],"Blueprint4_nested":["Blueprint3"],"Blueprint6_animated":["Fox"],"Cube.001":["Cube.002","Cylinder","Cube.002","Cylinder"],"b_Spine02_03":["b_Neck_04","b_RightUpperArm_06","b_LeftUpperArm_09","b_Neck_04","b_RightUpperArm_06","b_LeftUpperArm_09"],"b_LeftLeg01_015":["b_LeftLeg02_016","b_LeftLeg02_016"],"Blueprint4_nested.001":["Blueprint3"],"b_Tail02_013":["b_Tail03_014","b_Tail03_014"],"b_RightForeArm_07":["b_RightHand_08","b_RightHand_08"],"External_blueprint2_Cylinder":["Cylinder"],"Blueprint3":["Blueprint3_mesh","Blueprint3_mesh"],"External_blueprint2":["External_blueprint2_Cylinder","External_blueprint3"],"b_LeftUpperArm_09":["b_LeftForeArm_010","b_LeftForeArm_010"],"Cube":["Cube"],"Plane":["Plane"],"no_name":["Parent_Object","Blueprint6_animated.001","lighting_components_World","assets_list_World_components","Collection","Collection 2"],"Collection 2 1":["Empty_in_sub_collection"],"External_blueprint_mesh":["Cube.001"],"b_LeftLeg02_016":["b_LeftFoot01_017","b_LeftFoot01_017"],"Cylinder.002":["Cylinder.003"],"b_RightLeg02_020":["b_RightFoot01_021","b_RightFoot01_021"],"b_Neck_04":["b_Head_05","b_Head_05"],"b_RightUpperArm_06":["b_RightForeArm_07","b_RightForeArm_07"],"Spot":["Spot"],"External_blueprint3_Cone":["Cone"],"External_blueprint":["External_blueprint_mesh"],"Blueprint3_mesh":["Cylinder","Cylinder"],"External_blueprint3":["External_blueprint3_Cone"],"Camera":["Camera Gizmo"],"Blueprint1_mesh":["Cube.001","Cube.001"],"Blueprint1.001":["Blueprint1_mesh"],"b_Tail01_012":["b_Tail02_013","b_Tail02_013"],"b_RightFoot01_021":["b_RightFoot02_022","b_RightFoot02_022"]}
|