Compare commits

...

5 Commits

Author SHA1 Message Date
Mark Moissette 58ac8d206e
Merge ab7a0cab65 into 9b50d77790 2024-07-27 16:58:37 +00:00
kaosat.dev ab7a0cab65 feat(Blenvy:Blender): fixed issue with duplicate objects when using parenting in blender 2024-07-27 18:57:47 +02:00
kaosat.dev 55a4deac1c feat(Blenvy:Bevy): changed panic! in component string parsing to be a simple warning
(might make further changes to this down the line)
2024-07-27 17:02:34 +02:00
kaosat.dev ce17f723b1 feat(Blenvy):changed materials system to generate one gltf file per material
* modified materials detection & export accordingly
 * modified material paths & co accordingly (all much simpler !)
 * modified assets_scan helpers to also include material assets
 * modified & massively cleaned up BlueprintAsset injection
 * further cleanups & fixes for materials handling
 * also removed a lot of obsolete code dealing with assets
 * ever more cleanups !
2024-07-27 16:59:57 +02:00
kaosat.dev 4865d432d9 feat(Blenvy): added basics of correct multi-material meshes support
* relying on the fact that the mesh-per-material generated by the gltf exporter is deterministic:
ie always uses the ordering of materials in an object
 * added new component MaterialInfos (plural) with a vec of MaterialInfo's
 * modified how materials per object are gathered on the Blender side
 * and modified the processing on the Bevy side to also use the ordered approach
 * seems to work well so far !
2024-07-27 12:48:47 +02:00
16 changed files with 143 additions and 227 deletions

11
TODO.md
View File

@ -226,10 +226,13 @@ Blender side:
- [ ] materials_path custom property should be ignored both in the list of fixable component AND on export
- [ ] if we want to add material_infos & others as normal components they should not be editable, so we need another attribute, and adapt the UI for that
- [ ] if material library is toggled, then changes to materials should not change the blueprints that are using them => not really: as the name & co might change
- [x] if material library is toggled, then changes to materials should not change the blueprints that are using them => not really: as the name & co might change
- [ ] material assets seem to be added to list regardless of whether material exports are enabled or not
- [ ] review & upgrade overall logic of material libraries, their names & output path
- [ ] persist exported materials path in blueprints so that it can be read from library file users
- [x] review & upgrade overall logic of material libraries, their names & output path
- [x] change materials logic to work with multiple materials per mesh
- [x] the index of the generated gltf files is reliable, and can be used both in Blender & Bevy
- [x] change MaterialInfo to MaterialInfos & turn it into a vec/list & updated logic both on Blender & Bevy side
- [ ] persist exported materials paths in blueprints so that it can be read from library file users
- [ ] just like "export_path" write it into each blueprint's collection
- [ ] scan for used materials per blueprint !
- [ ] for scenes, scan for used materials of all non instance objects (TODO: what about overrides ?)
@ -323,7 +326,7 @@ Bevy Side:
- [x] fix "remove component" operator from the rename/fix/update components panel
- [ ] replace string in BlueprintInfo path with PathBuf ?
- [ ] update main docs
- [x] update main docs
- [x] rename project to Blenvy
- [x] replace all references to the old 2 add-ons with those to Blenvy
- [x] rename repo to "Blenvy"

View File

@ -2,22 +2,26 @@ use bevy::prelude::*;
use crate::BlenvyConfig;
#[derive(Component, Reflect, Default, Debug)]
#[reflect(Component)]
#[derive(Reflect, Default, Debug)]
/// struct containing the name & path of the material to apply
pub struct MaterialInfo {
pub name: String,
pub path: String,
}
#[derive(Component, Reflect, Default, Debug)]
#[reflect(Component)]
/// component containing the full list of MaterialInfos for a given entity/object
pub struct MaterialInfos(Vec<MaterialInfo>);
#[derive(Component, Default, Debug)]
pub struct MaterialProcessed;
/// system that injects / replaces materials from material library
pub(crate) fn inject_materials(
mut blenvy_config: ResMut<BlenvyConfig>,
material_infos: Query<
(Entity, &MaterialInfo, &Children),
material_infos_query: Query<
(Entity, &MaterialInfos, &Children),
Without<MaterialProcessed>, // (With<BlueprintReadyForPostProcess>)
/*(
Added<BlueprintMaterialAssetsLoaded>,
@ -37,57 +41,64 @@ pub(crate) fn inject_materials(
mut commands: Commands,
) {
for (entity, material_info, children) in material_infos.iter() {
let material_full_path = format!("{}#{}", material_info.path, material_info.name);
let mut material_found: Option<&Handle<StandardMaterial>> = None;
if blenvy_config
.materials_cache
.contains_key(&material_full_path)
{
debug!("material is cached, retrieving");
let material = blenvy_config
for (entity, material_infos, children) in material_infos_query.iter() {
for (material_index, material_info) in material_infos.0.iter().enumerate() {
let material_full_path = format!("{}#{}", material_info.path, material_info.name);
let mut material_found: Option<&Handle<StandardMaterial>> = None;
if blenvy_config
.materials_cache
.get(&material_full_path)
.expect("we should have the material available");
material_found = Some(material);
} else {
let model_handle: Handle<Gltf> = asset_server.load(material_info.path.clone()); // FIXME: kinda weird now
let mat_gltf = assets_gltf.get(model_handle.id()).unwrap_or_else(|| {
panic!(
"materials file {} should have been preloaded",
material_info.path
)
});
if mat_gltf
.named_materials
.contains_key(&material_info.name as &str)
.contains_key(&material_full_path)
{
let material = mat_gltf
.named_materials
.get(&material_info.name as &str)
.expect("this material should have been loaded at this stage, please make sure you are correctly preloading them");
blenvy_config
debug!("material is cached, retrieving");
let material = blenvy_config
.materials_cache
.insert(material_full_path, material.clone());
.get(&material_full_path)
.expect("we should have the material available");
material_found = Some(material);
} else {
let model_handle: Handle<Gltf> = asset_server.load(material_info.path.clone()); // FIXME: kinda weird now
let mat_gltf = assets_gltf.get(model_handle.id()).unwrap_or_else(|| {
panic!(
"materials file {} should have been preloaded",
material_info.path
)
});
if mat_gltf
.named_materials
.contains_key(&material_info.name as &str)
{
let material = mat_gltf
.named_materials
.get(&material_info.name as &str)
.expect("this material should have been loaded at this stage, please make sure you are correctly preloading them");
blenvy_config
.materials_cache
.insert(material_full_path, material.clone());
material_found = Some(material);
}
}
}
commands.entity(entity).insert(MaterialProcessed);
if let Some(material) = material_found {
for (child_index, child) in children.iter().enumerate() {
if child_index == material_index {
if with_materials_and_meshes.contains(*child) {
info!(
"injecting material {}, path: {:?}",
material_info.name,
material_info.path.clone()
);
if let Some(material) = material_found {
for child in children.iter() {
if with_materials_and_meshes.contains(*child) {
info!(
"injecting material {}, path: {:?}",
material_info.name,
material_info.path.clone()
);
commands.entity(*child).insert(material.clone());
}
}
commands.entity(*child).insert(material.clone());
}
}
}
commands.entity(entity).insert(MaterialProcessed);
}
}

View File

@ -91,6 +91,8 @@ impl Plugin for BlueprintsPlugin {
.add_event::<BlueprintEvent>()
.register_type::<BlueprintInfo>()
.register_type::<MaterialInfo>()
.register_type::<MaterialInfos>()
.register_type::<SpawnBlueprint>()
.register_type::<BlueprintInstanceDisabled>()
.register_type::<HideUntilReady>()

View File

@ -69,14 +69,21 @@ fn components_string_to_components(
let mut deserializer = ron::Deserializer::from_str(ron_string.as_str())
.expect("deserialzer should have been generated from string");
let reflect_deserializer = ReflectDeserializer::new(type_registry);
let component = reflect_deserializer
/*let component = reflect_deserializer
.deserialize(&mut deserializer)
.unwrap_or_else(|_| {
panic!(
"failed to deserialize component {} with value: {:?}",
name, value
)
});
});*/
let Ok(component) = reflect_deserializer.deserialize(&mut deserializer) else {
warn!(
"failed to deserialize component {} with value: {:?}",
name, value
);
return;
};
debug!("component {:?}", component);
debug!("real type {:?}", component.get_represented_type_info());

View File

@ -102,7 +102,7 @@ def auto_export(changes_per_scene, changes_per_collection, changes_per_material,
old_selections = bpy.context.selected_objects
# deal with materials
if export_materials_library:
if export_materials_library and len(materials_to_export) > 0:
print("export MATERIALS")
export_materials(materials_to_export, settings, blueprints_data)

View File

@ -73,7 +73,7 @@ def generate_gltf_export_settings(settings):
if str(key) not in constant_keys:
gltf_export_settings[key] = standard_gltf_exporter_settings.get(key)
print("GLTF EXPORT SETTINGS", gltf_export_settings)
#print("GLTF EXPORT SETTINGS", gltf_export_settings)
return gltf_export_settings

View File

@ -81,11 +81,14 @@ def generate_temporary_scene_and_export(settings, gltf_export_settings, gltf_out
# copies the contents of a collection into another one while replacing library instances with empties
def copy_hollowed_collection_into(source_collection, destination_collection, parent_empty=None, filter=None, blueprints_data=None, settings={}):
collection_instances_combine_mode = getattr(settings.auto_export, "collection_instances_combine_mode")
for object in source_collection.objects:
if object.name.endswith("____bak"): # some objects could already have been handled, ignore them
continue
if filter is not None and filter(object) is False:
continue
if object.parent is not None: # if there is a parent, this object is not a direct child of the collection, and should be ignored (it will get picked up by the recursive scan inside duplicate_object)
continue
#check if a specific collection instance does not have an ovveride for combine_mode
combine_mode = object['_combine'] if '_combine' in object else collection_instances_combine_mode
parent = parent_empty

View File

@ -8,4 +8,4 @@ custom_properties_to_filter_out = [
'_combine', 'template',
'Blenvy_scene_type', 'blenvy_scene_type',
'materials_path', 'export_path',
]
]

View File

@ -1,5 +1,4 @@
import os
from blenvy.blueprints.blueprint_helpers import inject_blueprints_list_into_level_scene, remove_blueprints_list_from_level_scene
from ..constants import TEMPSCENE_PREFIX
from ..common.generate_temporary_scene_and_export import generate_temporary_scene_and_export, copy_hollowed_collection_into, clear_hollow_scene
from ..common.export_gltf import (generate_gltf_export_settings, export_gltf)
@ -26,8 +25,8 @@ def export_level_scene(scene, settings, blueprints_data):
if export_blueprints :
gltf_output_path = os.path.join(levels_path_full, scene.name)
inject_blueprints_list_into_level_scene(scene, blueprints_data, settings)
# we inject assets into the scene before it gets exported
# TODO: this should be done in the temporary scene !
upsert_scene_assets(scene, blueprints_data=blueprints_data, settings=settings)
if export_separate_dynamic_and_static_objects:
@ -67,8 +66,6 @@ def export_level_scene(scene, settings, blueprints_data):
tempScene_cleaner= lambda temp_scene, params: clear_hollow_scene(original_root_collection=scene.collection, temp_scene=temp_scene, **params)
)
remove_blueprints_list_from_level_scene(scene)
else:
gltf_output_path = os.path.join(assets_path_full, scene.name)
print(" exporting gltf to", gltf_output_path, ".gltf/glb")

View File

@ -46,6 +46,13 @@ def generate_materials_scene_content(root_collection, used_material_names):
make_material_object("Material_"+material_name, [index * 0.2,0,0], material=material, collection=root_collection)
return {}
# generates a scene for a given material
def generate_material_scene_content(root_collection, material_name):
material = bpy.data.materials[material_name]
make_material_object(f"Material_{material_name}", [0,0,0], material=material, collection=root_collection)
return {}
def clear_materials_scene(temp_scene):
root_collection = temp_scene.collection
scene_objects = [o for o in root_collection.objects]
@ -83,21 +90,18 @@ def export_materials(materials_to_export, settings, blueprints_data):
'export_apply':True
}
current_project_name = Path(bpy.context.blend_data.filepath).stem
gltf_output_path = os.path.join(materials_path_full, current_project_name + "_materials")
print(" exporting Materials to", gltf_output_path, ".gltf/glb")
generate_temporary_scene_and_export(
settings=settings,
gltf_export_settings=gltf_export_settings,
temp_scene_name="__materials_scene",
gltf_output_path=gltf_output_path,
tempScene_filler= lambda temp_collection: generate_materials_scene_content(temp_collection, used_material_names),
tempScene_cleaner= lambda temp_scene, params: clear_materials_scene(temp_scene=temp_scene)
)
for material in materials_to_export:
print("exporting material", material.name)
gltf_output_path = os.path.join(materials_path_full, material.name)
generate_temporary_scene_and_export(
settings=settings,
gltf_export_settings=gltf_export_settings,
temp_scene_name="__materials_scene",
gltf_output_path=gltf_output_path,
tempScene_filler= lambda temp_collection: generate_material_scene_content(temp_collection, material.name),
tempScene_cleaner= lambda temp_scene, params: clear_materials_scene(temp_scene=temp_scene)
)
def cleanup_materials(collections, library_scenes):

View File

@ -8,21 +8,26 @@ def get_materials_to_export(changes_per_material, changed_export_parameters, blu
materials_path_full = getattr(settings,"materials_path_full", "")
change_detection = getattr(settings.auto_export, "change_detection")
export_materials_library = getattr(settings.auto_export, "export_materials_library")
collection_instances_combine_mode = getattr(settings.auto_export, "collection_instances_combine_mode")
all_materials = bpy.data.materials
local_materials = [material for material in all_materials if material.library is None]
materials_to_export = []
if change_detection and not changed_export_parameters:
changed_materials = [bpy.data.materials[material_name] for material_name in list(changes_per_material.keys())]
# first check if all materials have already been exported before (if this is the first time the exporter is run
# in your current Blender session for example)
materials_not_on_disk = find_materials_not_on_disk(local_materials, materials_path_full, export_gltf_extension)
if export_materials_library and change_detection:
if changed_export_parameters:
materials_to_export = [bpy.data.materials[material_name] for material_name in list(changes_per_material.keys())] # TODO: should be based on the list of materials in use
else :
changed_materials = [bpy.data.materials[material_name] for material_name in list(changes_per_material.keys())]
# also deal with blueprints that are always marked as "always_export"
#materials_always_export = [material for material in internal_materials if is_material_always_export(material)]
materials_always_export = []
materials_to_export = list(set(changed_materials + materials_not_on_disk + materials_always_export))
# first check if all materials have already been exported before (if this is the first time the exporter is run
# in your current Blender session for example)
materials_not_on_disk = find_materials_not_on_disk(local_materials, materials_path_full, export_gltf_extension)
# also deal with blueprints that are always marked as "always_export"
#materials_always_export = [material for material in internal_materials if is_material_always_export(material)]
materials_always_export = []
materials_to_export = list(set(changed_materials + materials_not_on_disk + materials_always_export))
print("materials_to_export", materials_to_export, local_materials)
return materials_to_export

View File

@ -24,63 +24,14 @@ def assets_to_fake_ron(list_like):
# TODO : move to assets
def upsert_scene_assets(scene, blueprints_data, settings):
"""print("level scene", scene)
for asset in scene.user_assets:
print(" user asset", asset.name, asset.path)
for asset in scene.generated_assets:
print(" generated asset", asset)"""
"""for blueprint in blueprints_data.blueprints_per_scenes[scene.name]:
print("BLUEPRINT", blueprint)"""
blueprint_instances_in_scene = blueprints_data.blueprint_instances_per_level_scene.get(scene.name, {}).keys()
blueprints_in_scene = [blueprints_data.blueprints_per_name[blueprint_name] for blueprint_name in blueprint_instances_in_scene]
#yala = [blueprint.collection.user_assets for blueprint in blueprints_in_scene]
#print("dsfsdf", yala)
level_assets = []
all_assets = []
export_gltf_extension = getattr(settings, "export_gltf_extension", ".glb")
blueprints_path = getattr(settings, "blueprints_path")
for blueprint in blueprints_in_scene:
if blueprint.local:
blueprint_exported_path = posixpath.join(blueprints_path, f"{blueprint.name}{export_gltf_extension}")
else:
# get the injected path of the external blueprints
blueprint_exported_path = blueprint.collection['export_path'] if 'export_path' in blueprint.collection else None
# add their material path
materials_exported_path = blueprint.collection['materials_path'] if 'materials_path' in blueprint.collection else None
level_assets.append({"name": blueprint.name+"_material", "path": materials_exported_path})#, "generated": True, "internal":blueprint.local, "parent": None})
if blueprint_exported_path is not None: # and not does_asset_exist(assets_list, blueprint_exported_path):
level_assets.append({"name": blueprint.name, "path": blueprint_exported_path})#, "generated": True, "internal":blueprint.local, "parent": None})
# now also add the assets of the blueprints # TODO: wait no , these should not be a part of the (scene) local assets
for asset in blueprint.collection.user_assets:
#print("adding assets of blueprint", asset.name)
all_assets.append({"name": asset.name, "path": asset.path})
"""for asset in level_assets:
print(" generated asset", asset.name, asset.path)"""
materials_path = getattr(settings, "materials_path")
current_project_name = Path(bpy.context.blend_data.filepath).stem
materials_library_name = f"{current_project_name}_materials"
materials_exported_path = posixpath.join(materials_path, f"{materials_library_name}{export_gltf_extension}")
material_assets = [{"name": materials_library_name, "path": materials_exported_path}] # we also add the material library as an asset
print("material_assets", material_assets, "extension", export_gltf_extension)
all_assets_raw = get_level_scene_assets_tree2(level_scene=scene, blueprints_data=blueprints_data, settings=settings)
local_assets = [{"name": asset["name"], "path": asset["path"]} for asset in all_assets_raw if asset['parent'] is None and asset["path"] != "" ]
all_assets = [{"name": asset["name"], "path": asset["path"]} for asset in all_assets_raw if asset["path"] != "" ]
print("all_assets_raw", all_assets_raw)
print("all_assets", all_assets)
print("local assets", local_assets + material_assets)
scene["BlueprintAssets"] = assets_to_fake_ron(local_assets + material_assets)
#scene["BlueprintAssets"] = assets_to_fake_ron(all_assets + [{"name": asset.name, "path": asset.path} for asset in scene.user_assets] + level_assets + material_assets)
#scene["BlueprintAssets"] = assets_to_fake_ron([{'name':'foo', 'path':'bar'}])
print("local assets", local_assets)
scene["BlueprintAssets"] = assets_to_fake_ron(all_assets) #local_assets
def upsert_blueprint_assets(blueprint, blueprints_data, settings):
all_assets_raw = get_blueprint_asset_tree(blueprint=blueprint, blueprints_data=blueprints_data, settings=settings)

View File

@ -3,6 +3,7 @@ import json
import posixpath
import bpy
from ..materials.materials_helpers import get_blueprint_materials
from .asset_helpers import does_asset_exist, get_user_assets, get_user_assets_as_list
def scan_assets(scene, blueprints_data, settings):
@ -62,6 +63,7 @@ def get_userTextures():
print("textures", textures)
def get_blueprint_assets_tree(blueprint, blueprints_data, parent, settings):
print("blueprint", blueprint.name)
blueprints_path = getattr(settings, "blueprints_path")
export_gltf_extension = getattr(settings, "export_gltf_extension", ".glb")
assets_list = []
@ -88,6 +90,15 @@ def get_blueprint_assets_tree(blueprint, blueprints_data, parent, settings):
asset["parent"] = parent
asset["internal"] = blueprint.local
assets_list += direct_assets
# now get materials used by this blueprint
(blueprint_materials_names, materials_per_object) = get_blueprint_materials(blueprint=blueprint)
print("blueprint_materials", blueprint_materials_names)
for material_name in blueprint_materials_names:
materials_path = getattr(settings, "materials_path")
materials_exported_path = posixpath.join(materials_path, f"{material_name}{export_gltf_extension}")
assets_list.append({"name": material_name, "path": materials_exported_path, "type": "MATERIAL", "generated": True,"internal":blueprint.local, "parent": blueprint.name})
return assets_list
def get_level_scene_assets_tree(level_scene, blueprints_data, settings):

View File

@ -70,11 +70,6 @@ class BLENVY_OT_assets_add(Operator):
context.window_manager.assets_registry.asset_type_selector = "MODEL"
context.window_manager.assets_registry.asset_path_selector = ""
"""if blueprint_assets:
bpy.data.collections[self.target_name]["assets"] = json.dumps(assets)
else:
bpy.data.scenes[self.target_name]["assets"] = json.dumps(assets)"""
return {'FINISHED'}

View File

@ -5,8 +5,6 @@ import bpy
from pathlib import Path
import posixpath
from ..core.scene_helpers import add_scene_property
def find_blueprints_not_on_disk(blueprints, folder_path, extension):
not_found_blueprints = []
for blueprint in blueprints:
@ -23,64 +21,9 @@ def check_if_blueprint_on_disk(scene_name, folder_path, extension):
return found
def inject_export_path_into_internal_blueprints(internal_blueprints, blueprints_path, gltf_extension, settings):
export_materials_library = getattr(settings.auto_export, "export_materials_library")
# FIXME: duplicate of materials stuff
export_gltf_extension = getattr(settings, "export_gltf_extension", ".glb")
materials_path = getattr(settings, "materials_path")
current_project_name = Path(bpy.context.blend_data.filepath).stem
materials_library_name = f"{current_project_name}_materials"
materials_exported_path = posixpath.join(materials_path, f"{materials_library_name}{export_gltf_extension}")
for blueprint in internal_blueprints:
blueprint_exported_path = posixpath.join(blueprints_path, f"{blueprint.name}{gltf_extension}")
# print("injecting blueprint path", blueprint_exported_path, "for", blueprint.name)
blueprint.collection["export_path"] = blueprint_exported_path
if export_materials_library:
blueprint.collection["materials_path"] = materials_exported_path
def inject_blueprints_list_into_level_scene(scene, blueprints_data, settings):
project_root_path = getattr(settings, "project_root_path")
assets_path = getattr(settings,"assets_path")
levels_path = getattr(settings,"levels_path")
blueprints_path = getattr(settings, "blueprints_path")
export_gltf_extension = getattr(settings, "export_gltf_extension")
# print("injecting assets/blueprints data into scene")
assets_list_name = f"assets_list_{scene.name}_components"
assets_list_data = {}
blueprint_instance_names_for_scene = blueprints_data.blueprint_instances_per_level_scene.get(scene.name, None)
blueprint_assets_list = []
if blueprint_instance_names_for_scene:
for blueprint_name in blueprint_instance_names_for_scene:
blueprint = blueprints_data.blueprints_per_name.get(blueprint_name, None)
if blueprint is not None:
#print("BLUEPRINT", blueprint)
blueprint_exported_path = None
if blueprint.local:
blueprint_exported_path = posixpath.join(blueprints_path, f"{blueprint.name}{export_gltf_extension}")
else:
# get the injected path of the external blueprints
blueprint_exported_path = blueprint.collection['Export_path'] if 'Export_path' in blueprint.collection else None
#print("foo", dict(blueprint.collection))
if blueprint_exported_path is not None:
blueprint_assets_list.append({"name": blueprint.name, "path": blueprint_exported_path, "type": "MODEL", "internal": True})
assets_list_name = f"assets_{scene.name}"
scene["assets"] = json.dumps(blueprint_assets_list)
#print("blueprint assets", blueprint_assets_list)
def remove_blueprints_list_from_level_scene(scene):
assets_list = None
assets_list_name = f"assets_list_{scene.name}_components"
for object in scene.objects:
if object.name == assets_list_name:
assets_list = object
if assets_list is not None:
bpy.data.objects.remove(assets_list, do_unlink=True)
"""if export_materials_library:
blueprint.collection["materials_path"] = materials_exported_path"""

View File

@ -1,28 +1,15 @@
import os
import posixpath
import bpy
from pathlib import Path
from ..core.helpers_collections import (traverse_tree)
def find_materials_not_on_disk(materials, materials_path_full, extension):
not_found_materials = []
current_project_name = Path(bpy.context.blend_data.filepath).stem
materials_library_name = f"{current_project_name}_materials"
materials_exported_path = os.path.join(materials_path_full, f"{materials_library_name}{extension}")
found = os.path.exists(materials_exported_path) and os.path.isfile(materials_exported_path)
for material in materials:
if not found:
not_found_materials.append(material)
"""for material in materials:
gltf_output_path = os.path.join(materials_path_full, material.name + extension)
# print("gltf_output_path", gltf_output_path)
found = os.path.exists(gltf_output_path) and os.path.isfile(gltf_output_path)
if not found:
not_found_materials.append(material)"""
not_found_materials.append(material)
return not_found_materials
def check_if_material_on_disk(scene_name, folder_path, extension):
@ -40,7 +27,9 @@ def get_materials(object, materials_per_object):
# print(" slot", m, "material", material)
used_materials_names.append(material.name)
# TODO:, also respect slots & export multiple materials if applicable !
materials_per_object[object] = material
if not object in materials_per_object:
materials_per_object[object] = []
materials_per_object[object].append(material)
return used_materials_names
@ -62,22 +51,17 @@ def get_all_materials(collection_names, library_scenes):
def add_material_info_to_objects(materials_per_object, settings):
materials_path = getattr(settings, "materials_path")
export_gltf_extension = getattr(settings, "export_gltf_extension", ".glb")
current_project_name = Path(bpy.context.blend_data.filepath).stem
materials_library_name = f"{current_project_name}_materials"
materials_exported_path = posixpath.join(materials_path, f"{materials_library_name}{export_gltf_extension}")
#print("ADDING MAERIAL INFOS")
for object in materials_per_object.keys():
material = materials_per_object[object]
material_infos = []
for material in materials_per_object[object]:
materials_exported_path = posixpath.join(materials_path, f"{material.name}{export_gltf_extension}")
material_info = f'(name: "{material.name}", path: "{materials_exported_path}")'
material_infos.append(material_info)
# problem with using actual components: you NEED the type registry/component infos, so if there is none , or it is not loaded yet, it does not work
# for a few components we could hardcode this
component_value = f'(name: "{material.name}", path: "{materials_exported_path}")'
#bpy.ops.blenvy.component_add(target_item_name=object.name, target_item_type="OBJECT", component_type="blenvy::blueprints::materials::MaterialInfo", component_value=component_value)
materials_exported_path = posixpath.join(materials_path, f"{materials_library_name}{export_gltf_extension}")
object['MaterialInfo'] = component_value
print("adding materialInfo to object", object, "material info", component_value)
#bpy.ops.blenvy.component_add(target_item_name=object.name, target_item_type="OBJECT", component_type="blenvy::blueprints::materials::MaterialInfos", component_value=component_value)
object['MaterialInfos'] = f"({material_infos})".replace("'","")
print("adding materialInfos to object", object, "material infos", material_infos)
# get all the materials of all objects in a given scene