mirror of
https://github.com/kaosat-dev/Blender_bevy_components_workflow.git
synced 2024-11-21 19:44:18 +00:00
feat(gltf_auto_export): component filtering for exports, bug-fixes and tests(#143)
* added filtering out of invalid components/ custom properties: * includes filtering out of invalid / disabled components as defined in bevy_components, components_meta etc * completely changed handling of "unique" (non blueprint) objects by copying them instead of moving/renaming them & manipulating their custom properties : much cleaner, much simpler ! * disabled default gltf export of optimised animations * added tests ! * closes #139 * closes #141 * closes #142 * closes #146
This commit is contained in:
parent
e83ef32b9f
commit
dfc2be8c50
@ -16,7 +16,9 @@ members = [
|
|||||||
"examples/bevy_gltf_save_load/basic/",
|
"examples/bevy_gltf_save_load/basic/",
|
||||||
"examples/bevy_registry_export/basic",
|
"examples/bevy_registry_export/basic",
|
||||||
|
|
||||||
"testing/bevy_registry_export/basic"
|
"testing/bevy_registry_export/basic",
|
||||||
|
"testing/bevy_example"
|
||||||
|
|
||||||
]
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
|
BIN
testing/auto_export_template.blend
Normal file
BIN
testing/auto_export_template.blend
Normal file
Binary file not shown.
16
testing/bevy_example/Cargo.toml
Normal file
16
testing/bevy_example/Cargo.toml
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
[package]
|
||||||
|
name = "bevy_example"
|
||||||
|
version = "0.3.0"
|
||||||
|
edition = "2021"
|
||||||
|
license = "MIT OR Apache-2.0"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
bevy="0.12"
|
||||||
|
bevy_gltf_blueprints = { path = "../../crates/bevy_gltf_blueprints" }
|
||||||
|
bevy_registry_export = { path = "../../crates/bevy_registry_export" }
|
||||||
|
bevy_gltf_worlflow_examples_common = { path = "../../examples/common" }
|
||||||
|
|
||||||
|
bevy_rapier3d = { version = "0.23.0", features = [ "serde-serialize", "debug-render-3d", "enhanced-determinism"] }
|
||||||
|
bevy_asset_loader = { version = "0.18", features = ["standard_dynamic_assets" ]}
|
||||||
|
bevy_editor_pls = { version = "0.6" }
|
||||||
|
rand = "0.8.5"
|
15
testing/bevy_example/README.md
Normal file
15
testing/bevy_example/README.md
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# Bevy registry export example/demo
|
||||||
|
|
||||||
|
This example showcases
|
||||||
|
* the use of the bevy_registry_export crate to extract all components & types information into a json file.
|
||||||
|
* That file is then used by the [Blender addon](https://github.com/kaosat-dev/Blender_bevy_components_workflow/tree/main/tools/bevy_components) to create Uis for each component,
|
||||||
|
to be able to add & edit Bevy components easilly in Blender !
|
||||||
|
|
||||||
|
|
||||||
|
## Running this example
|
||||||
|
|
||||||
|
```
|
||||||
|
cargo run --features bevy/dynamic_linking
|
||||||
|
```
|
||||||
|
|
||||||
|
Running the example also regenerates the registry.json file.
|
1
testing/bevy_example/assets/assets_core.assets.ron
Normal file
1
testing/bevy_example/assets/assets_core.assets.ron
Normal file
@ -0,0 +1 @@
|
|||||||
|
({})
|
6
testing/bevy_example/assets/assets_game.assets.ron
Normal file
6
testing/bevy_example/assets/assets_game.assets.ron
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
({
|
||||||
|
"world":File (path: "models/World.glb"),
|
||||||
|
"models": Folder (
|
||||||
|
path: "models/library",
|
||||||
|
),
|
||||||
|
})
|
10732
testing/bevy_example/assets/registry.json
Normal file
10732
testing/bevy_example/assets/registry.json
Normal file
File diff suppressed because it is too large
Load Diff
BIN
testing/bevy_example/assets/testing.blend
Normal file
BIN
testing/bevy_example/assets/testing.blend
Normal file
Binary file not shown.
22
testing/bevy_example/src/core/mod.rs
Normal file
22
testing/bevy_example/src/core/mod.rs
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
use bevy::prelude::*;
|
||||||
|
use bevy_gltf_blueprints::*;
|
||||||
|
use bevy_registry_export::*;
|
||||||
|
|
||||||
|
pub struct CorePlugin;
|
||||||
|
impl Plugin for CorePlugin {
|
||||||
|
fn build(&self, app: &mut App) {
|
||||||
|
app.add_plugins((
|
||||||
|
ExportRegistryPlugin {
|
||||||
|
save_path: "assets/registry.json".into(),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
BlueprintsPlugin {
|
||||||
|
legacy_mode: false,
|
||||||
|
library_folder: "models/library".into(),
|
||||||
|
format: GltfFormat::GLB,
|
||||||
|
aabbs: true,
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
84
testing/bevy_example/src/game/in_game.rs
Normal file
84
testing/bevy_example/src/game/in_game.rs
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
use bevy::prelude::*;
|
||||||
|
use bevy_gltf_blueprints::{BluePrintBundle, BlueprintName, GameWorldTag};
|
||||||
|
use bevy_gltf_worlflow_examples_common::{assets::GameAssets, GameState, InAppRunning};
|
||||||
|
|
||||||
|
use bevy_rapier3d::prelude::Velocity;
|
||||||
|
use rand::Rng;
|
||||||
|
|
||||||
|
pub fn setup_game(
|
||||||
|
mut commands: Commands,
|
||||||
|
game_assets: Res<GameAssets>,
|
||||||
|
models: Res<Assets<bevy::gltf::Gltf>>,
|
||||||
|
mut next_game_state: ResMut<NextState<GameState>>,
|
||||||
|
) {
|
||||||
|
println!("setting up all stuff");
|
||||||
|
commands.insert_resource(AmbientLight {
|
||||||
|
color: Color::WHITE,
|
||||||
|
brightness: 0.2,
|
||||||
|
});
|
||||||
|
// here we actually spawn our game world/level
|
||||||
|
|
||||||
|
commands.spawn((
|
||||||
|
SceneBundle {
|
||||||
|
// note: because of this issue https://github.com/bevyengine/bevy/issues/10436, "world" is now a gltf file instead of a scene
|
||||||
|
scene: models
|
||||||
|
.get(game_assets.world.id())
|
||||||
|
.expect("main level should have been loaded")
|
||||||
|
.scenes[0]
|
||||||
|
.clone(),
|
||||||
|
..default()
|
||||||
|
},
|
||||||
|
bevy::prelude::Name::from("world"),
|
||||||
|
GameWorldTag,
|
||||||
|
InAppRunning,
|
||||||
|
));
|
||||||
|
|
||||||
|
next_game_state.set(GameState::InGame)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
struct UnregisteredComponent;
|
||||||
|
|
||||||
|
pub fn spawn_test(
|
||||||
|
keycode: Res<Input<KeyCode>>,
|
||||||
|
mut commands: Commands,
|
||||||
|
|
||||||
|
mut game_world: Query<(Entity, &Children), With<GameWorldTag>>,
|
||||||
|
) {
|
||||||
|
if keycode.just_pressed(KeyCode::T) {
|
||||||
|
let world = game_world.single_mut();
|
||||||
|
let world = world.1[0];
|
||||||
|
|
||||||
|
let mut rng = rand::thread_rng();
|
||||||
|
let range = 5.5;
|
||||||
|
let x: f32 = rng.gen_range(-range..range);
|
||||||
|
let y: f32 = rng.gen_range(-range..range);
|
||||||
|
|
||||||
|
let mut rng = rand::thread_rng();
|
||||||
|
let range = 0.8;
|
||||||
|
let vel_x: f32 = rng.gen_range(-range..range);
|
||||||
|
let vel_y: f32 = rng.gen_range(2.0..2.5);
|
||||||
|
let vel_z: f32 = rng.gen_range(-range..range);
|
||||||
|
|
||||||
|
let name_index: u64 = rng.gen();
|
||||||
|
|
||||||
|
let new_entity = commands
|
||||||
|
.spawn((
|
||||||
|
BluePrintBundle {
|
||||||
|
blueprint: BlueprintName("Health_Pickup".to_string()),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
bevy::prelude::Name::from(format!("test{}", name_index)),
|
||||||
|
// BlueprintName("Health_Pickup".to_string()),
|
||||||
|
// SpawnHere,
|
||||||
|
TransformBundle::from_transform(Transform::from_xyz(x, 2.0, y)),
|
||||||
|
Velocity {
|
||||||
|
linvel: Vec3::new(vel_x, vel_y, vel_z),
|
||||||
|
angvel: Vec3::new(0.0, 0.0, 0.0),
|
||||||
|
},
|
||||||
|
))
|
||||||
|
.id();
|
||||||
|
commands.entity(world).add_child(new_entity);
|
||||||
|
}
|
||||||
|
}
|
28
testing/bevy_example/src/game/mod.rs
Normal file
28
testing/bevy_example/src/game/mod.rs
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
pub mod in_game;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
pub use in_game::*;
|
||||||
|
|
||||||
|
use bevy::{prelude::*, time::common_conditions::on_timer};
|
||||||
|
use bevy_gltf_worlflow_examples_common::{AppState, GameState};
|
||||||
|
|
||||||
|
fn start_game(mut next_app_state: ResMut<NextState<AppState>>) {
|
||||||
|
next_app_state.set(AppState::AppLoading);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn exit_game(mut app_exit_events: ResMut<Events<bevy::app::AppExit>>) {
|
||||||
|
app_exit_events.send(bevy::app::AppExit);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct GamePlugin;
|
||||||
|
impl Plugin for GamePlugin {
|
||||||
|
fn build(&self, app: &mut App) {
|
||||||
|
app.add_systems(Update, (spawn_test).run_if(in_state(GameState::InGame)))
|
||||||
|
.add_systems(OnEnter(AppState::MenuRunning), start_game)
|
||||||
|
.add_systems(
|
||||||
|
Update,
|
||||||
|
exit_game.run_if(on_timer(Duration::from_secs_f32(0.5))),
|
||||||
|
) // shut down the app after this time
|
||||||
|
.add_systems(OnEnter(AppState::AppRunning), setup_game);
|
||||||
|
}
|
||||||
|
}
|
27
testing/bevy_example/src/main.rs
Normal file
27
testing/bevy_example/src/main.rs
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
use bevy::prelude::*;
|
||||||
|
use bevy_editor_pls::prelude::*;
|
||||||
|
use bevy_gltf_worlflow_examples_common::CommonPlugin;
|
||||||
|
|
||||||
|
mod core;
|
||||||
|
use crate::core::*;
|
||||||
|
|
||||||
|
mod game;
|
||||||
|
use game::*;
|
||||||
|
|
||||||
|
mod test_components;
|
||||||
|
use test_components::*;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
App::new()
|
||||||
|
.add_plugins((
|
||||||
|
DefaultPlugins.set(AssetPlugin::default()),
|
||||||
|
// editor
|
||||||
|
EditorPlugin::default(),
|
||||||
|
// our custom plugins
|
||||||
|
CommonPlugin,
|
||||||
|
CorePlugin, // reusable plugins
|
||||||
|
GamePlugin, // specific to our game
|
||||||
|
ComponentsTestPlugin, // Showcases different type of components /structs
|
||||||
|
))
|
||||||
|
.run();
|
||||||
|
}
|
190
testing/bevy_example/src/test_components.rs
Normal file
190
testing/bevy_example/src/test_components.rs
Normal file
@ -0,0 +1,190 @@
|
|||||||
|
use bevy::{
|
||||||
|
pbr::{ExtendedMaterial, MaterialExtension},
|
||||||
|
prelude::*,
|
||||||
|
render::render_resource::*,
|
||||||
|
};
|
||||||
|
use std::ops::Range;
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
struct UnitTest;
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug, Deref, DerefMut)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
struct TupleTestF32(f32);
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug, Deref, DerefMut)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
struct TupleTestU64(u64);
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug, Deref, DerefMut)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
pub struct TupleTestStr(String);
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
struct TupleTest2(f32, u64, String);
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
struct TupleTestBool(bool);
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
struct TupleVec2(Vec2);
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
struct TupleVec3(Vec3);
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
struct TupleVec(Vec<String>);
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
struct TupleVecF32F32(Vec<(f32, f32)>);
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
struct TupleTestColor(Color);
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
pub struct BasicTest {
|
||||||
|
a: f32,
|
||||||
|
b: u64,
|
||||||
|
c: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
pub enum EnumTest {
|
||||||
|
Metal,
|
||||||
|
Wood,
|
||||||
|
Rock,
|
||||||
|
Cloth,
|
||||||
|
Squishy,
|
||||||
|
#[default]
|
||||||
|
None,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
pub struct NestingTestLevel2 {
|
||||||
|
text: String,
|
||||||
|
enable: bool,
|
||||||
|
enum_inner: EnumTest,
|
||||||
|
color: TupleTestColor,
|
||||||
|
toggle: TupleTestBool,
|
||||||
|
basic: BasicTest,
|
||||||
|
pub nested: NestingTestLevel3,
|
||||||
|
colors_list: VecOfColors,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
pub struct NestingTestLevel3 {
|
||||||
|
vec: TupleVec3,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
pub struct NestedTupleStuff(f32, u64, NestingTestLevel2);
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
pub enum EnumComplex {
|
||||||
|
Float(f32),
|
||||||
|
Wood(String),
|
||||||
|
Vec(BasicTest),
|
||||||
|
SomeThing,
|
||||||
|
StructLike {
|
||||||
|
a: f32,
|
||||||
|
b: u32,
|
||||||
|
c: String,
|
||||||
|
},
|
||||||
|
#[default]
|
||||||
|
None,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
pub struct VecOfVec3s2(Vec<TupleVec3>);
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
pub struct VecOfColors(Vec<Color>);
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
pub struct AAAAddedCOMPONENT;
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
pub struct AComponentWithAnExtremlyExageratedOrMaybeNotButCouldBeNameOrWut;
|
||||||
|
|
||||||
|
/* fn toto(){
|
||||||
|
let bla:core::ops::Range<f32> = Range { start: 0.1, end: 5.0};
|
||||||
|
} */
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
pub struct VecOfF32s(Vec<f32>);
|
||||||
|
|
||||||
|
// test for extended materials
|
||||||
|
#[derive(Asset, AsBindGroup, Reflect, Debug, Clone)]
|
||||||
|
struct MyExtension {
|
||||||
|
// We need to ensure that the bindings of the base material and the extension do not conflict,
|
||||||
|
// so we start from binding slot 100, leaving slots 0-99 for the base material.
|
||||||
|
#[uniform(100)]
|
||||||
|
quantize_steps: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MaterialExtension for MyExtension {
|
||||||
|
fn fragment_shader() -> ShaderRef {
|
||||||
|
"shaders/extended_material.wgsl".into()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn deferred_fragment_shader() -> ShaderRef {
|
||||||
|
"shaders/extended_material.wgsl".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ComponentsTestPlugin;
|
||||||
|
impl Plugin for ComponentsTestPlugin {
|
||||||
|
fn build(&self, app: &mut App) {
|
||||||
|
app.register_type::<BasicTest>()
|
||||||
|
.register_type::<UnitTest>()
|
||||||
|
.register_type::<TupleTestF32>()
|
||||||
|
.register_type::<TupleTestU64>()
|
||||||
|
.register_type::<TupleTestStr>()
|
||||||
|
.register_type::<TupleTestBool>()
|
||||||
|
.register_type::<TupleTest2>()
|
||||||
|
.register_type::<TupleVec2>()
|
||||||
|
.register_type::<TupleVec3>()
|
||||||
|
.register_type::<EnumTest>()
|
||||||
|
.register_type::<TupleTestColor>()
|
||||||
|
.register_type::<TupleVec>()
|
||||||
|
.register_type::<Vec<String>>()
|
||||||
|
.register_type::<NestingTestLevel2>()
|
||||||
|
.register_type::<NestingTestLevel3>()
|
||||||
|
.register_type::<NestedTupleStuff>()
|
||||||
|
.register_type::<EnumComplex>()
|
||||||
|
.register_type::<VecOfVec3s2>()
|
||||||
|
.register_type::<TupleVecF32F32>()
|
||||||
|
.register_type::<(f32, f32)>()
|
||||||
|
.register_type::<Vec<(f32, f32)>>()
|
||||||
|
.register_type::<Vec<TupleVec3>>()
|
||||||
|
.register_type::<Vec<Color>>()
|
||||||
|
.register_type::<VecOfColors>()
|
||||||
|
.register_type::<Range<f32>>()
|
||||||
|
.register_type::<VecOfF32s>()
|
||||||
|
.register_type::<Vec<f32>>()
|
||||||
|
// .register_type::<AAAAddedCOMPONENT>()
|
||||||
|
.register_type::<AComponentWithAnExtremlyExageratedOrMaybeNotButCouldBeNameOrWut>()
|
||||||
|
.add_plugins(MaterialPlugin::<
|
||||||
|
ExtendedMaterial<StandardMaterial, MyExtension>,
|
||||||
|
>::default());
|
||||||
|
}
|
||||||
|
}
|
@ -1,7 +1,7 @@
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
"name": "gltf_auto_export",
|
"name": "gltf_auto_export",
|
||||||
"author": "kaosigh",
|
"author": "kaosigh",
|
||||||
"version": (0, 12, 1),
|
"version": (0, 13, 0),
|
||||||
"blender": (3, 4, 0),
|
"blender": (3, 4, 0),
|
||||||
"location": "File > Import-Export",
|
"location": "File > Import-Export",
|
||||||
"description": "glTF/glb auto-export",
|
"description": "glTF/glb auto-export",
|
||||||
|
@ -21,6 +21,9 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
|
|||||||
folder_path = os.path.dirname(file_path)
|
folder_path = os.path.dirname(file_path)
|
||||||
# get the preferences for our addon
|
# get the preferences for our addon
|
||||||
|
|
||||||
|
#should we use change detection or not
|
||||||
|
export_change_detection = getattr(addon_prefs, "export_change_detection")
|
||||||
|
|
||||||
export_blueprints = getattr(addon_prefs,"export_blueprints")
|
export_blueprints = getattr(addon_prefs,"export_blueprints")
|
||||||
export_output_folder = getattr(addon_prefs,"export_output_folder")
|
export_output_folder = getattr(addon_prefs,"export_output_folder")
|
||||||
|
|
||||||
@ -75,7 +78,7 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
|
|||||||
if matching_collection is not None:
|
if matching_collection is not None:
|
||||||
changed_collections.append(matching_collection)
|
changed_collections.append(matching_collection)
|
||||||
|
|
||||||
collections_to_export = list(set(changed_collections + collections_not_on_disk))
|
collections_to_export = list(set(changed_collections + collections_not_on_disk)) if export_change_detection else collections
|
||||||
|
|
||||||
# we need to re_export everything if the export parameters have been changed
|
# we need to re_export everything if the export parameters have been changed
|
||||||
collections_to_export = collections if changed_export_parameters else collections_to_export
|
collections_to_export = collections if changed_export_parameters else collections_to_export
|
||||||
@ -110,14 +113,14 @@ def auto_export(changes_per_scene, changed_export_parameters, addon_prefs):
|
|||||||
print("export MAIN scenes")
|
print("export MAIN scenes")
|
||||||
for scene_name in main_scene_names:
|
for scene_name in main_scene_names:
|
||||||
# we have more relaxed rules to determine if the main scenes have changed : any change is ok, (allows easier handling of changes, render settings etc)
|
# we have more relaxed rules to determine if the main scenes have changed : any change is ok, (allows easier handling of changes, render settings etc)
|
||||||
do_export_main_scene = changed_export_parameters or scene_name in changes_per_scene.keys() or not check_if_blueprint_on_disk(scene_name, export_levels_path, gltf_extension)
|
do_export_main_scene = not export_change_detection or changed_export_parameters or scene_name in changes_per_scene.keys() or not check_if_blueprint_on_disk(scene_name, export_levels_path, gltf_extension)
|
||||||
if do_export_main_scene:
|
if do_export_main_scene:
|
||||||
print(" exporting scene:", scene_name)
|
print(" exporting scene:", scene_name)
|
||||||
export_main_scene(bpy.data.scenes[scene_name], folder_path, addon_prefs, library_collections)
|
export_main_scene(bpy.data.scenes[scene_name], folder_path, addon_prefs, library_collections)
|
||||||
|
|
||||||
|
|
||||||
# now deal with blueprints/collections
|
# now deal with blueprints/collections
|
||||||
do_export_library_scene = changed_export_parameters or len(collections_to_export) > 0 # export_library_scene_name in changes_per_scene.keys()
|
do_export_library_scene = not export_change_detection or changed_export_parameters or len(collections_to_export) > 0 # export_library_scene_name in changes_per_scene.keys()
|
||||||
print("export LIBRARY")
|
print("export LIBRARY")
|
||||||
if do_export_library_scene:
|
if do_export_library_scene:
|
||||||
# we only want to go through the library scenes where our collections to export are present
|
# we only want to go through the library scenes where our collections to export are present
|
||||||
|
@ -33,7 +33,8 @@ def generate_gltf_export_preferences(addon_prefs):
|
|||||||
export_skins=True,
|
export_skins=True,
|
||||||
export_morph=False,
|
export_morph=False,
|
||||||
export_apply=False,
|
export_apply=False,
|
||||||
export_animations=False
|
export_animations=False,
|
||||||
|
export_optimize_animation_size=False
|
||||||
)
|
)
|
||||||
|
|
||||||
for key in addon_prefs.__annotations__.keys():
|
for key in addon_prefs.__annotations__.keys():
|
||||||
|
@ -9,7 +9,8 @@ from ..helpers.helpers_collections import (get_exportable_collections)
|
|||||||
from .auto_export import auto_export
|
from .auto_export import auto_export
|
||||||
|
|
||||||
class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
|
class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
|
||||||
"""test"""
|
"""auto export gltf"""
|
||||||
|
#bl_idname = "object.xxx"
|
||||||
bl_idname = "export_scenes.auto_gltf"
|
bl_idname = "export_scenes.auto_gltf"
|
||||||
bl_label = "Apply settings"
|
bl_label = "Apply settings"
|
||||||
bl_options = {'PRESET', 'UNDO'}
|
bl_options = {'PRESET', 'UNDO'}
|
||||||
@ -21,7 +22,7 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
|
|||||||
'export_main_scene_name',
|
'export_main_scene_name',
|
||||||
'export_output_folder',
|
'export_output_folder',
|
||||||
'export_library_scene_name',
|
'export_library_scene_name',
|
||||||
|
'export_change_detection',
|
||||||
'export_blueprints',
|
'export_blueprints',
|
||||||
'export_blueprints_path',
|
'export_blueprints_path',
|
||||||
|
|
||||||
@ -79,7 +80,17 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
|
|||||||
# we inject all that we need, the above is not sufficient
|
# we inject all that we need, the above is not sufficient
|
||||||
for (k, v) in self.properties.items():
|
for (k, v) in self.properties.items():
|
||||||
if k in self.white_list or k not in AutoExportGltfPreferenceNames:
|
if k in self.white_list or k not in AutoExportGltfPreferenceNames:
|
||||||
export_props[k] = v
|
value = v
|
||||||
|
# FIXME: really weird having to do this
|
||||||
|
if k == "collection_instances_combine_mode":
|
||||||
|
value = self.collection_instances_combine_mode
|
||||||
|
if k == "export_format":
|
||||||
|
value = self.export_format
|
||||||
|
if k == "export_image_format":
|
||||||
|
value = self.export_image_format
|
||||||
|
if k == "export_materials":
|
||||||
|
value = self.export_materials
|
||||||
|
export_props[k] = value
|
||||||
# we add main & library scene names to our preferences
|
# we add main & library scene names to our preferences
|
||||||
|
|
||||||
export_props['main_scene_names'] = list(map(lambda scene_data: scene_data.name, getattr(self,"main_scenes")))
|
export_props['main_scene_names'] = list(map(lambda scene_data: scene_data.name, getattr(self,"main_scenes")))
|
||||||
@ -93,7 +104,7 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
|
|||||||
#print("saving settings", bpy.data.texts[".gltf_auto_export_settings"].as_string(), "raw", json.dumps(export_props))
|
#print("saving settings", bpy.data.texts[".gltf_auto_export_settings"].as_string(), "raw", json.dumps(export_props))
|
||||||
|
|
||||||
def load_settings(self, context):
|
def load_settings(self, context):
|
||||||
#print("loading settings")
|
# print("loading settings")
|
||||||
settings = None
|
settings = None
|
||||||
try:
|
try:
|
||||||
settings = bpy.data.texts[".gltf_auto_export_settings"].as_string()
|
settings = bpy.data.texts[".gltf_auto_export_settings"].as_string()
|
||||||
@ -102,10 +113,10 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
|
|||||||
|
|
||||||
self.will_save_settings = False
|
self.will_save_settings = False
|
||||||
if settings:
|
if settings:
|
||||||
#print("loading settings in invoke AutoExportGLTF", settings)
|
print("loading settings in invoke AutoExportGLTF", settings)
|
||||||
try:
|
try:
|
||||||
for (k, v) in settings.items():
|
for (k, v) in settings.items():
|
||||||
#print("loading setting", k, v)
|
print("loading setting", k, v)
|
||||||
setattr(self, k, v)
|
setattr(self, k, v)
|
||||||
self.will_save_settings = True
|
self.will_save_settings = True
|
||||||
|
|
||||||
@ -128,7 +139,8 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
|
|||||||
item = library_scenes.add()
|
item = library_scenes.add()
|
||||||
item.name = item_name
|
item.name = item_name
|
||||||
|
|
||||||
except (AttributeError, TypeError):
|
except Exception as error:
|
||||||
|
print("error", error)
|
||||||
self.report({"ERROR"}, "Loading export settings failed. Removed corrupted settings")
|
self.report({"ERROR"}, "Loading export settings failed. Removed corrupted settings")
|
||||||
bpy.data.texts.remove(bpy.data.texts[".gltf_auto_export_settings"])
|
bpy.data.texts.remove(bpy.data.texts[".gltf_auto_export_settings"])
|
||||||
|
|
||||||
|
@ -15,6 +15,7 @@ AutoExportGltfPreferenceNames = [
|
|||||||
'export_main_scene_name',
|
'export_main_scene_name',
|
||||||
'export_output_folder',
|
'export_output_folder',
|
||||||
'export_library_scene_name',
|
'export_library_scene_name',
|
||||||
|
'export_change_detection',
|
||||||
|
|
||||||
'export_blueprints',
|
'export_blueprints',
|
||||||
'export_blueprints_path',
|
'export_blueprints_path',
|
||||||
@ -39,7 +40,7 @@ AutoExportGltfPreferenceNames = [
|
|||||||
'library_scene_names',
|
'library_scene_names',
|
||||||
'previous_export_settings',
|
'previous_export_settings',
|
||||||
'filter_glob',
|
'filter_glob',
|
||||||
'will_save_settings'
|
'will_save_settings',
|
||||||
]
|
]
|
||||||
|
|
||||||
class AutoExportGltfAddonPreferences(AddonPreferences):
|
class AutoExportGltfAddonPreferences(AddonPreferences):
|
||||||
@ -86,6 +87,11 @@ class AutoExportGltfAddonPreferences(AddonPreferences):
|
|||||||
description='The name of the library scene to auto export',
|
description='The name of the library scene to auto export',
|
||||||
default='Library'
|
default='Library'
|
||||||
)
|
)
|
||||||
|
export_change_detection: BoolProperty(
|
||||||
|
name='Change detection',
|
||||||
|
description='Use change detection to determine what/if should be exported',
|
||||||
|
default=True
|
||||||
|
)
|
||||||
# scene components
|
# scene components
|
||||||
export_scene_settings: BoolProperty(
|
export_scene_settings: BoolProperty(
|
||||||
name='Export scene settings',
|
name='Export scene settings',
|
||||||
|
@ -2,16 +2,32 @@ import bpy
|
|||||||
from .helpers_collections import (set_active_collection)
|
from .helpers_collections import (set_active_collection)
|
||||||
from .object_makers import (make_empty)
|
from .object_makers import (make_empty)
|
||||||
|
|
||||||
|
|
||||||
|
# these are mostly for when using this add-on together with the bevy_components add-on
|
||||||
|
custom_properties_to_filter_out = ['_combine', 'template', 'components_meta']
|
||||||
|
|
||||||
|
def is_component_valid(object, component_name):
|
||||||
|
if "components_meta" in object:
|
||||||
|
target_components_metadata = object.components_meta.components
|
||||||
|
component_meta = next(filter(lambda component: component["name"] == component_name, target_components_metadata), None)
|
||||||
|
if component_meta != None:
|
||||||
|
return component_meta.enabled and not component_meta.invalid
|
||||||
|
return True
|
||||||
|
|
||||||
|
def remove_unwanted_custom_properties(object):
|
||||||
|
to_remove = []
|
||||||
|
for component_name in object.keys():
|
||||||
|
if not is_component_valid(object, component_name):
|
||||||
|
to_remove.append(component_name)
|
||||||
|
|
||||||
|
for cp in custom_properties_to_filter_out + to_remove:
|
||||||
|
if cp in object:
|
||||||
|
del object[cp]
|
||||||
|
|
||||||
# copies the contents of a collection into another one while replacing library instances with empties
|
# copies the contents of a collection into another one while replacing library instances with empties
|
||||||
def copy_hollowed_collection_into(source_collection, destination_collection, parent_empty=None, filter=None, library_collections=[], addon_prefs={}):
|
def copy_hollowed_collection_into(source_collection, destination_collection, parent_empty=None, filter=None, library_collections=[], addon_prefs={}):
|
||||||
collection_instances_combine_mode = getattr(addon_prefs, "collection_instances_combine_mode")
|
collection_instances_combine_mode = getattr(addon_prefs, "collection_instances_combine_mode")
|
||||||
legacy_mode = getattr(addon_prefs, "export_legacy_mode")
|
legacy_mode = getattr(addon_prefs, "export_legacy_mode")
|
||||||
|
|
||||||
root_objects = []
|
|
||||||
special_properties= { # to be able to reset any special property afterwards
|
|
||||||
"combine": [],
|
|
||||||
}
|
|
||||||
|
|
||||||
collection_instances_combine_mode= collection_instances_combine_mode
|
collection_instances_combine_mode= collection_instances_combine_mode
|
||||||
|
|
||||||
for object in source_collection.objects:
|
for object in source_collection.objects:
|
||||||
@ -31,23 +47,26 @@ def copy_hollowed_collection_into(source_collection, destination_collection, par
|
|||||||
empty_obj['BlueprintName'] = '"'+collection_name+'"' if legacy_mode else '("'+collection_name+'")'
|
empty_obj['BlueprintName'] = '"'+collection_name+'"' if legacy_mode else '("'+collection_name+'")'
|
||||||
empty_obj['SpawnHere'] = ''
|
empty_obj['SpawnHere'] = ''
|
||||||
|
|
||||||
for k, v in object.items():
|
# we copy custom properties over from our original object to our empty
|
||||||
if k != 'template' or k != '_combine': # do not copy these properties
|
for component_name, component_value in object.items():
|
||||||
empty_obj[k] = v
|
if component_name not in custom_properties_to_filter_out and is_component_valid(object, component_name): #copy only valid properties
|
||||||
|
empty_obj[component_name] = component_value
|
||||||
if parent_empty is not None:
|
if parent_empty is not None:
|
||||||
empty_obj.parent = parent_empty
|
empty_obj.parent = parent_empty
|
||||||
else:
|
else:
|
||||||
# we backup special properties that we do not want to export, and remove them
|
# we create a copy of our object, to leave the original one as it is
|
||||||
if '_combine' in object:
|
original_name = object.name
|
||||||
special_properties["combine"].append((object, object['_combine']))
|
object.name = original_name + "____bak"
|
||||||
del object['_combine']
|
copy = object.copy()
|
||||||
|
copy.name = original_name
|
||||||
|
remove_unwanted_custom_properties(copy)
|
||||||
|
|
||||||
if parent_empty is not None:
|
if parent_empty is not None:
|
||||||
object.parent = parent_empty
|
copy.parent = parent_empty
|
||||||
destination_collection.objects.link(object)
|
destination_collection.objects.link(copy)
|
||||||
else:
|
else:
|
||||||
root_objects.append(object)
|
# root_objects.append(object)
|
||||||
destination_collection.objects.link(object)
|
destination_collection.objects.link(copy)
|
||||||
|
|
||||||
# for every sub-collection of the source, copy its content into a new sub-collection of the destination
|
# for every sub-collection of the source, copy its content into a new sub-collection of the destination
|
||||||
for collection in source_collection.children:
|
for collection in source_collection.children:
|
||||||
@ -58,7 +77,7 @@ def copy_hollowed_collection_into(source_collection, destination_collection, par
|
|||||||
if parent_empty is not None:
|
if parent_empty is not None:
|
||||||
collection_placeholder.parent = parent_empty
|
collection_placeholder.parent = parent_empty
|
||||||
|
|
||||||
nested_results = copy_hollowed_collection_into(
|
copy_hollowed_collection_into(
|
||||||
source_collection = collection,
|
source_collection = collection,
|
||||||
destination_collection = destination_collection,
|
destination_collection = destination_collection,
|
||||||
parent_empty = collection_placeholder,
|
parent_empty = collection_placeholder,
|
||||||
@ -66,19 +85,11 @@ def copy_hollowed_collection_into(source_collection, destination_collection, par
|
|||||||
library_collections = library_collections,
|
library_collections = library_collections,
|
||||||
addon_prefs=addon_prefs
|
addon_prefs=addon_prefs
|
||||||
)
|
)
|
||||||
sub_root_objects = nested_results["root_objects"]
|
|
||||||
sub_special_properties = nested_results["special_properties"]
|
|
||||||
|
|
||||||
root_objects.extend(sub_root_objects)
|
return {}
|
||||||
for s in sub_special_properties.keys():
|
|
||||||
if not s in special_properties.keys():
|
|
||||||
special_properties[s] = []
|
|
||||||
special_properties[s].extend(sub_special_properties[s])
|
|
||||||
|
|
||||||
return {"root_objects": root_objects, "special_properties": special_properties}
|
|
||||||
|
|
||||||
# clear & remove "hollow scene"
|
# clear & remove "hollow scene"
|
||||||
def clear_hollow_scene(temp_scene, original_root_collection, root_objects, special_properties):
|
def clear_hollow_scene(temp_scene, original_root_collection):
|
||||||
def restore_original_names(collection):
|
def restore_original_names(collection):
|
||||||
if collection.name.endswith("____bak"):
|
if collection.name.endswith("____bak"):
|
||||||
collection.name = collection.name.replace("____bak", "")
|
collection.name = collection.name.replace("____bak", "")
|
||||||
@ -86,6 +97,9 @@ def clear_hollow_scene(temp_scene, original_root_collection, root_objects, speci
|
|||||||
if object.instance_type == 'COLLECTION':
|
if object.instance_type == 'COLLECTION':
|
||||||
if object.name.endswith("____bak"):
|
if object.name.endswith("____bak"):
|
||||||
object.name = object.name.replace("____bak", "")
|
object.name = object.name.replace("____bak", "")
|
||||||
|
else:
|
||||||
|
if object.name.endswith("____bak"):
|
||||||
|
object.name = object.name.replace("____bak", "")
|
||||||
for child_collection in collection.children:
|
for child_collection in collection.children:
|
||||||
restore_original_names(child_collection)
|
restore_original_names(child_collection)
|
||||||
|
|
||||||
@ -96,37 +110,15 @@ def clear_hollow_scene(temp_scene, original_root_collection, root_objects, speci
|
|||||||
temp_root_collection = temp_scene.collection
|
temp_root_collection = temp_scene.collection
|
||||||
temp_scene_objects = [o for o in temp_root_collection.objects]
|
temp_scene_objects = [o for o in temp_root_collection.objects]
|
||||||
for object in temp_scene_objects:
|
for object in temp_scene_objects:
|
||||||
if object.type == 'EMPTY':
|
bpy.data.objects.remove(object, do_unlink=True)
|
||||||
if hasattr(object, "SpawnHere"):
|
|
||||||
bpy.data.objects.remove(object, do_unlink=True)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
temp_root_collection.objects.unlink(object)
|
|
||||||
except:
|
|
||||||
print("failed to unlink", object)
|
|
||||||
if object in root_objects:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
bpy.data.objects.remove(object, do_unlink=True)
|
|
||||||
else:
|
|
||||||
temp_root_collection.objects.unlink(object)
|
|
||||||
|
|
||||||
# remove temporary collections
|
|
||||||
"""for collection in temporary_collections:
|
|
||||||
bpy.data.collections.remove(collection)"""
|
|
||||||
|
|
||||||
# put back special properties
|
|
||||||
for (object, value) in special_properties["combine"]:
|
|
||||||
object['_combine'] = value
|
|
||||||
|
|
||||||
# remove the temporary scene
|
# remove the temporary scene
|
||||||
bpy.data.scenes.remove(temp_scene)
|
bpy.data.scenes.remove(temp_scene)
|
||||||
|
|
||||||
|
|
||||||
# convenience utility to get lists of scenes
|
# convenience utility to get lists of scenes
|
||||||
def get_scenes(addon_prefs):
|
def get_scenes(addon_prefs):
|
||||||
level_scene_names= list(map(lambda scene: scene.name, getattr(addon_prefs,"main_scenes")))
|
level_scene_names= list(map(lambda scene: scene.name, getattr(addon_prefs,"main_scenes"))) # getattr(addon_prefs, "main_scene_names_compact").split(',')#
|
||||||
library_scene_names = list(map(lambda scene: scene.name, getattr(addon_prefs,"library_scenes")))
|
library_scene_names = list(map(lambda scene: scene.name, getattr(addon_prefs,"library_scenes"))) #getattr(addon_prefs, "main_scene_names_compact").split(',')#
|
||||||
|
|
||||||
level_scene_names = list(filter(lambda name: name in bpy.data.scenes, level_scene_names))
|
level_scene_names = list(filter(lambda name: name in bpy.data.scenes, level_scene_names))
|
||||||
library_scene_names = list(filter(lambda name: name in bpy.data.scenes, library_scene_names))
|
library_scene_names = list(filter(lambda name: name in bpy.data.scenes, library_scene_names))
|
||||||
|
6
tools/gltf_auto_export/pytest.ini
Normal file
6
tools/gltf_auto_export/pytest.ini
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
[pytest]
|
||||||
|
blender-template = ../../testing/bevy_example/assets/testing.blend
|
||||||
|
addopts = -svv
|
||||||
|
testpaths =
|
||||||
|
tests
|
||||||
|
|
0
tools/gltf_auto_export/tests/__init__.py
Normal file
0
tools/gltf_auto_export/tests/__init__.py
Normal file
212
tools/gltf_auto_export/tests/test_basic.py
Normal file
212
tools/gltf_auto_export/tests/test_basic.py
Normal file
@ -0,0 +1,212 @@
|
|||||||
|
import bpy
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import json
|
||||||
|
import pytest
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def setup_data(request):
|
||||||
|
print("\nSetting up resources...")
|
||||||
|
root_path = "../../testing/bevy_example"
|
||||||
|
assets_root_path = os.path.join(root_path, "assets")
|
||||||
|
|
||||||
|
models_path = os.path.join(assets_root_path, "models")
|
||||||
|
materials_path = os.path.join(assets_root_path, "materials")
|
||||||
|
other_materials_path = os.path.join(assets_root_path, "other_materials")
|
||||||
|
yield {"root_path": root_path, "assets_root_path": assets_root_path, "models_path": models_path, "materials_path": materials_path, "other_materials_path": other_materials_path}
|
||||||
|
|
||||||
|
def finalizer():
|
||||||
|
print("\nPerforming teardown...")
|
||||||
|
if os.path.exists(models_path):
|
||||||
|
shutil.rmtree(models_path)
|
||||||
|
|
||||||
|
if os.path.exists(materials_path):
|
||||||
|
shutil.rmtree(materials_path)
|
||||||
|
|
||||||
|
if os.path.exists(other_materials_path):
|
||||||
|
shutil.rmtree(other_materials_path)
|
||||||
|
|
||||||
|
|
||||||
|
request.addfinalizer(finalizer)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def test_export_do_not_export_blueprints(setup_data):
|
||||||
|
auto_export_operator = bpy.ops.export_scenes.auto_gltf
|
||||||
|
|
||||||
|
# first, configure things
|
||||||
|
# we use the global settings for that
|
||||||
|
export_props = {
|
||||||
|
"main_scene_names" : ['World'],
|
||||||
|
"library_scene_names": ['Library']
|
||||||
|
}
|
||||||
|
stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
|
||||||
|
stored_settings.clear()
|
||||||
|
stored_settings.write(json.dumps(export_props))
|
||||||
|
|
||||||
|
auto_export_operator(
|
||||||
|
direct_mode=True,
|
||||||
|
export_output_folder="./models",
|
||||||
|
export_scene_settings=True,
|
||||||
|
export_blueprints=False,
|
||||||
|
)
|
||||||
|
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
|
||||||
|
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint1.glb")) == False
|
||||||
|
|
||||||
|
def test_export_custom_blueprints_path(setup_data):
|
||||||
|
auto_export_operator = bpy.ops.export_scenes.auto_gltf
|
||||||
|
|
||||||
|
# first, configure things
|
||||||
|
# we use the global settings for that
|
||||||
|
export_props = {
|
||||||
|
"main_scene_names" : ['World'],
|
||||||
|
"library_scene_names": ['Library']
|
||||||
|
}
|
||||||
|
stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
|
||||||
|
stored_settings.clear()
|
||||||
|
stored_settings.write(json.dumps(export_props))
|
||||||
|
|
||||||
|
auto_export_operator(
|
||||||
|
direct_mode=True,
|
||||||
|
export_output_folder="./models",
|
||||||
|
export_scene_settings=True,
|
||||||
|
export_blueprints=True,
|
||||||
|
export_blueprints_path = "another_library_path"
|
||||||
|
)
|
||||||
|
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
|
||||||
|
assert os.path.exists(os.path.join(setup_data["models_path"], "another_library_path", "Blueprint1.glb")) == True
|
||||||
|
|
||||||
|
def test_export_materials_library(setup_data):
|
||||||
|
auto_export_operator = bpy.ops.export_scenes.auto_gltf
|
||||||
|
|
||||||
|
# first, configure things
|
||||||
|
# we use the global settings for that
|
||||||
|
export_props = {
|
||||||
|
"main_scene_names" : ['World'],
|
||||||
|
"library_scene_names": ['Library']
|
||||||
|
}
|
||||||
|
stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
|
||||||
|
stored_settings.clear()
|
||||||
|
stored_settings.write(json.dumps(export_props))
|
||||||
|
|
||||||
|
auto_export_operator(
|
||||||
|
direct_mode=True,
|
||||||
|
export_output_folder="./models",
|
||||||
|
export_scene_settings=True,
|
||||||
|
export_blueprints=True,
|
||||||
|
export_materials_library = True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint1.glb")) == True
|
||||||
|
assert os.path.exists(os.path.join(setup_data["materials_path"], "testing_materials_library.glb")) == True
|
||||||
|
|
||||||
|
|
||||||
|
def test_export_materials_library_custom_path(setup_data):
|
||||||
|
auto_export_operator = bpy.ops.export_scenes.auto_gltf
|
||||||
|
|
||||||
|
# first, configure things
|
||||||
|
# we use the global settings for that
|
||||||
|
export_props = {
|
||||||
|
"main_scene_names" : ['World'],
|
||||||
|
"library_scene_names": ['Library']
|
||||||
|
}
|
||||||
|
stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
|
||||||
|
stored_settings.clear()
|
||||||
|
stored_settings.write(json.dumps(export_props))
|
||||||
|
|
||||||
|
auto_export_operator(
|
||||||
|
direct_mode=True,
|
||||||
|
export_output_folder="./models",
|
||||||
|
export_scene_settings=True,
|
||||||
|
export_blueprints=True,
|
||||||
|
export_materials_library = True,
|
||||||
|
export_materials_path="other_materials"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint1.glb")) == True
|
||||||
|
assert os.path.exists(os.path.join(setup_data["materials_path"], "testing_materials_library.glb")) == False
|
||||||
|
assert os.path.exists(os.path.join(setup_data["other_materials_path"], "testing_materials_library.glb")) == True
|
||||||
|
|
||||||
|
def test_export_collection_instances_combine_mode(setup_data): # TODO: change & check this
|
||||||
|
auto_export_operator = bpy.ops.export_scenes.auto_gltf
|
||||||
|
|
||||||
|
# first, configure things
|
||||||
|
# we use the global settings for that
|
||||||
|
export_props = {
|
||||||
|
"main_scene_names" : ['World'],
|
||||||
|
"library_scene_names": ['Library']
|
||||||
|
}
|
||||||
|
stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
|
||||||
|
stored_settings.clear()
|
||||||
|
stored_settings.write(json.dumps(export_props))
|
||||||
|
|
||||||
|
|
||||||
|
bpy.data.objects["Cube"]["dynamic"] = True
|
||||||
|
|
||||||
|
auto_export_operator(
|
||||||
|
direct_mode=True,
|
||||||
|
export_output_folder="./models",
|
||||||
|
export_blueprints=True,
|
||||||
|
collection_instances_combine_mode = 'Embed'
|
||||||
|
)
|
||||||
|
|
||||||
|
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
|
||||||
|
assert os.path.exists(os.path.join(setup_data["models_path"], "World_dynamic.glb")) == False
|
||||||
|
|
||||||
|
|
||||||
|
def test_export_do_not_export_marked_assets(setup_data):
|
||||||
|
auto_export_operator = bpy.ops.export_scenes.auto_gltf
|
||||||
|
|
||||||
|
# first, configure things
|
||||||
|
# we use the global settings for that
|
||||||
|
export_props = {
|
||||||
|
"main_scene_names" : ['World'],
|
||||||
|
"library_scene_names": ['Library']
|
||||||
|
}
|
||||||
|
stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
|
||||||
|
stored_settings.clear()
|
||||||
|
stored_settings.write(json.dumps(export_props))
|
||||||
|
|
||||||
|
auto_export_operator(
|
||||||
|
direct_mode=True,
|
||||||
|
export_output_folder="./models",
|
||||||
|
export_scene_settings=True,
|
||||||
|
export_blueprints=True,
|
||||||
|
export_marked_assets = False
|
||||||
|
)
|
||||||
|
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
|
||||||
|
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint1.glb")) == True
|
||||||
|
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint2.glb")) == False
|
||||||
|
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint3.glb")) == True
|
||||||
|
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint4_nested.glb")) == True
|
||||||
|
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint5.glb")) == False
|
||||||
|
|
||||||
|
|
||||||
|
def test_export_separate_dynamic_and_static_objects(setup_data):
|
||||||
|
auto_export_operator = bpy.ops.export_scenes.auto_gltf
|
||||||
|
|
||||||
|
# first, configure things
|
||||||
|
# we use the global settings for that
|
||||||
|
export_props = {
|
||||||
|
"main_scene_names" : ['World'],
|
||||||
|
"library_scene_names": ['Library']
|
||||||
|
}
|
||||||
|
stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
|
||||||
|
stored_settings.clear()
|
||||||
|
stored_settings.write(json.dumps(export_props))
|
||||||
|
|
||||||
|
|
||||||
|
bpy.data.objects["Cube"]["dynamic"] = True
|
||||||
|
|
||||||
|
auto_export_operator(
|
||||||
|
direct_mode=True,
|
||||||
|
export_output_folder="./models",
|
||||||
|
export_scene_settings=True,
|
||||||
|
export_blueprints=True,
|
||||||
|
export_separate_dynamic_and_static_objects = True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
|
||||||
|
assert os.path.exists(os.path.join(setup_data["models_path"], "World_dynamic.glb")) == True
|
93
tools/gltf_auto_export/tests/test_bevy_integration.py
Normal file
93
tools/gltf_auto_export/tests/test_bevy_integration.py
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
import bpy
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import json
|
||||||
|
import pytest
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def setup_data(request):
|
||||||
|
print("\nSetting up resources...")
|
||||||
|
|
||||||
|
def finalizer():
|
||||||
|
root_path = "../../testing/bevy_example"
|
||||||
|
assets_root_path = os.path.join(root_path, "assets")
|
||||||
|
models_path = os.path.join(assets_root_path, "models")
|
||||||
|
#materials_path = os.path.join("../../testing", "materials")
|
||||||
|
#other_materials_path = os.path.join("../../testing", "other_materials")
|
||||||
|
|
||||||
|
print("\nPerforming teardown...")
|
||||||
|
if os.path.exists(models_path):
|
||||||
|
shutil.rmtree(models_path)
|
||||||
|
|
||||||
|
"""if os.path.exists(materials_path):
|
||||||
|
shutil.rmtree(materials_path)
|
||||||
|
|
||||||
|
if os.path.exists(other_materials_path):
|
||||||
|
shutil.rmtree(other_materials_path)"""
|
||||||
|
|
||||||
|
|
||||||
|
request.addfinalizer(finalizer)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
- removes existing gltf files if needed
|
||||||
|
- calls exporter on the testing scene
|
||||||
|
- launches bevy app & checks for output
|
||||||
|
- if all worked => test is a-ok
|
||||||
|
"""
|
||||||
|
def test_export_complex(setup_data):
|
||||||
|
root_path = "../../testing/bevy_example"
|
||||||
|
assets_root_path = os.path.join(root_path, "assets")
|
||||||
|
models_path = os.path.join(assets_root_path, "models")
|
||||||
|
auto_export_operator = bpy.ops.export_scenes.auto_gltf
|
||||||
|
|
||||||
|
# with change detection
|
||||||
|
# first, configure things
|
||||||
|
# we use the global settings for that
|
||||||
|
export_props = {
|
||||||
|
"main_scene_names" : ['World'],
|
||||||
|
"library_scene_names": ['Library']
|
||||||
|
}
|
||||||
|
stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
|
||||||
|
stored_settings.clear()
|
||||||
|
stored_settings.write(json.dumps(export_props))
|
||||||
|
|
||||||
|
# move the main cube
|
||||||
|
bpy.data.objects["Cube"].location = [1, 0, 0]
|
||||||
|
# move the cube in the library
|
||||||
|
bpy.data.objects["Blueprint1_mesh"].location = [1, 2, 1]
|
||||||
|
|
||||||
|
auto_export_operator(
|
||||||
|
direct_mode=True,
|
||||||
|
export_output_folder="./models",
|
||||||
|
export_scene_settings=True,
|
||||||
|
export_blueprints=True,
|
||||||
|
export_legacy_mode=False
|
||||||
|
)
|
||||||
|
# blueprint1 => has an instance, got changed, should export
|
||||||
|
# blueprint2 => has NO instance, but marked as asset, should export
|
||||||
|
# blueprint3 => has NO instance, not marked as asset, used inside blueprint 4: should export
|
||||||
|
# blueprint4 => has an instance, with nested blueprint3, should export
|
||||||
|
# blueprint5 => has NO instance, not marked as asset, should NOT export
|
||||||
|
|
||||||
|
assert os.path.exists(os.path.join(models_path, "World.glb")) == True
|
||||||
|
|
||||||
|
assert os.path.exists(os.path.join(models_path, "library", "Blueprint1.glb")) == True
|
||||||
|
assert os.path.exists(os.path.join(models_path, "library", "Blueprint2.glb")) == True
|
||||||
|
assert os.path.exists(os.path.join(models_path, "library", "Blueprint3.glb")) == True
|
||||||
|
assert os.path.exists(os.path.join(models_path, "library", "Blueprint4_nested.glb")) == True
|
||||||
|
assert os.path.exists(os.path.join(models_path, "library", "Blueprint5.glb")) == False
|
||||||
|
|
||||||
|
# now run bevy
|
||||||
|
bla = "cargo run --features bevy/dynamic_linking"
|
||||||
|
# assert getattr(propertyGroup, 'a') == 0.5714026093482971
|
||||||
|
FNULL = open(os.devnull, 'w') #use this if you want to suppress output to stdout from the subprocess
|
||||||
|
filename = "my_file.dat"
|
||||||
|
args = bla
|
||||||
|
#subprocess.call(args, stdout=FNULL, stderr=FNULL, shell=False, cwd=bevy_run_exec_path)
|
||||||
|
return_code = subprocess.call(["cargo", "run", "--features", "bevy/dynamic_linking"], cwd=root_path)
|
||||||
|
print("RETURN CODE OF BEVY APP", return_code)
|
||||||
|
assert return_code == 0
|
@ -37,7 +37,6 @@ class GLTF_PT_auto_export_main(bpy.types.Panel):
|
|||||||
|
|
||||||
sfile = context.space_data
|
sfile = context.space_data
|
||||||
|
|
||||||
|
|
||||||
class GLTF_PT_auto_export_root(bpy.types.Panel):
|
class GLTF_PT_auto_export_root(bpy.types.Panel):
|
||||||
bl_space_type = 'FILE_BROWSER'
|
bl_space_type = 'FILE_BROWSER'
|
||||||
bl_region_type = 'TOOL_PROPS'
|
bl_region_type = 'TOOL_PROPS'
|
||||||
@ -66,6 +65,7 @@ class GLTF_PT_auto_export_root(bpy.types.Panel):
|
|||||||
|
|
||||||
layout.active = operator.auto_export
|
layout.active = operator.auto_export
|
||||||
layout.prop(operator, 'will_save_settings')
|
layout.prop(operator, 'will_save_settings')
|
||||||
|
layout.prop(operator, "export_change_detection")
|
||||||
layout.prop(operator, "export_output_folder")
|
layout.prop(operator, "export_output_folder")
|
||||||
layout.prop(operator, "export_scene_settings")
|
layout.prop(operator, "export_scene_settings")
|
||||||
layout.prop(operator, "export_legacy_mode")
|
layout.prop(operator, "export_legacy_mode")
|
||||||
@ -125,8 +125,6 @@ class GLTF_PT_auto_export_root(bpy.types.Panel):
|
|||||||
remove_operator.scene_type = 'library'
|
remove_operator.scene_type = 'library'
|
||||||
col.separator()
|
col.separator()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class GLTF_PT_auto_export_blueprints(bpy.types.Panel):
|
class GLTF_PT_auto_export_blueprints(bpy.types.Panel):
|
||||||
bl_space_type = 'FILE_BROWSER'
|
bl_space_type = 'FILE_BROWSER'
|
||||||
bl_region_type = 'TOOL_PROPS'
|
bl_region_type = 'TOOL_PROPS'
|
||||||
|
Loading…
Reference in New Issue
Block a user