mirror of
https://github.com/kaosat-dev/Blender_bevy_components_workflow.git
synced 2024-11-26 21:37:01 +00:00
Merge c44d82e7dc
into 6c34ab8bd6
This commit is contained in:
commit
dfe6096831
@ -4,7 +4,7 @@ use bevy::utils::HashMap;
|
|||||||
#[derive(Component, Reflect, Default, Debug)]
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
#[reflect(Component)]
|
#[reflect(Component)]
|
||||||
/// storage for animations for a given entity (hierarchy), essentially a clone of gltf's `named_animations`
|
/// storage for animations for a given entity (hierarchy), essentially a clone of gltf's `named_animations`
|
||||||
pub struct Animations {
|
pub struct BlueprintAnimations {
|
||||||
pub named_animations: HashMap<String, Handle<AnimationClip>>,
|
pub named_animations: HashMap<String, Handle<AnimationClip>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -13,4 +13,35 @@ pub struct Animations {
|
|||||||
/// so that the root entity knows which of its children contains an actualy `AnimationPlayer` component
|
/// so that the root entity knows which of its children contains an actualy `AnimationPlayer` component
|
||||||
/// this is for convenience, because currently , Bevy's gltf parsing inserts `AnimationPlayers` "one level down"
|
/// this is for convenience, because currently , Bevy's gltf parsing inserts `AnimationPlayers` "one level down"
|
||||||
/// ie armature/root for animated models, which means more complex queries to trigger animations that we want to avoid
|
/// ie armature/root for animated models, which means more complex queries to trigger animations that we want to avoid
|
||||||
pub struct AnimationPlayerLink(pub Entity);
|
pub struct BlueprintAnimationPlayerLink(pub Entity);
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
pub struct Animated{
|
||||||
|
pub animations: Vec<String>
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
/// storage for animations for a given entity (hierarchy), essentially a clone of gltf's `named_animations`
|
||||||
|
pub struct InstanceAnimations {
|
||||||
|
pub named_animations: HashMap<String, Handle<AnimationClip>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Component, Debug)]
|
||||||
|
/// Stop gap helper component : this is inserted into a "root" entity (an entity representing a whole gltf file)
|
||||||
|
/// so that the root entity knows which of its children contains an actualy `AnimationPlayer` component
|
||||||
|
/// this is for convenience, because currently , Bevy's gltf parsing inserts `AnimationPlayers` "one level down"
|
||||||
|
/// ie armature/root for animated models, which means more complex queries to trigger animations that we want to avoid
|
||||||
|
pub struct InstanceAnimationPlayerLink(pub Entity);
|
||||||
|
|
||||||
|
|
||||||
|
pub struct AnimationMarker{
|
||||||
|
pub frame:u32,
|
||||||
|
pub name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
pub struct AnimationMarkers(pub HashMap<String, HashMap<u32, Vec<String> >>);
|
@ -123,7 +123,15 @@ impl Plugin for BlueprintsPlugin {
|
|||||||
.register_type::<BlueprintName>()
|
.register_type::<BlueprintName>()
|
||||||
.register_type::<MaterialInfo>()
|
.register_type::<MaterialInfo>()
|
||||||
.register_type::<SpawnHere>()
|
.register_type::<SpawnHere>()
|
||||||
.register_type::<Animations>()
|
|
||||||
|
.register_type::<BlueprintAnimations>()
|
||||||
|
.register_type::<InstanceAnimations>()
|
||||||
|
.register_type::<Animated>()
|
||||||
|
.register_type::<AnimationMarkers>()
|
||||||
|
.register_type::<HashMap<u32, Vec<String>>>()
|
||||||
|
.register_type::<HashMap<String, HashMap<u32, Vec<String> >>>()
|
||||||
|
|
||||||
|
|
||||||
.register_type::<BlueprintsList>()
|
.register_type::<BlueprintsList>()
|
||||||
.register_type::<Vec<String>>()
|
.register_type::<Vec<String>>()
|
||||||
.register_type::<HashMap<String, Vec<String>>>()
|
.register_type::<HashMap<String, Vec<String>>>()
|
||||||
|
@ -2,7 +2,7 @@ use std::path::{Path, PathBuf};
|
|||||||
|
|
||||||
use bevy::{gltf::Gltf, prelude::*, utils::HashMap};
|
use bevy::{gltf::Gltf, prelude::*, utils::HashMap};
|
||||||
|
|
||||||
use crate::{Animations, BluePrintsConfig};
|
use crate::{BlueprintAnimations, BluePrintsConfig};
|
||||||
|
|
||||||
/// this is a flag component for our levels/game world
|
/// this is a flag component for our levels/game world
|
||||||
#[derive(Component)]
|
#[derive(Component)]
|
||||||
@ -279,11 +279,11 @@ pub(crate) fn spawn_from_blueprints(
|
|||||||
transform: transforms,
|
transform: transforms,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
Animations {
|
|
||||||
named_animations: gltf.named_animations.clone(),
|
|
||||||
},
|
|
||||||
Spawned,
|
Spawned,
|
||||||
OriginalChildren(original_children),
|
OriginalChildren(original_children),
|
||||||
|
BlueprintAnimations { // these are animations specific to the inside of the blueprint
|
||||||
|
named_animations: gltf.named_animations.clone(),
|
||||||
|
}
|
||||||
));
|
));
|
||||||
|
|
||||||
if add_to_world.is_some() {
|
if add_to_world.is_some() {
|
||||||
|
@ -5,7 +5,7 @@ use bevy::prelude::*;
|
|||||||
use bevy::scene::SceneInstance;
|
use bevy::scene::SceneInstance;
|
||||||
// use bevy::utils::hashbrown::HashSet;
|
// use bevy::utils::hashbrown::HashSet;
|
||||||
|
|
||||||
use super::{AnimationPlayerLink, Animations};
|
use super::{BlueprintAnimationPlayerLink, BlueprintAnimations};
|
||||||
use super::{SpawnHere, Spawned};
|
use super::{SpawnHere, Spawned};
|
||||||
use crate::{
|
use crate::{
|
||||||
AssetsToLoad, BlueprintAssetsLoaded, CopyComponents, InBlueprint, NoInBlueprint,
|
AssetsToLoad, BlueprintAssetsLoaded, CopyComponents, InBlueprint, NoInBlueprint,
|
||||||
@ -24,7 +24,7 @@ pub(crate) fn spawned_blueprint_post_process(
|
|||||||
Entity,
|
Entity,
|
||||||
&Children,
|
&Children,
|
||||||
&OriginalChildren,
|
&OriginalChildren,
|
||||||
&Animations,
|
&BlueprintAnimations,
|
||||||
Option<&NoInBlueprint>,
|
Option<&NoInBlueprint>,
|
||||||
Option<&Name>,
|
Option<&Name>,
|
||||||
),
|
),
|
||||||
@ -85,7 +85,7 @@ pub(crate) fn spawned_blueprint_post_process(
|
|||||||
// FIXME: stopgap solution: since we cannot use an AnimationPlayer at the root entity level
|
// FIXME: stopgap solution: since we cannot use an AnimationPlayer at the root entity level
|
||||||
// and we cannot update animation clips so that the EntityPaths point to one level deeper,
|
// and we cannot update animation clips so that the EntityPaths point to one level deeper,
|
||||||
// BUT we still want to have some marker/control at the root entity level, we add this
|
// BUT we still want to have some marker/control at the root entity level, we add this
|
||||||
commands.entity(original).insert(AnimationPlayerLink(added));
|
commands.entity(original).insert(BlueprintAnimationPlayerLink(added));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3373,6 +3373,17 @@
|
|||||||
"type": "object",
|
"type": "object",
|
||||||
"typeInfo": "Struct"
|
"typeInfo": "Struct"
|
||||||
},
|
},
|
||||||
|
"bevy_gltf_blueprints::animation::Animated": {
|
||||||
|
"additionalProperties": false,
|
||||||
|
"isComponent": true,
|
||||||
|
"isResource": false,
|
||||||
|
"properties": {},
|
||||||
|
"required": [],
|
||||||
|
"short_name": "Animated",
|
||||||
|
"title": "bevy_gltf_blueprints::animation::Animated",
|
||||||
|
"type": "object",
|
||||||
|
"typeInfo": "Struct"
|
||||||
|
},
|
||||||
"bevy_gltf_blueprints::animation::Animations": {
|
"bevy_gltf_blueprints::animation::Animations": {
|
||||||
"additionalProperties": false,
|
"additionalProperties": false,
|
||||||
"isComponent": true,
|
"isComponent": true,
|
||||||
@ -3433,6 +3444,22 @@
|
|||||||
"type": "array",
|
"type": "array",
|
||||||
"typeInfo": "TupleStruct"
|
"typeInfo": "TupleStruct"
|
||||||
},
|
},
|
||||||
|
"bevy_gltf_blueprints::spawn_from_blueprints::BlueprintsList": {
|
||||||
|
"isComponent": true,
|
||||||
|
"isResource": false,
|
||||||
|
"items": false,
|
||||||
|
"prefixItems": [
|
||||||
|
{
|
||||||
|
"type": {
|
||||||
|
"$ref": "#/$defs/bevy_utils::hashbrown::HashMap<alloc::string::String, alloc::vec::Vec<alloc::string::String>, bevy_utils::hashbrown::hash_map::DefaultHashBuilder>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"short_name": "BlueprintsList",
|
||||||
|
"title": "bevy_gltf_blueprints::spawn_from_blueprints::BlueprintsList",
|
||||||
|
"type": "array",
|
||||||
|
"typeInfo": "TupleStruct"
|
||||||
|
},
|
||||||
"bevy_gltf_blueprints::spawn_from_blueprints::SpawnHere": {
|
"bevy_gltf_blueprints::spawn_from_blueprints::SpawnHere": {
|
||||||
"additionalProperties": false,
|
"additionalProperties": false,
|
||||||
"isComponent": true,
|
"isComponent": true,
|
||||||
@ -10691,6 +10718,19 @@
|
|||||||
"type": "object",
|
"type": "object",
|
||||||
"typeInfo": "Value"
|
"typeInfo": "Value"
|
||||||
},
|
},
|
||||||
|
"bevy_utils::hashbrown::HashMap<alloc::string::String, alloc::vec::Vec<alloc::string::String>, bevy_utils::hashbrown::hash_map::DefaultHashBuilder>": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": {
|
||||||
|
"$ref": "#/$defs/alloc::vec::Vec<alloc::string::String>"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isComponent": false,
|
||||||
|
"isResource": false,
|
||||||
|
"short_name": "HashMap<String, Vec<String>, DefaultHashBuilder>",
|
||||||
|
"title": "bevy_utils::hashbrown::HashMap<alloc::string::String, alloc::vec::Vec<alloc::string::String>, bevy_utils::hashbrown::hash_map::DefaultHashBuilder>",
|
||||||
|
"type": "object",
|
||||||
|
"typeInfo": "Map"
|
||||||
|
},
|
||||||
"bevy_utils::smallvec::SmallVec<[bevy_ecs::entity::Entity; 8]>": {
|
"bevy_utils::smallvec::SmallVec<[bevy_ecs::entity::Entity; 8]>": {
|
||||||
"isComponent": false,
|
"isComponent": false,
|
||||||
"isResource": false,
|
"isResource": false,
|
||||||
|
@ -14,3 +14,4 @@ bevy_rapier3d = { version = "0.25.0", features = ["serde-serialize", "debug-rend
|
|||||||
bevy_asset_loader = { version = "0.20", features = ["standard_dynamic_assets"] }
|
bevy_asset_loader = { version = "0.20", features = ["standard_dynamic_assets"] }
|
||||||
bevy_editor_pls = { version = "0.8" }
|
bevy_editor_pls = { version = "0.8" }
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
|
json-writer ="0.3"
|
@ -2947,6 +2947,39 @@
|
|||||||
"type": "object",
|
"type": "object",
|
||||||
"typeInfo": "Struct"
|
"typeInfo": "Struct"
|
||||||
},
|
},
|
||||||
|
"bevy_example::game::Marker1": {
|
||||||
|
"additionalProperties": false,
|
||||||
|
"isComponent": true,
|
||||||
|
"isResource": false,
|
||||||
|
"properties": {},
|
||||||
|
"required": [],
|
||||||
|
"short_name": "Marker1",
|
||||||
|
"title": "bevy_example::game::Marker1",
|
||||||
|
"type": "object",
|
||||||
|
"typeInfo": "Struct"
|
||||||
|
},
|
||||||
|
"bevy_example::game::Marker2": {
|
||||||
|
"additionalProperties": false,
|
||||||
|
"isComponent": true,
|
||||||
|
"isResource": false,
|
||||||
|
"properties": {},
|
||||||
|
"required": [],
|
||||||
|
"short_name": "Marker2",
|
||||||
|
"title": "bevy_example::game::Marker2",
|
||||||
|
"type": "object",
|
||||||
|
"typeInfo": "Struct"
|
||||||
|
},
|
||||||
|
"bevy_example::game::Marker3": {
|
||||||
|
"additionalProperties": false,
|
||||||
|
"isComponent": true,
|
||||||
|
"isResource": false,
|
||||||
|
"properties": {},
|
||||||
|
"required": [],
|
||||||
|
"short_name": "Marker3",
|
||||||
|
"title": "bevy_example::game::Marker3",
|
||||||
|
"type": "object",
|
||||||
|
"typeInfo": "Struct"
|
||||||
|
},
|
||||||
"bevy_example::test_components::AComponentWithAnExtremlyExageratedOrMaybeNotButCouldBeNameOrWut": {
|
"bevy_example::test_components::AComponentWithAnExtremlyExageratedOrMaybeNotButCouldBeNameOrWut": {
|
||||||
"additionalProperties": false,
|
"additionalProperties": false,
|
||||||
"isComponent": true,
|
"isComponent": true,
|
||||||
@ -3516,7 +3549,26 @@
|
|||||||
"type": "object",
|
"type": "object",
|
||||||
"typeInfo": "Struct"
|
"typeInfo": "Struct"
|
||||||
},
|
},
|
||||||
"bevy_gltf_blueprints::animation::Animations": {
|
"bevy_gltf_blueprints::animation::Animated": {
|
||||||
|
"additionalProperties": false,
|
||||||
|
"isComponent": true,
|
||||||
|
"isResource": false,
|
||||||
|
"properties": {
|
||||||
|
"animations": {
|
||||||
|
"type": {
|
||||||
|
"$ref": "#/$defs/alloc::vec::Vec<alloc::string::String>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": [
|
||||||
|
"animations"
|
||||||
|
],
|
||||||
|
"short_name": "Animated",
|
||||||
|
"title": "bevy_gltf_blueprints::animation::Animated",
|
||||||
|
"type": "object",
|
||||||
|
"typeInfo": "Struct"
|
||||||
|
},
|
||||||
|
"bevy_gltf_blueprints::animation::BlueprintAnimations": {
|
||||||
"additionalProperties": false,
|
"additionalProperties": false,
|
||||||
"isComponent": true,
|
"isComponent": true,
|
||||||
"isResource": false,
|
"isResource": false,
|
||||||
@ -3530,8 +3582,27 @@
|
|||||||
"required": [
|
"required": [
|
||||||
"named_animations"
|
"named_animations"
|
||||||
],
|
],
|
||||||
"short_name": "Animations",
|
"short_name": "BlueprintAnimations",
|
||||||
"title": "bevy_gltf_blueprints::animation::Animations",
|
"title": "bevy_gltf_blueprints::animation::BlueprintAnimations",
|
||||||
|
"type": "object",
|
||||||
|
"typeInfo": "Struct"
|
||||||
|
},
|
||||||
|
"bevy_gltf_blueprints::animation::InstanceAnimations": {
|
||||||
|
"additionalProperties": false,
|
||||||
|
"isComponent": true,
|
||||||
|
"isResource": false,
|
||||||
|
"properties": {
|
||||||
|
"named_animations": {
|
||||||
|
"type": {
|
||||||
|
"$ref": "#/$defs/bevy_utils::hashbrown::HashMap<alloc::string::String, bevy_asset::handle::Handle<bevy_animation::AnimationClip>, bevy_utils::hashbrown::hash_map::DefaultHashBuilder>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": [
|
||||||
|
"named_animations"
|
||||||
|
],
|
||||||
|
"short_name": "InstanceAnimations",
|
||||||
|
"title": "bevy_gltf_blueprints::animation::InstanceAnimations",
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"typeInfo": "Struct"
|
"typeInfo": "Struct"
|
||||||
},
|
},
|
||||||
|
Binary file not shown.
Binary file not shown.
Before Width: | Height: | Size: 626 KiB After Width: | Height: | Size: 644 KiB |
@ -1,52 +1,42 @@
|
|||||||
pub mod in_game;
|
pub mod in_game;
|
||||||
use std::{
|
use std::{
|
||||||
fs::{self},
|
collections::HashMap, fs, time::Duration
|
||||||
time::Duration,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use bevy_gltf_blueprints::{AnimationPlayerLink, BlueprintName, BlueprintsList};
|
use bevy_gltf_blueprints::{Animated, AnimationMarkers, BlueprintAnimationPlayerLink, BlueprintAnimations, BlueprintName, BlueprintsList, GltfBlueprintsSet, InstanceAnimationPlayerLink, InstanceAnimations};
|
||||||
pub use in_game::*;
|
pub use in_game::*;
|
||||||
|
|
||||||
use bevy::{
|
use bevy::{
|
||||||
prelude::*, render::view::screenshot::ScreenshotManager, time::common_conditions::on_timer,
|
ecs::query, gltf::Gltf, prelude::*, render::view::screenshot::ScreenshotManager, time::common_conditions::on_timer, window::PrimaryWindow
|
||||||
window::PrimaryWindow,
|
|
||||||
};
|
};
|
||||||
use bevy_gltf_worlflow_examples_common_rapier::{AppState, GameState};
|
use bevy_gltf_worlflow_examples_common_rapier::{AppState, GameState};
|
||||||
|
|
||||||
use crate::{TupleTestF32, UnitTest};
|
use crate::{TupleTestF32, UnitTest};
|
||||||
|
use json_writer::to_json_string;
|
||||||
|
|
||||||
fn start_game(mut next_app_state: ResMut<NextState<AppState>>) {
|
fn start_game(mut next_app_state: ResMut<NextState<AppState>>) {
|
||||||
next_app_state.set(AppState::AppLoading);
|
next_app_state.set(AppState::AppLoading);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// if the export from Blender worked correctly, we should have animations (simplified here by using AnimationPlayerLink)
|
// if the export from Blender worked correctly, we should have animations (simplified here by using AnimationPlayerLink)
|
||||||
// if the export from Blender worked correctly, we should have an Entity called "Cylinder" that has two components: UnitTest, TupleTestF32
|
|
||||||
// if the export from Blender worked correctly, we should have an Entity called "Blueprint4_nested" that has a child called "Blueprint3" that has a "BlueprintName" component with value Blueprint3
|
// if the export from Blender worked correctly, we should have an Entity called "Blueprint4_nested" that has a child called "Blueprint3" that has a "BlueprintName" component with value Blueprint3
|
||||||
// if the export from Blender worked correctly, we should have a blueprints_list
|
// if the export from Blender worked correctly, we should have a blueprints_list
|
||||||
|
// if the export from Blender worked correctly, we should have the correct tree of entities
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
fn validate_export(
|
fn validate_export(
|
||||||
parents: Query<&Parent>,
|
parents: Query<&Parent>,
|
||||||
children: Query<&Children>,
|
children: Query<&Children>,
|
||||||
names: Query<&Name>,
|
names: Query<&Name>,
|
||||||
blueprints: Query<(Entity, &Name, &BlueprintName)>,
|
blueprints: Query<(Entity, &Name, &BlueprintName)>,
|
||||||
animation_player_links: Query<(Entity, &AnimationPlayerLink)>,
|
animation_player_links: Query<(Entity, &BlueprintAnimationPlayerLink)>,
|
||||||
exported_cylinder: Query<(Entity, &Name, &UnitTest, &TupleTestF32)>,
|
|
||||||
empties_candidates: Query<(Entity, &Name, &GlobalTransform)>,
|
empties_candidates: Query<(Entity, &Name, &GlobalTransform)>,
|
||||||
|
|
||||||
blueprints_list: Query<(Entity, &BlueprintsList)>,
|
blueprints_list: Query<(Entity, &BlueprintsList)>,
|
||||||
|
root: Query<(Entity, &Name, &Children), (Without<Parent>, With<Children>)>
|
||||||
) {
|
) {
|
||||||
let animations_found = !animation_player_links.is_empty();
|
let animations_found = !animation_player_links.is_empty();
|
||||||
|
|
||||||
let mut cylinder_found = false;
|
|
||||||
if let Ok(nested_cylinder) = exported_cylinder.get_single() {
|
|
||||||
let parent_name = names
|
|
||||||
.get(parents.get(nested_cylinder.0).unwrap().get())
|
|
||||||
.unwrap();
|
|
||||||
cylinder_found = parent_name.to_string() == *"Cube.001"
|
|
||||||
&& nested_cylinder.1.to_string() == *"Cylinder"
|
|
||||||
&& nested_cylinder.3 .0 == 75.1;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut nested_blueprint_found = false;
|
let mut nested_blueprint_found = false;
|
||||||
for (entity, name, blueprint_name) in blueprints.iter() {
|
for (entity, name, blueprint_name) in blueprints.iter() {
|
||||||
if name.to_string() == *"Blueprint4_nested" && blueprint_name.0 == *"Blueprint4_nested" {
|
if name.to_string() == *"Blueprint4_nested" && blueprint_name.0 == *"Blueprint4_nested" {
|
||||||
@ -71,14 +61,45 @@ fn validate_export(
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// check if there are blueprints_list components
|
||||||
let blueprints_list_found = !blueprints_list.is_empty();
|
let blueprints_list_found = !blueprints_list.is_empty();
|
||||||
|
|
||||||
|
// there should be no entity named xxx____bak as it means an error in the Blender side export process
|
||||||
|
let mut exported_names_correct = true;
|
||||||
|
for name in names.iter() {
|
||||||
|
if name.to_string().ends_with("___bak") {
|
||||||
|
exported_names_correct = false;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// generate parent/child "tree"
|
||||||
|
if !root.is_empty() {
|
||||||
|
let root = root.single();
|
||||||
|
let mut tree: HashMap<String, Vec<String>> = HashMap::new();
|
||||||
|
|
||||||
|
for child in children.iter_descendants(root.0) {
|
||||||
|
let child_name:String = names.get(child).map_or(String::from("no_name"), |e| e.to_string() ); //|e| e.to_string(), || "no_name".to_string());
|
||||||
|
//println!(" child {}", child_name);
|
||||||
|
let parent = parents.get(child).unwrap();
|
||||||
|
let parent_name:String = names.get(parent.get()).map_or(String::from("no_name"), |e| e.to_string() ); //|e| e.to_string(), || "no_name".to_string());
|
||||||
|
tree.entry(parent_name).or_default().push(child_name.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
let hierarchy = to_json_string(&tree);
|
||||||
|
fs::write(
|
||||||
|
"bevy_hierarchy.json",
|
||||||
|
hierarchy
|
||||||
|
)
|
||||||
|
.expect("unable to write hierarchy file")
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
fs::write(
|
fs::write(
|
||||||
"bevy_diagnostics.json",
|
"bevy_diagnostics.json",
|
||||||
format!(
|
format!(
|
||||||
"{{ \"animations\": {}, \"cylinder_found\": {} , \"nested_blueprint_found\": {}, \"empty_found\": {}, \"blueprints_list_found\": {} }}",
|
"{{ \"animations\": {}, \"nested_blueprint_found\": {}, \"empty_found\": {}, \"blueprints_list_found\": {}, \"exported_names_correct\": {} }}",
|
||||||
animations_found, cylinder_found, nested_blueprint_found, empty_found, blueprints_list_found
|
animations_found, nested_blueprint_found, empty_found, blueprints_list_found, exported_names_correct
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.expect("Unable to write file");
|
.expect("Unable to write file");
|
||||||
@ -97,18 +118,267 @@ fn exit_game(mut app_exit_events: ResMut<Events<bevy::app::AppExit>>) {
|
|||||||
app_exit_events.send(bevy::app::AppExit);
|
app_exit_events.send(bevy::app::AppExit);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[derive(Resource)]
|
||||||
|
struct MainAnimations(Vec<Handle<AnimationClip>>);
|
||||||
|
|
||||||
|
#[derive(Resource)]
|
||||||
|
struct AnimTest(Handle<Gltf>);
|
||||||
|
fn setup_main_scene_animations(
|
||||||
|
asset_server: Res<AssetServer>,
|
||||||
|
mut commands: Commands,
|
||||||
|
) {
|
||||||
|
/*commands.insert_resource(MainAnimations(vec![
|
||||||
|
asset_server.load("models/World.glb#Blueprint1_jump"),
|
||||||
|
asset_server.load("models/World.glb#Blueprint1_move"),
|
||||||
|
|
||||||
|
// asset_server.load("models/library/Blueprint6_animated.glb#Run"),
|
||||||
|
|
||||||
|
]));*/
|
||||||
|
|
||||||
|
commands.insert_resource(AnimTest(asset_server.load("models/World.glb")));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn animations(
|
||||||
|
added_animation_players:Query<(Entity, &Name, &AnimationPlayer)>,
|
||||||
|
addded_animateds:Query<(Entity, &Name, &Animated),(Added<Animated>)>,
|
||||||
|
|
||||||
|
animtest: Res<AnimTest>,
|
||||||
|
|
||||||
|
mut commands: Commands,
|
||||||
|
assets_gltf: Res<Assets<Gltf>>,
|
||||||
|
|
||||||
|
parents: Query<&Parent>,
|
||||||
|
names: Query<&Name>,
|
||||||
|
|
||||||
|
) {
|
||||||
|
for (entity, name, animated) in addded_animateds.iter() {
|
||||||
|
// println!("animated stuf {:?} on entity {}", animated, name);
|
||||||
|
let gltf = assets_gltf.get(&animtest.0).unwrap();
|
||||||
|
|
||||||
|
let animations_list = animated;
|
||||||
|
let mut matching_data = true;
|
||||||
|
for animation_name in &animations_list.animations {
|
||||||
|
if !gltf.named_animations.contains_key(animation_name){
|
||||||
|
matching_data = false;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if matching_data {
|
||||||
|
println!("inserting Animations components into {} ({:?})", name, entity);
|
||||||
|
println!("Found match {:?}", gltf.named_animations);
|
||||||
|
// commands.entity(entity).remove::<Animations>();
|
||||||
|
// FIXME: for some reason this does NOT overwrite the component ??
|
||||||
|
|
||||||
|
commands.entity(entity).insert(
|
||||||
|
InstanceAnimations {
|
||||||
|
named_animations: gltf.named_animations.clone(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
//animations.named_animations = gltf.named_animations.clone();
|
||||||
|
|
||||||
|
|
||||||
|
for ancestor in parents.iter_ancestors(entity) {
|
||||||
|
if added_animation_players.contains(ancestor) {
|
||||||
|
println!("found match with animationPlayer !! {:?}",names.get(ancestor));
|
||||||
|
commands.entity(entity).insert(InstanceAnimationPlayerLink(ancestor));
|
||||||
|
}
|
||||||
|
// info!("{:?} is an ancestor of {:?}", ancestor, player);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn play_animations(
|
||||||
|
animated_marker1: Query<(&InstanceAnimationPlayerLink, &InstanceAnimations), (With<Animated>, With<Marker1>)>,
|
||||||
|
animated_marker2: Query<(&InstanceAnimationPlayerLink, &InstanceAnimations), (With<Animated>, With<Marker2>)>,
|
||||||
|
animated_marker3: Query<(&InstanceAnimationPlayerLink, &InstanceAnimations, &BlueprintAnimationPlayerLink, &BlueprintAnimations), (With<Animated>, With<Marker3>)>,
|
||||||
|
|
||||||
|
mut animation_players: Query<&mut AnimationPlayer>,
|
||||||
|
keycode: Res<ButtonInput<KeyCode>>,
|
||||||
|
|
||||||
|
) {
|
||||||
|
if keycode.just_pressed(KeyCode::KeyM) {
|
||||||
|
for (link, animations) in animated_marker1.iter() {
|
||||||
|
println!("animations {:?}", animations.named_animations);
|
||||||
|
let mut animation_player = animation_players.get_mut(link.0).unwrap();
|
||||||
|
let anim_name = "Blueprint1_move";
|
||||||
|
animation_player
|
||||||
|
.play_with_transition(
|
||||||
|
animations
|
||||||
|
.named_animations
|
||||||
|
.get(anim_name)
|
||||||
|
.expect("animation name should be in the list")
|
||||||
|
.clone(),
|
||||||
|
Duration::from_secs(5),
|
||||||
|
)
|
||||||
|
.repeat();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if keycode.just_pressed(KeyCode::KeyJ) {
|
||||||
|
for (link, animations) in animated_marker1.iter() {
|
||||||
|
println!("animations {:?}", animations.named_animations);
|
||||||
|
let mut animation_player = animation_players.get_mut(link.0).unwrap();
|
||||||
|
let anim_name = "Blueprint1_jump";
|
||||||
|
animation_player
|
||||||
|
.play_with_transition(
|
||||||
|
animations
|
||||||
|
.named_animations
|
||||||
|
.get(anim_name)
|
||||||
|
.expect("animation name should be in the list")
|
||||||
|
.clone(),
|
||||||
|
Duration::from_secs(5),
|
||||||
|
)
|
||||||
|
.repeat();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if keycode.just_pressed(KeyCode::KeyA) {
|
||||||
|
for (link, animations) in animated_marker2.iter() {
|
||||||
|
println!("animations {:?}", animations.named_animations);
|
||||||
|
let mut animation_player = animation_players.get_mut(link.0).unwrap();
|
||||||
|
let anim_name = "Blueprint1_move";
|
||||||
|
animation_player
|
||||||
|
.play_with_transition(
|
||||||
|
animations
|
||||||
|
.named_animations
|
||||||
|
.get(anim_name)
|
||||||
|
.expect("animation name should be in the list")
|
||||||
|
.clone(),
|
||||||
|
Duration::from_secs(5),
|
||||||
|
)
|
||||||
|
.repeat();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if keycode.just_pressed(KeyCode::KeyB) {
|
||||||
|
for (link, animations) in animated_marker2.iter() {
|
||||||
|
println!("animations {:?}", animations.named_animations);
|
||||||
|
let mut animation_player = animation_players.get_mut(link.0).unwrap();
|
||||||
|
let anim_name = "Blueprint1_jump";
|
||||||
|
animation_player
|
||||||
|
.play_with_transition(
|
||||||
|
animations
|
||||||
|
.named_animations
|
||||||
|
.get(anim_name)
|
||||||
|
.expect("animation name should be in the list")
|
||||||
|
.clone(),
|
||||||
|
Duration::from_secs(5),
|
||||||
|
)
|
||||||
|
.repeat();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// play instance animation
|
||||||
|
if keycode.just_pressed(KeyCode::KeyW) {
|
||||||
|
for (link, animations, _, _) in animated_marker3.iter() {
|
||||||
|
println!("animations {:?}", animations.named_animations);
|
||||||
|
let mut animation_player = animation_players.get_mut(link.0).unwrap();
|
||||||
|
let anim_name = "Blueprint8_move";
|
||||||
|
animation_player
|
||||||
|
.play_with_transition(
|
||||||
|
animations
|
||||||
|
.named_animations
|
||||||
|
.get(anim_name)
|
||||||
|
.expect("animation name should be in the list")
|
||||||
|
.clone(),
|
||||||
|
Duration::from_secs(5),
|
||||||
|
)
|
||||||
|
.repeat();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// play blueprint animation
|
||||||
|
if keycode.just_pressed(KeyCode::KeyX) {
|
||||||
|
for (_, _, link, animations) in animated_marker3.iter() {
|
||||||
|
println!("animations {:?}", animations.named_animations);
|
||||||
|
let mut animation_player = animation_players.get_mut(link.0).unwrap();
|
||||||
|
let anim_name = "Walk";
|
||||||
|
animation_player
|
||||||
|
.play_with_transition(
|
||||||
|
animations
|
||||||
|
.named_animations
|
||||||
|
.get(anim_name)
|
||||||
|
.expect("animation name should be in the list")
|
||||||
|
.clone(),
|
||||||
|
Duration::from_secs(5),
|
||||||
|
)
|
||||||
|
.repeat();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn trigger_event_based_on_animation_marker(
|
||||||
|
bla: Query<(Entity, &AnimationMarkers, &InstanceAnimationPlayerLink, &InstanceAnimations)>,
|
||||||
|
animation_players: Query<&AnimationPlayer>,
|
||||||
|
animation_clips: Res<Assets<AnimationClip>>
|
||||||
|
) {
|
||||||
|
for (entity, markers, link, animations) in bla.iter() {
|
||||||
|
let animation_player = animation_players.get(link.0).unwrap();
|
||||||
|
|
||||||
|
let animation_clip = animation_clips.get(animation_player.animation_clip());
|
||||||
|
|
||||||
|
if animation_clip.is_some(){
|
||||||
|
// println!("Entity {:?} markers {:?}", entity, markers);
|
||||||
|
// println!("Player {:?} {}", animation_player.elapsed(), animation_player.completions());
|
||||||
|
|
||||||
|
let animation_total_length = animation_clip.unwrap().duration();
|
||||||
|
let animation_total_frames = 80; // FIXME just for testing
|
||||||
|
// TODO: we also need to take playback speed into account
|
||||||
|
let time_in_animation = animation_player.elapsed() - (animation_player.completions() as f32) * animation_total_length;//(animation_player.elapsed() / (animation_player.completions() as f32 + 1.0)) ;// / animation_total_length;
|
||||||
|
let time_bla = (animation_total_frames as f32 / animation_total_length) * time_in_animation ;
|
||||||
|
let frame = time_bla as u32;
|
||||||
|
// println!("time_in_animation {} out of {}, completions {}, // frame {}",time_in_animation, animation_total_length, animation_player.completions(), frame);
|
||||||
|
//animation_player.animation_clip()
|
||||||
|
|
||||||
|
let matching_animation_marker = &markers.0[&"Blueprint1_jump".to_string()];
|
||||||
|
if matching_animation_marker.contains_key(&frame) {
|
||||||
|
let matching_markers_per_frame = matching_animation_marker.get(&frame).unwrap();
|
||||||
|
println!("FOUND A MARKER {:?} at frame {}", matching_markers_per_frame, frame);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
/// flag component for testing
|
||||||
|
pub struct Marker1;
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
/// flag component for testing
|
||||||
|
pub struct Marker2;
|
||||||
|
|
||||||
|
#[derive(Component, Reflect, Default, Debug)]
|
||||||
|
#[reflect(Component)]
|
||||||
|
/// flag component for testing
|
||||||
|
pub struct Marker3;
|
||||||
|
|
||||||
pub struct GamePlugin;
|
pub struct GamePlugin;
|
||||||
impl Plugin for GamePlugin {
|
impl Plugin for GamePlugin {
|
||||||
fn build(&self, app: &mut App) {
|
fn build(&self, app: &mut App) {
|
||||||
app.add_systems(Update, (spawn_test).run_if(in_state(GameState::InGame)))
|
app.register_type::<Marker1>()
|
||||||
|
.register_type::<Marker2>()
|
||||||
|
.register_type::<Marker3>()
|
||||||
|
|
||||||
|
.add_systems(Update, (spawn_test).run_if(in_state(GameState::InGame)))
|
||||||
.add_systems(Update, validate_export)
|
.add_systems(Update, validate_export)
|
||||||
.add_systems(OnEnter(AppState::MenuRunning), start_game)
|
.add_systems(OnEnter(AppState::MenuRunning), start_game)
|
||||||
.add_systems(OnEnter(AppState::AppRunning), setup_game)
|
.add_systems(OnEnter(AppState::AppRunning), setup_game)
|
||||||
.add_systems(Update, generate_screenshot.run_if(on_timer(Duration::from_secs_f32(0.2)))) // TODO: run once
|
|
||||||
|
.add_systems(OnEnter(AppState::MenuRunning), setup_main_scene_animations)
|
||||||
|
.add_systems(Update, (animations, trigger_event_based_on_animation_marker)
|
||||||
|
.run_if(in_state(AppState::AppRunning))
|
||||||
|
.after(GltfBlueprintsSet::AfterSpawn)
|
||||||
|
)
|
||||||
|
.add_systems(Update, play_animations)
|
||||||
|
/* .add_systems(Update, generate_screenshot.run_if(on_timer(Duration::from_secs_f32(0.2)))) // TODO: run once
|
||||||
.add_systems(
|
.add_systems(
|
||||||
Update,
|
Update,
|
||||||
exit_game.run_if(on_timer(Duration::from_secs_f32(0.5))),
|
exit_game.run_if(on_timer(Duration::from_secs_f32(0.5))),
|
||||||
) // shut down the app after this time
|
) // shut down the app after this time*/
|
||||||
;
|
;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
|
from .get_standard_exporter_settings import get_standard_exporter_settings
|
||||||
from .preferences import (AutoExportGltfPreferenceNames)
|
from .preferences import (AutoExportGltfPreferenceNames)
|
||||||
|
|
||||||
def generate_gltf_export_preferences(addon_prefs):
|
def generate_gltf_export_preferences(addon_prefs):
|
||||||
@ -37,10 +39,31 @@ def generate_gltf_export_preferences(addon_prefs):
|
|||||||
export_optimize_animation_size=False
|
export_optimize_animation_size=False
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
for key in addon_prefs.__annotations__.keys():
|
for key in addon_prefs.__annotations__.keys():
|
||||||
if str(key) not in AutoExportGltfPreferenceNames:
|
if str(key) not in AutoExportGltfPreferenceNames:
|
||||||
#print("overriding setting", key, "value", getattr(addon_prefs,key))
|
#print("overriding setting", key, "value", getattr(addon_prefs,key))
|
||||||
gltf_export_preferences[key] = getattr(addon_prefs,key)
|
gltf_export_preferences[key] = getattr(addon_prefs, key)
|
||||||
|
|
||||||
|
|
||||||
|
"""standard_gltf_exporter_settings = get_standard_exporter_settings()
|
||||||
|
print("standard settings", standard_gltf_exporter_settings)
|
||||||
|
|
||||||
|
constant_keys = [
|
||||||
|
'export_cameras',
|
||||||
|
'export_extras', # For custom exported properties.
|
||||||
|
'export_lights',
|
||||||
|
]
|
||||||
|
|
||||||
|
# a certain number of essential params should NEVER be overwritten , no matter the settings of the standard exporter
|
||||||
|
for key in standard_gltf_exporter_settings.keys():
|
||||||
|
if str(key) not in constant_keys:
|
||||||
|
gltf_export_preferences[key] = standard_gltf_exporter_settings.get(key)
|
||||||
|
|
||||||
|
print("final export preferences", gltf_export_preferences)"""
|
||||||
|
|
||||||
|
|
||||||
return gltf_export_preferences
|
return gltf_export_preferences
|
||||||
|
|
||||||
|
@ -0,0 +1,9 @@
|
|||||||
|
import bpy
|
||||||
|
|
||||||
|
def get_standard_exporter_settings():
|
||||||
|
settings_key = 'glTF2ExportSettings'
|
||||||
|
for scene in bpy.data.scenes:
|
||||||
|
if settings_key in scene:
|
||||||
|
settings = scene[settings_key]
|
||||||
|
#print("standard exporter settings", settings, dict(settings))
|
||||||
|
return dict(settings)
|
@ -20,87 +20,165 @@ def remove_unwanted_custom_properties(object):
|
|||||||
for component_name in object.keys():
|
for component_name in object.keys():
|
||||||
if not is_component_valid(object, component_name):
|
if not is_component_valid(object, component_name):
|
||||||
to_remove.append(component_name)
|
to_remove.append(component_name)
|
||||||
|
|
||||||
for cp in custom_properties_to_filter_out + to_remove:
|
for cp in custom_properties_to_filter_out + to_remove:
|
||||||
if cp in object:
|
if cp in object:
|
||||||
del object[cp]
|
del object[cp]
|
||||||
|
|
||||||
def duplicate_object(object):
|
# TODO: rename actions ?
|
||||||
obj_copy = object.copy()
|
# reference https://github.com/KhronosGroup/glTF-Blender-IO/blob/main/addons/io_scene_gltf2/blender/exp/animation/gltf2_blender_gather_action.py#L481
|
||||||
if object.data:
|
def copy_animation_data(source, target):
|
||||||
data = object.data.copy()
|
"""if source.data:
|
||||||
obj_copy.data = data
|
data = source.data.copy()
|
||||||
if object.animation_data and object.animation_data.action:
|
target.data = data"""
|
||||||
obj_copy.animation_data.action = object.animation_data.action.copy()
|
if source.animation_data and source.animation_data:
|
||||||
return obj_copy
|
#print("copying animation data from", source.name, "to", target.name)
|
||||||
|
print("I have animation data")
|
||||||
|
ad = source.animation_data
|
||||||
|
"""if ad.action:
|
||||||
|
print(source.name,'uses',ad.action.name)"""
|
||||||
|
|
||||||
#also removes unwanted custom_properties for all objects in hiearchy
|
animations = []
|
||||||
def duplicate_object_recursive(object, parent, collection):
|
blender_actions = []
|
||||||
original_name = object.name
|
blender_tracks = {}
|
||||||
object.name = original_name + "____bak"
|
|
||||||
copy = duplicate_object(object)
|
|
||||||
copy.name = original_name
|
|
||||||
collection.objects.link(copy)
|
|
||||||
|
|
||||||
remove_unwanted_custom_properties(copy)
|
# TODO: this might need to be modified/ adapted to match the standard gltf exporter settings
|
||||||
|
for track in ad.nla_tracks:
|
||||||
|
#print("track", track.name, track.active)
|
||||||
|
non_muted_strips = [strip for strip in track.strips if strip.action is not None and strip.mute is False]
|
||||||
|
|
||||||
if parent:
|
for strip in non_muted_strips: #t.strips:
|
||||||
|
print(" ", source.name,'uses',strip.action.name, "active", strip.active, "action", strip.action)
|
||||||
|
blender_actions.append(strip.action)
|
||||||
|
blender_tracks[strip.action.name] = track.name
|
||||||
|
|
||||||
|
# Remove duplicate actions.
|
||||||
|
blender_actions = list(set(blender_actions))
|
||||||
|
# sort animations alphabetically (case insensitive) so they have a defined order and match Blender's Action list
|
||||||
|
blender_actions.sort(key = lambda a: a.name.lower())
|
||||||
|
|
||||||
|
markers_per_animation = {}
|
||||||
|
for action in blender_actions:
|
||||||
|
animation_name = blender_tracks[action.name]
|
||||||
|
animations.append(animation_name)
|
||||||
|
|
||||||
|
markers_per_animation[animation_name] = {}
|
||||||
|
|
||||||
|
print("markers", action.pose_markers, "for", action.name)
|
||||||
|
for marker in action.pose_markers:
|
||||||
|
|
||||||
|
if marker.frame not in markers_per_animation[animation_name]:
|
||||||
|
markers_per_animation[animation_name][marker.frame] = []
|
||||||
|
print(" marker", marker.name, marker.frame)
|
||||||
|
|
||||||
|
markers_per_animation[animation_name][marker.frame].append(marker.name)
|
||||||
|
|
||||||
|
print("animations", animations)
|
||||||
|
|
||||||
|
"""if target.animation_data == None:
|
||||||
|
target.animation_data_create()
|
||||||
|
target.animation_data.action = source.animation_data.action.copy()"""
|
||||||
|
# alternative method, using the build in link animation operator
|
||||||
|
with bpy.context.temp_override(active_object=source, selected_editable_objects=[target]):
|
||||||
|
bpy.ops.object.make_links_data(type='ANIMATION')
|
||||||
|
# we add an "animated" flag component
|
||||||
|
target['Animated'] = f'(animations: {animations})'.replace("'", '"')
|
||||||
|
|
||||||
|
markers_formated = '{'
|
||||||
|
for animation in markers_per_animation.keys():
|
||||||
|
markers_formated += f'"{animation}":'
|
||||||
|
markers_formated += "{"
|
||||||
|
for frame in markers_per_animation[animation].keys():
|
||||||
|
markers = markers_per_animation[animation][frame]
|
||||||
|
markers_formated += f"{frame}:{markers}, ".replace("'", '"')
|
||||||
|
markers_formated += '}, '
|
||||||
|
markers_formated += '}'
|
||||||
|
print("markers_formated", markers_formated)
|
||||||
|
target["AnimationMarkers"] = f'( {markers_formated} )'
|
||||||
|
#'({"animation_name": {5: ["Marker_1"]} })'
|
||||||
|
#f'({json.dumps(markers_per_animation)})'
|
||||||
|
|
||||||
|
"""print("copying animation data for", source.name, target.animation_data)
|
||||||
|
properties = [p.identifier for p in source.animation_data.bl_rna.properties if not p.is_readonly]
|
||||||
|
for prop in properties:
|
||||||
|
print("copying stuff", prop)
|
||||||
|
setattr(target.animation_data, prop, getattr(source.animation_data, prop))"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def duplicate_object(object, parent, combine_mode, destination_collection, library_collections, legacy_mode, nester=""):
|
||||||
|
copy = None
|
||||||
|
if object.instance_type == 'COLLECTION' and (combine_mode == 'Split' or (combine_mode == 'EmbedExternal' and (object.instance_collection.name in library_collections)) ):
|
||||||
|
#print("creating empty for", object.name, object.instance_collection.name, library_collections, combine_mode)
|
||||||
|
collection_name = object.instance_collection.name
|
||||||
|
original_name = object.name
|
||||||
|
|
||||||
|
object.name = original_name + "____bak"
|
||||||
|
empty_obj = make_empty(original_name, object.location, object.rotation_euler, object.scale, destination_collection)
|
||||||
|
"""we inject the collection/blueprint name, as a component called 'BlueprintName', but we only do this in the empty, not the original object"""
|
||||||
|
empty_obj['BlueprintName'] = '"'+collection_name+'"' if legacy_mode else '("'+collection_name+'")'
|
||||||
|
empty_obj['SpawnHere'] = '()'
|
||||||
|
|
||||||
|
# we also inject a list of all sub blueprints, so that the bevy side can preload them
|
||||||
|
if not legacy_mode:
|
||||||
|
root_node = CollectionNode()
|
||||||
|
root_node.name = "root"
|
||||||
|
children_per_collection = {}
|
||||||
|
get_sub_collections([object.instance_collection], root_node, children_per_collection)
|
||||||
|
empty_obj["BlueprintsList"] = f"({json.dumps(dict(children_per_collection))})"
|
||||||
|
|
||||||
|
# empty_obj["AnimationMarkers"] = '({"animation_name": {5: "Marker_1"} })'
|
||||||
|
|
||||||
|
#'({5: "sdf"})'#.replace('"',"'") #f"({json.dumps(dict(animation_foo))})"
|
||||||
|
#empty_obj["Assets"] = {"Animations": [], "Materials": [], "Models":[], "Textures":[], "Audio":[], "Other":[]}
|
||||||
|
|
||||||
|
# we copy custom properties over from our original object to our empty
|
||||||
|
for component_name, component_value in object.items():
|
||||||
|
if component_name not in custom_properties_to_filter_out and is_component_valid(object, component_name): #copy only valid properties
|
||||||
|
empty_obj[component_name] = component_value
|
||||||
|
copy = empty_obj
|
||||||
|
else:
|
||||||
|
# for objects which are NOT collection instances
|
||||||
|
# we create a copy of our object and its children, to leave the original one as it is
|
||||||
|
original_name = object.name
|
||||||
|
object.name = original_name + "____bak"
|
||||||
|
copy = object.copy()
|
||||||
|
copy.name = original_name
|
||||||
|
|
||||||
|
|
||||||
|
destination_collection.objects.link(copy)
|
||||||
|
|
||||||
|
"""if object.parent == None:
|
||||||
|
if parent_empty is not None:
|
||||||
|
copy.parent = parent_empty
|
||||||
|
"""
|
||||||
|
|
||||||
|
print(nester, "copy", copy)
|
||||||
|
# do this both for empty replacements & normal copies
|
||||||
|
if parent is not None:
|
||||||
copy.parent = parent
|
copy.parent = parent
|
||||||
|
remove_unwanted_custom_properties(copy)
|
||||||
|
copy_animation_data(object, copy)
|
||||||
|
|
||||||
for child in object.children:
|
for child in object.children:
|
||||||
duplicate_object_recursive(child, copy, collection)
|
duplicate_object(child, copy, combine_mode, destination_collection, library_collections, legacy_mode, nester+" ")
|
||||||
return copy
|
|
||||||
|
|
||||||
|
|
||||||
# copies the contents of a collection into another one while replacing library instances with empties
|
# copies the contents of a collection into another one while replacing library instances with empties
|
||||||
def copy_hollowed_collection_into(source_collection, destination_collection, parent_empty=None, filter=None, library_collections=[], addon_prefs={}):
|
def copy_hollowed_collection_into(source_collection, destination_collection, parent_empty=None, filter=None, library_collections=[], addon_prefs={}):
|
||||||
collection_instances_combine_mode = getattr(addon_prefs, "collection_instances_combine_mode")
|
collection_instances_combine_mode = getattr(addon_prefs, "collection_instances_combine_mode")
|
||||||
legacy_mode = getattr(addon_prefs, "export_legacy_mode")
|
legacy_mode = getattr(addon_prefs, "export_legacy_mode")
|
||||||
collection_instances_combine_mode= collection_instances_combine_mode
|
collection_instances_combine_mode= collection_instances_combine_mode
|
||||||
|
|
||||||
for object in source_collection.objects:
|
for object in source_collection.objects:
|
||||||
|
if object.name.endswith("____bak"): # some objects could already have been handled, ignore them
|
||||||
|
continue
|
||||||
if filter is not None and filter(object) is False:
|
if filter is not None and filter(object) is False:
|
||||||
continue
|
continue
|
||||||
#check if a specific collection instance does not have an ovveride for combine_mode
|
#check if a specific collection instance does not have an ovveride for combine_mode
|
||||||
combine_mode = object['_combine'] if '_combine' in object else collection_instances_combine_mode
|
combine_mode = object['_combine'] if '_combine' in object else collection_instances_combine_mode
|
||||||
|
parent = parent_empty
|
||||||
|
duplicate_object(object, parent, combine_mode, destination_collection, library_collections, legacy_mode)
|
||||||
|
|
||||||
if object.instance_type == 'COLLECTION' and (combine_mode == 'Split' or (combine_mode == 'EmbedExternal' and (object.instance_collection.name in library_collections)) ):
|
# for every child-collection of the source, copy its content into a new sub-collection of the destination
|
||||||
#print("creating empty for", object.name, object.instance_collection.name, library_collections, combine_mode)
|
|
||||||
collection_name = object.instance_collection.name
|
|
||||||
original_name = object.name
|
|
||||||
|
|
||||||
object.name = original_name + "____bak"
|
|
||||||
empty_obj = make_empty(original_name, object.location, object.rotation_euler, object.scale, destination_collection)
|
|
||||||
"""we inject the collection/blueprint name, as a component called 'BlueprintName', but we only do this in the empty, not the original object"""
|
|
||||||
empty_obj['BlueprintName'] = '"'+collection_name+'"' if legacy_mode else '("'+collection_name+'")'
|
|
||||||
empty_obj['SpawnHere'] = '()'
|
|
||||||
|
|
||||||
# we also inject a list of all sub blueprints, so that the bevy side can preload them
|
|
||||||
if not legacy_mode:
|
|
||||||
root_node = CollectionNode()
|
|
||||||
root_node.name = "root"
|
|
||||||
children_per_collection = {}
|
|
||||||
print("collection stuff", original_name)
|
|
||||||
get_sub_collections([object.instance_collection], root_node, children_per_collection)
|
|
||||||
empty_obj["BlueprintsList"] = f"({json.dumps(dict(children_per_collection))})"
|
|
||||||
#empty_obj["Assets"] = {"Animations": [], "Materials": [], "Models":[], "Textures":[], "Audio":[], "Other":[]}
|
|
||||||
|
|
||||||
|
|
||||||
# we copy custom properties over from our original object to our empty
|
|
||||||
for component_name, component_value in object.items():
|
|
||||||
if component_name not in custom_properties_to_filter_out and is_component_valid(object, component_name): #copy only valid properties
|
|
||||||
empty_obj[component_name] = component_value
|
|
||||||
if parent_empty is not None:
|
|
||||||
empty_obj.parent = parent_empty
|
|
||||||
else:
|
|
||||||
|
|
||||||
# we create a copy of our object and its children, to leave the original one as it is
|
|
||||||
if object.parent == None:
|
|
||||||
copy = duplicate_object_recursive(object, None, destination_collection)
|
|
||||||
|
|
||||||
if parent_empty is not None:
|
|
||||||
copy.parent = parent_empty
|
|
||||||
|
|
||||||
# for every sub-collection of the source, copy its content into a new sub-collection of the destination
|
|
||||||
for collection in source_collection.children:
|
for collection in source_collection.children:
|
||||||
original_name = collection.name
|
original_name = collection.name
|
||||||
collection.name = original_name + "____bak"
|
collection.name = original_name + "____bak"
|
||||||
@ -108,7 +186,6 @@ def copy_hollowed_collection_into(source_collection, destination_collection, par
|
|||||||
|
|
||||||
if parent_empty is not None:
|
if parent_empty is not None:
|
||||||
collection_placeholder.parent = parent_empty
|
collection_placeholder.parent = parent_empty
|
||||||
|
|
||||||
copy_hollowed_collection_into(
|
copy_hollowed_collection_into(
|
||||||
source_collection = collection,
|
source_collection = collection,
|
||||||
destination_collection = destination_collection,
|
destination_collection = destination_collection,
|
||||||
@ -118,6 +195,8 @@ def copy_hollowed_collection_into(source_collection, destination_collection, par
|
|||||||
addon_prefs=addon_prefs
|
addon_prefs=addon_prefs
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
# clear & remove "hollow scene"
|
# clear & remove "hollow scene"
|
||||||
@ -138,14 +217,14 @@ def clear_hollow_scene(temp_scene, original_root_collection):
|
|||||||
# reset original names
|
# reset original names
|
||||||
restore_original_names(original_root_collection)
|
restore_original_names(original_root_collection)
|
||||||
|
|
||||||
# remove empties (only needed when we go via ops ????)
|
# remove any data we created
|
||||||
temp_root_collection = temp_scene.collection
|
temp_root_collection = temp_scene.collection
|
||||||
temp_scene_objects = [o for o in temp_root_collection.objects]
|
temp_scene_objects = [o for o in temp_root_collection.all_objects]
|
||||||
for object in temp_scene_objects:
|
for object in temp_scene_objects:
|
||||||
|
print("removing", object.name)
|
||||||
bpy.data.objects.remove(object, do_unlink=True)
|
bpy.data.objects.remove(object, do_unlink=True)
|
||||||
# remove the temporary scene
|
# remove the temporary scene
|
||||||
bpy.data.scenes.remove(temp_scene)
|
bpy.data.scenes.remove(temp_scene, do_unlink=True)
|
||||||
|
|
||||||
|
|
||||||
# convenience utility to get lists of scenes
|
# convenience utility to get lists of scenes
|
||||||
def get_scenes(addon_prefs):
|
def get_scenes(addon_prefs):
|
||||||
@ -160,9 +239,6 @@ def get_scenes(addon_prefs):
|
|||||||
|
|
||||||
return [level_scene_names, level_scenes, library_scene_names, library_scenes]
|
return [level_scene_names, level_scenes, library_scene_names, library_scenes]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def inject_blueprints_list_into_main_scene(scene):
|
def inject_blueprints_list_into_main_scene(scene):
|
||||||
print("injecting assets/blueprints data into scene")
|
print("injecting assets/blueprints data into scene")
|
||||||
root_collection = scene.collection
|
root_collection = scene.collection
|
||||||
|
@ -0,0 +1 @@
|
|||||||
|
{"b_Tail02_013":["b_Tail03_014"],"Blueprint4_nested.001":["Blueprint3"],"Collection 2 1":["Empty_in_sub_collection"],"b_Root_00":["b_Hip_01"],"b_LeftForeArm_010":["b_LeftHand_011"],"b_Spine01_02":["b_Spine02_03"],"Blueprint7_hierarchy.001":["Blueprint4_nested.001","Cube.001"],"b_RightLeg01_019":["b_RightLeg02_020"],"b_LeftUpperArm_09":["b_LeftForeArm_010"],"no_name":["Parent_Object","lighting_components_World","assets_list_World_components","Collection","Collection 2"],"Blueprint3":["Blueprint3_mesh","Blueprint3_mesh"],"world":["no_name"],"Parent_Object":["Cube.003","Blueprint1","Cylinder.001"],"Light":["Light","DirectionalLight Gizmo"],"Blueprint1.001":["Blueprint1_mesh"],"Blueprint7_hierarchy":["Cube.001"],"Spot":["Spot"],"b_Hip_01":["b_Spine01_02","b_Tail01_012","b_LeftLeg01_015","b_RightLeg01_019"],"Cylinder":["Cylinder.001","Cylinder.001"],"Collection 2":["Collection 2 1","Empty_in_collection","Spot"],"b_RightForeArm_07":["b_RightHand_08"],"Blueprint3_mesh":["Cylinder","Cylinder"],"Blueprint4_nested":["Blueprint3"],"Fox_mesh":["fox1"],"b_LeftLeg01_015":["b_LeftLeg02_016"],"b_Neck_04":["b_Head_05"],"b_RightFoot01_021":["b_RightFoot02_022"],"Blueprint1_mesh":["Cube.001","Cube.001"],"b_Tail01_012":["b_Tail02_013"],"Fox":["Fox_mesh","_rootJoint"],"Collection":["Blueprint1.001","Blueprint4_nested","Blueprint6_animated","Blueprint7_hierarchy","Camera","Cube","Empty","Light","Plane"],"Cube":["Cube"],"_rootJoint":["b_Root_00"],"b_RightLeg02_020":["b_RightFoot01_021"],"b_RightUpperArm_06":["b_RightForeArm_07"],"Plane":["Plane"],"Camera":["Camera Gizmo"],"Blueprint6_animated":["Fox"],"b_Spine02_03":["b_Neck_04","b_RightUpperArm_06","b_LeftUpperArm_09"],"b_LeftLeg02_016":["b_LeftFoot01_017"],"b_LeftFoot01_017":["b_LeftFoot02_018"],"Cube.001":["Cube.002","Cylinder","Cube.002","Cylinder"],"Cylinder.001":["Cylinder.002","Blueprint7_hierarchy.001","Empty_as_child"],"Blueprint1":["Blueprint1_mesh"]}
|
@ -19,7 +19,6 @@ def setup_data(request):
|
|||||||
|
|
||||||
def finalizer():
|
def finalizer():
|
||||||
print("\nPerforming teardown...")
|
print("\nPerforming teardown...")
|
||||||
get_orphan_data()
|
|
||||||
|
|
||||||
if os.path.exists(models_path):
|
if os.path.exists(models_path):
|
||||||
shutil.rmtree(models_path)
|
shutil.rmtree(models_path)
|
||||||
@ -38,7 +37,10 @@ def setup_data(request):
|
|||||||
|
|
||||||
def get_orphan_data():
|
def get_orphan_data():
|
||||||
orphan_meshes = [m.name for m in bpy.data.meshes if m.users == 0]
|
orphan_meshes = [m.name for m in bpy.data.meshes if m.users == 0]
|
||||||
# print("orphan meshes before", orphan_meshes)
|
orphan_objects = [m.name for m in bpy.data.objects if m.users == 0]
|
||||||
|
|
||||||
|
#print("orphan meshes before", orphan_meshes)
|
||||||
|
return orphan_meshes + orphan_objects
|
||||||
|
|
||||||
def test_export_do_not_export_blueprints(setup_data):
|
def test_export_do_not_export_blueprints(setup_data):
|
||||||
auto_export_operator = bpy.ops.export_scenes.auto_gltf
|
auto_export_operator = bpy.ops.export_scenes.auto_gltf
|
||||||
@ -61,6 +63,9 @@ def test_export_do_not_export_blueprints(setup_data):
|
|||||||
)
|
)
|
||||||
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
|
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
|
||||||
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint1.glb")) == False
|
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint1.glb")) == False
|
||||||
|
orphan_data = get_orphan_data()
|
||||||
|
assert len(orphan_data) == 0
|
||||||
|
|
||||||
|
|
||||||
def test_export_custom_blueprints_path(setup_data):
|
def test_export_custom_blueprints_path(setup_data):
|
||||||
auto_export_operator = bpy.ops.export_scenes.auto_gltf
|
auto_export_operator = bpy.ops.export_scenes.auto_gltf
|
||||||
@ -83,6 +88,7 @@ def test_export_custom_blueprints_path(setup_data):
|
|||||||
)
|
)
|
||||||
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
|
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
|
||||||
assert os.path.exists(os.path.join(setup_data["models_path"], "another_library_path", "Blueprint1.glb")) == True
|
assert os.path.exists(os.path.join(setup_data["models_path"], "another_library_path", "Blueprint1.glb")) == True
|
||||||
|
assert len(get_orphan_data()) == 0
|
||||||
|
|
||||||
def test_export_materials_library(setup_data):
|
def test_export_materials_library(setup_data):
|
||||||
auto_export_operator = bpy.ops.export_scenes.auto_gltf
|
auto_export_operator = bpy.ops.export_scenes.auto_gltf
|
||||||
@ -107,7 +113,7 @@ def test_export_materials_library(setup_data):
|
|||||||
|
|
||||||
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint1.glb")) == True
|
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint1.glb")) == True
|
||||||
assert os.path.exists(os.path.join(setup_data["materials_path"], "testing_materials_library.glb")) == True
|
assert os.path.exists(os.path.join(setup_data["materials_path"], "testing_materials_library.glb")) == True
|
||||||
|
assert len(get_orphan_data()) == 0
|
||||||
|
|
||||||
def test_export_materials_library_custom_path(setup_data):
|
def test_export_materials_library_custom_path(setup_data):
|
||||||
auto_export_operator = bpy.ops.export_scenes.auto_gltf
|
auto_export_operator = bpy.ops.export_scenes.auto_gltf
|
||||||
@ -134,6 +140,7 @@ def test_export_materials_library_custom_path(setup_data):
|
|||||||
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint1.glb")) == True
|
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint1.glb")) == True
|
||||||
assert os.path.exists(os.path.join(setup_data["materials_path"], "testing_materials_library.glb")) == False
|
assert os.path.exists(os.path.join(setup_data["materials_path"], "testing_materials_library.glb")) == False
|
||||||
assert os.path.exists(os.path.join(setup_data["other_materials_path"], "testing_materials_library.glb")) == True
|
assert os.path.exists(os.path.join(setup_data["other_materials_path"], "testing_materials_library.glb")) == True
|
||||||
|
assert len(get_orphan_data()) == 0
|
||||||
|
|
||||||
def test_export_collection_instances_combine_mode(setup_data): # TODO: change & check this
|
def test_export_collection_instances_combine_mode(setup_data): # TODO: change & check this
|
||||||
auto_export_operator = bpy.ops.export_scenes.auto_gltf
|
auto_export_operator = bpy.ops.export_scenes.auto_gltf
|
||||||
@ -160,6 +167,7 @@ def test_export_collection_instances_combine_mode(setup_data): # TODO: change &
|
|||||||
|
|
||||||
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
|
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
|
||||||
assert os.path.exists(os.path.join(setup_data["models_path"], "World_dynamic.glb")) == False
|
assert os.path.exists(os.path.join(setup_data["models_path"], "World_dynamic.glb")) == False
|
||||||
|
assert len(get_orphan_data()) == 0
|
||||||
|
|
||||||
|
|
||||||
def test_export_do_not_export_marked_assets(setup_data):
|
def test_export_do_not_export_marked_assets(setup_data):
|
||||||
@ -188,6 +196,7 @@ def test_export_do_not_export_marked_assets(setup_data):
|
|||||||
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint3.glb")) == True
|
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint3.glb")) == True
|
||||||
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint4_nested.glb")) == True
|
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint4_nested.glb")) == True
|
||||||
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint5.glb")) == False
|
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint5.glb")) == False
|
||||||
|
assert len(get_orphan_data()) == 0
|
||||||
|
|
||||||
|
|
||||||
def test_export_separate_dynamic_and_static_objects(setup_data):
|
def test_export_separate_dynamic_and_static_objects(setup_data):
|
||||||
@ -216,6 +225,7 @@ def test_export_separate_dynamic_and_static_objects(setup_data):
|
|||||||
|
|
||||||
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
|
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
|
||||||
assert os.path.exists(os.path.join(setup_data["models_path"], "World_dynamic.glb")) == True
|
assert os.path.exists(os.path.join(setup_data["models_path"], "World_dynamic.glb")) == True
|
||||||
|
assert len(get_orphan_data()) == 0
|
||||||
|
|
||||||
|
|
||||||
def test_export_should_not_generate_orphan_data(setup_data):
|
def test_export_should_not_generate_orphan_data(setup_data):
|
||||||
@ -239,4 +249,5 @@ def test_export_should_not_generate_orphan_data(setup_data):
|
|||||||
)
|
)
|
||||||
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
|
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
|
||||||
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint1.glb")) == False
|
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint1.glb")) == False
|
||||||
|
assert len(get_orphan_data()) == 0
|
||||||
|
|
||||||
|
@ -5,6 +5,7 @@ import json
|
|||||||
import pytest
|
import pytest
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
|
import filecmp
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
from pixelmatch.contrib.PIL import pixelmatch
|
from pixelmatch.contrib.PIL import pixelmatch
|
||||||
|
|
||||||
@ -30,6 +31,10 @@ def setup_data(request):
|
|||||||
if os.path.exists(diagnostics_file_path):
|
if os.path.exists(diagnostics_file_path):
|
||||||
os.remove(diagnostics_file_path)
|
os.remove(diagnostics_file_path)
|
||||||
|
|
||||||
|
hierarchy_file_path = os.path.join(root_path, "bevy_hierarchy.json")
|
||||||
|
if os.path.exists(hierarchy_file_path):
|
||||||
|
os.remove(hierarchy_file_path)
|
||||||
|
|
||||||
screenshot_observed_path = os.path.join(root_path, "screenshot.png")
|
screenshot_observed_path = os.path.join(root_path, "screenshot.png")
|
||||||
if os.path.exists(screenshot_observed_path):
|
if os.path.exists(screenshot_observed_path):
|
||||||
os.remove(screenshot_observed_path)
|
os.remove(screenshot_observed_path)
|
||||||
@ -56,7 +61,8 @@ def test_export_complex(setup_data):
|
|||||||
# we use the global settings for that
|
# we use the global settings for that
|
||||||
export_props = {
|
export_props = {
|
||||||
"main_scene_names" : ['World'],
|
"main_scene_names" : ['World'],
|
||||||
"library_scene_names": ['Library']
|
"library_scene_names": ['Library'],
|
||||||
|
# "export_format":'GLTF_SEPARATE'
|
||||||
}
|
}
|
||||||
stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
|
stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
|
||||||
stored_settings.clear()
|
stored_settings.clear()
|
||||||
@ -107,9 +113,15 @@ def test_export_complex(setup_data):
|
|||||||
diagnostics = json.load(diagnostics_file)
|
diagnostics = json.load(diagnostics_file)
|
||||||
print("diagnostics", diagnostics)
|
print("diagnostics", diagnostics)
|
||||||
assert diagnostics["animations"] == True
|
assert diagnostics["animations"] == True
|
||||||
assert diagnostics["cylinder_found"] == True
|
|
||||||
assert diagnostics["empty_found"] == True
|
assert diagnostics["empty_found"] == True
|
||||||
assert diagnostics["blueprints_list_found"] == True
|
assert diagnostics["blueprints_list_found"] == True
|
||||||
|
assert diagnostics["exported_names_correct"] == True
|
||||||
|
|
||||||
|
with open(os.path.join(root_path, "bevy_hierarchy.json")) as hierarchy_file:
|
||||||
|
with open(os.path.join(os.path.dirname(__file__), "expected_bevy_hierarchy.json")) as expexted_hierarchy_file:
|
||||||
|
hierarchy = json.load(hierarchy_file)
|
||||||
|
expected = json.load(expexted_hierarchy_file)
|
||||||
|
assert sorted(hierarchy.items()) == sorted(expected.items())
|
||||||
|
|
||||||
# last but not least, do a visual compare
|
# last but not least, do a visual compare
|
||||||
screenshot_expected_path = os.path.join(root_path, "expected_screenshot.png")
|
screenshot_expected_path = os.path.join(root_path, "expected_screenshot.png")
|
||||||
|
Loading…
Reference in New Issue
Block a user