mirror of
https://github.com/kaosat-dev/Blender_bevy_components_workflow.git
synced 2024-12-21 23:24:10 +00:00
feat(nested_blueprints): added support for nested blueprints both in bevy_gltf_blueprints & in gltf_auto_export (#65)
* feat(bevy_gltf_blueprints): * added support for nested blueprints * added example/test for nested blueprints with multiple levels of nesting * feat(tools/gltf_auto_export): * implemented functionality to export nested blueprints/collection instances * added add prefs to control nested blueprints export * experiments with nested blueprints merge modes * when exporting of nested blueprints is disabled, will not scan for nested blueprints and thus no export them * docs(): updated docs & added images for nested blueprints support
This commit is contained in:
parent
2c4720b951
commit
6047959b3f
14
Cargo.lock
generated
14
Cargo.lock
generated
@ -779,7 +779,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "bevy_gltf_blueprints"
|
||||
version = "0.4.0"
|
||||
version = "0.5.0"
|
||||
dependencies = [
|
||||
"bevy",
|
||||
"bevy_gltf_components 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -857,6 +857,18 @@ dependencies = [
|
||||
"rand",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bevy_gltf_blueprints_nested_blueprints_example"
|
||||
version = "0.3.0"
|
||||
dependencies = [
|
||||
"bevy",
|
||||
"bevy_asset_loader",
|
||||
"bevy_editor_pls",
|
||||
"bevy_gltf_blueprints",
|
||||
"bevy_rapier3d",
|
||||
"rand",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bevy_gltf_components"
|
||||
version = "0.2.0"
|
||||
|
@ -6,6 +6,7 @@ members = [
|
||||
"examples/bevy_gltf_blueprints/basic/",
|
||||
"examples/bevy_gltf_blueprints/basic_scene_components/",
|
||||
"examples/bevy_gltf_blueprints/basic_xpbd_physics/",
|
||||
"examples/bevy_gltf_blueprints/nested_blueprints/",
|
||||
"examples/bevy_gltf_blueprints/animation/",
|
||||
"examples/bevy_gltf_blueprints/multiple_levels/",
|
||||
"examples/bevy_gltf_blueprints/materials/"
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "bevy_gltf_blueprints"
|
||||
version = "0.4.0"
|
||||
version = "0.5.0"
|
||||
authors = ["Mark 'kaosat-dev' Moissette"]
|
||||
description = "Adds the ability to define Blueprints/Prefabs for [Bevy](https://bevyengine.org/) inside gltf files and spawn them in Bevy."
|
||||
homepage = "https://github.com/kaosat-dev/Blender_bevy_components_workflow"
|
||||
|
@ -26,7 +26,7 @@ Here's a minimal usage example:
|
||||
# Cargo.toml
|
||||
[dependencies]
|
||||
bevy="0.12"
|
||||
bevy_gltf_blueprints = { version = "0.4"}
|
||||
bevy_gltf_blueprints = { version = "0.5"}
|
||||
|
||||
```
|
||||
|
||||
@ -64,7 +64,7 @@ fn spawn_blueprint(
|
||||
Add the following to your `[dependencies]` section in `Cargo.toml`:
|
||||
|
||||
```toml
|
||||
bevy_gltf_blueprints = "0.4"
|
||||
bevy_gltf_blueprints = "0.5"
|
||||
```
|
||||
|
||||
Or use `cargo add`:
|
||||
@ -281,6 +281,8 @@ https://github.com/kaosat-dev/Blender_bevy_components_workflow/tree/main/example
|
||||
|
||||
https://github.com/kaosat-dev/Blender_bevy_components_workflow/tree/main/examples/materials
|
||||
|
||||
https://github.com/kaosat-dev/Blender_bevy_components_workflow/tree/main/examples/nested_blueprints
|
||||
|
||||
|
||||
## Compatible Bevy versions
|
||||
|
||||
@ -289,7 +291,7 @@ The main branch is compatible with the latest Bevy release, while the branch `be
|
||||
Compatibility of `bevy_gltf_blueprints` versions:
|
||||
| `bevy_gltf_blueprints` | `bevy` |
|
||||
| :-- | :-- |
|
||||
| `0.3 - 0.4` | `0.12` |
|
||||
| `0.3 - 0.5` | `0.12` |
|
||||
| `0.1 - 0.2` | `0.11` |
|
||||
| branch `main` | `0.12` |
|
||||
| branch `bevy_main` | `main` |
|
||||
|
@ -34,7 +34,7 @@ pub struct SpawnedRoot;
|
||||
/// * also takes into account the already exisiting "override" components, ie "override components" > components from blueprint
|
||||
pub(crate) fn spawn_from_blueprints(
|
||||
spawn_placeholders: Query<
|
||||
(Entity, &Name, &BlueprintName, &Transform),
|
||||
(Entity, &Name, &BlueprintName, &Transform, Option<&Parent>),
|
||||
(
|
||||
Added<BlueprintName>,
|
||||
Added<SpawnHere>,
|
||||
@ -49,9 +49,12 @@ pub(crate) fn spawn_from_blueprints(
|
||||
assets_gltf: Res<Assets<Gltf>>,
|
||||
asset_server: Res<AssetServer>,
|
||||
blueprints_config: Res<BluePrintsConfig>,
|
||||
|
||||
) {
|
||||
for (entity, name, blupeprint_name, transform) in spawn_placeholders.iter() {
|
||||
debug!("need to spawn {:?}", blupeprint_name.0);
|
||||
for (entity, name, blupeprint_name, transform, original_parent) in spawn_placeholders.iter() {
|
||||
debug!("need to spawn {:?}, id: {:?}", blupeprint_name.0, entity);
|
||||
|
||||
|
||||
let what = &blupeprint_name.0;
|
||||
let model_file_name = format!("{}.{}", &what, &blueprints_config.format);
|
||||
let model_path =
|
||||
@ -60,8 +63,7 @@ pub(crate) fn spawn_from_blueprints(
|
||||
debug!("attempting to spawn {:?}", model_path);
|
||||
let model_handle: Handle<Gltf> = asset_server.load(model_path);
|
||||
|
||||
let world = game_world.single_mut();
|
||||
let world = world.1[0]; // FIXME: dangerous hack because our gltf data have a single child like this, but might not always be the case
|
||||
|
||||
|
||||
let gltf = assets_gltf
|
||||
.get(&model_handle)
|
||||
@ -92,6 +94,15 @@ pub(crate) fn spawn_from_blueprints(
|
||||
},
|
||||
))
|
||||
.id();
|
||||
commands.entity(world).add_child(child_scene);
|
||||
|
||||
let world = game_world.single_mut();
|
||||
let mut parent = world.1[0]; // FIXME: dangerous hack because our gltf data have a single child like this, but might not always be the case
|
||||
|
||||
// ideally, insert the newly created entity as a child of the original parent, if any, the world otherwise
|
||||
if let Some(original_parent) = original_parent {
|
||||
parent = original_parent.get();
|
||||
}
|
||||
|
||||
commands.entity(parent).add_child(child_scene);
|
||||
}
|
||||
}
|
||||
|
4629
examples/bevy_gltf_blueprints/nested_blueprints/Cargo.lock
generated
Normal file
4629
examples/bevy_gltf_blueprints/nested_blueprints/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
13
examples/bevy_gltf_blueprints/nested_blueprints/Cargo.toml
Normal file
13
examples/bevy_gltf_blueprints/nested_blueprints/Cargo.toml
Normal file
@ -0,0 +1,13 @@
|
||||
[package]
|
||||
name = "bevy_gltf_blueprints_nested_blueprints_example"
|
||||
version = "0.3.0"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[dependencies]
|
||||
bevy="0.12"
|
||||
bevy_gltf_blueprints = { path = "../../../crates/bevy_gltf_blueprints" }
|
||||
bevy_rapier3d = { version = "0.23.0", features = [ "serde-serialize", "debug-render-3d", "enhanced-determinism"] }
|
||||
bevy_asset_loader = { version = "0.18", features = ["standard_dynamic_assets" ]}
|
||||
bevy_editor_pls = { version = "0.6" }
|
||||
rand = "0.8.5"
|
14
examples/bevy_gltf_blueprints/nested_blueprints/README.md
Normal file
14
examples/bevy_gltf_blueprints/nested_blueprints/README.md
Normal file
@ -0,0 +1,14 @@
|
||||
|
||||
# Nested blueprints example/demo
|
||||
|
||||
Example of blueprints (and thus gltf) nested & reuse to avoid redundant data in blueprints gltfs that lead to asset & memory bloat
|
||||
- ideally, to be used together with ```gltf_auto_export``` version >0.8 with the "export nested blueprints" option for exports, as that will generate whole
|
||||
gltf blueprints hierarchies, and minimise their size for you
|
||||
- It shows you how ou can configure```Bevy_gltf_blueprints``` to spawn nested blueprints
|
||||
|
||||
|
||||
## Running this example
|
||||
|
||||
```
|
||||
cargo run --features bevy/dynamic_linking
|
||||
```
|
@ -0,0 +1 @@
|
||||
({})
|
@ -0,0 +1,6 @@
|
||||
({
|
||||
"world":File (path: "models/World.glb"),
|
||||
"models": Folder (
|
||||
path: "models/library",
|
||||
),
|
||||
})
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,5 @@
|
||||
use bevy::prelude::*;
|
||||
use bevy_asset_loader::prelude::*;
|
||||
|
||||
#[derive(AssetCollection, Resource)]
|
||||
pub struct CoreAssets {}
|
@ -0,0 +1,13 @@
|
||||
use bevy::gltf::Gltf;
|
||||
use bevy::prelude::*;
|
||||
use bevy::utils::HashMap;
|
||||
use bevy_asset_loader::prelude::*;
|
||||
|
||||
#[derive(AssetCollection, Resource)]
|
||||
pub struct GameAssets {
|
||||
#[asset(key = "world")]
|
||||
pub world: Handle<Gltf>,
|
||||
|
||||
#[asset(key = "models", collection(typed, mapped))]
|
||||
pub models: HashMap<String, Handle<Gltf>>,
|
||||
}
|
@ -0,0 +1,35 @@
|
||||
pub mod assets_core;
|
||||
pub use assets_core::*;
|
||||
|
||||
pub mod assets_game;
|
||||
pub use assets_game::*;
|
||||
|
||||
use bevy::prelude::*;
|
||||
use bevy_asset_loader::prelude::*;
|
||||
|
||||
use crate::state::AppState;
|
||||
|
||||
pub struct AssetsPlugin;
|
||||
impl Plugin for AssetsPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
app
|
||||
// load core assets (ie assets needed in the main menu, and everywhere else before loading more assets in game)
|
||||
.add_loading_state(
|
||||
LoadingState::new(AppState::CoreLoading).continue_to_state(AppState::MenuRunning),
|
||||
)
|
||||
.add_dynamic_collection_to_loading_state::<_, StandardDynamicAssetCollection>(
|
||||
AppState::CoreLoading,
|
||||
"assets_core.assets.ron",
|
||||
)
|
||||
.add_collection_to_loading_state::<_, CoreAssets>(AppState::CoreLoading)
|
||||
// load game assets
|
||||
.add_loading_state(
|
||||
LoadingState::new(AppState::AppLoading).continue_to_state(AppState::AppRunning),
|
||||
)
|
||||
.add_dynamic_collection_to_loading_state::<_, StandardDynamicAssetCollection>(
|
||||
AppState::AppLoading,
|
||||
"assets_game.assets.ron",
|
||||
)
|
||||
.add_collection_to_loading_state::<_, GameAssets>(AppState::AppLoading);
|
||||
}
|
||||
}
|
@ -0,0 +1,24 @@
|
||||
use bevy::core_pipeline::bloom::{BloomCompositeMode, BloomSettings};
|
||||
use bevy::core_pipeline::tonemapping::{DebandDither, Tonemapping};
|
||||
use bevy::prelude::*;
|
||||
|
||||
use super::CameraTrackingOffset;
|
||||
|
||||
pub fn camera_replace_proxies(
|
||||
mut commands: Commands,
|
||||
mut added_cameras: Query<(Entity, &mut Camera), (Added<Camera>, With<CameraTrackingOffset>)>,
|
||||
) {
|
||||
for (entity, mut camera) in added_cameras.iter_mut() {
|
||||
info!("detected added camera, updating proxy");
|
||||
camera.hdr = true;
|
||||
commands
|
||||
.entity(entity)
|
||||
.insert(DebandDither::Enabled)
|
||||
.insert(Tonemapping::BlenderFilmic)
|
||||
.insert(BloomSettings {
|
||||
intensity: 0.01,
|
||||
composite_mode: BloomCompositeMode::Additive,
|
||||
..default()
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,58 @@
|
||||
use bevy::prelude::*;
|
||||
|
||||
#[derive(Component, Reflect, Debug)]
|
||||
#[reflect(Component)]
|
||||
/// Component for cameras, with an offset from the Trackable target
|
||||
///
|
||||
pub struct CameraTracking {
|
||||
pub offset: Vec3,
|
||||
}
|
||||
impl Default for CameraTracking {
|
||||
fn default() -> Self {
|
||||
CameraTracking {
|
||||
offset: Vec3::new(0.0, 6.0, 8.0),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Component, Reflect, Debug, Deref, DerefMut)]
|
||||
#[reflect(Component)]
|
||||
/// Component for cameras, with an offset from the Trackable target
|
||||
pub struct CameraTrackingOffset(Vec3);
|
||||
impl Default for CameraTrackingOffset {
|
||||
fn default() -> Self {
|
||||
CameraTrackingOffset(Vec3::new(0.0, 6.0, 8.0))
|
||||
}
|
||||
}
|
||||
|
||||
impl CameraTrackingOffset {
|
||||
fn new(input: Vec3) -> Self {
|
||||
CameraTrackingOffset(input)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug)]
|
||||
#[reflect(Component)]
|
||||
/// Add this component to an entity if you want it to be tracked by a Camera
|
||||
pub struct CameraTrackable;
|
||||
|
||||
pub fn camera_track(
|
||||
mut tracking_cameras: Query<
|
||||
(&mut Transform, &CameraTrackingOffset),
|
||||
(
|
||||
With<Camera>,
|
||||
With<CameraTrackingOffset>,
|
||||
Without<CameraTrackable>,
|
||||
),
|
||||
>,
|
||||
camera_tracked: Query<&Transform, With<CameraTrackable>>,
|
||||
) {
|
||||
for (mut camera_transform, tracking_offset) in tracking_cameras.iter_mut() {
|
||||
for tracked_transform in camera_tracked.iter() {
|
||||
let target_position = tracked_transform.translation + tracking_offset.0;
|
||||
let eased_position = camera_transform.translation.lerp(target_position, 0.1);
|
||||
camera_transform.translation = eased_position; // + tracking.offset;// tracked_transform.translation + tracking.offset;
|
||||
*camera_transform = camera_transform.looking_at(tracked_transform.translation, Vec3::Y);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,24 @@
|
||||
pub mod camera_tracking;
|
||||
pub use camera_tracking::*;
|
||||
|
||||
pub mod camera_replace_proxies;
|
||||
pub use camera_replace_proxies::*;
|
||||
|
||||
use bevy::prelude::*;
|
||||
use bevy_gltf_blueprints::GltfBlueprintsSet;
|
||||
|
||||
pub struct CameraPlugin;
|
||||
impl Plugin for CameraPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
app.register_type::<CameraTrackable>()
|
||||
.register_type::<CameraTracking>()
|
||||
.register_type::<CameraTrackingOffset>()
|
||||
.add_systems(
|
||||
Update,
|
||||
(
|
||||
camera_replace_proxies.after(GltfBlueprintsSet::AfterSpawn),
|
||||
camera_track,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
@ -0,0 +1,25 @@
|
||||
use bevy::prelude::*;
|
||||
|
||||
use bevy::pbr::{CascadeShadowConfig, CascadeShadowConfigBuilder};
|
||||
|
||||
// fixme might be too specific to might needs, should it be moved out ? also these are all for lights, not models
|
||||
pub fn lighting_replace_proxies(
|
||||
mut added_dirights: Query<(Entity, &mut DirectionalLight), Added<DirectionalLight>>,
|
||||
mut added_spotlights: Query<&mut SpotLight, Added<SpotLight>>,
|
||||
mut commands: Commands,
|
||||
) {
|
||||
for (entity, mut light) in added_dirights.iter_mut() {
|
||||
light.illuminance *= 5.0;
|
||||
light.shadows_enabled = true;
|
||||
let shadow_config: CascadeShadowConfig = CascadeShadowConfigBuilder {
|
||||
first_cascade_far_bound: 15.0,
|
||||
maximum_distance: 135.0,
|
||||
..default()
|
||||
}
|
||||
.into();
|
||||
commands.entity(entity).insert(shadow_config);
|
||||
}
|
||||
for mut light in added_spotlights.iter_mut() {
|
||||
light.shadows_enabled = true;
|
||||
}
|
||||
}
|
@ -0,0 +1,18 @@
|
||||
mod lighting_replace_proxies;
|
||||
use lighting_replace_proxies::*;
|
||||
|
||||
use bevy::pbr::{DirectionalLightShadowMap, NotShadowCaster};
|
||||
use bevy::prelude::*;
|
||||
|
||||
pub struct LightingPlugin;
|
||||
impl Plugin for LightingPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
app
|
||||
.insert_resource(DirectionalLightShadowMap { size: 4096 })
|
||||
// FIXME: adding these since they are missing
|
||||
.register_type::<NotShadowCaster>()
|
||||
|
||||
.add_systems(PreUpdate, lighting_replace_proxies) // FIXME: you should actually run this in a specific state most likely
|
||||
;
|
||||
}
|
||||
}
|
@ -0,0 +1,35 @@
|
||||
pub mod camera;
|
||||
pub use camera::*;
|
||||
|
||||
pub mod lighting;
|
||||
pub use lighting::*;
|
||||
|
||||
pub mod relationships;
|
||||
pub use relationships::*;
|
||||
|
||||
pub mod physics;
|
||||
pub use physics::*;
|
||||
|
||||
// pub mod save_load;
|
||||
// pub use save_load::*;
|
||||
|
||||
use bevy::prelude::*;
|
||||
use bevy_gltf_blueprints::*;
|
||||
|
||||
pub struct CorePlugin;
|
||||
impl Plugin for CorePlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
app.add_plugins((
|
||||
LightingPlugin,
|
||||
CameraPlugin,
|
||||
PhysicsPlugin,
|
||||
// SaveLoadPlugin,
|
||||
BlueprintsPlugin {
|
||||
library_folder: "models/library".into(),
|
||||
format: GltfFormat::GLB,
|
||||
aabbs: true,
|
||||
..Default::default()
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
@ -0,0 +1,12 @@
|
||||
use bevy::prelude::{info, ResMut};
|
||||
use bevy_rapier3d::prelude::RapierConfiguration;
|
||||
|
||||
pub fn pause_physics(mut physics_config: ResMut<RapierConfiguration>) {
|
||||
info!("pausing physics");
|
||||
physics_config.physics_pipeline_active = false;
|
||||
}
|
||||
|
||||
pub fn resume_physics(mut physics_config: ResMut<RapierConfiguration>) {
|
||||
info!("unpausing physics");
|
||||
physics_config.physics_pipeline_active = true;
|
||||
}
|
@ -0,0 +1,37 @@
|
||||
pub mod physics_replace_proxies;
|
||||
use bevy_rapier3d::{
|
||||
prelude::{NoUserData, RapierPhysicsPlugin},
|
||||
render::RapierDebugRenderPlugin,
|
||||
};
|
||||
pub use physics_replace_proxies::*;
|
||||
|
||||
pub mod utils;
|
||||
|
||||
pub mod controls;
|
||||
pub use controls::*;
|
||||
|
||||
use crate::state::GameState;
|
||||
use bevy::prelude::*;
|
||||
// use super::blueprints::GltfBlueprintsSet;
|
||||
use bevy_gltf_blueprints::GltfBlueprintsSet;
|
||||
// use crate::Collider;
|
||||
pub struct PhysicsPlugin;
|
||||
impl Plugin for PhysicsPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
app.add_plugins((
|
||||
RapierPhysicsPlugin::<NoUserData>::default(),
|
||||
RapierDebugRenderPlugin::default(),
|
||||
))
|
||||
.register_type::<AutoAABBCollider>()
|
||||
.register_type::<physics_replace_proxies::Collider>()
|
||||
// find a way to make serde's stuff serializable
|
||||
// .register_type::<bevy_rapier3d::dynamics::CoefficientCombineRule>()
|
||||
//bevy_rapier3d::dynamics::CoefficientCombineRule
|
||||
.add_systems(
|
||||
Update,
|
||||
physics_replace_proxies.after(GltfBlueprintsSet::AfterSpawn),
|
||||
)
|
||||
.add_systems(OnEnter(GameState::InGame), resume_physics)
|
||||
.add_systems(OnExit(GameState::InGame), pause_physics);
|
||||
}
|
||||
}
|
@ -0,0 +1,101 @@
|
||||
use bevy::prelude::*;
|
||||
// use bevy::render::primitives::Aabb;
|
||||
use bevy_rapier3d::geometry::Collider as RapierCollider;
|
||||
use bevy_rapier3d::prelude::{ActiveCollisionTypes, ActiveEvents, ComputedColliderShape};
|
||||
|
||||
use super::utils::*;
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug)]
|
||||
#[reflect(Component)]
|
||||
pub enum Collider {
|
||||
Ball(f32),
|
||||
Cuboid(Vec3),
|
||||
Capsule(Vec3, Vec3, f32),
|
||||
#[default]
|
||||
Mesh,
|
||||
}
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug)]
|
||||
#[reflect(Component)]
|
||||
pub enum AutoAABBCollider {
|
||||
#[default]
|
||||
Cuboid,
|
||||
Ball,
|
||||
Capsule,
|
||||
}
|
||||
|
||||
// replaces all physics stand-ins with the actual rapier types
|
||||
pub fn physics_replace_proxies(
|
||||
meshes: Res<Assets<Mesh>>,
|
||||
mesh_handles: Query<&Handle<Mesh>>,
|
||||
mut proxy_colliders: Query<
|
||||
(Entity, &Collider, &Name, &mut Visibility),
|
||||
(Without<RapierCollider>, Added<Collider>),
|
||||
>,
|
||||
// needed for tri meshes
|
||||
children: Query<&Children>,
|
||||
|
||||
mut commands: Commands,
|
||||
) {
|
||||
for proxy_colider in proxy_colliders.iter_mut() {
|
||||
let (entity, collider_proxy, name, mut visibility) = proxy_colider;
|
||||
// we hide the collider meshes: perhaps they should be removed altogether once processed ?
|
||||
if name.ends_with("_collider") || name.ends_with("_sensor") {
|
||||
*visibility = Visibility::Hidden;
|
||||
}
|
||||
|
||||
let mut rapier_collider: RapierCollider;
|
||||
match collider_proxy {
|
||||
Collider::Ball(radius) => {
|
||||
info!("generating collider from proxy: ball");
|
||||
rapier_collider = RapierCollider::ball(*radius);
|
||||
commands.entity(entity)
|
||||
.insert(rapier_collider)
|
||||
.insert(ActiveEvents::COLLISION_EVENTS) // FIXME: this is just for demo purposes !!!
|
||||
;
|
||||
}
|
||||
Collider::Cuboid(size) => {
|
||||
info!("generating collider from proxy: cuboid");
|
||||
rapier_collider = RapierCollider::cuboid(size.x, size.y, size.z);
|
||||
commands.entity(entity)
|
||||
.insert(rapier_collider)
|
||||
.insert(ActiveEvents::COLLISION_EVENTS) // FIXME: this is just for demo purposes !!!
|
||||
;
|
||||
}
|
||||
Collider::Capsule(a, b, radius) => {
|
||||
info!("generating collider from proxy: capsule");
|
||||
rapier_collider = RapierCollider::capsule(*a, *b, *radius);
|
||||
commands.entity(entity)
|
||||
.insert(rapier_collider)
|
||||
.insert(ActiveEvents::COLLISION_EVENTS) // FIXME: this is just for demo purposes !!!
|
||||
;
|
||||
}
|
||||
Collider::Mesh => {
|
||||
info!("generating collider from proxy: mesh");
|
||||
for (_, collider_mesh) in
|
||||
Mesh::search_in_children(entity, &children, &meshes, &mesh_handles)
|
||||
{
|
||||
rapier_collider = RapierCollider::from_bevy_mesh(
|
||||
collider_mesh,
|
||||
&ComputedColliderShape::TriMesh,
|
||||
)
|
||||
.unwrap();
|
||||
commands
|
||||
.entity(entity)
|
||||
.insert(rapier_collider)
|
||||
// FIXME: this is just for demo purposes !!!
|
||||
.insert(
|
||||
ActiveCollisionTypes::default()
|
||||
| ActiveCollisionTypes::KINEMATIC_STATIC
|
||||
| ActiveCollisionTypes::STATIC_STATIC
|
||||
| ActiveCollisionTypes::DYNAMIC_STATIC,
|
||||
)
|
||||
.insert(ActiveEvents::COLLISION_EVENTS);
|
||||
// .insert(ActiveEvents::COLLISION_EVENTS)
|
||||
// break;
|
||||
// RapierCollider::convex_hull(points)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,175 @@
|
||||
use bevy::prelude::*;
|
||||
use bevy::render::mesh::{MeshVertexAttributeId, PrimitiveTopology, VertexAttributeValues};
|
||||
// TAKEN VERBATIB FROM https://github.com/janhohenheim/foxtrot/blob/src/util/trait_extension.rs
|
||||
|
||||
pub(crate) trait Vec3Ext: Copy {
|
||||
fn is_approx_zero(self) -> bool;
|
||||
fn split(self, up: Vec3) -> SplitVec3;
|
||||
}
|
||||
impl Vec3Ext for Vec3 {
|
||||
#[inline]
|
||||
fn is_approx_zero(self) -> bool {
|
||||
self.length_squared() < 1e-5
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn split(self, up: Vec3) -> SplitVec3 {
|
||||
let vertical = up * self.dot(up);
|
||||
let horizontal = self - vertical;
|
||||
SplitVec3 {
|
||||
vertical,
|
||||
horizontal,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub(crate) struct SplitVec3 {
|
||||
pub(crate) vertical: Vec3,
|
||||
pub(crate) horizontal: Vec3,
|
||||
}
|
||||
|
||||
pub(crate) trait Vec2Ext: Copy {
|
||||
fn is_approx_zero(self) -> bool;
|
||||
fn x0y(self) -> Vec3;
|
||||
}
|
||||
impl Vec2Ext for Vec2 {
|
||||
#[inline]
|
||||
fn is_approx_zero(self) -> bool {
|
||||
self.length_squared() < 1e-5
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn x0y(self) -> Vec3 {
|
||||
Vec3::new(self.x, 0., self.y)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait MeshExt {
|
||||
fn transform(&mut self, transform: Transform);
|
||||
fn transformed(&self, transform: Transform) -> Mesh;
|
||||
fn read_coords_mut(&mut self, id: impl Into<MeshVertexAttributeId>) -> &mut Vec<[f32; 3]>;
|
||||
fn search_in_children<'a>(
|
||||
parent: Entity,
|
||||
children: &'a Query<&Children>,
|
||||
meshes: &'a Assets<Mesh>,
|
||||
mesh_handles: &'a Query<&Handle<Mesh>>,
|
||||
) -> Vec<(Entity, &'a Mesh)>;
|
||||
}
|
||||
|
||||
impl MeshExt for Mesh {
|
||||
fn transform(&mut self, transform: Transform) {
|
||||
for coords in self.read_coords_mut(Mesh::ATTRIBUTE_POSITION.clone()) {
|
||||
let vec3 = (*coords).into();
|
||||
let transformed = transform.transform_point(vec3);
|
||||
*coords = transformed.into();
|
||||
}
|
||||
for normal in self.read_coords_mut(Mesh::ATTRIBUTE_NORMAL.clone()) {
|
||||
let vec3 = (*normal).into();
|
||||
let transformed = transform.rotation.mul_vec3(vec3);
|
||||
*normal = transformed.into();
|
||||
}
|
||||
}
|
||||
|
||||
fn transformed(&self, transform: Transform) -> Mesh {
|
||||
let mut mesh = self.clone();
|
||||
mesh.transform(transform);
|
||||
mesh
|
||||
}
|
||||
|
||||
fn read_coords_mut(&mut self, id: impl Into<MeshVertexAttributeId>) -> &mut Vec<[f32; 3]> {
|
||||
// Guaranteed by Bevy for the current usage
|
||||
match self
|
||||
.attribute_mut(id)
|
||||
.expect("Failed to read unknown mesh attribute")
|
||||
{
|
||||
VertexAttributeValues::Float32x3(values) => values,
|
||||
// Guaranteed by Bevy for the current usage
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn search_in_children<'a>(
|
||||
parent: Entity,
|
||||
children_query: &'a Query<&Children>,
|
||||
meshes: &'a Assets<Mesh>,
|
||||
mesh_handles: &'a Query<&Handle<Mesh>>,
|
||||
) -> Vec<(Entity, &'a Mesh)> {
|
||||
if let Ok(children) = children_query.get(parent) {
|
||||
let mut result: Vec<_> = children
|
||||
.iter()
|
||||
.filter_map(|entity| mesh_handles.get(*entity).ok().map(|mesh| (*entity, mesh)))
|
||||
.map(|(entity, mesh_handle)| {
|
||||
(
|
||||
entity,
|
||||
meshes
|
||||
.get(mesh_handle)
|
||||
.expect("Failed to get mesh from handle"),
|
||||
)
|
||||
})
|
||||
.map(|(entity, mesh)| {
|
||||
assert_eq!(mesh.primitive_topology(), PrimitiveTopology::TriangleList);
|
||||
(entity, mesh)
|
||||
})
|
||||
.collect();
|
||||
let mut inner_result = children
|
||||
.iter()
|
||||
.flat_map(|entity| {
|
||||
Self::search_in_children(*entity, children_query, meshes, mesh_handles)
|
||||
})
|
||||
.collect();
|
||||
result.append(&mut inner_result);
|
||||
result
|
||||
} else {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait F32Ext: Copy {
|
||||
fn is_approx_zero(self) -> bool;
|
||||
fn squared(self) -> f32;
|
||||
fn lerp(self, other: f32, ratio: f32) -> f32;
|
||||
}
|
||||
|
||||
impl F32Ext for f32 {
|
||||
#[inline]
|
||||
fn is_approx_zero(self) -> bool {
|
||||
self.abs() < 1e-5
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn squared(self) -> f32 {
|
||||
self * self
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn lerp(self, other: f32, ratio: f32) -> f32 {
|
||||
self.mul_add(1. - ratio, other * ratio)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait TransformExt: Copy {
|
||||
fn horizontally_looking_at(self, target: Vec3, up: Vec3) -> Transform;
|
||||
fn lerp(self, other: Transform, ratio: f32) -> Transform;
|
||||
}
|
||||
|
||||
impl TransformExt for Transform {
|
||||
fn horizontally_looking_at(self, target: Vec3, up: Vec3) -> Transform {
|
||||
let direction = target - self.translation;
|
||||
let horizontal_direction = direction - up * direction.dot(up);
|
||||
let look_target = self.translation + horizontal_direction;
|
||||
self.looking_at(look_target, up)
|
||||
}
|
||||
|
||||
fn lerp(self, other: Transform, ratio: f32) -> Transform {
|
||||
let translation = self.translation.lerp(other.translation, ratio);
|
||||
let rotation = self.rotation.slerp(other.rotation, ratio);
|
||||
let scale = self.scale.lerp(other.scale, ratio);
|
||||
Transform {
|
||||
translation,
|
||||
rotation,
|
||||
scale,
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,75 @@
|
||||
use bevy::prelude::*;
|
||||
use bevy::render::mesh::{MeshVertexAttributeId, PrimitiveTopology, VertexAttributeValues};
|
||||
// TAKEN VERBATIB FROM https://github.com/janhohenheim/foxtrot/blob/6e31fc02652fc9d085a4adde0a73ab007dbbb0dc/src/util/trait_extension.rs
|
||||
|
||||
pub trait Vec3Ext {
|
||||
#[allow(clippy::wrong_self_convention)] // Because [`Vec3`] is [`Copy`]
|
||||
fn is_approx_zero(self) -> bool;
|
||||
fn x0z(self) -> Vec3;
|
||||
}
|
||||
impl Vec3Ext for Vec3 {
|
||||
fn is_approx_zero(self) -> bool {
|
||||
[self.x, self.y, self.z].iter().all(|&x| x.abs() < 1e-5)
|
||||
}
|
||||
fn x0z(self) -> Vec3 {
|
||||
Vec3::new(self.x, 0., self.z)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait MeshExt {
|
||||
fn transform(&mut self, transform: Transform);
|
||||
fn transformed(&self, transform: Transform) -> Mesh;
|
||||
fn read_coords_mut(&mut self, id: impl Into<MeshVertexAttributeId>) -> &mut Vec<[f32; 3]>;
|
||||
fn search_in_children<'a>(
|
||||
children: &'a Children,
|
||||
meshes: &'a Assets<Mesh>,
|
||||
mesh_handles: &'a Query<&Handle<Mesh>>,
|
||||
) -> (Entity, &'a Mesh);
|
||||
}
|
||||
|
||||
impl MeshExt for Mesh {
|
||||
fn transform(&mut self, transform: Transform) {
|
||||
for attribute in [Mesh::ATTRIBUTE_POSITION, Mesh::ATTRIBUTE_NORMAL] {
|
||||
for coords in self.read_coords_mut(attribute.clone()) {
|
||||
let vec3 = (*coords).into();
|
||||
let transformed = transform.transform_point(vec3);
|
||||
*coords = transformed.into();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn transformed(&self, transform: Transform) -> Mesh {
|
||||
let mut mesh = self.clone();
|
||||
mesh.transform(transform);
|
||||
mesh
|
||||
}
|
||||
|
||||
fn read_coords_mut(&mut self, id: impl Into<MeshVertexAttributeId>) -> &mut Vec<[f32; 3]> {
|
||||
match self.attribute_mut(id).unwrap() {
|
||||
VertexAttributeValues::Float32x3(values) => values,
|
||||
// Guaranteed by Bevy
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn search_in_children<'a>(
|
||||
children: &'a Children,
|
||||
meshes: &'a Assets<Mesh>,
|
||||
mesh_handles: &'a Query<&Handle<Mesh>>,
|
||||
) -> (Entity, &'a Mesh) {
|
||||
let entity_handles: Vec<_> = children
|
||||
.iter()
|
||||
.filter_map(|entity| mesh_handles.get(*entity).ok().map(|mesh| (*entity, mesh)))
|
||||
.collect();
|
||||
assert_eq!(
|
||||
entity_handles.len(),
|
||||
1,
|
||||
"Collider must contain exactly one mesh, but found {}",
|
||||
entity_handles.len()
|
||||
);
|
||||
let (entity, mesh_handle) = entity_handles.first().unwrap();
|
||||
let mesh = meshes.get(mesh_handle).unwrap();
|
||||
assert_eq!(mesh.primitive_topology(), PrimitiveTopology::TriangleList);
|
||||
(*entity, mesh)
|
||||
}
|
||||
}
|
@ -0,0 +1,11 @@
|
||||
pub mod relationships_insert_dependant_components;
|
||||
pub use relationships_insert_dependant_components::*;
|
||||
|
||||
use bevy::prelude::*;
|
||||
|
||||
pub struct EcsRelationshipsPlugin;
|
||||
impl Plugin for EcsRelationshipsPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
app;
|
||||
}
|
||||
}
|
@ -0,0 +1,15 @@
|
||||
use bevy::prelude::*;
|
||||
|
||||
pub fn insert_dependant_component<
|
||||
Dependant: Component,
|
||||
Dependency: Component + std::default::Default,
|
||||
>(
|
||||
mut commands: Commands,
|
||||
entities_without_depency: Query<(Entity, &Name), (With<Dependant>, Without<Dependency>)>,
|
||||
) {
|
||||
for (entity, name) in entities_without_depency.iter() {
|
||||
let name = name.clone().to_string();
|
||||
commands.entity(entity).insert(Dependency::default());
|
||||
warn!("found an entity called {} with a {} component but without an {}, please check your assets", name.clone(), std::any::type_name::<Dependant>(), std::any::type_name::<Dependency>());
|
||||
}
|
||||
}
|
@ -0,0 +1,218 @@
|
||||
use bevy::prelude::*;
|
||||
use bevy_gltf_blueprints::{clone_entity::CloneEntity, GameWorldTag, SpawnHere};
|
||||
|
||||
use crate::{
|
||||
assets::GameAssets,
|
||||
state::{AppState, GameState, InAppRunning},
|
||||
};
|
||||
|
||||
use super::Saveable;
|
||||
|
||||
const SCENE_FILE_PATH: &str = "scenes/save.scn.ron";
|
||||
|
||||
#[derive(Component, Debug)]
|
||||
pub struct TempLoadedSceneMarker;
|
||||
|
||||
#[derive(Component, Debug)]
|
||||
pub struct SaveablesToRemove(Vec<(Entity, Name)>);
|
||||
|
||||
#[derive(Component, Event)]
|
||||
pub struct LoadRequest {
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
pub fn should_load(save_requested_events: EventReader<LoadRequest>) -> bool {
|
||||
return save_requested_events.len() > 0;
|
||||
}
|
||||
|
||||
pub fn load_prepare(
|
||||
mut next_app_state: ResMut<NextState<AppState>>,
|
||||
mut next_game_state: ResMut<NextState<GameState>>,
|
||||
) {
|
||||
next_app_state.set(AppState::LoadingGame);
|
||||
next_game_state.set(GameState::None);
|
||||
info!("--loading: prepare")
|
||||
}
|
||||
|
||||
/// unload the level recursively
|
||||
pub fn _unload_world_old(world: &mut World) {
|
||||
let entities: Vec<Entity> = world
|
||||
// .query_filtered::<Entity, Or<(With<Save>, With<Unload>)>>()
|
||||
.query_filtered::<Entity, With<GameWorldTag>>() // our level/world contains this component
|
||||
.iter(world)
|
||||
.collect();
|
||||
for entity in entities {
|
||||
// Check the entity again in case it was despawned recursively
|
||||
if world.get_entity(entity).is_some() {
|
||||
world.entity_mut(entity).despawn_recursive();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unload_world(mut commands: Commands, gameworlds: Query<Entity, With<GameWorldTag>>) {
|
||||
for e in gameworlds.iter() {
|
||||
info!("--loading: despawn old world/level");
|
||||
commands.entity(e).despawn_recursive();
|
||||
}
|
||||
}
|
||||
|
||||
// almost identical to setup_game, !!??
|
||||
pub fn load_world(
|
||||
mut commands: Commands,
|
||||
game_assets: Res<GameAssets>,
|
||||
// scenes: ResMut<Scene>,
|
||||
) {
|
||||
info!("--loading: loading world/level");
|
||||
|
||||
commands.spawn((
|
||||
SceneBundle {
|
||||
scene: game_assets.world.clone(),
|
||||
..default()
|
||||
},
|
||||
bevy::prelude::Name::from("world"),
|
||||
GameWorldTag,
|
||||
InAppRunning,
|
||||
));
|
||||
}
|
||||
|
||||
pub fn load_saved_scene(mut commands: Commands, asset_server: Res<AssetServer>) {
|
||||
commands.spawn((
|
||||
DynamicSceneBundle {
|
||||
// Scenes are loaded just like any other asset.
|
||||
scene: asset_server.load(SCENE_FILE_PATH),
|
||||
..default()
|
||||
},
|
||||
TempLoadedSceneMarker,
|
||||
));
|
||||
// commands.entity(world).add_child(child_scene);
|
||||
info!("--loading: loaded saved scene");
|
||||
}
|
||||
|
||||
pub fn process_loaded_scene(
|
||||
loaded_scene: Query<(Entity, &Children), With<TempLoadedSceneMarker>>,
|
||||
named_entities: Query<(Entity, &Name, &Parent)>, // FIXME: very inneficient
|
||||
mut commands: Commands,
|
||||
|
||||
mut game_world: Query<(Entity, &Children), With<GameWorldTag>>,
|
||||
saveables: Query<(Entity, &Name), With<Saveable>>,
|
||||
asset_server: Res<AssetServer>,
|
||||
) {
|
||||
for (loaded_scene, children) in loaded_scene.iter() {
|
||||
info!("--loading: post processing loaded scene");
|
||||
|
||||
let mut entities_to_load: Vec<(Entity, Name)> = vec![];
|
||||
|
||||
for loaded_entity in children.iter() {
|
||||
if let Ok((source, name, _)) = named_entities.get(*loaded_entity) {
|
||||
entities_to_load.push((source, name.clone()));
|
||||
|
||||
let mut found = false;
|
||||
for (e, n, p) in named_entities.iter() {
|
||||
// if we have an entity with the same name as in same file, overwrite
|
||||
if e != source && name.as_str() == n.as_str() {
|
||||
// println!("found entity with same name {} {} {:?} {:?}", name, n, source, e);
|
||||
// source is entity within the newly loaded scene (source), e is within the existing world (destination)
|
||||
info!("copying data from {:?} to {:?}", source, e);
|
||||
commands.add(CloneEntity {
|
||||
source: source,
|
||||
destination: e,
|
||||
});
|
||||
// FIXME: issue with hierarchy & parenting, would be nice to be able to filter out components from CloneEntity
|
||||
commands.entity(p.get()).add_child(e);
|
||||
commands.entity(source).despawn_recursive();
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// entity not found in the list of existing entities (ie entities that came as part of the level)
|
||||
// so we spawn a new one
|
||||
if !found {
|
||||
info!("generating new entity");
|
||||
let world = game_world.single_mut();
|
||||
let world = world.1[0];
|
||||
|
||||
let new_entity = commands
|
||||
.spawn((bevy::prelude::Name::from(name.clone()), SpawnHere))
|
||||
.id();
|
||||
|
||||
commands.add(CloneEntity {
|
||||
source: source,
|
||||
destination: new_entity,
|
||||
});
|
||||
|
||||
commands.entity(world).add_child(new_entity);
|
||||
info!("copying data from {:?} to {:?}", source, new_entity);
|
||||
}
|
||||
}
|
||||
}
|
||||
commands.spawn(SaveablesToRemove(entities_to_load.clone()));
|
||||
|
||||
// if an entity is present in the world but NOT in the saved entities , it should be removed from the world
|
||||
// ideally this should be run between spawning of the world/level AND spawn_placeholders
|
||||
|
||||
// remove the dynamic scene
|
||||
info!("--loading: DESPAWNING LOADED SCENE");
|
||||
commands.entity(loaded_scene).despawn_recursive();
|
||||
|
||||
asset_server.mark_unused_assets();
|
||||
asset_server.free_unused_assets();
|
||||
}
|
||||
//for saveable in saveables.iter(){
|
||||
// println!("SAVEABLE BEFORE {:?}", saveable)
|
||||
//}
|
||||
}
|
||||
|
||||
pub fn final_cleanup(
|
||||
saveables_to_remove: Query<(Entity, &SaveablesToRemove)>,
|
||||
mut commands: Commands,
|
||||
saveables: Query<(Entity, &Name), With<Saveable>>,
|
||||
mut next_app_state: ResMut<NextState<AppState>>,
|
||||
mut next_game_state: ResMut<NextState<GameState>>,
|
||||
) {
|
||||
if let Ok((e, entities_to_load)) = saveables_to_remove.get_single() {
|
||||
info!("saveables to remove {:?}", entities_to_load);
|
||||
for (e, n) in saveables.iter() {
|
||||
let mut found = false;
|
||||
println!("SAVEABLE {}", n);
|
||||
|
||||
//let entities_to_load = saveables_to_remove.single();
|
||||
for (en, na) in entities_to_load.0.iter() {
|
||||
found = na.as_str() == n.as_str();
|
||||
if found {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
println!("REMOVING THIS ONE {}", n);
|
||||
commands.entity(e).despawn_recursive();
|
||||
}
|
||||
}
|
||||
// if there is a saveable that is NOT in the list of entities to load, despawn it
|
||||
|
||||
// despawn list
|
||||
commands.entity(e).despawn_recursive();
|
||||
|
||||
info!("--loading: done, move to InGame state");
|
||||
// next_app_state.set(AppState::AppRunning);
|
||||
next_game_state.set(GameState::InGame);
|
||||
}
|
||||
}
|
||||
|
||||
fn process_loaded_scene_load_alt(
|
||||
entities: Query<(Entity, &Children), With<TempLoadedSceneMarker>>,
|
||||
named_entities: Query<(Entity, &Name, &Parent)>, // FIXME: very inneficient
|
||||
mut commands: Commands,
|
||||
) {
|
||||
for (entity, children) in entities.iter() {
|
||||
let mut entities_to_load: Vec<(Entity, Name)> = vec![];
|
||||
for saved_source in children.iter() {
|
||||
if let Ok((source, name, _)) = named_entities.get(*saved_source) {
|
||||
println!("AAAAAAA {}", name);
|
||||
entities_to_load.push((source, name.clone()));
|
||||
}
|
||||
}
|
||||
println!("entities to load {:?}", entities_to_load);
|
||||
|
||||
commands.entity(entity).despawn_recursive();
|
||||
}
|
||||
}
|
@ -0,0 +1,70 @@
|
||||
pub mod saveable;
|
||||
use bevy::asset::free_unused_assets_system;
|
||||
use bevy_gltf_components::GltfComponentsSet;
|
||||
pub use saveable::*;
|
||||
|
||||
pub mod saving;
|
||||
pub use saving::*;
|
||||
|
||||
pub mod loading;
|
||||
pub use loading::*;
|
||||
|
||||
use bevy::prelude::*;
|
||||
use bevy::prelude::{App, IntoSystemConfigs, Plugin};
|
||||
use bevy::utils::Uuid;
|
||||
|
||||
use bevy_gltf_blueprints::GltfBlueprintsSet;
|
||||
|
||||
#[derive(SystemSet, Debug, Hash, PartialEq, Eq, Clone)]
|
||||
pub enum LoadingSet {
|
||||
Load,
|
||||
PostLoad,
|
||||
}
|
||||
|
||||
pub struct SaveLoadPlugin;
|
||||
impl Plugin for SaveLoadPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
app
|
||||
.register_type::<Uuid>()
|
||||
.register_type::<Saveable>()
|
||||
.add_event::<SaveRequest>()
|
||||
.add_event::<LoadRequest>()
|
||||
|
||||
.configure_sets(
|
||||
Update,
|
||||
(LoadingSet::Load, LoadingSet::PostLoad)
|
||||
.chain()
|
||||
.before(GltfBlueprintsSet::Spawn)
|
||||
.before(GltfComponentsSet::Injection)
|
||||
)
|
||||
|
||||
.add_systems(PreUpdate, save_game.run_if(should_save))
|
||||
|
||||
.add_systems(Update,
|
||||
(
|
||||
load_prepare,
|
||||
unload_world,
|
||||
load_world,
|
||||
load_saved_scene,
|
||||
// process_loaded_scene
|
||||
)
|
||||
.chain()
|
||||
.run_if(should_load) // .run_if(in_state(AppState::AppRunning))
|
||||
.in_set(LoadingSet::Load)
|
||||
)
|
||||
.add_systems(Update,
|
||||
(
|
||||
process_loaded_scene,
|
||||
apply_deferred,
|
||||
final_cleanup,
|
||||
apply_deferred,
|
||||
free_unused_assets_system
|
||||
)
|
||||
.chain()
|
||||
.in_set(LoadingSet::PostLoad)
|
||||
)
|
||||
|
||||
// .add_systems(Update, bla)
|
||||
;
|
||||
}
|
||||
}
|
@ -0,0 +1,137 @@
|
||||
const NEW_SCENE_FILE_PATH:&str="save.scn.ron";
|
||||
|
||||
|
||||
|
||||
|
||||
use bevy::ecs::component::Components;
|
||||
use bevy::ecs::entity::EntityMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
|
||||
use std::io::Read;
|
||||
use bevy::scene::serde::SceneDeserializer;
|
||||
use ron::Deserializer;
|
||||
use serde::de::DeserializeSeed;
|
||||
|
||||
|
||||
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Components2;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Fake {
|
||||
resources: HashMap<u32, String>,
|
||||
entities: HashMap<u32, Components2>
|
||||
}
|
||||
|
||||
fn ron_test(){
|
||||
let full_path = "/home/ckaos/projects/grappling-boom-bot/assets/save.ron";
|
||||
match File::open(full_path) {
|
||||
Ok(mut file) => {
|
||||
let mut serialized_scene = Vec::new();
|
||||
if let Err(why) = file.read_to_end(&mut serialized_scene) {
|
||||
error!("file read failed: {why:?}");
|
||||
}
|
||||
match Deserializer::from_bytes(&serialized_scene) {
|
||||
Ok(mut deserializer) => {
|
||||
// deserializer.
|
||||
let bla:Fake = ron::from_str("(
|
||||
resources: {},
|
||||
entities: {}
|
||||
)").unwrap();
|
||||
info!("testing {:?}", bla);
|
||||
info!("YOYO DONE YO !")
|
||||
}
|
||||
Err(why) => {
|
||||
error!("deserializer creation failed: {why:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(why) => {
|
||||
error!("load failed: {why:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn inject_component_data(world: &mut World, scene: DynamicScene){
|
||||
let mut entity_map = EntityMap::default();
|
||||
if let Err(why) = scene.write_to_world(world, &mut entity_map) {
|
||||
panic!("world write failed: {why:?}");
|
||||
}
|
||||
println!("entity map {:?}", entity_map);
|
||||
// TODO: EntityMap doesn't implement `iter()`
|
||||
for old_entity in entity_map.keys() {
|
||||
let entity = entity_map.get(old_entity).unwrap();
|
||||
info!("entity update required: {old_entity:?} -> {entity:?}");
|
||||
let e_mut = world
|
||||
.entity_mut(entity);
|
||||
}
|
||||
|
||||
info!("done loading scene");
|
||||
}
|
||||
|
||||
fn post_load(world: &mut World){
|
||||
let full_path = "/home/ckaos/projects/grappling-boom-bot/assets/save.ron";
|
||||
match File::open(full_path) {
|
||||
Ok(mut file) => {
|
||||
let mut serialized_scene = Vec::new();
|
||||
if let Err(why) = file.read_to_end(&mut serialized_scene) {
|
||||
error!("file read failed: {why:?}");
|
||||
}
|
||||
match Deserializer::from_bytes(&serialized_scene) {
|
||||
Ok(mut deserializer) => {
|
||||
let result = SceneDeserializer {
|
||||
type_registry: &world.resource::<AppTypeRegistry>().read(),
|
||||
}
|
||||
.deserialize(&mut deserializer);
|
||||
info!("deserialize done");
|
||||
match result {
|
||||
Ok(scene) => {
|
||||
info!("scene loaded");
|
||||
// scene.write_to_world(world, entity_map)
|
||||
// println!("{:?}", scene.entities);
|
||||
inject_component_data(world, scene);
|
||||
/*for dyn_ent in scene.entities.iter(){
|
||||
// let mut query = scene.world.query::<(Entity, &Name, &GltfExtras, &Parent)>();
|
||||
}*/
|
||||
}
|
||||
Err(why) => {
|
||||
error!("deserialization failed: {why:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(why) => {
|
||||
error!("deserializer creation failed: {why:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(why) => {
|
||||
error!("load failed: {why:?}");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
#[derive(Component, Reflect, Debug, Default )]
|
||||
#[reflect(Component)]
|
||||
pub struct Hackish;
|
||||
|
||||
|
||||
|
||||
/// unload saveables
|
||||
fn unload_saveables(world: &mut World) {
|
||||
let entities: Vec<Entity> = world
|
||||
.query_filtered::<Entity, With<Saveable>>()// our level/world contains this component
|
||||
.iter(world)
|
||||
.collect();
|
||||
for entity in entities {
|
||||
// Check the entity again in case it was despawned recursively
|
||||
if world.get_entity(entity).is_some() {
|
||||
info!("despawning");
|
||||
world.entity_mut(entity).despawn_recursive();
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,14 @@
|
||||
use bevy::prelude::*;
|
||||
use bevy::utils::Uuid;
|
||||
|
||||
#[derive(Component, Reflect, Debug)]
|
||||
#[reflect(Component)]
|
||||
pub struct Saveable {
|
||||
id: Uuid,
|
||||
}
|
||||
|
||||
impl Default for Saveable {
|
||||
fn default() -> Self {
|
||||
Saveable { id: Uuid::new_v4() }
|
||||
}
|
||||
}
|
@ -0,0 +1,87 @@
|
||||
use bevy::pbr::{Clusters, VisiblePointLights};
|
||||
use bevy::render::camera::CameraRenderGraph;
|
||||
use bevy::render::view::VisibleEntities;
|
||||
use bevy::tasks::IoTaskPool;
|
||||
use bevy::{gltf::GltfExtras, prelude::*};
|
||||
use bevy_rapier3d::prelude::RigidBody;
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
|
||||
use crate::core::physics::Collider;
|
||||
use crate::game::{Pickable, Player};
|
||||
|
||||
use super::Saveable;
|
||||
|
||||
const NEW_SCENE_FILE_PATH: &str = "save.scn.ron";
|
||||
|
||||
#[derive(Component, Event)]
|
||||
pub struct SaveRequest {
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
pub fn should_save(
|
||||
// keycode: Res<Input<KeyCode>>,
|
||||
save_requested_events: EventReader<SaveRequest>,
|
||||
) -> bool {
|
||||
return save_requested_events.len() > 0;
|
||||
|
||||
// return keycode.just_pressed(KeyCode::S)
|
||||
}
|
||||
|
||||
pub fn save_game(
|
||||
world: &mut World,
|
||||
// save_requested_events: EventReader<SaveRequest>,
|
||||
) {
|
||||
info!("saving");
|
||||
// world.
|
||||
/*for bli in save_requested_events.iter(){
|
||||
println!("SAAAAVE TO THISSSSS {:?}", bli.path)
|
||||
}*/
|
||||
|
||||
let saveable_entities: Vec<Entity> = world
|
||||
.query_filtered::<Entity, With<Saveable>>()
|
||||
.iter(world)
|
||||
.collect();
|
||||
|
||||
/*let static_entities: Vec<Entity> = world
|
||||
.query_filtered::<Entity, Without<Saveable>>()
|
||||
.iter(world)
|
||||
.collect();*/
|
||||
println!("saveable entities {}", saveable_entities.len());
|
||||
|
||||
let mut scene_builder = DynamicSceneBuilder::from_world(world);
|
||||
scene_builder
|
||||
.deny::<Children>()
|
||||
.deny::<Parent>()
|
||||
.deny::<ComputedVisibility>()
|
||||
.deny::<Visibility>()
|
||||
.deny::<GltfExtras>()
|
||||
.deny::<GlobalTransform>()
|
||||
.deny::<Collider>()
|
||||
.deny::<RigidBody>()
|
||||
.deny::<Saveable>()
|
||||
// camera stuff
|
||||
.deny::<Camera>()
|
||||
.deny::<CameraRenderGraph>()
|
||||
.deny::<Camera3d>()
|
||||
.deny::<Clusters>()
|
||||
.deny::<VisibleEntities>()
|
||||
.deny::<VisiblePointLights>()
|
||||
//.deny::<HasGizmoMarker>()
|
||||
.extract_entities(saveable_entities.into_iter());
|
||||
|
||||
let dyn_scene = scene_builder.build();
|
||||
let serialized_scene = dyn_scene
|
||||
.serialize_ron(world.resource::<AppTypeRegistry>())
|
||||
.unwrap();
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
IoTaskPool::get()
|
||||
.spawn(async move {
|
||||
// Write the scene RON data to file
|
||||
File::create(format!("assets/scenes/{NEW_SCENE_FILE_PATH}"))
|
||||
.and_then(|mut file| file.write(serialized_scene.as_bytes()))
|
||||
.expect("Error while writing scene to file");
|
||||
})
|
||||
.detach();
|
||||
}
|
@ -0,0 +1,85 @@
|
||||
use bevy::prelude::*;
|
||||
|
||||
use crate::{
|
||||
assets::GameAssets,
|
||||
state::{GameState, InAppRunning},
|
||||
};
|
||||
use bevy_gltf_blueprints::{BluePrintBundle, BlueprintName, GameWorldTag};
|
||||
|
||||
use bevy_rapier3d::prelude::Velocity;
|
||||
use rand::Rng;
|
||||
|
||||
pub fn setup_game(
|
||||
mut commands: Commands,
|
||||
game_assets: Res<GameAssets>,
|
||||
models: Res<Assets<bevy::gltf::Gltf>>,
|
||||
mut next_game_state: ResMut<NextState<GameState>>,
|
||||
) {
|
||||
println!("setting up all stuff");
|
||||
commands.insert_resource(AmbientLight {
|
||||
color: Color::WHITE,
|
||||
brightness: 0.2,
|
||||
});
|
||||
// here we actually spawn our game world/level
|
||||
|
||||
commands.spawn((
|
||||
SceneBundle {
|
||||
// note: because of this issue https://github.com/bevyengine/bevy/issues/10436, "world" is now a gltf file instead of a scene
|
||||
scene: models
|
||||
.get(game_assets.world.id())
|
||||
.expect("main level should have been loaded")
|
||||
.scenes[0]
|
||||
.clone(),
|
||||
..default()
|
||||
},
|
||||
bevy::prelude::Name::from("world"),
|
||||
GameWorldTag,
|
||||
InAppRunning,
|
||||
));
|
||||
|
||||
next_game_state.set(GameState::InGame)
|
||||
}
|
||||
|
||||
pub fn spawn_test(
|
||||
keycode: Res<Input<KeyCode>>,
|
||||
mut commands: Commands,
|
||||
|
||||
mut game_world: Query<(Entity, &Children), With<GameWorldTag>>,
|
||||
) {
|
||||
if keycode.just_pressed(KeyCode::T) {
|
||||
let world = game_world.single_mut();
|
||||
let world = world.1[0];
|
||||
|
||||
let mut rng = rand::thread_rng();
|
||||
let range = 5.5;
|
||||
let x: f32 = rng.gen_range(-range..range);
|
||||
let y: f32 = rng.gen_range(-range..range);
|
||||
|
||||
let mut rng = rand::thread_rng();
|
||||
let range = 0.8;
|
||||
let vel_x: f32 = rng.gen_range(-range..range);
|
||||
let vel_y: f32 = rng.gen_range(2.0..2.5);
|
||||
let vel_z: f32 = rng.gen_range(-range..range);
|
||||
|
||||
let name_index: u64 = rng.gen();
|
||||
|
||||
let new_entity = commands
|
||||
.spawn((
|
||||
BluePrintBundle {
|
||||
blueprint: BlueprintName("Health_Pickup".to_string()),
|
||||
transform: TransformBundle::from_transform(Transform::from_xyz(x, 2.0, y)),
|
||||
..Default::default()
|
||||
},
|
||||
bevy::prelude::Name::from(format!("test{}", name_index)),
|
||||
// BlueprintName("Health_Pickup".to_string()),
|
||||
// SpawnHere,
|
||||
// TransformBundle::from_transform(Transform::from_xyz(x, 2.0, y)),
|
||||
Velocity {
|
||||
linvel: Vec3::new(vel_x, vel_y, vel_z),
|
||||
angvel: Vec3::new(0.0, 0.0, 0.0),
|
||||
},
|
||||
))
|
||||
.id();
|
||||
commands.entity(world).add_child(new_entity);
|
||||
}
|
||||
}
|
@ -0,0 +1,113 @@
|
||||
use bevy::prelude::*;
|
||||
|
||||
use crate::state::{AppState, GameState, InMainMenu};
|
||||
|
||||
pub fn setup_main_menu(mut commands: Commands) {
|
||||
commands.spawn((Camera2dBundle::default(), InMainMenu));
|
||||
|
||||
commands.spawn((
|
||||
TextBundle::from_section(
|
||||
"SOME GAME TITLE !!",
|
||||
TextStyle {
|
||||
//font: asset_server.load("fonts/FiraMono-Medium.ttf"),
|
||||
font_size: 18.0,
|
||||
color: Color::WHITE,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.with_style(Style {
|
||||
position_type: PositionType::Absolute,
|
||||
top: Val::Px(100.0),
|
||||
left: Val::Px(200.0),
|
||||
..default()
|
||||
}),
|
||||
InMainMenu,
|
||||
));
|
||||
|
||||
commands.spawn((
|
||||
TextBundle::from_section(
|
||||
"New Game (press Enter to start, press T once the game is started for demo spawning)",
|
||||
TextStyle {
|
||||
//font: asset_server.load("fonts/FiraMono-Medium.ttf"),
|
||||
font_size: 18.0,
|
||||
color: Color::WHITE,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.with_style(Style {
|
||||
position_type: PositionType::Absolute,
|
||||
top: Val::Px(200.0),
|
||||
left: Val::Px(200.0),
|
||||
..default()
|
||||
}),
|
||||
InMainMenu,
|
||||
));
|
||||
|
||||
/*
|
||||
commands.spawn((
|
||||
TextBundle::from_section(
|
||||
"Load Game",
|
||||
TextStyle {
|
||||
//font: asset_server.load("fonts/FiraMono-Medium.ttf"),
|
||||
font_size: 18.0,
|
||||
color: Color::WHITE,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.with_style(Style {
|
||||
position_type: PositionType::Absolute,
|
||||
top: Val::Px(250.0),
|
||||
left: Val::Px(200.0),
|
||||
..default()
|
||||
}),
|
||||
InMainMenu
|
||||
));
|
||||
|
||||
commands.spawn((
|
||||
TextBundle::from_section(
|
||||
"Exit Game",
|
||||
TextStyle {
|
||||
//font: asset_server.load("fonts/FiraMono-Medium.ttf"),
|
||||
font_size: 18.0,
|
||||
color: Color::WHITE,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.with_style(Style {
|
||||
position_type: PositionType::Absolute,
|
||||
top: Val::Px(300.0),
|
||||
left: Val::Px(200.0),
|
||||
..default()
|
||||
}),
|
||||
InMainMenu
|
||||
));*/
|
||||
}
|
||||
|
||||
pub fn teardown_main_menu(bla: Query<Entity, With<InMainMenu>>, mut commands: Commands) {
|
||||
for bli in bla.iter() {
|
||||
commands.entity(bli).despawn_recursive();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn main_menu(
|
||||
keycode: Res<Input<KeyCode>>,
|
||||
|
||||
mut next_app_state: ResMut<NextState<AppState>>,
|
||||
// mut next_game_state: ResMut<NextState<GameState>>,
|
||||
// mut save_requested_events: EventWriter<SaveRequest>,
|
||||
// mut load_requested_events: EventWriter<LoadRequest>,
|
||||
) {
|
||||
if keycode.just_pressed(KeyCode::Return) {
|
||||
next_app_state.set(AppState::AppLoading);
|
||||
// next_game_state.set(GameState::None);
|
||||
}
|
||||
|
||||
if keycode.just_pressed(KeyCode::L) {
|
||||
next_app_state.set(AppState::AppLoading);
|
||||
// load_requested_events.send(LoadRequest { path: "toto".into() })
|
||||
}
|
||||
|
||||
if keycode.just_pressed(KeyCode::S) {
|
||||
// save_requested_events.send(SaveRequest { path: "toto".into() })
|
||||
}
|
||||
}
|
115
examples/bevy_gltf_blueprints/nested_blueprints/src/game/mod.rs
Normal file
115
examples/bevy_gltf_blueprints/nested_blueprints/src/game/mod.rs
Normal file
@ -0,0 +1,115 @@
|
||||
pub mod in_game;
|
||||
pub use in_game::*;
|
||||
|
||||
pub mod in_main_menu;
|
||||
pub use in_main_menu::*;
|
||||
|
||||
pub mod picking;
|
||||
pub use picking::*;
|
||||
|
||||
use crate::{
|
||||
insert_dependant_component,
|
||||
state::{AppState, GameState},
|
||||
};
|
||||
use bevy::prelude::*;
|
||||
use bevy_rapier3d::prelude::*;
|
||||
|
||||
// this file is just for demo purposes, contains various types of components, systems etc
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug)]
|
||||
#[reflect(Component)]
|
||||
pub enum SoundMaterial {
|
||||
Metal,
|
||||
Wood,
|
||||
Rock,
|
||||
Cloth,
|
||||
Squishy,
|
||||
#[default]
|
||||
None,
|
||||
}
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug)]
|
||||
#[reflect(Component)]
|
||||
/// Demo marker component
|
||||
pub struct Player;
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug)]
|
||||
#[reflect(Component)]
|
||||
/// Demo component showing auto injection of components
|
||||
pub struct ShouldBeWithPlayer;
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug)]
|
||||
#[reflect(Component)]
|
||||
/// Demo marker component
|
||||
pub struct Interactible;
|
||||
|
||||
fn player_move_demo(
|
||||
keycode: Res<Input<KeyCode>>,
|
||||
mut players: Query<&mut Transform, With<Player>>,
|
||||
) {
|
||||
let speed = 0.2;
|
||||
if let Ok(mut player) = players.get_single_mut() {
|
||||
if keycode.pressed(KeyCode::Left) {
|
||||
player.translation.x += speed;
|
||||
}
|
||||
if keycode.pressed(KeyCode::Right) {
|
||||
player.translation.x -= speed;
|
||||
}
|
||||
|
||||
if keycode.pressed(KeyCode::Up) {
|
||||
player.translation.z += speed;
|
||||
}
|
||||
if keycode.pressed(KeyCode::Down) {
|
||||
player.translation.z -= speed;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// collision tests/debug
|
||||
pub fn test_collision_events(
|
||||
mut collision_events: EventReader<CollisionEvent>,
|
||||
mut contact_force_events: EventReader<ContactForceEvent>,
|
||||
) {
|
||||
for collision_event in collision_events.read() {
|
||||
println!("collision");
|
||||
match collision_event {
|
||||
CollisionEvent::Started(_entity1, _entity2, _) => {
|
||||
println!("collision started")
|
||||
}
|
||||
CollisionEvent::Stopped(_entity1, _entity2, _) => {
|
||||
println!("collision ended")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for contact_force_event in contact_force_events.read() {
|
||||
println!("Received contact force event: {:?}", contact_force_event);
|
||||
}
|
||||
}
|
||||
|
||||
pub struct GamePlugin;
|
||||
impl Plugin for GamePlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
app.add_plugins(PickingPlugin)
|
||||
.register_type::<Interactible>()
|
||||
.register_type::<SoundMaterial>()
|
||||
.register_type::<Player>()
|
||||
// little helper utility, to automatically inject components that are dependant on an other component
|
||||
// ie, here an Entity with a Player component should also always have a ShouldBeWithPlayer component
|
||||
// you get a warning if you use this, as I consider this to be stop-gap solution (usually you should have either a bundle, or directly define all needed components)
|
||||
.add_systems(
|
||||
Update,
|
||||
(
|
||||
// insert_dependant_component::<Player, ShouldBeWithPlayer>,
|
||||
player_move_demo, //.run_if(in_state(AppState::Running)),
|
||||
// test_collision_events
|
||||
spawn_test,
|
||||
)
|
||||
.run_if(in_state(GameState::InGame)),
|
||||
)
|
||||
.add_systems(OnEnter(AppState::MenuRunning), setup_main_menu)
|
||||
.add_systems(OnExit(AppState::MenuRunning), teardown_main_menu)
|
||||
.add_systems(Update, (main_menu))
|
||||
.add_systems(OnEnter(AppState::AppRunning), setup_game);
|
||||
}
|
||||
}
|
@ -0,0 +1,34 @@
|
||||
use super::Player;
|
||||
use bevy::prelude::*;
|
||||
use bevy_gltf_blueprints::GltfBlueprintsSet;
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug)]
|
||||
#[reflect(Component)]
|
||||
pub struct Pickable;
|
||||
|
||||
// very simple, crude picking (as in picking up objects) implementation
|
||||
|
||||
pub fn picking(
|
||||
players: Query<&GlobalTransform, With<Player>>,
|
||||
pickables: Query<(Entity, &GlobalTransform), With<Pickable>>,
|
||||
mut commands: Commands,
|
||||
) {
|
||||
for player_transforms in players.iter() {
|
||||
for (pickable, pickable_transforms) in pickables.iter() {
|
||||
let distance = player_transforms
|
||||
.translation()
|
||||
.distance(pickable_transforms.translation());
|
||||
if distance < 2.5 {
|
||||
commands.entity(pickable).despawn_recursive();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PickingPlugin;
|
||||
impl Plugin for PickingPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
app.register_type::<Pickable>()
|
||||
.add_systems(Update, (picking.after(GltfBlueprintsSet::AfterSpawn),));
|
||||
}
|
||||
}
|
33
examples/bevy_gltf_blueprints/nested_blueprints/src/main.rs
Normal file
33
examples/bevy_gltf_blueprints/nested_blueprints/src/main.rs
Normal file
@ -0,0 +1,33 @@
|
||||
use bevy::prelude::*;
|
||||
use bevy_editor_pls::prelude::*;
|
||||
|
||||
mod core;
|
||||
use crate::core::*;
|
||||
|
||||
pub mod assets;
|
||||
use assets::*;
|
||||
|
||||
pub mod state;
|
||||
use state::*;
|
||||
|
||||
mod game;
|
||||
use game::*;
|
||||
|
||||
mod test_components;
|
||||
use test_components::*;
|
||||
|
||||
fn main() {
|
||||
App::new()
|
||||
.add_plugins((
|
||||
DefaultPlugins.set(AssetPlugin::default()),
|
||||
// editor
|
||||
EditorPlugin::default(),
|
||||
// our custom plugins
|
||||
StatePlugin,
|
||||
AssetsPlugin,
|
||||
CorePlugin, // reusable plugins
|
||||
GamePlugin, // specific to our game
|
||||
ComponentsTestPlugin, // Showcases different type of components /structs
|
||||
))
|
||||
.run();
|
||||
}
|
54
examples/bevy_gltf_blueprints/nested_blueprints/src/state.rs
Normal file
54
examples/bevy_gltf_blueprints/nested_blueprints/src/state.rs
Normal file
@ -0,0 +1,54 @@
|
||||
use bevy::app::AppExit;
|
||||
use bevy::prelude::*;
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, Default, States)]
|
||||
pub enum AppState {
|
||||
#[default]
|
||||
CoreLoading,
|
||||
MenuRunning,
|
||||
AppLoading,
|
||||
AppRunning,
|
||||
AppEnding,
|
||||
|
||||
// FIXME: not sure
|
||||
LoadingGame,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, Default, States)]
|
||||
pub enum GameState {
|
||||
#[default]
|
||||
None,
|
||||
|
||||
InMenu,
|
||||
InGame,
|
||||
|
||||
InGameOver,
|
||||
|
||||
InSaving,
|
||||
InLoading,
|
||||
}
|
||||
|
||||
// tag components for all entities within a certain state (for despawning them if needed) , FIXME: seems kinda hack-ish
|
||||
#[derive(Component)]
|
||||
pub struct InCoreLoading;
|
||||
#[derive(Component, Default)]
|
||||
pub struct InMenuRunning;
|
||||
#[derive(Component)]
|
||||
pub struct InAppLoading;
|
||||
#[derive(Component)]
|
||||
pub struct InAppRunning;
|
||||
|
||||
// components for tagging in game vs in game menu stuff
|
||||
#[derive(Component, Default)]
|
||||
pub struct InMainMenu;
|
||||
#[derive(Component, Default)]
|
||||
pub struct InMenu;
|
||||
#[derive(Component, Default)]
|
||||
pub struct InGame;
|
||||
|
||||
pub struct StatePlugin;
|
||||
impl Plugin for StatePlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
app.add_state::<AppState>().add_state::<GameState>();
|
||||
}
|
||||
}
|
@ -0,0 +1,93 @@
|
||||
use bevy::prelude::*;
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug)]
|
||||
#[reflect(Component)]
|
||||
struct Marker;
|
||||
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug)]
|
||||
#[reflect(Component)]
|
||||
struct Enemy;
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug)]
|
||||
#[reflect(Component)]
|
||||
struct NestingTest;
|
||||
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug, Deref, DerefMut)]
|
||||
#[reflect(Component)]
|
||||
struct TuppleTestF32(f32);
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug, Deref, DerefMut)]
|
||||
#[reflect(Component)]
|
||||
struct TuppleTestU64(u64);
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug, Deref, DerefMut)]
|
||||
#[reflect(Component)]
|
||||
pub struct TuppleTestStr(String);
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug)]
|
||||
#[reflect(Component)]
|
||||
struct TuppleTest2(f32, u64, String);
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug)]
|
||||
#[reflect(Component)]
|
||||
struct TuppleTestBool(bool);
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug)]
|
||||
#[reflect(Component)]
|
||||
struct TuppleVec2(Vec2);
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug)]
|
||||
#[reflect(Component)]
|
||||
struct TuppleVec3(Vec3);
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug)]
|
||||
#[reflect(Component)]
|
||||
struct TuppleVec(Vec<String>);
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug)]
|
||||
#[reflect(Component)]
|
||||
struct TuppleTestColor(Color);
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug)]
|
||||
#[reflect(Component)]
|
||||
struct BasicTest {
|
||||
a: f32,
|
||||
b: u64,
|
||||
c: String,
|
||||
}
|
||||
|
||||
#[derive(Component, Reflect, Default, Debug)]
|
||||
#[reflect(Component)]
|
||||
pub enum EnumTest {
|
||||
Metal,
|
||||
Wood,
|
||||
Rock,
|
||||
Cloth,
|
||||
Squishy,
|
||||
#[default]
|
||||
None,
|
||||
}
|
||||
|
||||
pub struct ComponentsTestPlugin;
|
||||
impl Plugin for ComponentsTestPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
app.register_type::<BasicTest>()
|
||||
.register_type::<Marker>()
|
||||
.register_type::<Enemy>()
|
||||
.register_type::<NestingTest>()
|
||||
|
||||
.register_type::<TuppleTestF32>()
|
||||
.register_type::<TuppleTestU64>()
|
||||
.register_type::<TuppleTestStr>()
|
||||
.register_type::<TuppleTestBool>()
|
||||
.register_type::<TuppleTest2>()
|
||||
.register_type::<TuppleVec2>()
|
||||
.register_type::<TuppleVec3>()
|
||||
.register_type::<EnumTest>()
|
||||
.register_type::<TuppleTestColor>()
|
||||
.register_type::<TuppleVec>()
|
||||
.register_type::<Vec<String>>();
|
||||
}
|
||||
}
|
@ -62,6 +62,9 @@ This [Blender addon](./)
|
||||
|
||||
- export blueprints: check this if you want to automatically export blueprints (default: True)
|
||||
- blueprints path: the path to export blueprints to , relative to the main **export folder** (default: library)
|
||||
- export nested blueprints: check this if you want to automatically export nested blueprints (collection instances inside blueprint collections)
|
||||
as seperate blueprints (default: True)
|
||||
please read dedicate section below for more information
|
||||
|
||||
- export materials library: check this if you want to automatically export material libraries (default: False)
|
||||
please read the dedicated section below for more information
|
||||
@ -109,6 +112,40 @@ You can enable this option to automatically replace all the **collection instanc
|
||||
|
||||
![exported collections](./docs/exported_collections.png)
|
||||
|
||||
|
||||
#### Nested blueprints
|
||||
|
||||
To maximise reuse of meshes/components etc, you can also nest ***collections instances*** inside collections (as normally in Blender), but also export each nested Blueprint as a seperate blueprints.
|
||||
|
||||
> Don't forget to toggle the option in the exporter settings
|
||||
|
||||
- To make things clearer:
|
||||
|
||||
![nested-blueprints](./docs/nested_blueprints.png)
|
||||
|
||||
- **Player2** & **Enemy** both use the **Humanoid_cactus** nested collection/blueprint, so **Humanoid_cactus** gets exported as a Blueprint for re-use ...but
|
||||
- **Humanoid_cactus** is also made up of a main mesh & two instances of **Hand** , so **Hand** gets exported as a Blueprint for re-use ...but
|
||||
- **Hand** is also made up of a main mesh & three instances of **Finger**, so **Finger** gets exported as a Blueprint for re-use
|
||||
|
||||
- The exported models in this case end up being:
|
||||
|
||||
![nested_blueprints2](./docs/nested_blueprints2.png)
|
||||
|
||||
- Note how **Player2.glb** is tiny, because most of its data is actually sotred in **Humanoid_cactus.glb**
|
||||
- **Enemy.glb** is slightly bigger because that blueprints contains additional meshes
|
||||
- All the intermediary blueprints got exported automatically, and all instances have been replaced with "empties" (see explanation in the **Process section** ) to minimize file size
|
||||
|
||||
- Compare this to the output **WITHOUT** the nested export option:
|
||||
|
||||
![nested_blueprints3](./docs/nested_blueprints3.png)
|
||||
|
||||
- less blueprints as the sub collections that are not in use somewhere directly are not exported
|
||||
- **Player2.glb** & **Enemy.glb** are significantly larger
|
||||
|
||||
|
||||
TLDR: smaller, more reuseable blueprints which can share sub-parts with other entities !
|
||||
|
||||
|
||||
### Materials
|
||||
|
||||
You can enable this option to automatically generate a **material library** file that combines all the materials in use in your blueprints.
|
||||
|
@ -1,7 +1,7 @@
|
||||
bl_info = {
|
||||
"name": "gltf_auto_export",
|
||||
"author": "kaosigh",
|
||||
"version": (0, 7, 1),
|
||||
"version": (0, 8, 0),
|
||||
"blender": (3, 4, 0),
|
||||
"location": "File > Import-Export",
|
||||
"description": "glTF/glb auto-export",
|
||||
@ -43,6 +43,7 @@ bpy.context.window_manager['changed_objects_per_scene'] = {}
|
||||
bpy.context.window_manager['previous_params'] = {}
|
||||
bpy.context.window_manager['__gltf_auto_export_initialized'] = False
|
||||
bpy.context.window_manager['__gltf_auto_export_gltf_params_changed'] = False
|
||||
bpy.context.window_manager['__gltf_auto_export_saving'] = False
|
||||
|
||||
######################################################
|
||||
""" there are two places where we load settings for auto_export from:
|
||||
@ -60,6 +61,9 @@ def deps_update_handler(scene, depsgraph):
|
||||
print("-------------")
|
||||
changed = scene.name or ""
|
||||
|
||||
# only deal with changes if we are no in the mids of saving/exporting
|
||||
#if not bpy.context.window_manager['__gltf_auto_export_saving']:
|
||||
|
||||
# depsgraph = bpy.context.evaluated_depsgraph_get()
|
||||
if not 'changed_objects_per_scene' in bpy.context.window_manager:
|
||||
bpy.context.window_manager['changed_objects_per_scene'] = {}
|
||||
@ -71,6 +75,7 @@ def deps_update_handler(scene, depsgraph):
|
||||
if isinstance(obj.id, bpy.types.Object):
|
||||
# get the actual object
|
||||
object = bpy.data.objects[obj.id.name]
|
||||
print("changed object", obj.id.name)
|
||||
bpy.context.window_manager['changed_objects_per_scene'][scene.name][obj.id.name] = object
|
||||
|
||||
bpy.context.window_manager.changedScene = changed
|
||||
@ -79,10 +84,16 @@ def deps_update_handler(scene, depsgraph):
|
||||
def save_handler(dummy):
|
||||
print("-------------")
|
||||
print("saved", bpy.data.filepath)
|
||||
# mark saving as in progress, this is needed to ignore any changes from the depsgraph done during saving
|
||||
# bpy.context.window_manager['__gltf_auto_export_saving'] = True
|
||||
|
||||
if not 'changed_objects_per_scene' in bpy.context.window_manager:
|
||||
bpy.context.window_manager['changed_objects_per_scene'] = {}
|
||||
changes_per_scene = bpy.context.window_manager['changed_objects_per_scene']
|
||||
|
||||
if not 'previous_params' in bpy.context.window_manager:
|
||||
bpy.context.window_manager['previous_params'] = {}
|
||||
|
||||
#determine changed parameters
|
||||
addon_prefs = bpy.context.preferences.addons["gltf_auto_export"].preferences
|
||||
|
||||
@ -113,6 +124,11 @@ def save_handler(dummy):
|
||||
# reset whether there have been changed objects since the last save
|
||||
bpy.context.window_manager['changed_objects_per_scene'] = {}
|
||||
|
||||
# all our logic is done, mark this as done
|
||||
#bpy.context.window_manager['__gltf_auto_export_saving'] = False
|
||||
print("EXPORT DONE")
|
||||
|
||||
|
||||
def get_changedScene(self):
|
||||
return self["changedScene"]
|
||||
|
||||
@ -153,6 +169,7 @@ def register():
|
||||
bpy.app.handlers.depsgraph_update_post.append(deps_update_handler)
|
||||
bpy.app.handlers.save_post.append(save_handler)
|
||||
|
||||
|
||||
bpy.types.WindowManager.changedScene = bpy.props.StringProperty(get=get_changedScene, set=set_changedScene)
|
||||
bpy.types.WindowManager.exportedCollections = bpy.props.CollectionProperty(type=CollectionsToExport)
|
||||
|
||||
|
@ -48,8 +48,6 @@ def auto_export(changes_per_scene, changed_export_parameters):
|
||||
print("error setting preferences from saved settings", error)
|
||||
bpy.context.window_manager['__gltf_auto_export_initialized'] = True
|
||||
|
||||
|
||||
|
||||
# have the export parameters (not auto export, just gltf export) have changed: if yes (for example switch from glb to gltf, compression or not, animations or not etc), we need to re-export everything
|
||||
print ("changed_export_parameters", changed_export_parameters)
|
||||
try:
|
||||
@ -63,6 +61,7 @@ def auto_export(changes_per_scene, changed_export_parameters):
|
||||
|
||||
export_materials_library = getattr(addon_prefs,"export_materials_library")
|
||||
export_scene_settings = getattr(addon_prefs,"export_scene_settings")
|
||||
export_nested_blueprints = getattr(addon_prefs,"export_nested_blueprints")
|
||||
|
||||
|
||||
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs)
|
||||
@ -74,11 +73,12 @@ def auto_export(changes_per_scene, changed_export_parameters):
|
||||
# inject/ update scene components
|
||||
upsert_scene_components(bpy.context.scene, world = bpy.context.scene.world)
|
||||
|
||||
# export everything everytime
|
||||
# export
|
||||
if export_blueprints:
|
||||
print("EXPORTING")
|
||||
# get a list of all collections actually in use
|
||||
collections = get_exportable_collections(level_scenes, library_scenes)
|
||||
scan_nested_collections = export_nested_blueprints
|
||||
(collections, blueprint_hierarchy) = get_exportable_collections(level_scenes, library_scenes, scan_nested_collections)
|
||||
# first check if all collections have already been exported before (if this is the first time the exporter is run
|
||||
# in your current Blender session for example)
|
||||
export_blueprints_path = os.path.join(folder_path, export_output_folder, getattr(addon_prefs,"export_blueprints_path")) if getattr(addon_prefs,"export_blueprints_path") != '' else folder_path
|
||||
@ -96,10 +96,10 @@ def auto_export(changes_per_scene, changed_export_parameters):
|
||||
object_collections = list(obj.users_collection)
|
||||
object_collection_names = list(map(lambda collection: collection.name, object_collections))
|
||||
if len(object_collection_names) > 1:
|
||||
print("ERRROR, objects in multiple collections not supported")
|
||||
print("ERRROR for",obj_name,"objects in multiple collections not supported")
|
||||
else:
|
||||
object_collection_name = object_collection_names[0] if len(object_collection_names) > 0 else None
|
||||
print(" object ", obj, object_collection_name)
|
||||
#print(" object ", obj, object_collection_name)
|
||||
if object_collection_name in collections:
|
||||
changed_collections.append(object_collection_name)
|
||||
|
||||
@ -111,6 +111,7 @@ def auto_export(changes_per_scene, changed_export_parameters):
|
||||
|
||||
|
||||
# collections that do not come from a library should not be exported as seperate blueprints
|
||||
# FIMXE: logic is erroneous, needs to be changed
|
||||
library_collections = [name for sublist in collections_per_scene.values() for name in sublist]
|
||||
collections_to_export = list(set(collections_to_export).intersection(set(library_collections)))
|
||||
|
||||
@ -151,7 +152,7 @@ def auto_export(changes_per_scene, changed_export_parameters):
|
||||
print(" exporting collections from scene:", scene_name)
|
||||
print(" collections to export", collections_to_export)
|
||||
library_scene = bpy.data.scenes[scene_name]
|
||||
export_blueprints_from_collections(collections_to_export, library_scene, folder_path, addon_prefs)
|
||||
export_blueprints_from_collections(collections_to_export, library_scene, folder_path, addon_prefs, blueprint_hierarchy, collections)
|
||||
|
||||
# reset current scene from backup
|
||||
bpy.context.window.scene = old_current_scene
|
||||
|
113
tools/gltf_auto_export/blueprints.py
Normal file
113
tools/gltf_auto_export/blueprints.py
Normal file
@ -0,0 +1,113 @@
|
||||
import bpy
|
||||
from .helpers_collections import (find_layer_collection_recursive)
|
||||
from .helpers import (make_empty3, traverse_tree)
|
||||
|
||||
|
||||
def generate_blueprint_hollow_scene(blueprint_collection, library_collections):
|
||||
temp_scene = bpy.data.scenes.new(name="temp_scene_"+blueprint_collection.name)
|
||||
temp_scene_root_collection = temp_scene.collection
|
||||
|
||||
# we set our active scene to be this one : this is needed otherwise the stand-in empties get generated in the wrong scene
|
||||
bpy.context.window.scene = temp_scene
|
||||
found = find_layer_collection_recursive(temp_scene_root_collection, bpy.context.view_layer.layer_collection)
|
||||
if found:
|
||||
# once it's found, set the active layer collection to the one we found
|
||||
bpy.context.view_layer.active_layer_collection = found
|
||||
|
||||
original_names = []
|
||||
|
||||
# TODO also add the handling for "template" flags, so that instead of creating empties we link the data from the sub collection INTO the parent collection
|
||||
# copies the contents of a collection into another one while replacing blueprint instances with empties
|
||||
def copy_hollowed_collection_into(source_collection, destination_collection):
|
||||
for object in source_collection.objects:
|
||||
#FIXME: enum would be better
|
||||
""" combine mode can be
|
||||
- 'Split' (default): replace with an empty, creating links to sub blueprints
|
||||
- 'Embed' : treat it as an embeded object and do not replace it with an empty
|
||||
- 'Inject': inject components from sub collection instances into the curent object
|
||||
"""
|
||||
combineMode = 'Split' if not 'Combine' in object else object['Combine']
|
||||
# TODO: implement
|
||||
# print("COMBINE MODE", combineMode)
|
||||
# embed = 'Embed' in object and object['Embed'] == True # if the object has the "embed" flag set to true, treat it as an embeded object and do not replace it with an empty
|
||||
# merge = 'Merge' in object and object['Merge'] == True
|
||||
if object.instance_type == 'COLLECTION' and (object.instance_collection.name in library_collections):
|
||||
# if we have combine_mode set to "merge", we take all the custom attributed of the nested (1 level only ! unless we use 'deepMerge') custom attributes and copy them to this level
|
||||
"""TODO: implement later
|
||||
if merge:
|
||||
foo = get_nested_components(object)
|
||||
print("nested components", foo)
|
||||
pass
|
||||
else:
|
||||
"""
|
||||
collection_name = object.instance_collection.name
|
||||
|
||||
original_name = object.name
|
||||
original_names.append(original_name)
|
||||
|
||||
object.name = original_name + "____bak"
|
||||
empty_obj = make_empty3(original_name, object.location, object.rotation_euler, object.scale, destination_collection)
|
||||
"""we inject the collection/blueprint name, as a component called 'BlueprintName', but we only do this in the empty, not the original object"""
|
||||
empty_obj['BlueprintName'] = '"'+collection_name+'"'
|
||||
empty_obj['SpawnHere'] = ''
|
||||
|
||||
for k, v in object.items():
|
||||
empty_obj[k] = v
|
||||
else:
|
||||
destination_collection.objects.link(object)
|
||||
|
||||
# for every sub-collection of the source, copy its content into a new sub-collection of the destination
|
||||
for collection in source_collection.children:
|
||||
copy_collection = bpy.data.collections.new(collection.name + "____collection_export")
|
||||
copy_hollowed_collection_into(collection, copy_collection)
|
||||
destination_collection.children.link(copy_collection)
|
||||
|
||||
copy_hollowed_collection_into(blueprint_collection, temp_scene_root_collection)
|
||||
|
||||
|
||||
return (temp_scene, original_names)
|
||||
|
||||
|
||||
|
||||
|
||||
# clear & remove "hollow scene"
|
||||
def clear_blueprint_hollow_scene(temp_scene, original_collection, original_names):
|
||||
|
||||
def restore_original_names(collection):
|
||||
for object in collection.objects:
|
||||
if object.instance_type == 'COLLECTION':
|
||||
if object.name.endswith("____bak"):
|
||||
object.name = object.name.replace("____bak", "")
|
||||
for child_collection in collection.children:
|
||||
restore_original_names(child_collection)
|
||||
|
||||
restore_original_names(original_collection)
|
||||
|
||||
# remove empties (only needed when we go via ops ????)
|
||||
root_collection = temp_scene.collection
|
||||
scene_objects = [o for o in root_collection.objects]
|
||||
for object in scene_objects:
|
||||
if object.type == 'EMPTY':
|
||||
if hasattr(object, "SpawnHere"):
|
||||
bpy.data.objects.remove(object, do_unlink=True)
|
||||
else:
|
||||
bpy.context.scene.collection.objects.unlink(object)
|
||||
#bpy.data.objects.remove(object, do_unlink=True)
|
||||
|
||||
bpy.data.scenes.remove(temp_scene)
|
||||
|
||||
# TODO : add a flag to also search of deeply nested components
|
||||
def get_nested_components(object):
|
||||
if object.instance_type == 'COLLECTION':
|
||||
collection_name = object.instance_collection.name
|
||||
collection = bpy.data.collections[collection_name]
|
||||
all_objects = collection.all_objects
|
||||
result = []
|
||||
for object in all_objects:
|
||||
components = dict(object)
|
||||
if len(components.keys()) > 0:
|
||||
result += [(object, components)]
|
||||
return result
|
||||
return []
|
||||
#for collection in traverse_tree(collection):
|
||||
# for object in collection.all_objects
|
Binary file not shown.
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 18 KiB |
BIN
tools/gltf_auto_export/docs/nested_blueprints.png
Normal file
BIN
tools/gltf_auto_export/docs/nested_blueprints.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 85 KiB |
BIN
tools/gltf_auto_export/docs/nested_blueprints2.png
Normal file
BIN
tools/gltf_auto_export/docs/nested_blueprints2.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 29 KiB |
BIN
tools/gltf_auto_export/docs/nested_blueprints3.png
Normal file
BIN
tools/gltf_auto_export/docs/nested_blueprints3.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 23 KiB |
@ -37,7 +37,7 @@ def make_empty3(name, location, rotation, scale, collection):
|
||||
bpy.context.view_layer.objects.active = original_active_object
|
||||
return empty_obj
|
||||
|
||||
|
||||
# traverse all collections
|
||||
def traverse_tree(t):
|
||||
yield t
|
||||
for child in t.children:
|
||||
@ -48,7 +48,7 @@ def check_if_blueprints_exist(collections, folder_path, extension):
|
||||
not_found_blueprints = []
|
||||
for collection_name in collections:
|
||||
gltf_output_path = os.path.join(folder_path, collection_name + extension)
|
||||
print("gltf_output_path", gltf_output_path)
|
||||
# print("gltf_output_path", gltf_output_path)
|
||||
found = os.path.exists(gltf_output_path) and os.path.isfile(gltf_output_path)
|
||||
if not found:
|
||||
not_found_blueprints.append(collection_name)
|
||||
|
@ -2,6 +2,7 @@ import bpy
|
||||
from .helpers import traverse_tree
|
||||
|
||||
# returns the list of the collections in use for a given scene
|
||||
# FIXME: this should also look into sub collections
|
||||
def get_used_collections(scene):
|
||||
root_collection = scene.collection
|
||||
|
||||
@ -11,13 +12,7 @@ def get_used_collections(scene):
|
||||
for object in scene_objects:
|
||||
#print("object ", object)
|
||||
if object.instance_type == 'COLLECTION':
|
||||
#print("THIS OBJECT IS A COLLECTION")
|
||||
# print("instance_type" ,object.instance_type)
|
||||
collection_name = object.instance_collection.name
|
||||
#print("instance collection", object.instance_collection.name)
|
||||
#object.instance_collection.users_scene
|
||||
# del object['blueprint']
|
||||
# object['BlueprintName'] = '"'+collection_name+'"'
|
||||
if not collection_name in collection_names:
|
||||
collection_names.add(collection_name)
|
||||
used_collections.append(object.instance_collection)
|
||||
@ -37,16 +32,87 @@ def get_marked_collections(scene):
|
||||
collection_names.append(collection.name)
|
||||
return (collection_names, marked_collections)
|
||||
|
||||
# gets all collections within collections that might also be relevant
|
||||
def get_sub_collections(collections, parent, children_per_collection):
|
||||
collection_names = set()
|
||||
used_collections = []
|
||||
|
||||
|
||||
for root_collection in collections:
|
||||
node = Node(name=root_collection.name, parent=parent)
|
||||
parent.children.append(node)
|
||||
|
||||
|
||||
#print("root collection", root_collection.name)
|
||||
for collection in traverse_tree(root_collection): # TODO: filter out COLLECTIONS that have the flatten flag (unlike the flatten flag on colleciton instances themselves)
|
||||
node_name = collection.name
|
||||
children_per_collection[node_name] = []
|
||||
#print(" scanning", collection.name)
|
||||
for object in collection.objects:
|
||||
#print("FLATTEN", object.name, 'Flatten' in object)
|
||||
if object.instance_type == 'COLLECTION' : # and not 'Flatten' in object:
|
||||
collection_name = object.instance_collection.name
|
||||
(sub_names, sub_collections) = get_sub_collections([object.instance_collection], node, children_per_collection)
|
||||
if len(list(sub_names)) > 0:
|
||||
children_per_collection[node_name] += (list(sub_names))
|
||||
#print(" found sub collection in use", object.name, object.instance_collection)
|
||||
|
||||
|
||||
if not collection_name in collection_names:
|
||||
collection_names.add(collection_name)
|
||||
used_collections.append(object.instance_collection)
|
||||
collection_names.update(sub_names)
|
||||
|
||||
#for sub in traverse_tree(root_collection):
|
||||
return (collection_names, used_collections)
|
||||
|
||||
# FIXME: get rid of this, ugh
|
||||
def flatten_collection_tree(node, children_per_collection):
|
||||
children_per_collection[node.name] = []
|
||||
for child in node.children:
|
||||
if not node.name in children_per_collection[node.name]:
|
||||
children_per_collection[node.name].append(child.name)
|
||||
flatten_collection_tree(child, children_per_collection)
|
||||
children_per_collection[node.name] = list(set( children_per_collection[node.name]))
|
||||
|
||||
|
||||
class Node :
|
||||
def __init__(self, name="", parent=None):
|
||||
self.name = name
|
||||
self.children = []
|
||||
self.changed = False
|
||||
self.parent = parent
|
||||
return
|
||||
def __str__(self):
|
||||
children = list(map(lambda child: str(child), self.children))
|
||||
return "name: " +self.name + ", children:" + str(children)
|
||||
|
||||
# get exportable collections from lists of mains scenes and lists of library scenes
|
||||
def get_exportable_collections(main_scenes, library_scenes):
|
||||
def get_exportable_collections(main_scenes, library_scenes, scan_nested_collections):
|
||||
all_collections = []
|
||||
all_collection_names = []
|
||||
root_node = Node()
|
||||
root_node.name = "root"
|
||||
children_per_collection = {}
|
||||
|
||||
|
||||
for main_scene in main_scenes:
|
||||
(collection_names, _) = get_used_collections(main_scene)
|
||||
all_collections = all_collections + list(collection_names)
|
||||
(collection_names, collections) = get_used_collections(main_scene)
|
||||
all_collection_names = all_collection_names + list(collection_names)
|
||||
all_collections = all_collections + collections
|
||||
for library_scene in library_scenes:
|
||||
marked_collections = get_marked_collections(library_scene)
|
||||
all_collections = all_collections + marked_collections[0]
|
||||
return all_collections
|
||||
all_collection_names = all_collection_names + marked_collections[0]
|
||||
all_collections = all_collections + marked_collections[1]
|
||||
|
||||
if scan_nested_collections:
|
||||
(collection_names, collections) = get_sub_collections(all_collections, root_node, children_per_collection)
|
||||
all_collection_names = all_collection_names + list(collection_names)
|
||||
children_per_collection = {}
|
||||
flatten_collection_tree(root_node, children_per_collection)
|
||||
#print("ROOT NODE", children_per_collection) #
|
||||
|
||||
return (all_collection_names, children_per_collection)
|
||||
|
||||
def get_collections_per_scene(collection_names, library_scenes):
|
||||
collections_per_scene = {}
|
||||
@ -84,7 +150,6 @@ def find_layer_collection_recursive(find, col):
|
||||
return c
|
||||
return None
|
||||
|
||||
|
||||
#Recursivly transverse layer_collection for a particular name
|
||||
def recurLayerCollection(layerColl, collName):
|
||||
found = None
|
||||
|
@ -1,9 +1,12 @@
|
||||
import os
|
||||
import bpy
|
||||
|
||||
from .preferences import (AutoExportGltfPreferenceNames)
|
||||
from .helpers_scenes import (generate_hollow_scene, clear_hollow_scene)
|
||||
from .helpers_collections import (recurLayerCollection)
|
||||
from .blueprints import clear_blueprint_hollow_scene, generate_blueprint_hollow_scene
|
||||
from .helpers import (traverse_tree)
|
||||
|
||||
######################################################
|
||||
#### Export logic #####
|
||||
|
||||
@ -66,11 +69,13 @@ def get_source_scene(collection_name, library_scenes):
|
||||
return match
|
||||
|
||||
# export collections: all the collections that have an instance in the main scene AND any marked collections, even if they do not have instances
|
||||
def export_collections(collections, folder_path, library_scene, addon_prefs, gltf_export_preferences):
|
||||
def export_collections(collections, folder_path, library_scene, addon_prefs, gltf_export_preferences, blueprint_hierarchy, library_collections):
|
||||
# set active scene to be the library scene (hack for now)
|
||||
bpy.context.window.scene = library_scene
|
||||
# save current active collection
|
||||
active_collection = bpy.context.view_layer.active_layer_collection
|
||||
export_nested_blueprints = getattr(addon_prefs,"export_nested_blueprints")
|
||||
export_materials_library = getattr(addon_prefs,"export_materials_library")
|
||||
|
||||
for collection_name in collections:
|
||||
print("exporting collection", collection_name)
|
||||
@ -83,17 +88,32 @@ def export_collections(collections, folder_path, library_scene, addon_prefs, glt
|
||||
export_settings = { **gltf_export_preferences, 'use_active_scene': True, 'use_active_collection': True, 'use_active_collection_with_nested':True}
|
||||
|
||||
# if we are using the material library option, do not export materials, use placeholder instead
|
||||
export_materials_library = getattr(addon_prefs,"export_materials_library")
|
||||
if export_materials_library:
|
||||
export_settings['export_materials'] = 'PLACEHOLDER'
|
||||
|
||||
export_gltf(gltf_output_path, export_settings)
|
||||
|
||||
#if relevant we replace sub collections instances with placeholders too
|
||||
# this is not needed if a collection/blueprint does not have sub blueprints
|
||||
if collection_name in blueprint_hierarchy and len(blueprint_hierarchy[collection_name]) > 0 and export_nested_blueprints :
|
||||
print("generate hollow scene for nested blueprints", library_collections)
|
||||
backup = bpy.context.window.scene
|
||||
collection = bpy.data.collections[collection_name]
|
||||
(hollow_scene, object_names) = generate_blueprint_hollow_scene(collection, library_collections)
|
||||
|
||||
export_gltf(gltf_output_path, export_settings)
|
||||
|
||||
clear_blueprint_hollow_scene(hollow_scene, collection, object_names)
|
||||
bpy.context.window.scene = backup
|
||||
else:
|
||||
print("NORMAL")
|
||||
export_gltf(gltf_output_path, export_settings)
|
||||
|
||||
|
||||
# reset active collection to the one we save before
|
||||
bpy.context.view_layer.active_layer_collection = active_collection
|
||||
|
||||
|
||||
def export_blueprints_from_collections(collections, library_scene, folder_path, addon_prefs):
|
||||
def export_blueprints_from_collections(collections, library_scene, folder_path, addon_prefs, blueprint_hierarchy, library_collections):
|
||||
export_output_folder = getattr(addon_prefs,"export_output_folder")
|
||||
gltf_export_preferences = generate_gltf_export_preferences(addon_prefs)
|
||||
export_blueprints_path = os.path.join(folder_path, export_output_folder, getattr(addon_prefs,"export_blueprints_path")) if getattr(addon_prefs,"export_blueprints_path") != '' else folder_path
|
||||
@ -102,7 +122,7 @@ def export_blueprints_from_collections(collections, library_scene, folder_path,
|
||||
#print("LIBRARY EXPORT", export_blueprints_path )
|
||||
|
||||
try:
|
||||
export_collections(collections, export_blueprints_path, library_scene, addon_prefs, gltf_export_preferences)
|
||||
export_collections(collections, export_blueprints_path, library_scene, addon_prefs, gltf_export_preferences, blueprint_hierarchy, library_collections)
|
||||
except Exception as error:
|
||||
print("failed to export collections to gltf: ", error)
|
||||
# TODO : rethrow
|
||||
|
@ -15,14 +15,15 @@ AutoExportGltfPreferenceNames = [
|
||||
'export_main_scene_name',
|
||||
'export_output_folder',
|
||||
'export_library_scene_name',
|
||||
|
||||
'export_blueprints',
|
||||
'export_blueprints_path',
|
||||
|
||||
'export_scene_settings',
|
||||
'export_nested_blueprints',
|
||||
|
||||
'export_materials_library',
|
||||
'export_materials_path',
|
||||
|
||||
'export_scene_settings',
|
||||
|
||||
'main_scenes',
|
||||
'library_scenes',
|
||||
@ -75,6 +76,12 @@ class AutoExportGltfAddonPreferences(AddonPreferences):
|
||||
default='library'
|
||||
)
|
||||
|
||||
export_nested_blueprints: BoolProperty(
|
||||
name='Export nested Blueprints',
|
||||
description='Collection instances within Collections are turned into blueprint instances',
|
||||
default=True
|
||||
)
|
||||
|
||||
export_materials_library: BoolProperty(
|
||||
name='Export materials library',
|
||||
description='remove materials from blueprints and use the material library instead',
|
||||
|
@ -130,7 +130,8 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
|
||||
|
||||
|
||||
[main_scene_names, level_scenes, library_scene_names, library_scenes]=get_scenes(addon_prefs)
|
||||
collections = get_exportable_collections(level_scenes, library_scenes)
|
||||
scan_nested_collections = bpy.context.preferences.addons["gltf_auto_export"].preferences.export_nested_blueprints
|
||||
(collections, _) = get_exportable_collections(level_scenes, library_scenes, scan_nested_collections)
|
||||
|
||||
try:
|
||||
# we save this list of collections in the context
|
||||
@ -275,6 +276,15 @@ class GLTF_PT_auto_export_blueprints(bpy.types.Panel):
|
||||
|
||||
return operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf" #"EXPORT_SCENE_OT_gltf"
|
||||
|
||||
|
||||
def draw_header(self, context):
|
||||
layout = self.layout
|
||||
sfile = context.space_data
|
||||
operator = sfile.active_operator
|
||||
layout.prop(operator, "export_blueprints", text="")
|
||||
|
||||
#self.layout.prop(operator, "auto_export", text="")
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
@ -283,8 +293,10 @@ class GLTF_PT_auto_export_blueprints(bpy.types.Panel):
|
||||
sfile = context.space_data
|
||||
operator = sfile.active_operator
|
||||
|
||||
layout.prop(operator, "export_blueprints")
|
||||
layout.active = operator.export_blueprints
|
||||
|
||||
layout.prop(operator, "export_blueprints_path")
|
||||
layout.prop(operator, "export_nested_blueprints")
|
||||
|
||||
# materials
|
||||
layout.prop(operator, "export_materials_library")
|
||||
|
Loading…
Reference in New Issue
Block a user