feat(Gltf auto export): multiple blend files workflow, combine modes, bugfixes and more ! (#83)

* feat(multiple blend files): added example of multi blend file workflow
* feat(tools/auto_export): 
 * added collection_instances_combine_mode superseeds  & replaces "export nested blueprints", with a lot more flexibilty
 * added preferences & ui settings
 * added (optional) use of marked assets as replacement for "autoExport" collection flag
 * added correct cleaning up of temporary sub_collections
* feat(tools/auto_export): change detection added for material changes
* feat(tools/auto_export): blueprints now also use the "combine" settings for nested collections
*  feat(tools/auto_export): added correct cleanup of data when removing hollow scenes, so that there is no orphan data left behind
* feat(tools/auto_export): changes to nested collections are now triggering change detection of root
 (exportable collections)
* feat(tools/auto_export):  now actually useful tracebacks using traceback.format_exc()
* various minor fixes, cleanups & UI description improvements
* docs(README): updated & fleshed out docs

* closes #87 
* closes #86
* closes #85 
* closes #79 
* closes #61 
* closes #68 
* closes #41
This commit is contained in:
Mark Moissette 2024-01-01 22:35:21 +01:00 committed by GitHub
parent 20b34dde20
commit af94e49976
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
77 changed files with 8003 additions and 602 deletions

12
Cargo.lock generated
View File

@ -857,6 +857,18 @@ dependencies = [
"rand", "rand",
] ]
[[package]]
name = "bevy_gltf_blueprints_multiple_levels_multiple_blendfiles"
version = "0.3.0"
dependencies = [
"bevy",
"bevy_asset_loader",
"bevy_editor_pls",
"bevy_gltf_blueprints",
"bevy_rapier3d",
"rand",
]
[[package]] [[package]]
name = "bevy_gltf_blueprints_nested_blueprints_example" name = "bevy_gltf_blueprints_nested_blueprints_example"
version = "0.3.0" version = "0.3.0"

View File

@ -9,8 +9,8 @@ members = [
"examples/bevy_gltf_blueprints/nested_blueprints/", "examples/bevy_gltf_blueprints/nested_blueprints/",
"examples/bevy_gltf_blueprints/animation/", "examples/bevy_gltf_blueprints/animation/",
"examples/bevy_gltf_blueprints/multiple_levels/", "examples/bevy_gltf_blueprints/multiple_levels/",
"examples/bevy_gltf_blueprints/multiple_levels_multiple_blendfiles",
"examples/bevy_gltf_blueprints/materials/" "examples/bevy_gltf_blueprints/materials/"
] ]
resolver = "2" resolver = "2"

View File

@ -283,6 +283,8 @@ https://github.com/kaosat-dev/Blender_bevy_components_workflow/tree/main/example
https://github.com/kaosat-dev/Blender_bevy_components_workflow/tree/main/examples/nested_blueprints https://github.com/kaosat-dev/Blender_bevy_components_workflow/tree/main/examples/nested_blueprints
https://github.com/kaosat-dev/Blender_bevy_components_workflow/tree/main/examples/multiple_levels_multiple_blendfiles
## Compatible Bevy versions ## Compatible Bevy versions

View File

@ -1,485 +0,0 @@
(
resources: {},
entities: {
20: (
components: {
"bevy_render::camera::projection::Projection": Perspective((
fov: 0.3995965,
aspect_ratio: 1.7777778,
near: 0.1,
far: 100.0,
)),
"bevy_render::primitives::Frustum": (),
"bevy_transform::components::transform::Transform": (
translation: (
x: 34.821884,
y: 49.024857,
z: -36.79615,
),
rotation: (-0.1694689, 0.82838506, 0.40884802, 0.3433684),
scale: (
x: 1.0,
y: 1.0,
z: 1.0,
),
),
"bevy_core_pipeline::tonemapping::Tonemapping": BlenderFilmic,
"bevy_core_pipeline::tonemapping::DebandDither": Enabled,
"bevy_render::view::ColorGrading": (
exposure: 0.0,
gamma: 1.0,
pre_saturation: 1.0,
post_saturation: 1.0,
),
"bevy_core::name::Name": (
hash: 17702508670109176045,
name: "Camera",
),
"advanced::core::camera::camera_tracking::CameraTrackingOffset": ((
x: 26.0,
y: 48.0,
z: -26.0,
)),
"bevy_pbr::light::ClusterConfig": FixedZ(
total: 4096,
z_slices: 24,
z_config: (
first_slice_depth: 5.0,
far_z_mode: MaxLightRange,
),
dynamic_resizing: true,
),
"bevy_core_pipeline::bloom::settings::BloomSettings": (
intensity: 0.01,
low_frequency_boost: 0.7,
low_frequency_boost_curvature: 0.95,
high_pass_frequency: 1.0,
prefilter_settings: (
threshold: 0.0,
threshold_softness: 0.0,
),
composite_mode: Additive,
),
},
),
34: (
components: {
"bevy_transform::components::transform::Transform": (
translation: (
x: 4.697565,
y: 1.5983224,
z: 8.962274,
),
rotation: (0.000000000000000031724054, -0.00000000000000000000647681, -0.000013119204, 1.0),
scale: (
x: 1.0,
y: 1.0,
z: 1.0,
),
),
"bevy_core::name::Name": (
hash: 9837288155836662016,
name: "Health_Pickup.001",
),
"bevy_gltf_blueprints::spawn_from_blueprints::BlueprintName": ("Health_Pickup"),
"advanced::game::picking::Pickable": (),
},
),
54: (
components: {
"bevy_transform::components::transform::Transform": (
translation: (
x: 8.799996,
y: 1.02484,
z: -10.799994,
),
rotation: (0.0, 0.0, 0.0, 1.0),
scale: (
x: 1.0,
y: 1.0,
z: 1.0,
),
),
"bevy_core::name::Name": (
hash: 17978181434632022651,
name: "Player",
),
"advanced::core::camera::camera_tracking::CameraTrackable": (),
"bevy_gltf_blueprints::spawn_from_blueprints::BlueprintName": ("Player"),
"advanced::game::Player": (),
"advanced::game::SoundMaterial": Wood,
},
),
60: (
components: {
"bevy_transform::components::transform::Transform": (
translation: (
x: 3.6351967,
y: 1.7298106,
z: -7.313273,
),
rotation: (0.0, 0.0, 0.0, 1.0),
scale: (
x: 1.0,
y: 1.0,
z: 1.0,
),
),
"bevy_core::name::Name": (
hash: 7225506896223411979,
name: "MagicTeapot.001",
),
"bevy_gltf_blueprints::spawn_from_blueprints::BlueprintName": ("MagicTeapot"),
},
),
64: (
components: {
"bevy_transform::components::transform::Transform": (
translation: (
x: -4.6068983,
y: 1.5983224,
z: -10.579347,
),
rotation: (0.000000000000000031724054, 0.00000000000000000000647681, 0.000013119204, 1.0),
scale: (
x: 1.0,
y: 1.0,
z: 1.0,
),
),
"bevy_core::name::Name": (
hash: 3089896164553476909,
name: "Health_Pickup.002",
),
"bevy_gltf_blueprints::spawn_from_blueprints::BlueprintName": ("Health_Pickup"),
"advanced::game::picking::Pickable": (),
},
),
72: (
components: {
"bevy_transform::components::transform::Transform": (
translation: (
x: -11.560788,
y: 0.0,
z: 7.6554174,
),
rotation: (0.0, 0.0, 0.0, 1.0),
scale: (
x: 1.0,
y: 1.0,
z: 1.0,
),
),
"bevy_core::name::Name": (
hash: 16961132108296874979,
name: "Container.001",
),
"bevy_gltf_blueprints::spawn_from_blueprints::BlueprintName": ("Container"),
"advanced::game::picking::Pickable": (),
},
),
80: (
components: {
"bevy_transform::components::transform::Transform": (
translation: (
x: -21.397858,
y: 0.3833189,
z: -0.32418346,
),
rotation: (0.0, 0.0, 0.0, 1.0),
scale: (
x: 1.0,
y: 1.0,
z: 1.0,
),
),
"bevy_core::name::Name": (
hash: 5104740624378885265,
name: "Container.002",
),
"bevy_gltf_blueprints::spawn_from_blueprints::BlueprintName": ("Container"),
"advanced::game::picking::Pickable": (),
},
),
82: (
components: {
"bevy_transform::components::transform::Transform": (
translation: (
x: 2.9156065,
y: 1.4984571,
z: 2.1909573,
),
rotation: (0.058853183, 0.0726243, 0.2048649, 0.97431636),
scale: (
x: 1.0,
y: 1.0,
z: 1.0,
),
),
"bevy_core::name::Name": (
hash: 107557640935939866,
name: "test5159735758431545549",
),
"bevy_gltf_blueprints::spawn_from_blueprints::BlueprintName": ("Health_Pickup"),
"advanced::game::picking::Pickable": (),
"bevy_rapier3d::dynamics::rigid_body::Velocity": (
linvel: (
x: -1.2580805,
y: -0.39687577,
z: 0.4816798,
),
angvel: (
x: 0.2979751,
y: 0.07926611,
z: 0.8434645,
),
),
},
),
86: (
components: {
"bevy_transform::components::transform::Transform": (
translation: (
x: 0.26087752,
y: 1.5525806,
z: 1.5980839,
),
rotation: (0.059497803, -0.0000018232388, 0.13145457, 0.9895351),
scale: (
x: 1.0,
y: 1.0,
z: 1.0,
),
),
"bevy_core::name::Name": (
hash: 3398656236303073559,
name: "test7470642598731063943",
),
"bevy_gltf_blueprints::spawn_from_blueprints::BlueprintName": ("Health_Pickup"),
"advanced::game::picking::Pickable": (),
"bevy_rapier3d::dynamics::rigid_body::Velocity": (
linvel: (
x: -0.9268077,
y: -0.19806683,
z: 0.41948256,
),
angvel: (
x: 0.26946256,
y: -0.000006710977,
z: 0.5953494,
),
),
},
),
90: (
components: {
"bevy_transform::components::transform::Transform": (
translation: (
x: 2.6515265,
y: 1.5944021,
z: -4.391837,
),
rotation: (-0.030030435, -0.0000006527225, 0.029748484, 0.9991062),
scale: (
x: 1.0,
y: 1.0,
z: 1.0,
),
),
"bevy_core::name::Name": (
hash: 12541900054595385134,
name: "test3938024405863834719",
),
"bevy_gltf_blueprints::spawn_from_blueprints::BlueprintName": ("Health_Pickup"),
"advanced::game::picking::Pickable": (),
"bevy_rapier3d::dynamics::rigid_body::Velocity": (
linvel: (
x: -0.28430828,
y: -0.022357654,
z: -0.2870027,
),
angvel: (
x: -0.17986917,
y: -0.0000035613396,
z: 0.17818078,
),
),
},
),
94: (
components: {
"bevy_transform::components::transform::Transform": (
translation: (
x: -4.2356462,
y: 1.596993,
z: 0.7254991,
),
rotation: (-0.0221751, -0.0000000001891749, 0.011065631, 0.99969286),
scale: (
x: 1.0,
y: 1.0,
z: 1.0,
),
),
"bevy_core::name::Name": (
hash: 6757906322211730861,
name: "test11007490954016878479",
),
"bevy_gltf_blueprints::spawn_from_blueprints::BlueprintName": ("Health_Pickup"),
"advanced::game::picking::Pickable": (),
"bevy_rapier3d::dynamics::rigid_body::Velocity": (
linvel: (
x: -0.21747473,
y: -0.014912919,
z: -0.43581253,
),
angvel: (
x: -0.2727097,
y: -0.0000000034594905,
z: 0.13608481,
),
),
},
),
98: (
components: {
"bevy_transform::components::transform::Transform": (
translation: (
x: 3.1525247,
y: 1.5518407,
z: -2.9611976,
),
rotation: (-0.09219627, 0.1602262, -0.11205085, 0.9763565),
scale: (
x: 1.0,
y: 1.0,
z: 1.0,
),
),
"bevy_core::name::Name": (
hash: 12588565107899185946,
name: "test5980867849331267699",
),
"bevy_gltf_blueprints::spawn_from_blueprints::BlueprintName": ("Health_Pickup"),
"advanced::game::picking::Pickable": (),
"bevy_rapier3d::dynamics::rigid_body::Velocity": (
linvel: (
x: 0.8323179,
y: -0.20597076,
z: -0.68975484,
),
angvel: (
x: -0.37971017,
y: 0.49603412,
z: -0.6079359,
),
),
},
),
4294967310: (
components: {
"bevy_transform::components::transform::Transform": (
translation: (
x: 4.826278,
y: 1.2710563,
z: -3.1997645,
),
rotation: (-0.303028, 0.00000087800436, -0.23889118, 0.9225535),
scale: (
x: 1.0,
y: 1.0,
z: 1.0,
),
),
"bevy_core::name::Name": (
hash: 15533546218717453536,
name: "test12380979123759326444",
),
"bevy_gltf_blueprints::spawn_from_blueprints::BlueprintName": ("Health_Pickup"),
"advanced::game::picking::Pickable": (),
"bevy_rapier3d::dynamics::rigid_body::Velocity": (
linvel: (
x: 1.2146912,
y: -1.1640646,
z: -1.5408095,
),
angvel: (
x: -1.1932359,
y: 0.000002945365,
z: -0.94068503,
),
),
},
),
4294967314: (
components: {
"bevy_transform::components::transform::Transform": (
translation: (
x: 3.9906094,
y: 1.4824095,
z: 2.4394412,
),
rotation: (0.06015042, 0.085218765, 0.2215642, 0.9695509),
scale: (
x: 1.0,
y: 1.0,
z: 1.0,
),
),
"bevy_core::name::Name": (
hash: 2466794778849297109,
name: "test12475628281920299197",
),
"bevy_gltf_blueprints::spawn_from_blueprints::BlueprintName": ("Health_Pickup"),
"advanced::game::picking::Pickable": (),
"bevy_rapier3d::dynamics::rigid_body::Velocity": (
linvel: (
x: -1.0818624,
y: -0.37798148,
z: 0.45334253,
),
angvel: (
x: 0.25961447,
y: 0.14854014,
z: 0.7426717,
),
),
},
),
4294967321: (
components: {
"bevy_transform::components::transform::Transform": (
translation: (
x: 2.2306876,
y: 0.989814,
z: -1.3596333,
),
rotation: (0.30614096, 0.002587511, -0.42789298, 0.8503991),
scale: (
x: 1.0,
y: 1.0,
z: 1.0,
),
),
"bevy_core::name::Name": (
hash: 1545925632270385398,
name: "test15780367212768138828",
),
"bevy_gltf_blueprints::spawn_from_blueprints::BlueprintName": ("Health_Pickup"),
"advanced::game::picking::Pickable": (),
"bevy_rapier3d::dynamics::rigid_body::Velocity": (
linvel: (
x: 1.3027526,
y: -1.8947054,
z: 1.6179247,
),
angvel: (
x: 1.4565696,
y: -0.16299045,
z: -1.3631926,
),
),
},
),
},
)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,13 @@
[package]
name = "bevy_gltf_blueprints_multiple_levels_multiple_blendfiles"
version = "0.3.0"
edition = "2021"
license = "MIT OR Apache-2.0"
[dependencies]
bevy="0.12"
bevy_gltf_blueprints = { path = "../../../crates/bevy_gltf_blueprints" }
bevy_rapier3d = { version = "0.23.0", features = [ "serde-serialize", "debug-render-3d", "enhanced-determinism"] }
bevy_asset_loader = { version = "0.18", features = ["standard_dynamic_assets" ]}
bevy_editor_pls = { version = "0.6" }
rand = "0.8.5"

View File

@ -0,0 +1,13 @@
# Multiple levels from multiple blend files example/demo
This example showcases multiple levels, each created from a seperate Blend file , with all assets stored in the (common file)[./art/common.blend]
It using triggers to transition to another level.
It currently has NO state persistence between levels.
You can press "D" to toggle the physics debug view.
## Running this example
```
cargo run --features bevy/dynamic_linking
```

View File

@ -0,0 +1,12 @@
({
"world":File (path: "models/StartLevel.glb"),
"level1":File (path: "models/Level1.glb"),
"level2":File (path: "models/Level2.glb"),
"models": Folder (
path: "models/library",
),
"materials": Folder (
path: "materials",
),
})

View File

@ -0,0 +1,5 @@
use bevy::prelude::*;
use bevy_asset_loader::prelude::*;
#[derive(AssetCollection, Resource)]
pub struct CoreAssets {}

View File

@ -0,0 +1,19 @@
use bevy::gltf::Gltf;
use bevy::prelude::*;
use bevy::utils::HashMap;
use bevy_asset_loader::prelude::*;
#[derive(AssetCollection, Resource)]
pub struct GameAssets {
#[asset(key = "world")]
pub world: Handle<Gltf>,
#[asset(key = "level1")]
pub level1: Handle<Gltf>,
#[asset(key = "level2")]
pub level2: Handle<Gltf>,
#[asset(key = "models", collection(typed, mapped))]
pub models: HashMap<String, Handle<Gltf>>,
#[asset(key = "materials", collection(typed, mapped))]
pub materials: HashMap<String, Handle<Gltf>>,
}

View File

@ -0,0 +1,35 @@
pub mod assets_core;
pub use assets_core::*;
pub mod assets_game;
pub use assets_game::*;
use bevy::prelude::*;
use bevy_asset_loader::prelude::*;
use crate::state::AppState;
pub struct AssetsPlugin;
impl Plugin for AssetsPlugin {
fn build(&self, app: &mut App) {
app
// load core assets (ie assets needed in the main menu, and everywhere else before loading more assets in game)
.add_loading_state(
LoadingState::new(AppState::CoreLoading).continue_to_state(AppState::MenuRunning),
)
.add_dynamic_collection_to_loading_state::<_, StandardDynamicAssetCollection>(
AppState::CoreLoading,
"assets_core.assets.ron",
)
.add_collection_to_loading_state::<_, CoreAssets>(AppState::CoreLoading)
// load game assets
.add_loading_state(
LoadingState::new(AppState::AppLoading).continue_to_state(AppState::AppRunning),
)
.add_dynamic_collection_to_loading_state::<_, StandardDynamicAssetCollection>(
AppState::AppLoading,
"assets_game.assets.ron",
)
.add_collection_to_loading_state::<_, GameAssets>(AppState::AppLoading);
}
}

View File

@ -0,0 +1,24 @@
use bevy::core_pipeline::bloom::{BloomCompositeMode, BloomSettings};
use bevy::core_pipeline::tonemapping::{DebandDither, Tonemapping};
use bevy::prelude::*;
use super::CameraTrackingOffset;
pub fn camera_replace_proxies(
mut commands: Commands,
mut added_cameras: Query<(Entity, &mut Camera), (Added<Camera>, With<CameraTrackingOffset>)>,
) {
for (entity, mut camera) in added_cameras.iter_mut() {
info!("detected added camera, updating proxy");
camera.hdr = true;
commands
.entity(entity)
.insert(DebandDither::Enabled)
.insert(Tonemapping::BlenderFilmic)
.insert(BloomSettings {
intensity: 0.01,
composite_mode: BloomCompositeMode::Additive,
..default()
});
}
}

View File

@ -0,0 +1,81 @@
use bevy::prelude::*;
#[derive(Component, Reflect, Debug)]
#[reflect(Component)]
/// Component for cameras, with an offset from the Trackable target
///
pub struct CameraTracking {
pub offset: Vec3,
}
impl Default for CameraTracking {
fn default() -> Self {
CameraTracking {
offset: Vec3::new(0.0, 6.0, 8.0),
}
}
}
#[derive(Component, Reflect, Debug, Deref, DerefMut)]
#[reflect(Component)]
/// Component for cameras, with an offset from the Trackable target
pub struct CameraTrackingOffset(Vec3);
impl Default for CameraTrackingOffset {
fn default() -> Self {
CameraTrackingOffset(Vec3::new(0.0, 6.0, 8.0))
}
}
impl CameraTrackingOffset {
fn new(input: Vec3) -> Self {
CameraTrackingOffset(input)
}
}
#[derive(Component, Reflect, Default, Debug)]
#[reflect(Component)]
/// Add this component to an entity if you want it to be tracked by a Camera
pub struct CameraTrackable;
// this system ensures that the camera is at the correct position immediatly after spawning
pub fn init_camera_track(
mut tracking_cameras: Query<
(&mut Transform, &CameraTrackingOffset),
(
With<Camera>,
With<CameraTrackingOffset>,
Without<CameraTrackable>,
),
>,
camera_tracked: Query<&Transform, (With<CameraTrackable>, Added<CameraTrackable>)>,
) {
for (mut camera_transform, tracking_offset) in tracking_cameras.iter_mut() {
for tracked_transform in camera_tracked.iter() {
println!("ADDED tracking camera");
let target_position = tracked_transform.translation + tracking_offset.0;
camera_transform.translation = target_position;
*camera_transform = camera_transform.looking_at(tracked_transform.translation, Vec3::Y);
}
}
}
pub fn camera_track(
mut tracking_cameras: Query<
(&mut Transform, &CameraTrackingOffset),
(
With<Camera>,
With<CameraTrackingOffset>,
Without<CameraTrackable>,
),
>,
camera_tracked: Query<&Transform, With<CameraTrackable>>,
) {
for (mut camera_transform, tracking_offset) in tracking_cameras.iter_mut() {
for tracked_transform in camera_tracked.iter() {
let target_position = tracked_transform.translation + tracking_offset.0;
let eased_position = camera_transform.translation.lerp(target_position, 0.1);
camera_transform.translation = eased_position; // + tracking.offset;// tracked_transform.translation + tracking.offset;
*camera_transform = camera_transform.looking_at(tracked_transform.translation, Vec3::Y);
}
}
}

View File

@ -0,0 +1,25 @@
pub mod camera_tracking;
pub use camera_tracking::*;
pub mod camera_replace_proxies;
pub use camera_replace_proxies::*;
use bevy::prelude::*;
use bevy_gltf_blueprints::GltfBlueprintsSet;
pub struct CameraPlugin;
impl Plugin for CameraPlugin {
fn build(&self, app: &mut App) {
app.register_type::<CameraTrackable>()
.register_type::<CameraTracking>()
.register_type::<CameraTrackingOffset>()
.add_systems(
Update,
(
camera_replace_proxies.after(GltfBlueprintsSet::AfterSpawn),
init_camera_track,
camera_track,
),
);
}
}

View File

@ -0,0 +1,31 @@
use bevy::prelude::*;
use bevy::pbr::{CascadeShadowConfig, CascadeShadowConfigBuilder};
pub fn lighting_replace_proxies(
mut added_dirights: Query<(Entity, &mut DirectionalLight), Added<DirectionalLight>>,
mut added_spotlights: Query<&mut SpotLight, Added<SpotLight>>,
mut added_pointlights: Query<&mut PointLight, Added<PointLight>>,
mut commands: Commands,
) {
for (entity, mut light) in added_dirights.iter_mut() {
light.illuminance *= 5.0;
light.shadows_enabled = true;
let shadow_config: CascadeShadowConfig = CascadeShadowConfigBuilder {
first_cascade_far_bound: 15.0,
maximum_distance: 135.0,
..default()
}
.into();
commands.entity(entity).insert(shadow_config);
}
for mut light in added_spotlights.iter_mut() {
light.shadows_enabled = true;
}
for mut light in added_pointlights.iter_mut() {
light.intensity *= 0.001; // arbitrary/ eyeballed to match the levels of Blender
light.shadows_enabled = true;
}
}

View File

@ -0,0 +1,18 @@
mod lighting_replace_proxies;
use lighting_replace_proxies::*;
use bevy::pbr::{DirectionalLightShadowMap, NotShadowCaster};
use bevy::prelude::*;
pub struct LightingPlugin;
impl Plugin for LightingPlugin {
fn build(&self, app: &mut App) {
app
.insert_resource(DirectionalLightShadowMap { size: 4096 })
// FIXME: adding these since they are missing
.register_type::<NotShadowCaster>()
.add_systems(PreUpdate, lighting_replace_proxies) // FIXME: you should actually run this in a specific state most likely
;
}
}

View File

@ -0,0 +1,34 @@
pub mod camera;
pub use camera::*;
pub mod lighting;
pub use lighting::*;
pub mod relationships;
pub use relationships::*;
pub mod physics;
pub use physics::*;
// pub mod save_load;
// pub use save_load::*;
use bevy::prelude::*;
use bevy_gltf_blueprints::*;
pub struct CorePlugin;
impl Plugin for CorePlugin {
fn build(&self, app: &mut App) {
app.add_plugins((
LightingPlugin,
CameraPlugin,
PhysicsPlugin,
// SaveLoadPlugin,
BlueprintsPlugin {
library_folder: "models/library".into(),
material_library: true,
..Default::default()
},
));
}
}

View File

@ -0,0 +1,21 @@
use bevy::prelude::{info, Input, KeyCode, Res, ResMut};
use bevy_rapier3d::{prelude::RapierConfiguration, render::DebugRenderContext};
pub fn pause_physics(mut physics_config: ResMut<RapierConfiguration>) {
info!("pausing physics");
physics_config.physics_pipeline_active = false;
}
pub fn resume_physics(mut physics_config: ResMut<RapierConfiguration>) {
info!("unpausing physics");
physics_config.physics_pipeline_active = true;
}
pub fn toggle_physics_debug(
mut debug_config: ResMut<DebugRenderContext>,
keycode: Res<Input<KeyCode>>,
) {
if keycode.just_pressed(KeyCode::D) {
debug_config.enabled = !debug_config.enabled;
}
}

View File

@ -0,0 +1,36 @@
pub mod physics_replace_proxies;
use bevy_rapier3d::{
prelude::{NoUserData, RapierPhysicsPlugin},
render::RapierDebugRenderPlugin,
};
pub use physics_replace_proxies::*;
pub mod utils;
pub mod controls;
pub use controls::*;
use crate::state::GameState;
use bevy::prelude::*;
// use super::blueprints::GltfBlueprintsSet;
use bevy_gltf_blueprints::GltfBlueprintsSet;
// use crate::Collider;
pub struct PhysicsPlugin;
impl Plugin for PhysicsPlugin {
fn build(&self, app: &mut App) {
app.add_plugins((
RapierPhysicsPlugin::<NoUserData>::default(),
RapierDebugRenderPlugin::default(),
))
.register_type::<AutoAABBCollider>()
.register_type::<physics_replace_proxies::Collider>()
.add_systems(
Update,
physics_replace_proxies.after(GltfBlueprintsSet::AfterSpawn),
)
// physics controls
.add_systems(OnEnter(GameState::InGame), resume_physics)
.add_systems(OnExit(GameState::InGame), pause_physics)
.add_systems(Update, toggle_physics_debug);
}
}

View File

@ -0,0 +1,102 @@
use bevy::prelude::*;
// use bevy::render::primitives::Aabb;
use bevy_rapier3d::geometry::Collider as RapierCollider;
use bevy_rapier3d::prelude::{ActiveCollisionTypes, ActiveEvents, ComputedColliderShape};
use super::utils::*;
#[derive(Component, Reflect, Default, Debug)]
#[reflect(Component)]
pub enum Collider {
Ball(f32),
Cuboid(Vec3),
Capsule(Vec3, Vec3, f32),
#[default]
Mesh,
}
#[derive(Component, Reflect, Default, Debug)]
#[reflect(Component)]
pub enum AutoAABBCollider {
#[default]
Cuboid,
Ball,
Capsule,
}
// replaces all physics stand-ins with the actual rapier types
pub fn physics_replace_proxies(
meshes: Res<Assets<Mesh>>,
mesh_handles: Query<&Handle<Mesh>>,
mut proxy_colliders: Query<
(Entity, &Collider, &Name, &mut Visibility),
(Without<RapierCollider>, Added<Collider>),
>,
// needed for tri meshes
children: Query<&Children>,
mut commands: Commands,
) {
for proxy_colider in proxy_colliders.iter_mut() {
let (entity, collider_proxy, name, mut visibility) = proxy_colider;
// we hide the collider meshes: perhaps they should be removed altogether once processed ?
if name.ends_with("_collider") || name.ends_with("_sensor") {
*visibility = Visibility::Hidden;
}
// also entities marked with collider names are actually children colliders
let mut rapier_collider: RapierCollider;
match collider_proxy {
Collider::Ball(radius) => {
info!("generating collider from proxy: ball");
rapier_collider = RapierCollider::ball(*radius);
commands.entity(entity)
.insert(rapier_collider)
.insert(ActiveEvents::COLLISION_EVENTS) // FIXME: this is just for demo purposes !!!
;
}
Collider::Cuboid(size) => {
info!("generating collider from proxy: cuboid");
rapier_collider = RapierCollider::cuboid(size.x, size.y, size.z);
commands.entity(entity)
.insert(rapier_collider)
.insert(ActiveEvents::COLLISION_EVENTS) // FIXME: this is just for demo purposes !!!
;
}
Collider::Capsule(a, b, radius) => {
info!("generating collider from proxy: capsule");
rapier_collider = RapierCollider::capsule(*a, *b, *radius);
commands.entity(entity)
.insert(rapier_collider)
.insert(ActiveEvents::COLLISION_EVENTS) // FIXME: this is just for demo purposes !!!
;
}
Collider::Mesh => {
info!("generating collider from proxy: mesh");
for (_, collider_mesh) in
Mesh::search_in_children(entity, &children, &meshes, &mesh_handles)
{
rapier_collider = RapierCollider::from_bevy_mesh(
collider_mesh,
&ComputedColliderShape::TriMesh,
)
.unwrap();
commands
.entity(entity)
.insert(rapier_collider)
// FIXME: this is just for demo purposes !!!
.insert(
ActiveCollisionTypes::default()
| ActiveCollisionTypes::KINEMATIC_STATIC
| ActiveCollisionTypes::STATIC_STATIC
| ActiveCollisionTypes::DYNAMIC_STATIC,
)
.insert(ActiveEvents::COLLISION_EVENTS);
// .insert(ActiveEvents::COLLISION_EVENTS)
// break;
// RapierCollider::convex_hull(points)
}
}
}
}
}

View File

@ -0,0 +1,175 @@
use bevy::prelude::*;
use bevy::render::mesh::{MeshVertexAttributeId, PrimitiveTopology, VertexAttributeValues};
// TAKEN VERBATIB FROM https://github.com/janhohenheim/foxtrot/blob/src/util/trait_extension.rs
pub(crate) trait Vec3Ext: Copy {
fn is_approx_zero(self) -> bool;
fn split(self, up: Vec3) -> SplitVec3;
}
impl Vec3Ext for Vec3 {
#[inline]
fn is_approx_zero(self) -> bool {
self.length_squared() < 1e-5
}
#[inline]
fn split(self, up: Vec3) -> SplitVec3 {
let vertical = up * self.dot(up);
let horizontal = self - vertical;
SplitVec3 {
vertical,
horizontal,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub(crate) struct SplitVec3 {
pub(crate) vertical: Vec3,
pub(crate) horizontal: Vec3,
}
pub(crate) trait Vec2Ext: Copy {
fn is_approx_zero(self) -> bool;
fn x0y(self) -> Vec3;
}
impl Vec2Ext for Vec2 {
#[inline]
fn is_approx_zero(self) -> bool {
self.length_squared() < 1e-5
}
#[inline]
fn x0y(self) -> Vec3 {
Vec3::new(self.x, 0., self.y)
}
}
pub(crate) trait MeshExt {
fn transform(&mut self, transform: Transform);
fn transformed(&self, transform: Transform) -> Mesh;
fn read_coords_mut(&mut self, id: impl Into<MeshVertexAttributeId>) -> &mut Vec<[f32; 3]>;
fn search_in_children<'a>(
parent: Entity,
children: &'a Query<&Children>,
meshes: &'a Assets<Mesh>,
mesh_handles: &'a Query<&Handle<Mesh>>,
) -> Vec<(Entity, &'a Mesh)>;
}
impl MeshExt for Mesh {
fn transform(&mut self, transform: Transform) {
for coords in self.read_coords_mut(Mesh::ATTRIBUTE_POSITION.clone()) {
let vec3 = (*coords).into();
let transformed = transform.transform_point(vec3);
*coords = transformed.into();
}
for normal in self.read_coords_mut(Mesh::ATTRIBUTE_NORMAL.clone()) {
let vec3 = (*normal).into();
let transformed = transform.rotation.mul_vec3(vec3);
*normal = transformed.into();
}
}
fn transformed(&self, transform: Transform) -> Mesh {
let mut mesh = self.clone();
mesh.transform(transform);
mesh
}
fn read_coords_mut(&mut self, id: impl Into<MeshVertexAttributeId>) -> &mut Vec<[f32; 3]> {
// Guaranteed by Bevy for the current usage
match self
.attribute_mut(id)
.expect("Failed to read unknown mesh attribute")
{
VertexAttributeValues::Float32x3(values) => values,
// Guaranteed by Bevy for the current usage
_ => unreachable!(),
}
}
fn search_in_children<'a>(
parent: Entity,
children_query: &'a Query<&Children>,
meshes: &'a Assets<Mesh>,
mesh_handles: &'a Query<&Handle<Mesh>>,
) -> Vec<(Entity, &'a Mesh)> {
if let Ok(children) = children_query.get(parent) {
let mut result: Vec<_> = children
.iter()
.filter_map(|entity| mesh_handles.get(*entity).ok().map(|mesh| (*entity, mesh)))
.map(|(entity, mesh_handle)| {
(
entity,
meshes
.get(mesh_handle)
.expect("Failed to get mesh from handle"),
)
})
.map(|(entity, mesh)| {
assert_eq!(mesh.primitive_topology(), PrimitiveTopology::TriangleList);
(entity, mesh)
})
.collect();
let mut inner_result = children
.iter()
.flat_map(|entity| {
Self::search_in_children(*entity, children_query, meshes, mesh_handles)
})
.collect();
result.append(&mut inner_result);
result
} else {
Vec::new()
}
}
}
pub(crate) trait F32Ext: Copy {
fn is_approx_zero(self) -> bool;
fn squared(self) -> f32;
fn lerp(self, other: f32, ratio: f32) -> f32;
}
impl F32Ext for f32 {
#[inline]
fn is_approx_zero(self) -> bool {
self.abs() < 1e-5
}
#[inline]
fn squared(self) -> f32 {
self * self
}
#[inline]
fn lerp(self, other: f32, ratio: f32) -> f32 {
self.mul_add(1. - ratio, other * ratio)
}
}
pub(crate) trait TransformExt: Copy {
fn horizontally_looking_at(self, target: Vec3, up: Vec3) -> Transform;
fn lerp(self, other: Transform, ratio: f32) -> Transform;
}
impl TransformExt for Transform {
fn horizontally_looking_at(self, target: Vec3, up: Vec3) -> Transform {
let direction = target - self.translation;
let horizontal_direction = direction - up * direction.dot(up);
let look_target = self.translation + horizontal_direction;
self.looking_at(look_target, up)
}
fn lerp(self, other: Transform, ratio: f32) -> Transform {
let translation = self.translation.lerp(other.translation, ratio);
let rotation = self.rotation.slerp(other.rotation, ratio);
let scale = self.scale.lerp(other.scale, ratio);
Transform {
translation,
rotation,
scale,
}
}
}

View File

@ -0,0 +1,11 @@
pub mod relationships_insert_dependant_components;
pub use relationships_insert_dependant_components::*;
use bevy::prelude::*;
pub struct EcsRelationshipsPlugin;
impl Plugin for EcsRelationshipsPlugin {
fn build(&self, app: &mut App) {
app;
}
}

View File

@ -0,0 +1,15 @@
use bevy::prelude::*;
pub fn insert_dependant_component<
Dependant: Component,
Dependency: Component + std::default::Default,
>(
mut commands: Commands,
entities_without_depency: Query<(Entity, &Name), (With<Dependant>, Without<Dependency>)>,
) {
for (entity, name) in entities_without_depency.iter() {
let name = name.clone().to_string();
commands.entity(entity).insert(Dependency::default());
warn!("found an entity called {} with a {} component but without an {}, please check your assets", name.clone(), std::any::type_name::<Dependant>(), std::any::type_name::<Dependency>());
}
}

View File

@ -0,0 +1,218 @@
use bevy::prelude::*;
use bevy_gltf_blueprints::{clone_entity::CloneEntity, GameWorldTag, SpawnHere};
use crate::{
assets::GameAssets,
state::{AppState, GameState, InAppRunning},
};
use super::Saveable;
const SCENE_FILE_PATH: &str = "scenes/save.scn.ron";
#[derive(Component, Debug)]
pub struct TempLoadedSceneMarker;
#[derive(Component, Debug)]
pub struct SaveablesToRemove(Vec<(Entity, Name)>);
#[derive(Component, Event)]
pub struct LoadRequest {
pub path: String,
}
pub fn should_load(save_requested_events: EventReader<LoadRequest>) -> bool {
return save_requested_events.len() > 0;
}
pub fn load_prepare(
mut next_app_state: ResMut<NextState<AppState>>,
mut next_game_state: ResMut<NextState<GameState>>,
) {
next_app_state.set(AppState::LoadingGame);
next_game_state.set(GameState::None);
info!("--loading: prepare")
}
/// unload the level recursively
pub fn _unload_world_old(world: &mut World) {
let entities: Vec<Entity> = world
// .query_filtered::<Entity, Or<(With<Save>, With<Unload>)>>()
.query_filtered::<Entity, With<GameWorldTag>>() // our level/world contains this component
.iter(world)
.collect();
for entity in entities {
// Check the entity again in case it was despawned recursively
if world.get_entity(entity).is_some() {
world.entity_mut(entity).despawn_recursive();
}
}
}
pub fn unload_world(mut commands: Commands, gameworlds: Query<Entity, With<GameWorldTag>>) {
for e in gameworlds.iter() {
info!("--loading: despawn old world/level");
commands.entity(e).despawn_recursive();
}
}
// almost identical to setup_game, !!??
pub fn load_world(
mut commands: Commands,
game_assets: Res<GameAssets>,
// scenes: ResMut<Scene>,
) {
info!("--loading: loading world/level");
commands.spawn((
SceneBundle {
scene: game_assets.world.clone(),
..default()
},
bevy::prelude::Name::from("world"),
GameWorldTag,
InAppRunning,
));
}
pub fn load_saved_scene(mut commands: Commands, asset_server: Res<AssetServer>) {
commands.spawn((
DynamicSceneBundle {
// Scenes are loaded just like any other asset.
scene: asset_server.load(SCENE_FILE_PATH),
..default()
},
TempLoadedSceneMarker,
));
// commands.entity(world).add_child(child_scene);
info!("--loading: loaded saved scene");
}
pub fn process_loaded_scene(
loaded_scene: Query<(Entity, &Children), With<TempLoadedSceneMarker>>,
named_entities: Query<(Entity, &Name, &Parent)>, // FIXME: very inneficient
mut commands: Commands,
mut game_world: Query<(Entity, &Children), With<GameWorldTag>>,
saveables: Query<(Entity, &Name), With<Saveable>>,
asset_server: Res<AssetServer>,
) {
for (loaded_scene, children) in loaded_scene.iter() {
info!("--loading: post processing loaded scene");
let mut entities_to_load: Vec<(Entity, Name)> = vec![];
for loaded_entity in children.iter() {
if let Ok((source, name, _)) = named_entities.get(*loaded_entity) {
entities_to_load.push((source, name.clone()));
let mut found = false;
for (e, n, p) in named_entities.iter() {
// if we have an entity with the same name as in same file, overwrite
if e != source && name.as_str() == n.as_str() {
// println!("found entity with same name {} {} {:?} {:?}", name, n, source, e);
// source is entity within the newly loaded scene (source), e is within the existing world (destination)
info!("copying data from {:?} to {:?}", source, e);
commands.add(CloneEntity {
source: source,
destination: e,
});
// FIXME: issue with hierarchy & parenting, would be nice to be able to filter out components from CloneEntity
commands.entity(p.get()).add_child(e);
commands.entity(source).despawn_recursive();
found = true;
break;
}
}
// entity not found in the list of existing entities (ie entities that came as part of the level)
// so we spawn a new one
if !found {
info!("generating new entity");
let world = game_world.single_mut();
let world = world.1[0];
let new_entity = commands
.spawn((bevy::prelude::Name::from(name.clone()), SpawnHere))
.id();
commands.add(CloneEntity {
source: source,
destination: new_entity,
});
commands.entity(world).add_child(new_entity);
info!("copying data from {:?} to {:?}", source, new_entity);
}
}
}
commands.spawn(SaveablesToRemove(entities_to_load.clone()));
// if an entity is present in the world but NOT in the saved entities , it should be removed from the world
// ideally this should be run between spawning of the world/level AND spawn_placeholders
// remove the dynamic scene
info!("--loading: DESPAWNING LOADED SCENE");
commands.entity(loaded_scene).despawn_recursive();
asset_server.mark_unused_assets();
asset_server.free_unused_assets();
}
//for saveable in saveables.iter(){
// println!("SAVEABLE BEFORE {:?}", saveable)
//}
}
pub fn final_cleanup(
saveables_to_remove: Query<(Entity, &SaveablesToRemove)>,
mut commands: Commands,
saveables: Query<(Entity, &Name), With<Saveable>>,
mut next_app_state: ResMut<NextState<AppState>>,
mut next_game_state: ResMut<NextState<GameState>>,
) {
if let Ok((e, entities_to_load)) = saveables_to_remove.get_single() {
info!("saveables to remove {:?}", entities_to_load);
for (e, n) in saveables.iter() {
let mut found = false;
println!("SAVEABLE {}", n);
//let entities_to_load = saveables_to_remove.single();
for (en, na) in entities_to_load.0.iter() {
found = na.as_str() == n.as_str();
if found {
break;
}
}
if !found {
println!("REMOVING THIS ONE {}", n);
commands.entity(e).despawn_recursive();
}
}
// if there is a saveable that is NOT in the list of entities to load, despawn it
// despawn list
commands.entity(e).despawn_recursive();
info!("--loading: done, move to InGame state");
// next_app_state.set(AppState::AppRunning);
next_game_state.set(GameState::InGame);
}
}
fn process_loaded_scene_load_alt(
entities: Query<(Entity, &Children), With<TempLoadedSceneMarker>>,
named_entities: Query<(Entity, &Name, &Parent)>, // FIXME: very inneficient
mut commands: Commands,
) {
for (entity, children) in entities.iter() {
let mut entities_to_load: Vec<(Entity, Name)> = vec![];
for saved_source in children.iter() {
if let Ok((source, name, _)) = named_entities.get(*saved_source) {
println!("AAAAAAA {}", name);
entities_to_load.push((source, name.clone()));
}
}
println!("entities to load {:?}", entities_to_load);
commands.entity(entity).despawn_recursive();
}
}

View File

@ -0,0 +1,70 @@
pub mod saveable;
use bevy::asset::free_unused_assets_system;
use bevy_gltf_components::GltfComponentsSet;
pub use saveable::*;
pub mod saving;
pub use saving::*;
pub mod loading;
pub use loading::*;
use bevy::prelude::*;
use bevy::prelude::{App, IntoSystemConfigs, Plugin};
use bevy::utils::Uuid;
use bevy_gltf_blueprints::GltfBlueprintsSet;
#[derive(SystemSet, Debug, Hash, PartialEq, Eq, Clone)]
pub enum LoadingSet {
Load,
PostLoad,
}
pub struct SaveLoadPlugin;
impl Plugin for SaveLoadPlugin {
fn build(&self, app: &mut App) {
app
.register_type::<Uuid>()
.register_type::<Saveable>()
.add_event::<SaveRequest>()
.add_event::<LoadRequest>()
.configure_sets(
Update,
(LoadingSet::Load, LoadingSet::PostLoad)
.chain()
.before(GltfBlueprintsSet::Spawn)
.before(GltfComponentsSet::Injection)
)
.add_systems(PreUpdate, save_game.run_if(should_save))
.add_systems(Update,
(
load_prepare,
unload_world,
load_world,
load_saved_scene,
// process_loaded_scene
)
.chain()
.run_if(should_load) // .run_if(in_state(AppState::AppRunning))
.in_set(LoadingSet::Load)
)
.add_systems(Update,
(
process_loaded_scene,
apply_deferred,
final_cleanup,
apply_deferred,
free_unused_assets_system
)
.chain()
.in_set(LoadingSet::PostLoad)
)
// .add_systems(Update, bla)
;
}
}

View File

@ -0,0 +1,137 @@
const NEW_SCENE_FILE_PATH:&str="save.scn.ron";
use bevy::ecs::component::Components;
use bevy::ecs::entity::EntityMap;
use serde::{Deserialize, Serialize};
use std::io::Read;
use bevy::scene::serde::SceneDeserializer;
use ron::Deserializer;
use serde::de::DeserializeSeed;
#[derive(Debug, Deserialize)]
struct Components2;
#[derive(Debug, Deserialize)]
struct Fake {
resources: HashMap<u32, String>,
entities: HashMap<u32, Components2>
}
fn ron_test(){
let full_path = "/home/ckaos/projects/grappling-boom-bot/assets/save.ron";
match File::open(full_path) {
Ok(mut file) => {
let mut serialized_scene = Vec::new();
if let Err(why) = file.read_to_end(&mut serialized_scene) {
error!("file read failed: {why:?}");
}
match Deserializer::from_bytes(&serialized_scene) {
Ok(mut deserializer) => {
// deserializer.
let bla:Fake = ron::from_str("(
resources: {},
entities: {}
)").unwrap();
info!("testing {:?}", bla);
info!("YOYO DONE YO !")
}
Err(why) => {
error!("deserializer creation failed: {why:?}");
}
}
}
Err(why) => {
error!("load failed: {why:?}");
}
}
}
fn inject_component_data(world: &mut World, scene: DynamicScene){
let mut entity_map = EntityMap::default();
if let Err(why) = scene.write_to_world(world, &mut entity_map) {
panic!("world write failed: {why:?}");
}
println!("entity map {:?}", entity_map);
// TODO: EntityMap doesn't implement `iter()`
for old_entity in entity_map.keys() {
let entity = entity_map.get(old_entity).unwrap();
info!("entity update required: {old_entity:?} -> {entity:?}");
let e_mut = world
.entity_mut(entity);
}
info!("done loading scene");
}
fn post_load(world: &mut World){
let full_path = "/home/ckaos/projects/grappling-boom-bot/assets/save.ron";
match File::open(full_path) {
Ok(mut file) => {
let mut serialized_scene = Vec::new();
if let Err(why) = file.read_to_end(&mut serialized_scene) {
error!("file read failed: {why:?}");
}
match Deserializer::from_bytes(&serialized_scene) {
Ok(mut deserializer) => {
let result = SceneDeserializer {
type_registry: &world.resource::<AppTypeRegistry>().read(),
}
.deserialize(&mut deserializer);
info!("deserialize done");
match result {
Ok(scene) => {
info!("scene loaded");
// scene.write_to_world(world, entity_map)
// println!("{:?}", scene.entities);
inject_component_data(world, scene);
/*for dyn_ent in scene.entities.iter(){
// let mut query = scene.world.query::<(Entity, &Name, &GltfExtras, &Parent)>();
}*/
}
Err(why) => {
error!("deserialization failed: {why:?}");
}
}
}
Err(why) => {
error!("deserializer creation failed: {why:?}");
}
}
}
Err(why) => {
error!("load failed: {why:?}");
}
}
}
#[derive(Component, Reflect, Debug, Default )]
#[reflect(Component)]
pub struct Hackish;
/// unload saveables
fn unload_saveables(world: &mut World) {
let entities: Vec<Entity> = world
.query_filtered::<Entity, With<Saveable>>()// our level/world contains this component
.iter(world)
.collect();
for entity in entities {
// Check the entity again in case it was despawned recursively
if world.get_entity(entity).is_some() {
info!("despawning");
world.entity_mut(entity).despawn_recursive();
}
}
}

View File

@ -0,0 +1,14 @@
use bevy::prelude::*;
use bevy::utils::Uuid;
#[derive(Component, Reflect, Debug)]
#[reflect(Component)]
pub struct Saveable {
id: Uuid,
}
impl Default for Saveable {
fn default() -> Self {
Saveable { id: Uuid::new_v4() }
}
}

View File

@ -0,0 +1,87 @@
use bevy::pbr::{Clusters, VisiblePointLights};
use bevy::render::camera::CameraRenderGraph;
use bevy::render::view::VisibleEntities;
use bevy::tasks::IoTaskPool;
use bevy::{gltf::GltfExtras, prelude::*};
use bevy_rapier3d::prelude::RigidBody;
use std::fs::File;
use std::io::Write;
use crate::core::physics::Collider;
use crate::game::{Pickable, Player};
use super::Saveable;
const NEW_SCENE_FILE_PATH: &str = "save.scn.ron";
#[derive(Component, Event)]
pub struct SaveRequest {
pub path: String,
}
pub fn should_save(
// keycode: Res<Input<KeyCode>>,
save_requested_events: EventReader<SaveRequest>,
) -> bool {
return save_requested_events.len() > 0;
// return keycode.just_pressed(KeyCode::S)
}
pub fn save_game(
world: &mut World,
// save_requested_events: EventReader<SaveRequest>,
) {
info!("saving");
// world.
/*for bli in save_requested_events.iter(){
println!("SAAAAVE TO THISSSSS {:?}", bli.path)
}*/
let saveable_entities: Vec<Entity> = world
.query_filtered::<Entity, With<Saveable>>()
.iter(world)
.collect();
/*let static_entities: Vec<Entity> = world
.query_filtered::<Entity, Without<Saveable>>()
.iter(world)
.collect();*/
println!("saveable entities {}", saveable_entities.len());
let mut scene_builder = DynamicSceneBuilder::from_world(world);
scene_builder
.deny::<Children>()
.deny::<Parent>()
.deny::<ComputedVisibility>()
.deny::<Visibility>()
.deny::<GltfExtras>()
.deny::<GlobalTransform>()
.deny::<Collider>()
.deny::<RigidBody>()
.deny::<Saveable>()
// camera stuff
.deny::<Camera>()
.deny::<CameraRenderGraph>()
.deny::<Camera3d>()
.deny::<Clusters>()
.deny::<VisibleEntities>()
.deny::<VisiblePointLights>()
//.deny::<HasGizmoMarker>()
.extract_entities(saveable_entities.into_iter());
let dyn_scene = scene_builder.build();
let serialized_scene = dyn_scene
.serialize_ron(world.resource::<AppTypeRegistry>())
.unwrap();
#[cfg(not(target_arch = "wasm32"))]
IoTaskPool::get()
.spawn(async move {
// Write the scene RON data to file
File::create(format!("assets/scenes/{NEW_SCENE_FILE_PATH}"))
.and_then(|mut file| file.write(serialized_scene.as_bytes()))
.expect("Error while writing scene to file");
})
.detach();
}

View File

@ -0,0 +1,85 @@
use bevy::prelude::*;
use crate::{
assets::GameAssets,
state::{GameState, InAppRunning},
};
use bevy_gltf_blueprints::{BluePrintBundle, BlueprintName, GameWorldTag};
use bevy_rapier3d::prelude::Velocity;
use rand::Rng;
pub fn setup_game(
mut commands: Commands,
game_assets: Res<GameAssets>,
models: Res<Assets<bevy::gltf::Gltf>>,
mut next_game_state: ResMut<NextState<GameState>>,
) {
println!("setting up all stuff");
/*commands.insert_resource(AmbientLight {
color: Color::WHITE,
brightness: 0.2,
});*/
// here we actually spawn our game world/level
commands.spawn((
SceneBundle {
// note: because of this issue https://github.com/bevyengine/bevy/issues/10436, "world" is now a gltf file instead of a scene
scene: models
.get(game_assets.world.id())
.expect("main level should have been loaded")
.scenes[0]
.clone(),
..default()
},
bevy::prelude::Name::from("world"),
GameWorldTag,
InAppRunning,
));
next_game_state.set(GameState::InGame)
}
pub fn spawn_test(
keycode: Res<Input<KeyCode>>,
mut commands: Commands,
mut game_world: Query<(Entity, &Children), With<GameWorldTag>>,
) {
if keycode.just_pressed(KeyCode::T) {
let world = game_world.single_mut();
let world = world.1[0];
let mut rng = rand::thread_rng();
let range = 5.5;
let x: f32 = rng.gen_range(-range..range);
let y: f32 = rng.gen_range(-range..range);
let mut rng = rand::thread_rng();
let range = 0.8;
let vel_x: f32 = rng.gen_range(-range..range);
let vel_y: f32 = rng.gen_range(2.0..2.5);
let vel_z: f32 = rng.gen_range(-range..range);
let name_index: u64 = rng.gen();
let new_entity = commands
.spawn((
BluePrintBundle {
blueprint: BlueprintName("Health_Pickup".to_string()),
transform: TransformBundle::from_transform(Transform::from_xyz(x, 2.0, y)),
..Default::default()
},
bevy::prelude::Name::from(format!("test{}", name_index)),
// BlueprintName("Health_Pickup".to_string()),
// SpawnHere,
// TransformBundle::from_transform(Transform::from_xyz(x, 2.0, y)),
Velocity {
linvel: Vec3::new(vel_x, vel_y, vel_z),
angvel: Vec3::new(0.0, 0.0, 0.0),
},
))
.id();
commands.entity(world).add_child(new_entity);
}
}

View File

@ -0,0 +1,113 @@
use bevy::prelude::*;
use crate::state::{AppState, GameState, InMainMenu};
pub fn setup_main_menu(mut commands: Commands) {
commands.spawn((Camera2dBundle::default(), InMainMenu));
commands.spawn((
TextBundle::from_section(
"SOME GAME TITLE !!",
TextStyle {
//font: asset_server.load("fonts/FiraMono-Medium.ttf"),
font_size: 18.0,
color: Color::WHITE,
..Default::default()
},
)
.with_style(Style {
position_type: PositionType::Absolute,
top: Val::Px(100.0),
left: Val::Px(200.0),
..default()
}),
InMainMenu,
));
commands.spawn((
TextBundle::from_section(
"New Game (press Enter to start, press T once the game is started for demo spawning)",
TextStyle {
//font: asset_server.load("fonts/FiraMono-Medium.ttf"),
font_size: 18.0,
color: Color::WHITE,
..Default::default()
},
)
.with_style(Style {
position_type: PositionType::Absolute,
top: Val::Px(200.0),
left: Val::Px(200.0),
..default()
}),
InMainMenu,
));
/*
commands.spawn((
TextBundle::from_section(
"Load Game",
TextStyle {
//font: asset_server.load("fonts/FiraMono-Medium.ttf"),
font_size: 18.0,
color: Color::WHITE,
..Default::default()
},
)
.with_style(Style {
position_type: PositionType::Absolute,
top: Val::Px(250.0),
left: Val::Px(200.0),
..default()
}),
InMainMenu
));
commands.spawn((
TextBundle::from_section(
"Exit Game",
TextStyle {
//font: asset_server.load("fonts/FiraMono-Medium.ttf"),
font_size: 18.0,
color: Color::WHITE,
..Default::default()
},
)
.with_style(Style {
position_type: PositionType::Absolute,
top: Val::Px(300.0),
left: Val::Px(200.0),
..default()
}),
InMainMenu
));*/
}
pub fn teardown_main_menu(bla: Query<Entity, With<InMainMenu>>, mut commands: Commands) {
for bli in bla.iter() {
commands.entity(bli).despawn_recursive();
}
}
pub fn main_menu(
keycode: Res<Input<KeyCode>>,
mut next_app_state: ResMut<NextState<AppState>>,
// mut next_game_state: ResMut<NextState<GameState>>,
// mut save_requested_events: EventWriter<SaveRequest>,
// mut load_requested_events: EventWriter<LoadRequest>,
) {
if keycode.just_pressed(KeyCode::Return) {
next_app_state.set(AppState::AppLoading);
// next_game_state.set(GameState::None);
}
if keycode.just_pressed(KeyCode::L) {
next_app_state.set(AppState::AppLoading);
// load_requested_events.send(LoadRequest { path: "toto".into() })
}
if keycode.just_pressed(KeyCode::S) {
// save_requested_events.send(SaveRequest { path: "toto".into() })
}
}

View File

@ -0,0 +1,100 @@
use crate::{assets::GameAssets, state::InAppRunning};
use bevy::prelude::*;
use bevy_gltf_blueprints::GameWorldTag;
use bevy_rapier3d::prelude::*;
use super::Player;
#[derive(Component, Reflect, Default, Debug)]
#[reflect(Component)]
pub struct LevelTransition {
pub target: String,
}
// very barebones example of triggering level transitions
pub fn trigger_level_transition(
mut collision_events: EventReader<CollisionEvent>,
level_transition_triggers: Query<&LevelTransition>,
parents: Query<&Parent>,
players: Query<&Player>,
mut commands: Commands,
game_assets: Res<GameAssets>,
models: Res<Assets<bevy::gltf::Gltf>>,
game_world: Query<(Entity, &GameWorldTag)>,
) {
for collision_event in collision_events.read() {
match collision_event {
CollisionEvent::Started(entity1, entity2, _) => {
// we need to accomodate for the fact that the collider may be a child of the level transition (FIXME: is this a missunderstanding on my part about rapier child colliders ?)
let entity1_parent = parents.get(*entity1).unwrap();
let entity2_parent = parents.get(*entity2).unwrap();
if level_transition_triggers.get(*entity1).is_ok()
|| level_transition_triggers.get(*entity2).is_ok()
|| level_transition_triggers.get(entity1_parent.get()).is_ok()
|| level_transition_triggers.get(entity2_parent.get()).is_ok()
{
println!("collision started, we can transition to level");
let transition_trigger;
if level_transition_triggers.get(*entity1).is_ok() {
transition_trigger = level_transition_triggers.get(*entity1).unwrap();
} else if level_transition_triggers.get(*entity2).is_ok() {
transition_trigger = level_transition_triggers.get(*entity2).unwrap();
} else if level_transition_triggers.get(entity1_parent.get()).is_ok() {
transition_trigger =
level_transition_triggers.get(entity1_parent.get()).unwrap();
} else {
transition_trigger =
level_transition_triggers.get(entity2_parent.get()).unwrap();
}
if players.get(*entity1).is_ok() || players.get(entity1_parent.get()).is_ok() || players.get(*entity2).is_ok() || players.get(entity2_parent.get()).is_ok() {
println!("one entity is the player, we can enter")
}
else {
// if none of our entities is a player, bail out, as only entities with player components should trigger a transition
return;
}
let current_game_world = game_world.single();
// remove current level/world
info!("despawning current level");
commands.entity(current_game_world.0).despawn_recursive();
let target_level = &transition_trigger.target;
let level;
println!("target level {}", target_level);
if target_level == "Level1" {
level = &game_assets.level1;
} else if (target_level == "Level2") {
level = &game_assets.level2;
} else {
level = &game_assets.world;
}
info!("spawning new level");
commands.spawn((
SceneBundle {
// note: because of this issue https://github.com/bevyengine/bevy/issues/10436, "world" is now a gltf file instead of a scene
scene: models
.get(level.id())
.expect("main level should have been loaded")
.scenes[0]
.clone(),
..default()
},
bevy::prelude::Name::from("world"),
GameWorldTag,
InAppRunning,
));
}
}
CollisionEvent::Stopped(_entity1, _entity2, _) => {
// println!("collision ended")
}
}
}
}

View File

@ -0,0 +1,121 @@
pub mod in_game;
pub use in_game::*;
pub mod in_main_menu;
pub use in_main_menu::*;
pub mod picking;
pub use picking::*;
pub mod level_transitions;
pub use level_transitions::*;
use crate::{
assets::GameAssets,
insert_dependant_component,
state::{AppState, GameState, InAppRunning},
};
use bevy::prelude::*;
use bevy_rapier3d::prelude::*;
// this file is just for demo purposes, contains various types of components, systems etc
#[derive(Component, Reflect, Default, Debug)]
#[reflect(Component)]
pub enum SoundMaterial {
Metal,
Wood,
Rock,
Cloth,
Squishy,
#[default]
None,
}
#[derive(Component, Reflect, Default, Debug)]
#[reflect(Component)]
/// Demo marker component
pub struct Player;
#[derive(Component, Reflect, Default, Debug)]
#[reflect(Component)]
/// Demo component showing auto injection of components
pub struct ShouldBeWithPlayer;
#[derive(Component, Reflect, Default, Debug)]
#[reflect(Component)]
/// Demo marker component
pub struct Interactible;
fn player_move_demo(
keycode: Res<Input<KeyCode>>,
mut players: Query<&mut Transform, With<Player>>,
) {
let speed = 0.2;
if let Ok(mut player) = players.get_single_mut() {
if keycode.pressed(KeyCode::Left) {
player.translation.x += speed;
}
if keycode.pressed(KeyCode::Right) {
player.translation.x -= speed;
}
if keycode.pressed(KeyCode::Up) {
player.translation.z += speed;
}
if keycode.pressed(KeyCode::Down) {
player.translation.z -= speed;
}
}
}
// collision tests/debug
pub fn test_collision_events(
mut collision_events: EventReader<CollisionEvent>,
mut contact_force_events: EventReader<ContactForceEvent>,
) {
for collision_event in collision_events.read() {
println!("collision");
match collision_event {
CollisionEvent::Started(_entity1, _entity2, _) => {
println!("collision started")
}
CollisionEvent::Stopped(_entity1, _entity2, _) => {
println!("collision ended")
}
}
}
for contact_force_event in contact_force_events.read() {
println!("Received contact force event: {:?}", contact_force_event);
}
}
pub struct GamePlugin;
impl Plugin for GamePlugin {
fn build(&self, app: &mut App) {
app.add_plugins(PickingPlugin)
.register_type::<Interactible>()
.register_type::<SoundMaterial>()
.register_type::<Player>()
.register_type::<LevelTransition>()
// little helper utility, to automatically inject components that are dependant on an other component
// ie, here an Entity with a Player component should also always have a ShouldBeWithPlayer component
// you get a warning if you use this, as I consider this to be stop-gap solution (usually you should have either a bundle, or directly define all needed components)
.add_systems(
Update,
(
// insert_dependant_component::<Player, ShouldBeWithPlayer>,
player_move_demo, //.run_if(in_state(AppState::Running)),
// test_collision_events,
spawn_test,
trigger_level_transition,
)
.run_if(in_state(GameState::InGame)),
)
.add_systems(OnEnter(AppState::MenuRunning), setup_main_menu)
.add_systems(OnExit(AppState::MenuRunning), teardown_main_menu)
.add_systems(Update, main_menu.run_if(in_state(AppState::MenuRunning)))
.add_systems(OnEnter(AppState::AppRunning), setup_game);
}
}

View File

@ -0,0 +1,34 @@
use super::Player;
use bevy::prelude::*;
use bevy_gltf_blueprints::GltfBlueprintsSet;
#[derive(Component, Reflect, Default, Debug)]
#[reflect(Component)]
pub struct Pickable;
// very simple, crude picking (as in picking up objects) implementation
pub fn picking(
players: Query<&GlobalTransform, With<Player>>,
pickables: Query<(Entity, &GlobalTransform), With<Pickable>>,
mut commands: Commands,
) {
for player_transforms in players.iter() {
for (pickable, pickable_transforms) in pickables.iter() {
let distance = player_transforms
.translation()
.distance(pickable_transforms.translation());
if distance < 2.5 {
commands.entity(pickable).despawn_recursive();
}
}
}
}
pub struct PickingPlugin;
impl Plugin for PickingPlugin {
fn build(&self, app: &mut App) {
app.register_type::<Pickable>()
.add_systems(Update, (picking.after(GltfBlueprintsSet::AfterSpawn),));
}
}

View File

@ -0,0 +1,33 @@
use bevy::prelude::*;
use bevy_editor_pls::prelude::*;
mod core;
use crate::core::*;
pub mod assets;
use assets::*;
pub mod state;
use state::*;
mod game;
use game::*;
mod test_components;
use test_components::*;
fn main() {
App::new()
.add_plugins((
DefaultPlugins.set(AssetPlugin::default()),
// editor
EditorPlugin::default(),
// our custom plugins
StatePlugin,
AssetsPlugin,
CorePlugin, // reusable plugins
GamePlugin, // specific to our game
ComponentsTestPlugin, // Showcases different type of components /structs
))
.run();
}

View File

@ -0,0 +1,54 @@
use bevy::app::AppExit;
use bevy::prelude::*;
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, Default, States)]
pub enum AppState {
#[default]
CoreLoading,
MenuRunning,
AppLoading,
AppRunning,
AppEnding,
// FIXME: not sure
LoadingGame,
}
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, Default, States)]
pub enum GameState {
#[default]
None,
InMenu,
InGame,
InGameOver,
InSaving,
InLoading,
}
// tag components for all entities within a certain state (for despawning them if needed) , FIXME: seems kinda hack-ish
#[derive(Component)]
pub struct InCoreLoading;
#[derive(Component, Default)]
pub struct InMenuRunning;
#[derive(Component)]
pub struct InAppLoading;
#[derive(Component)]
pub struct InAppRunning;
// components for tagging in game vs in game menu stuff
#[derive(Component, Default)]
pub struct InMainMenu;
#[derive(Component, Default)]
pub struct InMenu;
#[derive(Component, Default)]
pub struct InGame;
pub struct StatePlugin;
impl Plugin for StatePlugin {
fn build(&self, app: &mut App) {
app.add_state::<AppState>().add_state::<GameState>();
}
}

View File

@ -0,0 +1,80 @@
use bevy::prelude::*;
#[derive(Component, Reflect, Default, Debug)]
#[reflect(Component)]
struct UnitTest;
#[derive(Component, Reflect, Default, Debug, Deref, DerefMut)]
#[reflect(Component)]
struct TuppleTestF32(f32);
#[derive(Component, Reflect, Default, Debug, Deref, DerefMut)]
#[reflect(Component)]
struct TuppleTestU64(u64);
#[derive(Component, Reflect, Default, Debug, Deref, DerefMut)]
#[reflect(Component)]
pub struct TuppleTestStr(String);
#[derive(Component, Reflect, Default, Debug)]
#[reflect(Component)]
struct TuppleTest2(f32, u64, String);
#[derive(Component, Reflect, Default, Debug)]
#[reflect(Component)]
struct TuppleTestBool(bool);
#[derive(Component, Reflect, Default, Debug)]
#[reflect(Component)]
struct TuppleVec2(Vec2);
#[derive(Component, Reflect, Default, Debug)]
#[reflect(Component)]
struct TuppleVec3(Vec3);
#[derive(Component, Reflect, Default, Debug)]
#[reflect(Component)]
struct TuppleVec(Vec<String>);
#[derive(Component, Reflect, Default, Debug)]
#[reflect(Component)]
struct TuppleTestColor(Color);
#[derive(Component, Reflect, Default, Debug)]
#[reflect(Component)]
struct BasicTest {
a: f32,
b: u64,
c: String,
}
#[derive(Component, Reflect, Default, Debug)]
#[reflect(Component)]
pub enum EnumTest {
Metal,
Wood,
Rock,
Cloth,
Squishy,
#[default]
None,
}
pub struct ComponentsTestPlugin;
impl Plugin for ComponentsTestPlugin {
fn build(&self, app: &mut App) {
app.register_type::<BasicTest>()
.register_type::<UnitTest>()
.register_type::<TuppleTestF32>()
.register_type::<TuppleTestU64>()
.register_type::<TuppleTestStr>()
.register_type::<TuppleTestBool>()
.register_type::<TuppleTest2>()
.register_type::<TuppleVec2>()
.register_type::<TuppleVec3>()
.register_type::<EnumTest>()
.register_type::<TuppleTestColor>()
.register_type::<TuppleVec>()
.register_type::<Vec<String>>();
}
}

View File

@ -28,8 +28,32 @@ This [Blender addon](./)
![blender addon install](./docs/blender_addon_install2.png) ![blender addon install](./docs/blender_addon_install2.png)
## Usage: ## Usage:
> ***IMPORTANT***
if you have used a version of this add-on prior to v0.9, there was an issue that kept generating orphan (junk) data on every save !
You can easilly clean up that data
- go to orphan data:
![purge orphan data](./docs/purge_orphan1_data1.png)
- click on purge
![purge orphan data](./docs/purge_orphan1_data2.png)
- validate
![purge orphan data](./docs/purge_orphan1_data3.png)
This issue has been resolved in v0.9.
### Basics ### Basics
@ -47,7 +71,8 @@ This [Blender addon](./)
- export scene settings: exports "global"/scene settings like ambient color, bloom, ao, etc - export scene settings: exports "global"/scene settings like ambient color, bloom, ao, etc
This automatically generates additional components at the scene level This automatically generates additional components at the scene level
- pick your main (level) scenes and library scenes (see the chapter about Blueprints below)
- pick your main (level) scenes and/or library scenes (see the chapter about [Blueprints](#blueprints) and [multiple Blend filles workflow](#multiple-blend-file-workflow) below)
- click in the scene picker & select your scene - click in the scene picker & select your scene
![select scene](./docs/blender_addon_add_scene.png) ![select scene](./docs/blender_addon_add_scene.png)
@ -62,12 +87,23 @@ This [Blender addon](./)
- export blueprints: check this if you want to automatically export blueprints (default: True) - export blueprints: check this if you want to automatically export blueprints (default: True)
- blueprints path: the path to export blueprints to , relative to the main **export folder** (default: library) - blueprints path: the path to export blueprints to , relative to the main **export folder** (default: library)
- export nested blueprints: check this if you want to automatically export nested blueprints (collection instances inside blueprint collections) - collection instances: select which option you want to use to deal with collection instances (aka combine mode) (both inside blueprint collections & main collections)
as seperate blueprints (default: True)
please read dedicate section below for more information * split (default, highly recomended) : the addon will 'split out' any nested collections/ blueprints & export them
* embed: choose this option if you want to keep everything inside a gltf file (less efficient, not recomended)
* embedExternal: this will embed ONLY collection instances whose collections have not been found inside the current blend file
These options can also be **overridden** on a per collection instance basis: (if you want to split out most collection instances, but keep a few specific ones embeded
inside your gltf file)
![combine override](./docs/combine_override.png)
- simply add a custom property called **_combine** to the collection instance, and set it to one of the options above
please read the dedicated [section](#collection-instances--nested-blueprints) below for more information
- export materials library: check this if you want to automatically export material libraries (default: False) - export materials library: check this if you want to automatically export material libraries (default: False)
please read the dedicated section below for more information please read the dedicated [section](#materials) below for more information
> This only works together with blueprints ! > This only works together with blueprints !
@ -75,7 +111,7 @@ This [Blender addon](./)
* and your standard gltf export parameters in the **gltf** panel * and your standard gltf export parameters in the **gltf** panel
![blender addon use2](./docs/blender_addon_use2.png) ![blender addon use2](./docs/blender_addon_use2.png)
* click on "apply settings" * click on "apply settings"
@ -112,12 +148,19 @@ You can enable this option to automatically replace all the **collection instanc
![exported collections](./docs/exported_collections.png) ![exported collections](./docs/exported_collections.png)
- there are some workflow specificities for multi blend file [workflows](#multiple-blend-file-workflow)
#### Nested blueprints #### Collection instances & Nested blueprints
To maximise reuse of meshes/components etc, you can also nest ***collections instances*** inside collections (as normally in Blender), but also export each nested Blueprint as a seperate blueprints. To maximise reuse of meshes/components etc, you can also nest ***collections instances*** inside collections (as normally in Blender), but also export each nested Blueprint as a seperate blueprints.
> Don't forget to toggle the option in the exporter settings > Don't forget to choose the relevant option in the exporter settings (aka **"split"**)
> This replaces the previous "export nested blueprints" checkbox/ option
![instance combine mode](./docs/blender_addon_use4.png)
- To make things clearer: - To make things clearer:
@ -145,7 +188,6 @@ To maximise reuse of meshes/components etc, you can also nest ***collections ins
TLDR: smaller, more reuseable blueprints which can share sub-parts with other entities ! TLDR: smaller, more reuseable blueprints which can share sub-parts with other entities !
### Materials ### Materials
You can enable this option to automatically generate a **material library** file that combines all the materials in use in your blueprints. You can enable this option to automatically generate a **material library** file that combines all the materials in use in your blueprints.
@ -166,9 +208,34 @@ options in **bevy_gltf_blueprints** for more information on that)
TLDR: Use this option to make sure that each blueprint file does not contain a copy of the same materials TLDR: Use this option to make sure that each blueprint file does not contain a copy of the same materials
#### Process ### Multiple blend file workflow
This is the internal logic of the export process with blueprints If you want to use multiple blend files, use Blender's asset library etc, we got you coverred too !
There are only a few things to keep in mind
#### Assets/library/blueprints files
- mark your library scenes as specified above, but **do NOT** specify a **main** scene
- mark any collection in your scenes as "assets" (more convenient) or add the "AutoExport" custom property to the collection
- choose "split" for the combine mode (as you want your gltf blueprints to be saved for external use)
- do your Blender things as normal
- anytime you save your file, it will automatically export any relevant collections/blueprints
- (optional) activate the **material library** option, so you only have one set of material per asset library (recomended)
#### Level/world files
- mark your main scenes as specified above, but **do NOT** specify a **library** scene
- configure your asset libraries as you would usually do , I recomend using the "link" mode so that any changes to asset files are reflected correctly
- drag & drop any assets from the blueprints library (as you would normally do in Blender as well)
- choose "split" for the combine mode (as you want your gltf blueprints to be external usually & use the gltf files generated from your assets library)
- do your Blender things as normal
- anytime you save your file, it will automatically export your level(s)
Take a look at the [relevant](../../examples/bevy_gltf_blueprints/multiple_levels_multiple_blendfiles/) example for more [details](../../examples/bevy_gltf_blueprints/multiple_levels_multiple_blendfiles/art/)
### Process
This is the internal logic of the export process with blueprints (simplified)
![process](./docs/process.svg) ![process](./docs/process.svg)

View File

@ -1,7 +1,7 @@
bl_info = { bl_info = {
"name": "gltf_auto_export", "name": "gltf_auto_export",
"author": "kaosigh", "author": "kaosigh",
"version": (0, 8, 0), "version": (0, 9, 0),
"blender": (3, 4, 0), "blender": (3, 4, 0),
"location": "File > Import-Export", "location": "File > Import-Export",
"description": "glTF/glb auto-export", "description": "glTF/glb auto-export",
@ -77,6 +77,14 @@ def deps_update_handler(scene, depsgraph):
object = bpy.data.objects[obj.id.name] object = bpy.data.objects[obj.id.name]
print("changed object", obj.id.name) print("changed object", obj.id.name)
bpy.context.window_manager['changed_objects_per_scene'][scene.name][obj.id.name] = object bpy.context.window_manager['changed_objects_per_scene'][scene.name][obj.id.name] = object
elif isinstance(obj.id, bpy.types.Material): # or isinstance(obj.id, bpy.types.ShaderNodeTree):
print("changed material", obj.id, "scene", scene.name,)
material = bpy.data.materials[obj.id.name]
#now find which objects are using the material
for obj in bpy.data.objects:
for slot in obj.material_slots:
if slot.material == material:
bpy.context.window_manager['changed_objects_per_scene'][scene.name][obj.name] = obj
bpy.context.window_manager.changedScene = changed bpy.context.window_manager.changedScene = changed

View File

@ -3,7 +3,7 @@ import bpy
import traceback import traceback
from .helpers_scenes import (get_scenes, ) from .helpers_scenes import (get_scenes, )
from .helpers_collections import (get_exportable_collections, get_collections_per_scene) from .helpers_collections import (get_collections_in_library, get_exportable_collections, get_collections_per_scene, find_collection_ascendant_target_collection)
from .helpers_export import (export_main_scene, export_blueprints_from_collections) from .helpers_export import (export_main_scene, export_blueprints_from_collections)
from .helpers import (check_if_blueprints_exist, check_if_blueprint_on_disk) from .helpers import (check_if_blueprints_exist, check_if_blueprint_on_disk)
from .materials import cleanup_materials, clear_material_info, clear_materials_scene, export_materials, generate_materials_scenes, get_all_materials from .materials import cleanup_materials, clear_material_info, clear_materials_scene, export_materials, generate_materials_scenes, get_all_materials
@ -61,8 +61,6 @@ def auto_export(changes_per_scene, changed_export_parameters):
export_materials_library = getattr(addon_prefs,"export_materials_library") export_materials_library = getattr(addon_prefs,"export_materials_library")
export_scene_settings = getattr(addon_prefs,"export_scene_settings") export_scene_settings = getattr(addon_prefs,"export_scene_settings")
export_nested_blueprints = getattr(addon_prefs,"export_nested_blueprints")
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs) [main_scene_names, level_scenes, library_scene_names, library_scenes] = get_scenes(addon_prefs)
@ -76,9 +74,17 @@ def auto_export(changes_per_scene, changed_export_parameters):
# export # export
if export_blueprints: if export_blueprints:
print("EXPORTING") print("EXPORTING")
# create parent relations for all collections
collection_parents = dict()
for collection in bpy.data.collections:
collection_parents[collection.name] = None
for collection in bpy.data.collections:
for ch in collection.children:
collection_parents[ch.name] = collection.name
# get a list of all collections actually in use # get a list of all collections actually in use
scan_nested_collections = export_nested_blueprints (collections, blueprint_hierarchy) = get_exportable_collections(level_scenes, library_scenes, addon_prefs)
(collections, blueprint_hierarchy) = get_exportable_collections(level_scenes, library_scenes, scan_nested_collections)
# first check if all collections have already been exported before (if this is the first time the exporter is run # first check if all collections have already been exported before (if this is the first time the exporter is run
# in your current Blender session for example) # in your current Blender session for example)
export_blueprints_path = os.path.join(folder_path, export_output_folder, getattr(addon_prefs,"export_blueprints_path")) if getattr(addon_prefs,"export_blueprints_path") != '' else folder_path export_blueprints_path = os.path.join(folder_path, export_output_folder, getattr(addon_prefs,"export_blueprints_path")) if getattr(addon_prefs,"export_blueprints_path") != '' else folder_path
@ -89,19 +95,20 @@ def auto_export(changes_per_scene, changed_export_parameters):
collections_not_on_disk = check_if_blueprints_exist(collections, export_blueprints_path, gltf_extension) collections_not_on_disk = check_if_blueprints_exist(collections, export_blueprints_path, gltf_extension)
changed_collections = [] changed_collections = []
print('changes_per_scene', changes_per_scene.items(), changes_per_scene.keys()) for scene, objects in changes_per_scene.items():
for scene, bla in changes_per_scene.items():
print(" changed scene", scene) print(" changed scene", scene)
for obj_name, obj in bla.items(): for obj_name, obj in objects.items():
object_collections = list(obj.users_collection) object_collections = list(obj.users_collection) if hasattr(obj, 'users_collection') else []
object_collection_names = list(map(lambda collection: collection.name, object_collections)) object_collection_names = list(map(lambda collection: collection.name, object_collections))
if len(object_collection_names) > 1: if len(object_collection_names) > 1:
print("ERRROR for",obj_name,"objects in multiple collections not supported") print("ERRROR for",obj_name,"objects in multiple collections not supported")
else: else:
object_collection_name = object_collection_names[0] if len(object_collection_names) > 0 else None object_collection_name = object_collection_names[0] if len(object_collection_names) > 0 else None
#print(" object ", obj, object_collection_name) #recurse updwards until we find one of our collections (or not)
if object_collection_name in collections: matching_collection = find_collection_ascendant_target_collection(collection_parents, collections, object_collection_name)
changed_collections.append(object_collection_name) if matching_collection is not None:
changed_collections.append(matching_collection)
collections_to_export = list(set(changed_collections + collections_not_on_disk)) collections_to_export = list(set(changed_collections + collections_not_on_disk))
@ -112,7 +119,7 @@ def auto_export(changes_per_scene, changed_export_parameters):
# collections that do not come from a library should not be exported as seperate blueprints # collections that do not come from a library should not be exported as seperate blueprints
# FIMXE: logic is erroneous, needs to be changed # FIMXE: logic is erroneous, needs to be changed
library_collections = [name for sublist in collections_per_scene.values() for name in sublist] library_collections = get_collections_in_library(library_scenes)
collections_to_export = list(set(collections_to_export).intersection(set(library_collections))) collections_to_export = list(set(collections_to_export).intersection(set(library_collections)))
# since materials export adds components we need to call this before blueprints are exported # since materials export adds components we need to call this before blueprints are exported
@ -125,6 +132,7 @@ def auto_export(changes_per_scene, changed_export_parameters):
print("collections: all:", collections) print("collections: all:", collections)
print("collections: changed:", changed_collections) print("collections: changed:", changed_collections)
print("collections: not found on disk:", collections_not_on_disk) print("collections: not found on disk:", collections_not_on_disk)
print("collections: in library:", library_collections)
print("collections: to export:", collections_to_export) print("collections: to export:", collections_to_export)
print("collections: per_scene:", collections_per_scene) print("collections: per_scene:", collections_per_scene)
@ -140,7 +148,7 @@ def auto_export(changes_per_scene, changed_export_parameters):
do_export_main_scene = changed_export_parameters or scene_name in changes_per_scene.keys() or not check_if_blueprint_on_disk(scene_name, export_levels_path, gltf_extension) do_export_main_scene = changed_export_parameters or scene_name in changes_per_scene.keys() or not check_if_blueprint_on_disk(scene_name, export_levels_path, gltf_extension)
if do_export_main_scene: if do_export_main_scene:
print(" exporting scene:", scene_name) print(" exporting scene:", scene_name)
export_main_scene(bpy.data.scenes[scene_name], folder_path, addon_prefs, collections) export_main_scene(bpy.data.scenes[scene_name], folder_path, addon_prefs, library_collections)
# now deal with blueprints/collections # now deal with blueprints/collections
@ -169,7 +177,8 @@ def auto_export(changes_per_scene, changed_export_parameters):
export_main_scene(bpy.data.scenes[scene_name], folder_path, addon_prefs) export_main_scene(bpy.data.scenes[scene_name], folder_path, addon_prefs)
except Exception as error: except Exception as error:
traceback.print_stack() print(traceback.format_exc())
def error_message(self, context): def error_message(self, context):
self.layout.label(text="Failure during auto_export: Error: "+ str(error)) self.layout.label(text="Failure during auto_export: Error: "+ str(error))

View File

@ -3,7 +3,9 @@ from .helpers_collections import (find_layer_collection_recursive)
from .helpers import (make_empty3, traverse_tree) from .helpers import (make_empty3, traverse_tree)
def generate_blueprint_hollow_scene(blueprint_collection, library_collections): def generate_blueprint_hollow_scene(blueprint_collection, library_collections, addon_prefs):
collection_instances_combine_mode = getattr(addon_prefs, "collection_instances_combine_mode")
temp_scene = bpy.data.scenes.new(name="temp_scene_"+blueprint_collection.name) temp_scene = bpy.data.scenes.new(name="temp_scene_"+blueprint_collection.name)
temp_scene_root_collection = temp_scene.collection temp_scene_root_collection = temp_scene.collection
@ -15,32 +17,48 @@ def generate_blueprint_hollow_scene(blueprint_collection, library_collections):
bpy.context.view_layer.active_layer_collection = found bpy.context.view_layer.active_layer_collection = found
original_names = [] original_names = []
temporary_collections = []
root_objects = []
special_properties= { # to be able to reset any special property afterwards
"combine": [],
}
# TODO also add the handling for "template" flags, so that instead of creating empties we link the data from the sub collection INTO the parent collection # TODO also add the handling for "template" flags, so that instead of creating empties we link the data from the sub collection INTO the parent collection
# copies the contents of a collection into another one while replacing blueprint instances with empties # copies the contents of a collection into another one while replacing blueprint instances with empties
def copy_hollowed_collection_into(source_collection, destination_collection): # if we have combine_mode set to "Inject", we take all the custom attributed of the nested (1 level only ! unless we use 'deepMerge') custom attributes and copy them to this level
def copy_hollowed_collection_into(source_collection, destination_collection, parent_empty=None):
for object in source_collection.objects: for object in source_collection.objects:
#FIXME: enum would be better combine_mode = object['_combine'] if '_combine' in object else collection_instances_combine_mode
""" combine mode can be
- 'Split' (default): replace with an empty, creating links to sub blueprints if object.instance_type == 'COLLECTION' and (combine_mode == 'Split' or (combine_mode == 'EmbedExternal' and (object.instance_collection.name in library_collections)) ):
- 'Embed' : treat it as an embeded object and do not replace it with an empty
- 'Inject': inject components from sub collection instances into the curent object # get the name of the collection this is an instance of
"""
combineMode = 'Split' if not 'Combine' in object else object['Combine']
# TODO: implement
# print("COMBINE MODE", combineMode)
# embed = 'Embed' in object and object['Embed'] == True # if the object has the "embed" flag set to true, treat it as an embeded object and do not replace it with an empty
# merge = 'Merge' in object and object['Merge'] == True
if object.instance_type == 'COLLECTION' and (object.instance_collection.name in library_collections):
# if we have combine_mode set to "merge", we take all the custom attributed of the nested (1 level only ! unless we use 'deepMerge') custom attributes and copy them to this level
"""TODO: implement later
if merge:
foo = get_nested_components(object)
print("nested components", foo)
pass
else:
"""
collection_name = object.instance_collection.name collection_name = object.instance_collection.name
"""
blueprint_template = object['Template'] if 'Template' in object else False
if blueprint_template and parent_empty is None: # ONLY WORKS AT ROOT LEVEL
print("BLUEPRINT TEMPLATE", blueprint_template, destination_collection, parent_empty)
for object in source_collection.objects:
if object.type == 'EMPTY' and object.name.endswith("components"):
original_collection = bpy.data.collections[collection_name]
components_holder = object
print("WE CAN INJECT into", object, "data from", original_collection)
# now we look for components inside the collection
components = {}
for object in original_collection.objects:
if object.type == 'EMPTY' and object.name.endswith("components"):
for component_name in object.keys():
if component_name not in '_RNA_UI':
print( component_name , "-" , object[component_name] )
components[component_name] = object[component_name]
# copy template components into target object
for key in components:
print("copying ", key,"to", components_holder)
if not key in components_holder:
components_holder[key] = components[key]
"""
original_name = object.name original_name = object.name
original_names.append(original_name) original_names.append(original_name)
@ -51,29 +69,59 @@ def generate_blueprint_hollow_scene(blueprint_collection, library_collections):
empty_obj['BlueprintName'] = '"'+collection_name+'"' empty_obj['BlueprintName'] = '"'+collection_name+'"'
empty_obj['SpawnHere'] = '' empty_obj['SpawnHere'] = ''
for k, v in object.items(): for k, v in object.items():
if k != 'template' or k != '_combine': # do not copy these properties
empty_obj[k] = v empty_obj[k] = v
if parent_empty is not None:
empty_obj.parent = parent_empty
else: else:
# we backup special properties that we do not want to export, and remove them
if '_combine' in object:
special_properties["combine"].append((object, object['_combine']))
del object['_combine']
if parent_empty is not None:
object.parent = parent_empty
destination_collection.objects.link(object)
else:
root_objects.append(object)
destination_collection.objects.link(object) destination_collection.objects.link(object)
# for every sub-collection of the source, copy its content into a new sub-collection of the destination # for every sub-collection of the source, copy its content into a new sub-collection of the destination
for collection in source_collection.children: for collection in source_collection.children:
original_name = collection.name
collection.name = original_name + "____bak"
collection_placeholder = make_empty3(original_name, [0,0,0], [0,0,0], [1,1,1], destination_collection)
if parent_empty is not None:
collection_placeholder.parent = parent_empty
copy_hollowed_collection_into(collection, destination_collection, collection_placeholder)
"""
copy_collection = bpy.data.collections.new(collection.name + "____collection_export") copy_collection = bpy.data.collections.new(collection.name + "____collection_export")
# save the newly created collection for later reuse
temporary_collections.append(copy_collection)
# copy & link objects
copy_hollowed_collection_into(collection, copy_collection) copy_hollowed_collection_into(collection, copy_collection)
destination_collection.children.link(copy_collection) destination_collection.children.link(copy_collection)"""
copy_hollowed_collection_into(blueprint_collection, temp_scene_root_collection) copy_hollowed_collection_into(blueprint_collection, temp_scene_root_collection)
return (temp_scene, temporary_collections, root_objects, special_properties)
return (temp_scene, original_names)
# clear & remove "hollow scene" # clear & remove "hollow scene"
def clear_blueprint_hollow_scene(temp_scene, original_collection, original_names): def clear_blueprint_hollow_scene(temp_scene, original_collection, temporary_collections, root_objects, special_properties):
def restore_original_names(collection): def restore_original_names(collection):
if collection.name.endswith("____bak"):
collection.name = collection.name.replace("____bak", "")
for object in collection.objects: for object in collection.objects:
if object.instance_type == 'COLLECTION': if object.instance_type == 'COLLECTION':
if object.name.endswith("____bak"): if object.name.endswith("____bak"):
@ -84,18 +132,32 @@ def clear_blueprint_hollow_scene(temp_scene, original_collection, original_names
restore_original_names(original_collection) restore_original_names(original_collection)
# remove empties (only needed when we go via ops ????) # remove empties (only needed when we go via ops ????)
root_collection = temp_scene.collection temp_root_collection = temp_scene.collection
scene_objects = [o for o in root_collection.objects] temp_scene_objects = [o for o in temp_root_collection.objects]
for object in scene_objects: for object in temp_scene_objects:
if object.type == 'EMPTY': if object.type == 'EMPTY':
if hasattr(object, "SpawnHere"): if hasattr(object, "SpawnHere"):
bpy.data.objects.remove(object, do_unlink=True) bpy.data.objects.remove(object, do_unlink=True)
else: else:
bpy.context.scene.collection.objects.unlink(object) bpy.context.scene.collection.objects.unlink(object)
#bpy.data.objects.remove(object, do_unlink=True) if object in root_objects:
pass
else:
bpy.data.objects.remove(object, do_unlink=True)
else:
bpy.context.scene.collection.objects.unlink(object)
# put back special properties
for (object, value) in special_properties["combine"]:
object['_combine'] = value
# remove temporary collections
for collection in temporary_collections:
bpy.data.collections.remove(collection)
bpy.data.scenes.remove(temp_scene) bpy.data.scenes.remove(temp_scene)
# TODO : add a flag to also search of deeply nested components # TODO : add a flag to also search of deeply nested components
def get_nested_components(object): def get_nested_components(object):
if object.instance_type == 'COLLECTION': if object.instance_type == 'COLLECTION':

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.7 KiB

File diff suppressed because it is too large Load Diff

After

Width:  |  Height:  |  Size: 51 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.5 KiB

View File

@ -21,7 +21,9 @@ def get_used_collections(scene):
return (collection_names, used_collections) return (collection_names, used_collections)
# gets all collections that should ALWAYS be exported to their respective gltf files, even if they are not used in the main scene/level # gets all collections that should ALWAYS be exported to their respective gltf files, even if they are not used in the main scene/level
def get_marked_collections(scene): def get_marked_collections(scene, addon_prefs):
export_marked_assets = getattr(addon_prefs,"export_marked_assets")
# print("checking library for marked collections") # print("checking library for marked collections")
root_collection = scene.collection root_collection = scene.collection
marked_collections = [] marked_collections = []
@ -30,6 +32,10 @@ def get_marked_collections(scene):
if 'AutoExport' in collection and collection['AutoExport'] == True: if 'AutoExport' in collection and collection['AutoExport'] == True:
marked_collections.append(collection) marked_collections.append(collection)
collection_names.append(collection.name) collection_names.append(collection.name)
# if you have marked collections as assets you can auto export them too
if export_marked_assets and collection.asset_data is not None:
marked_collections.append(collection)
collection_names.append(collection.name)
return (collection_names, marked_collections) return (collection_names, marked_collections)
# gets all collections within collections that might also be relevant # gets all collections within collections that might also be relevant
@ -88,7 +94,8 @@ class Node :
return "name: " +self.name + ", children:" + str(children) return "name: " +self.name + ", children:" + str(children)
# get exportable collections from lists of mains scenes and lists of library scenes # get exportable collections from lists of mains scenes and lists of library scenes
def get_exportable_collections(main_scenes, library_scenes, scan_nested_collections): def get_exportable_collections(main_scenes, library_scenes, addon_prefs):
all_collections = [] all_collections = []
all_collection_names = [] all_collection_names = []
root_node = Node() root_node = Node()
@ -101,11 +108,10 @@ def get_exportable_collections(main_scenes, library_scenes, scan_nested_collecti
all_collection_names = all_collection_names + list(collection_names) all_collection_names = all_collection_names + list(collection_names)
all_collections = all_collections + collections all_collections = all_collections + collections
for library_scene in library_scenes: for library_scene in library_scenes:
marked_collections = get_marked_collections(library_scene) marked_collections = get_marked_collections(library_scene, addon_prefs)
all_collection_names = all_collection_names + marked_collections[0] all_collection_names = all_collection_names + marked_collections[0]
all_collections = all_collections + marked_collections[1] all_collections = all_collections + marked_collections[1]
if scan_nested_collections:
(collection_names, collections) = get_sub_collections(all_collections, root_node, children_per_collection) (collection_names, collections) = get_sub_collections(all_collections, root_node, children_per_collection)
all_collection_names = all_collection_names + list(collection_names) all_collection_names = all_collection_names + list(collection_names)
children_per_collection = {} children_per_collection = {}
@ -126,6 +132,25 @@ def get_collections_per_scene(collection_names, library_scenes):
return collections_per_scene return collections_per_scene
def get_collections_in_library(library_scenes):
"""all_collections = []
all_collection_names = []
for main_scene in main_scenes:
(collection_names, collections) = get_used_collections(main_scene)
all_collection_names = all_collection_names + list(collection_names)
all_collections = all_collections + collections"""
# now that we have the collections that are in use by collection instances, check if those collections are actully present in the library scenes
collections = []
collection_names = []
for library_scene in library_scenes:
root_collection = library_scene.collection
for collection in traverse_tree(root_collection):
collections.append(collection)
collection_names.append(collection.name)
return collection_names
def get_collection_hierarchy(root_col, levels=1): def get_collection_hierarchy(root_col, levels=1):
"""Read hierarchy of the collections in the scene""" """Read hierarchy of the collections in the scene"""
@ -159,4 +184,15 @@ def recurLayerCollection(layerColl, collName):
found = recurLayerCollection(layer, collName) found = recurLayerCollection(layer, collName)
if found: if found:
return found return found
# traverse the collection hierarchy updward until you find one collection inside target_collections
def find_collection_ascendant_target_collection(collection_parents, target_collections, collection):
if collection == None:
return None
if collection in target_collections:
return collection
if collection in collection_parents:
parent = collection_parents[collection]
else:
return None
return find_collection_ascendant_target_collection(collection_parents, target_collections, parent)

View File

@ -74,7 +74,6 @@ def export_collections(collections, folder_path, library_scene, addon_prefs, glt
bpy.context.window.scene = library_scene bpy.context.window.scene = library_scene
# save current active collection # save current active collection
active_collection = bpy.context.view_layer.active_layer_collection active_collection = bpy.context.view_layer.active_layer_collection
export_nested_blueprints = getattr(addon_prefs,"export_nested_blueprints")
export_materials_library = getattr(addon_prefs,"export_materials_library") export_materials_library = getattr(addon_prefs,"export_materials_library")
for collection_name in collections: for collection_name in collections:
@ -93,19 +92,21 @@ def export_collections(collections, folder_path, library_scene, addon_prefs, glt
#if relevant we replace sub collections instances with placeholders too #if relevant we replace sub collections instances with placeholders too
# this is not needed if a collection/blueprint does not have sub blueprints # this is not needed if a collection/blueprint does not have sub blueprints or sub collections
if collection_name in blueprint_hierarchy and len(blueprint_hierarchy[collection_name]) > 0 and export_nested_blueprints : collection_in_blueprint_hierarchy = collection_name in blueprint_hierarchy and len(blueprint_hierarchy[collection_name]) > 0
print("generate hollow scene for nested blueprints", library_collections) collection_has_child_collections = len(bpy.data.collections[collection_name].children) > 0
if collection_in_blueprint_hierarchy or collection_has_child_collections:
#print("generate hollow scene for nested blueprints", library_collections)
backup = bpy.context.window.scene backup = bpy.context.window.scene
collection = bpy.data.collections[collection_name] collection = bpy.data.collections[collection_name]
(hollow_scene, object_names) = generate_blueprint_hollow_scene(collection, library_collections) (hollow_scene, temporary_collections, root_objects, special_properties) = generate_blueprint_hollow_scene(collection, library_collections, addon_prefs)
export_gltf(gltf_output_path, export_settings) export_gltf(gltf_output_path, export_settings)
clear_blueprint_hollow_scene(hollow_scene, collection, object_names) clear_blueprint_hollow_scene(hollow_scene, collection, temporary_collections, root_objects, special_properties)
bpy.context.window.scene = backup bpy.context.window.scene = backup
else: else:
print("NORMAL") #print("standard export")
export_gltf(gltf_output_path, export_settings) export_gltf(gltf_output_path, export_settings)
@ -134,13 +135,12 @@ def export_main_scenes(scenes, folder_path, addon_prefs):
export_main_scene(scene, folder_path, addon_prefs) export_main_scene(scene, folder_path, addon_prefs)
def export_main_scene(scene, folder_path, addon_prefs, library_collections): def export_main_scene(scene, folder_path, addon_prefs, library_collections):
export_output_folder = getattr(addon_prefs,"export_output_folder")
gltf_export_preferences = generate_gltf_export_preferences(addon_prefs) gltf_export_preferences = generate_gltf_export_preferences(addon_prefs)
export_output_folder = getattr(addon_prefs,"export_output_folder")
export_blueprints = getattr(addon_prefs,"export_blueprints") export_blueprints = getattr(addon_prefs,"export_blueprints")
if export_blueprints : if export_blueprints :
(hollow_scene, object_names) = generate_hollow_scene(scene, library_collections) (hollow_scene, temporary_collections, root_objects, special_properties) = generate_hollow_scene(scene, library_collections, addon_prefs)
#except Exception: #except Exception:
# print("failed to create hollow scene") # print("failed to create hollow scene")
@ -162,7 +162,7 @@ def export_main_scene(scene, folder_path, addon_prefs, library_collections):
export_gltf(gltf_output_path, export_settings) export_gltf(gltf_output_path, export_settings)
if export_blueprints : if export_blueprints :
clear_hollow_scene(hollow_scene, scene, object_names) clear_hollow_scene(hollow_scene, scene, temporary_collections, root_objects, special_properties)
#https://docs.blender.org/api/current/bpy.ops.export_scene.html#bpy.ops.export_scene.gltf #https://docs.blender.org/api/current/bpy.ops.export_scene.html#bpy.ops.export_scene.gltf

View File

@ -3,9 +3,10 @@ from .helpers_collections import (find_layer_collection_recursive)
from .helpers import (make_empty3) from .helpers import (make_empty3)
# generate a copy of a scene that replaces collection instances with empties # generate a copy of a scene that replaces collection instances with empties
# alternative: copy original names before creating a new scene, & reset them # copy original names before creating a new scene, & reset them
# or create empties, hide original ones, and do the same renaming trick def generate_hollow_scene(scene, library_collections, addon_prefs):
def generate_hollow_scene(scene, library_collections): collection_instances_combine_mode = getattr(addon_prefs, "collection_instances_combine_mode")
root_collection = scene.collection root_collection = scene.collection
temp_scene = bpy.data.scenes.new(name="temp_scene") temp_scene = bpy.data.scenes.new(name="temp_scene")
copy_root_collection = temp_scene.collection copy_root_collection = temp_scene.collection
@ -18,13 +19,22 @@ def generate_hollow_scene(scene, library_collections):
# once it's found, set the active layer collection to the one we found # once it's found, set the active layer collection to the one we found
bpy.context.view_layer.active_layer_collection = found bpy.context.view_layer.active_layer_collection = found
#original_names = {}
original_names = [] original_names = []
temporary_collections = []
root_objects = []
special_properties= { # to be able to reset any special property afterwards
"combine": [],
}
# copies the contents of a collection into another one while replacing library instances with empties # copies the contents of a collection into another one while replacing library instances with empties
def copy_hollowed_collection_into(source_collection, destination_collection): def copy_hollowed_collection_into(source_collection, destination_collection, parent_empty=None):
for object in source_collection.objects: for object in source_collection.objects:
if object.instance_type == 'COLLECTION' and (object.instance_collection.name in library_collections): #check if a specific collection instance does not have an ovveride for combine_mode
combine_mode = object['_combine'] if '_combine' in object else collection_instances_combine_mode
if object.instance_type == 'COLLECTION' and (combine_mode == 'Split' or (combine_mode == 'EmbedExternal' and (object.instance_collection.name in library_collections)) ):
#print("creating empty for", object.name, object.instance_collection.name, library_collections, combine_mode)
collection_name = object.instance_collection.name collection_name = object.instance_collection.name
original_name = object.name original_name = object.name
@ -37,28 +47,55 @@ def generate_hollow_scene(scene, library_collections):
empty_obj['SpawnHere'] = '' empty_obj['SpawnHere'] = ''
for k, v in object.items(): for k, v in object.items():
if k != 'template' or k != '_combine': # do not copy these properties
empty_obj[k] = v empty_obj[k] = v
if parent_empty is not None:
empty_obj.parent = parent_empty
else: else:
# we backup special properties that we do not want to export, and remove them
if '_combine' in object:
special_properties["combine"].append((object, object['_combine']))
del object['_combine']
if parent_empty is not None:
object.parent = parent_empty
destination_collection.objects.link(object)
else:
root_objects.append(object)
destination_collection.objects.link(object) destination_collection.objects.link(object)
# for every sub-collection of the source, copy its content into a new sub-collection of the destination # for every sub-collection of the source, copy its content into a new sub-collection of the destination
for collection in source_collection.children: for collection in source_collection.children:
original_name = collection.name
collection.name = original_name + "____bak"
collection_placeholder = make_empty3(original_name, [0,0,0], [0,0,0], [1,1,1], destination_collection)
if parent_empty is not None:
collection_placeholder.parent = parent_empty
copy_hollowed_collection_into(collection, destination_collection, collection_placeholder)
"""
copy_collection = bpy.data.collections.new(collection.name + "____collection_export") copy_collection = bpy.data.collections.new(collection.name + "____collection_export")
# save the newly created collection for later reuse
temporary_collections.append(copy_collection)
# copy & link objects
copy_hollowed_collection_into(collection, copy_collection) copy_hollowed_collection_into(collection, copy_collection)
destination_collection.children.link(copy_collection) destination_collection.children.link(copy_collection)
"""
copy_hollowed_collection_into(root_collection, copy_root_collection) copy_hollowed_collection_into(root_collection, copy_root_collection)
# objs = bpy.data.objects return (temp_scene, temporary_collections, root_objects, special_properties)
#objs.remove(objs["Cube"], do_unlink=True)
return (temp_scene, original_names)
# clear & remove "hollow scene" # clear & remove "hollow scene"
def clear_hollow_scene(temp_scene, original_scene, original_names): def clear_hollow_scene(temp_scene, original_scene, temporary_collections, root_objects, special_properties):
# reset original names
root_collection = original_scene.collection
def restore_original_names(collection): def restore_original_names(collection):
if collection.name.endswith("____bak"):
collection.name = collection.name.replace("____bak", "")
for object in collection.objects: for object in collection.objects:
if object.instance_type == 'COLLECTION': if object.instance_type == 'COLLECTION':
if object.name.endswith("____bak"): if object.name.endswith("____bak"):
@ -66,19 +103,35 @@ def clear_hollow_scene(temp_scene, original_scene, original_names):
for child_collection in collection.children: for child_collection in collection.children:
restore_original_names(child_collection) restore_original_names(child_collection)
# reset original names
root_collection = original_scene.collection
restore_original_names(root_collection) restore_original_names(root_collection)
# remove empties (only needed when we go via ops ????) # remove empties (only needed when we go via ops ????)
root_collection = temp_scene.collection temp_root_collection = temp_scene.collection
scene_objects = [o for o in root_collection.objects] temp_scene_objects = [o for o in temp_root_collection.objects]
for object in scene_objects: for object in temp_scene_objects:
if object.type == 'EMPTY': if object.type == 'EMPTY':
if hasattr(object, "SpawnHere"): if hasattr(object, "SpawnHere"):
bpy.data.objects.remove(object, do_unlink=True) bpy.data.objects.remove(object, do_unlink=True)
else: else:
bpy.context.scene.collection.objects.unlink(object) bpy.context.scene.collection.objects.unlink(object)
#bpy.data.objects.remove(object, do_unlink=True) if object in root_objects:
pass
else:
bpy.data.objects.remove(object, do_unlink=True)
else:
bpy.context.scene.collection.objects.unlink(object)
# remove temporary collections
for collection in temporary_collections:
bpy.data.collections.remove(collection)
# put back special properties
for (object, value) in special_properties["combine"]:
object['_combine'] = value
# remove the temporary scene
bpy.data.scenes.remove(temp_scene) bpy.data.scenes.remove(temp_scene)

View File

@ -18,7 +18,9 @@ AutoExportGltfPreferenceNames = [
'export_blueprints', 'export_blueprints',
'export_blueprints_path', 'export_blueprints_path',
'export_nested_blueprints',
'export_marked_assets',
'collection_instances_combine_mode',
'export_materials_library', 'export_materials_library',
'export_materials_path', 'export_materials_path',
@ -61,7 +63,7 @@ class AutoExportGltfAddonPreferences(AddonPreferences):
export_scene_settings: BoolProperty( export_scene_settings: BoolProperty(
name='Export scene settings', name='Export scene settings',
description='Export scene settings ie AmbientLighting, Bloom, AO etc', description='Export scene settings ie AmbientLighting, Bloom, AO etc',
default=True default=False
) )
# blueprint settings # blueprint settings
@ -76,12 +78,6 @@ class AutoExportGltfAddonPreferences(AddonPreferences):
default='library' default='library'
) )
export_nested_blueprints: BoolProperty(
name='Export nested Blueprints',
description='Collection instances within Collections are turned into blueprint instances',
default=True
)
export_materials_library: BoolProperty( export_materials_library: BoolProperty(
name='Export materials library', name='Export materials library',
description='remove materials from blueprints and use the material library instead', description='remove materials from blueprints and use the material library instead',
@ -93,6 +89,32 @@ class AutoExportGltfAddonPreferences(AddonPreferences):
default='materials' default='materials'
) )
""" combine mode can be
- 'Split' (default): replace with an empty, creating links to sub blueprints
- 'Embed' : treat it as an embeded object and do not replace it with an empty
- 'EmbedExternal': embed any instance of a non local collection (ie external assets)
- 'Inject': inject components from sub collection instances into the curent object => this is now a seperate custom property that you can apply to a collecion instance
"""
collection_instances_combine_mode : EnumProperty(
name='Collection instances',
items=(
('Split', 'Split', 'replace collection instances with an empty + blueprint, creating links to sub blueprints (Default, Recomended)'),
('Embed', 'Embed', 'treat collection instances as embeded objects and do not replace them with an empty'),
('EmbedExternal', 'EmbedExternal', 'treat instances of external (not specifified in the current blend file) collections (aka assets etc) as embeded objects and do not replace them with empties'),
#('Inject', 'Inject', 'inject components from sub collection instances into the curent object')
),
default='Split'
)
export_marked_assets: BoolProperty(
name='Auto export marked assets',
description='Collections that have been marked as assets will be systematically exported, even if not in use in another scene',
default=True
)
main_scenes: CollectionProperty(name="main scenes", type=CUSTOM_PG_sceneName) main_scenes: CollectionProperty(name="main scenes", type=CUSTOM_PG_sceneName)
main_scenes_index: IntProperty(name = "Index for main scenes list", default = 0) main_scenes_index: IntProperty(name = "Index for main scenes list", default = 0)
@ -102,7 +124,8 @@ class AutoExportGltfAddonPreferences(AddonPreferences):
##### #####
export_format: EnumProperty( export_format: EnumProperty(
name='Format', name='Format',
items=(('GLB', 'glTF Binary (.glb)', items=(
('GLB', 'glTF Binary (.glb)',
'Exports a single file, with all data packed in binary form. ' 'Exports a single file, with all data packed in binary form. '
'Most efficient and portable, but more difficult to edit later'), 'Most efficient and portable, but more difficult to edit later'),
('GLTF_EMBEDDED', 'glTF Embedded (.gltf)', ('GLTF_EMBEDDED', 'glTF Embedded (.gltf)',
@ -110,7 +133,8 @@ class AutoExportGltfAddonPreferences(AddonPreferences):
'Less efficient than binary, but easier to edit later'), 'Less efficient than binary, but easier to edit later'),
('GLTF_SEPARATE', 'glTF Separate (.gltf + .bin + textures)', ('GLTF_SEPARATE', 'glTF Separate (.gltf + .bin + textures)',
'Exports multiple files, with separate JSON, binary and texture data. ' 'Exports multiple files, with separate JSON, binary and texture data. '
'Easiest to edit later')), 'Easiest to edit later')
),
description=( description=(
'Output format and embedding options. Binary is most efficient, ' 'Output format and embedding options. Binary is most efficient, '
'but JSON (embedded or separate) may be easier to edit later' 'but JSON (embedded or separate) may be easier to edit later'

View File

@ -42,7 +42,8 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
exceptional = [ exceptional = [
# options that don't start with 'export_' # options that don't start with 'export_'
'main_scenes', 'main_scenes',
'library_scenes' 'library_scenes',
'collection_instances_combine_mode',
] ]
all_props = self.properties all_props = self.properties
export_props = { export_props = {
@ -65,7 +66,8 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
exceptional = [ exceptional = [
# options that don't start with 'export_' # options that don't start with 'export_'
'main_scenes', 'main_scenes',
'library_scenes' 'library_scenes',
'collection_instances_combine_mode',
] ]
all_props = self.properties all_props = self.properties
export_props = { export_props = {
@ -117,6 +119,10 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
item = library_scenes.add() item = library_scenes.add()
item.name = item_name item.name = item_name
if hasattr(self, 'collection_instances_combine_mode'):
bpy.context.preferences.addons["gltf_auto_export"].preferences.collection_instances_combine_mode = self.collection_instances_combine_mode
except (AttributeError, TypeError): except (AttributeError, TypeError):
self.report({"ERROR"}, "Loading export settings failed. Removed corrupted settings") self.report({"ERROR"}, "Loading export settings failed. Removed corrupted settings")
del context.scene[self.scene_key] del context.scene[self.scene_key]
@ -127,11 +133,8 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
addon_prefs = bpy.context.preferences.addons["gltf_auto_export"].preferences addon_prefs = bpy.context.preferences.addons["gltf_auto_export"].preferences
[main_scene_names, level_scenes, library_scene_names, library_scenes]=get_scenes(addon_prefs) [main_scene_names, level_scenes, library_scene_names, library_scenes]=get_scenes(addon_prefs)
scan_nested_collections = bpy.context.preferences.addons["gltf_auto_export"].preferences.export_nested_blueprints (collections, _) = get_exportable_collections(level_scenes, library_scenes, addon_prefs)
(collections, _) = get_exportable_collections(level_scenes, library_scenes, scan_nested_collections)
try: try:
# we save this list of collections in the context # we save this list of collections in the context
@ -295,14 +298,19 @@ class GLTF_PT_auto_export_blueprints(bpy.types.Panel):
layout.active = operator.export_blueprints layout.active = operator.export_blueprints
# collections/blueprints
layout.prop(operator, "export_blueprints_path") layout.prop(operator, "export_blueprints_path")
layout.prop(operator, "export_nested_blueprints") layout.prop(operator, "collection_instances_combine_mode")
layout.prop(operator, "export_marked_assets")
layout.separator()
# materials # materials
layout.prop(operator, "export_materials_library") layout.prop(operator, "export_materials_library")
layout.prop(operator, "export_materials_path") layout.prop(operator, "export_materials_path")
class GLTF_PT_auto_export_collections_list(bpy.types.Panel): class GLTF_PT_auto_export_collections_list(bpy.types.Panel):
bl_space_type = 'FILE_BROWSER' bl_space_type = 'FILE_BROWSER'
bl_region_type = 'TOOL_PROPS' bl_region_type = 'TOOL_PROPS'