feat(blenvy):
* restructured & cleaned up auto export code * added very very basic serialization of modifiers (wip)
This commit is contained in:
parent
a0b1959a43
commit
cdab4c50f7
|
@ -102,3 +102,4 @@ General issues:
|
|||
- [ ] add tests for disabled components
|
||||
- [x] fix auto export workflow
|
||||
- [ ] should we write the previous _xxx data only AFTER a sucessfull export only ?
|
||||
- [ ] add hashing of modifiers/ geometry nodes in serialize scene
|
||||
|
|
|
@ -30,7 +30,7 @@ from .add_ons.bevy_components.settings import ComponentsSettings
|
|||
|
||||
# auto export
|
||||
from .add_ons.auto_export import gltf_post_export_callback
|
||||
from .add_ons.auto_export.export.tracker import AutoExportTracker
|
||||
from .add_ons.auto_export.common.tracker import AutoExportTracker
|
||||
from .add_ons.auto_export.settings import AutoExportSettings
|
||||
|
||||
# asset management
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
import os
|
||||
import bpy
|
||||
from ..constants import TEMPSCENE_PREFIX
|
||||
from ..helpers.generate_and_export import generate_and_export
|
||||
from ..helpers.helpers_scenes import clear_hollow_scene, copy_hollowed_collection_into
|
||||
from .export_gltf import generate_gltf_export_settings
|
||||
from ..common.generate_temporary_scene_and_export import generate_temporary_scene_and_export, copy_hollowed_collection_into, clear_hollow_scene
|
||||
from ..common.export_gltf import generate_gltf_export_settings
|
||||
|
||||
def export_blueprints(blueprints, settings, blueprints_data):
|
||||
blueprints_path_full = getattr(settings, "blueprints_path_full")
|
||||
|
@ -25,7 +24,7 @@ def export_blueprints(blueprints, settings, blueprints_data):
|
|||
|
||||
collection = bpy.data.collections[blueprint.name]
|
||||
# do the actual export
|
||||
generate_and_export(
|
||||
generate_temporary_scene_and_export(
|
||||
settings,
|
||||
temp_scene_name=TEMPSCENE_PREFIX+collection.name,
|
||||
gltf_export_settings=gltf_export_settings,
|
|
@ -7,14 +7,14 @@ from blenvy.core.scene_helpers import get_main_and_library_scenes
|
|||
from blenvy.blueprints.blueprints_scan import blueprints_scan
|
||||
from blenvy.blueprints.blueprint_helpers import inject_export_path_into_internal_blueprints
|
||||
|
||||
from .get_blueprints_to_export import get_blueprints_to_export
|
||||
from .get_levels_to_export import get_levels_to_export
|
||||
from ..blueprints.get_blueprints_to_export import get_blueprints_to_export
|
||||
from ..levels.get_levels_to_export import get_levels_to_export
|
||||
from .export_gltf import get_standard_exporter_settings
|
||||
|
||||
from .export_main_scenes import export_main_scene
|
||||
from .export_blueprints import export_blueprints
|
||||
from ..levels.export_main_scenes import export_main_scene
|
||||
from ..blueprints.export_blueprints import export_blueprints
|
||||
from .export_materials import cleanup_materials, export_materials
|
||||
from ..modules.bevy_scene_components import remove_scene_components, upsert_scene_components
|
||||
from ..levels.bevy_scene_components import remove_scene_components, upsert_scene_components
|
||||
|
||||
|
||||
"""this is the main 'central' function for all auto export """
|
|
@ -1,10 +1,7 @@
|
|||
import json
|
||||
import bpy
|
||||
from blenvy.core.object_makers import (make_empty)
|
||||
|
||||
|
||||
# these are mostly for when using this add-on together with the bevy_components add-on
|
||||
custom_properties_to_filter_out = ['_combine', 'template', 'components_meta']
|
||||
from ..constants import custom_properties_to_filter_out
|
||||
|
||||
def is_component_valid_and_enabled(object, component_name):
|
||||
if "components_meta" in object or hasattr(object, "components_meta"):
|
||||
|
@ -89,7 +86,6 @@ def copy_animation_data(source, target):
|
|||
markers_formated += '}, '
|
||||
markers_formated += '}'
|
||||
target["AnimationMarkers"] = f'( {markers_formated} )'
|
||||
|
||||
|
||||
def duplicate_object(object, parent, combine_mode, destination_collection, blueprints_data, nester=""):
|
||||
copy = None
|
||||
|
@ -97,19 +93,21 @@ def duplicate_object(object, parent, combine_mode, destination_collection, bluep
|
|||
# print("COMBINE MODE", combine_mode)
|
||||
if object.instance_type == 'COLLECTION' and (combine_mode == 'Split' or (combine_mode == 'EmbedExternal' and (object.instance_collection.name in internal_blueprint_names)) ):
|
||||
#print("creating empty for", object.name, object.instance_collection.name, internal_blueprint_names, combine_mode)
|
||||
collection_name = object.instance_collection.name
|
||||
original_collection = object.instance_collection
|
||||
original_name = object.name
|
||||
blueprint_name = original_collection.name
|
||||
blueprint_path = original_collection['export_path'] if 'export_path' in original_collection else f'./{blueprint_name}' # TODO: the default requires the currently used extension !!
|
||||
|
||||
|
||||
object.name = original_name + "____bak"
|
||||
empty_obj = make_empty(original_name, object.location, object.rotation_euler, object.scale, destination_collection)
|
||||
|
||||
"""we inject the collection/blueprint name, as a component called 'BlueprintName', but we only do this in the empty, not the original object"""
|
||||
empty_obj['BlueprintName'] = '("'+collection_name+'")'
|
||||
empty_obj["BlueprintPath"] = ''
|
||||
empty_obj['BlueprintName'] = f'("{blueprint_name}")'
|
||||
empty_obj["BlueprintPath"] = f'("{blueprint_path}")'
|
||||
empty_obj['SpawnHere'] = '()'
|
||||
|
||||
# we also inject a list of all sub blueprints, so that the bevy side can preload them
|
||||
blueprint_name = collection_name
|
||||
children_per_blueprint = {}
|
||||
blueprint = blueprints_data.blueprints_per_name.get(blueprint_name, None)
|
||||
if blueprint:
|
||||
|
@ -131,10 +129,6 @@ def duplicate_object(object, parent, combine_mode, destination_collection, bluep
|
|||
|
||||
destination_collection.objects.link(copy)
|
||||
|
||||
"""if object.parent == None:
|
||||
if parent_empty is not None:
|
||||
copy.parent = parent_empty
|
||||
"""
|
||||
# do this both for empty replacements & normal copies
|
||||
if parent is not None:
|
||||
copy.parent = parent
|
||||
|
@ -143,64 +137,3 @@ def duplicate_object(object, parent, combine_mode, destination_collection, bluep
|
|||
|
||||
for child in object.children:
|
||||
duplicate_object(child, copy, combine_mode, destination_collection, blueprints_data, nester+" ")
|
||||
|
||||
# copies the contents of a collection into another one while replacing library instances with empties
|
||||
def copy_hollowed_collection_into(source_collection, destination_collection, parent_empty=None, filter=None, blueprints_data=None, settings={}):
|
||||
collection_instances_combine_mode = getattr(settings.auto_export, "collection_instances_combine_mode")
|
||||
|
||||
for object in source_collection.objects:
|
||||
if object.name.endswith("____bak"): # some objects could already have been handled, ignore them
|
||||
continue
|
||||
if filter is not None and filter(object) is False:
|
||||
continue
|
||||
#check if a specific collection instance does not have an ovveride for combine_mode
|
||||
combine_mode = object['_combine'] if '_combine' in object else collection_instances_combine_mode
|
||||
parent = parent_empty
|
||||
duplicate_object(object, parent, combine_mode, destination_collection, blueprints_data)
|
||||
|
||||
# for every child-collection of the source, copy its content into a new sub-collection of the destination
|
||||
for collection in source_collection.children:
|
||||
original_name = collection.name
|
||||
collection.name = original_name + "____bak"
|
||||
collection_placeholder = make_empty(original_name, [0,0,0], [0,0,0], [1,1,1], destination_collection)
|
||||
|
||||
if parent_empty is not None:
|
||||
collection_placeholder.parent = parent_empty
|
||||
copy_hollowed_collection_into(
|
||||
source_collection = collection,
|
||||
destination_collection = destination_collection,
|
||||
parent_empty = collection_placeholder,
|
||||
filter = filter,
|
||||
blueprints_data = blueprints_data,
|
||||
settings=settings
|
||||
)
|
||||
return {}
|
||||
|
||||
# clear & remove "hollow scene"
|
||||
def clear_hollow_scene(temp_scene, original_root_collection):
|
||||
def restore_original_names(collection):
|
||||
if collection.name.endswith("____bak"):
|
||||
collection.name = collection.name.replace("____bak", "")
|
||||
for object in collection.objects:
|
||||
if object.instance_type == 'COLLECTION':
|
||||
if object.name.endswith("____bak"):
|
||||
object.name = object.name.replace("____bak", "")
|
||||
else:
|
||||
if object.name.endswith("____bak"):
|
||||
object.name = object.name.replace("____bak", "")
|
||||
for child_collection in collection.children:
|
||||
restore_original_names(child_collection)
|
||||
|
||||
|
||||
# remove any data we created
|
||||
temp_root_collection = temp_scene.collection
|
||||
temp_scene_objects = [o for o in temp_root_collection.all_objects]
|
||||
for object in temp_scene_objects:
|
||||
#print("removing", object.name)
|
||||
bpy.data.objects.remove(object, do_unlink=True)
|
||||
|
||||
# remove the temporary scene
|
||||
bpy.data.scenes.remove(temp_scene, do_unlink=True)
|
||||
|
||||
# reset original names
|
||||
restore_original_names(original_root_collection)
|
|
@ -5,7 +5,7 @@ from pathlib import Path
|
|||
from blenvy.core.helpers_collections import (traverse_tree)
|
||||
from blenvy.core.object_makers import make_cube
|
||||
from blenvy.materials.materials_helpers import get_all_materials
|
||||
from ..helpers.generate_and_export import generate_and_export
|
||||
from .generate_temporary_scene_and_export import generate_temporary_scene_and_export
|
||||
from .export_gltf import (generate_gltf_export_settings)
|
||||
|
||||
def clear_material_info(collection_names, library_scenes):
|
||||
|
@ -79,7 +79,7 @@ def export_materials(collections, library_scenes, settings):
|
|||
|
||||
print(" exporting Materials to", gltf_output_path, ".gltf/glb")
|
||||
|
||||
generate_and_export(
|
||||
generate_temporary_scene_and_export(
|
||||
settings=settings,
|
||||
gltf_export_settings=gltf_export_settings,
|
||||
temp_scene_name="__materials_scene",
|
|
@ -0,0 +1,124 @@
|
|||
import bpy
|
||||
from blenvy.core.helpers_collections import (set_active_collection)
|
||||
from blenvy.core.object_makers import (make_empty)
|
||||
from .duplicate_object import duplicate_object
|
||||
from .export_gltf import export_gltf
|
||||
|
||||
"""
|
||||
generates a temporary scene, fills it with data, cleans up after itself
|
||||
* named using temp_scene_name
|
||||
* filled using the tempScene_filler
|
||||
* written on disk to gltf_output_path, with the gltf export parameters in gltf_export_settings
|
||||
* cleaned up using tempScene_cleaner
|
||||
|
||||
"""
|
||||
def generate_temporary_scene_and_export(settings, gltf_export_settings, gltf_output_path, temp_scene_name="__temp_scene", tempScene_filler=None, tempScene_cleaner=None):
|
||||
|
||||
temp_scene = bpy.data.scenes.new(name=temp_scene_name)
|
||||
temp_root_collection = temp_scene.collection
|
||||
|
||||
# save active scene
|
||||
original_scene = bpy.context.window.scene
|
||||
# and selected collection
|
||||
original_collection = bpy.context.view_layer.active_layer_collection
|
||||
# and mode
|
||||
original_mode = bpy.context.active_object.mode if bpy.context.active_object != None else None
|
||||
# we change the mode to object mode, otherwise the gltf exporter is not happy
|
||||
if original_mode != None and original_mode != 'OBJECT':
|
||||
print("setting to object mode", original_mode)
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
# we set our active scene to be this one : this is needed otherwise the stand-in empties get generated in the wrong scene
|
||||
bpy.context.window.scene = temp_scene
|
||||
|
||||
area = [area for area in bpy.context.screen.areas if area.type == "VIEW_3D"][0]
|
||||
region = [region for region in area.regions if region.type == 'WINDOW'][0]
|
||||
with bpy.context.temp_override(scene=temp_scene, area=area, region=region):
|
||||
# detect scene mistmatch
|
||||
scene_mismatch = bpy.context.scene.name != bpy.context.window.scene.name
|
||||
if scene_mismatch:
|
||||
raise Exception("Context scene mismatch, aborting", bpy.context.scene.name, bpy.context.window.scene.name)
|
||||
|
||||
set_active_collection(bpy.context.scene, temp_root_collection.name)
|
||||
# generate contents of temporary scene
|
||||
scene_filler_data = tempScene_filler(temp_root_collection)
|
||||
# export the temporary scene
|
||||
try:
|
||||
if settings.auto_export.dry_run == "DISABLED":
|
||||
export_gltf(gltf_output_path, gltf_export_settings)
|
||||
except Exception as error:
|
||||
print("failed to export gltf !", error)
|
||||
raise error
|
||||
# restore everything
|
||||
tempScene_cleaner(temp_scene, scene_filler_data)
|
||||
|
||||
# reset active scene
|
||||
bpy.context.window.scene = original_scene
|
||||
# reset active collection
|
||||
bpy.context.view_layer.active_layer_collection = original_collection
|
||||
# reset mode
|
||||
if original_mode != None:
|
||||
bpy.ops.object.mode_set( mode = original_mode )
|
||||
|
||||
|
||||
|
||||
# copies the contents of a collection into another one while replacing library instances with empties
|
||||
def copy_hollowed_collection_into(source_collection, destination_collection, parent_empty=None, filter=None, blueprints_data=None, settings={}):
|
||||
collection_instances_combine_mode = getattr(settings.auto_export, "collection_instances_combine_mode")
|
||||
|
||||
for object in source_collection.objects:
|
||||
if object.name.endswith("____bak"): # some objects could already have been handled, ignore them
|
||||
continue
|
||||
if filter is not None and filter(object) is False:
|
||||
continue
|
||||
#check if a specific collection instance does not have an ovveride for combine_mode
|
||||
combine_mode = object['_combine'] if '_combine' in object else collection_instances_combine_mode
|
||||
parent = parent_empty
|
||||
duplicate_object(object, parent, combine_mode, destination_collection, blueprints_data)
|
||||
|
||||
# for every child-collection of the source, copy its content into a new sub-collection of the destination
|
||||
for collection in source_collection.children:
|
||||
original_name = collection.name
|
||||
collection.name = original_name + "____bak"
|
||||
collection_placeholder = make_empty(original_name, [0,0,0], [0,0,0], [1,1,1], destination_collection)
|
||||
|
||||
if parent_empty is not None:
|
||||
collection_placeholder.parent = parent_empty
|
||||
copy_hollowed_collection_into(
|
||||
source_collection = collection,
|
||||
destination_collection = destination_collection,
|
||||
parent_empty = collection_placeholder,
|
||||
filter = filter,
|
||||
blueprints_data = blueprints_data,
|
||||
settings=settings
|
||||
)
|
||||
return {}
|
||||
|
||||
|
||||
# clear & remove "hollow scene"
|
||||
def clear_hollow_scene(temp_scene, original_root_collection):
|
||||
def restore_original_names(collection):
|
||||
if collection.name.endswith("____bak"):
|
||||
collection.name = collection.name.replace("____bak", "")
|
||||
for object in collection.objects:
|
||||
if object.instance_type == 'COLLECTION':
|
||||
if object.name.endswith("____bak"):
|
||||
object.name = object.name.replace("____bak", "")
|
||||
else:
|
||||
if object.name.endswith("____bak"):
|
||||
object.name = object.name.replace("____bak", "")
|
||||
for child_collection in collection.children:
|
||||
restore_original_names(child_collection)
|
||||
|
||||
|
||||
# remove any data we created
|
||||
temp_root_collection = temp_scene.collection
|
||||
temp_scene_objects = [o for o in temp_root_collection.all_objects]
|
||||
for object in temp_scene_objects:
|
||||
#print("removing", object.name)
|
||||
bpy.data.objects.remove(object, do_unlink=True)
|
||||
|
||||
# remove the temporary scene
|
||||
bpy.data.scenes.remove(temp_scene, do_unlink=True)
|
||||
|
||||
# reset original names
|
||||
restore_original_names(original_root_collection)
|
|
@ -1,6 +1,6 @@
|
|||
import bpy
|
||||
|
||||
from .project_diff import get_changes_per_scene, project_diff, serialize_current
|
||||
from .project_diff import get_changes_per_scene
|
||||
from .auto_export import auto_export
|
||||
from .settings_diff import get_setting_changes
|
||||
|
||||
|
@ -13,11 +13,11 @@ def prepare_and_export():
|
|||
if auto_export_settings.auto_export: # only do the actual exporting if auto export is actually enabled
|
||||
|
||||
# determine changed objects
|
||||
per_scene_changes = get_changes_per_scene()
|
||||
per_scene_changes = get_changes_per_scene(settings=blenvy)
|
||||
# determine changed parameters
|
||||
setting_changes = get_setting_changes()
|
||||
# do the actual export
|
||||
auto_export(per_scene_changes, setting_changes, blenvy)
|
||||
# auto_export(per_scene_changes, setting_changes, blenvy)
|
||||
|
||||
# cleanup
|
||||
# TODO: these are likely obsolete
|
|
@ -1,6 +1,6 @@
|
|||
import json
|
||||
import bpy
|
||||
from ..helpers.serialize_scene import serialize_scene
|
||||
from .serialize_scene import serialize_scene
|
||||
from blenvy.settings import load_settings, upsert_settings
|
||||
|
||||
def bubble_up_changes(object, changes_per_scene):
|
||||
|
@ -8,23 +8,7 @@ def bubble_up_changes(object, changes_per_scene):
|
|||
changes_per_scene[object.parent.name] = bpy.data.objects[object.parent.name]
|
||||
bubble_up_changes(object.parent, changes_per_scene)
|
||||
|
||||
|
||||
def foo():
|
||||
current = json.loads(current)
|
||||
|
||||
previous_stored = bpy.data.texts[".TESTING"] if ".TESTING" in bpy.data.texts else None # bpy.data.texts.new(".TESTING")
|
||||
if previous_stored == None:
|
||||
previous_stored = bpy.data.texts.new(".TESTING")
|
||||
previous_stored.write(current)
|
||||
return {}
|
||||
previous = json.loads(previous_stored.as_string())
|
||||
|
||||
|
||||
previous_stored.clear()
|
||||
previous_stored.write(json.dumps(current))
|
||||
|
||||
|
||||
def serialize_current():
|
||||
def serialize_current(settings):
|
||||
# sigh... you need to save & reset the frame otherwise it saves the values AT THE CURRENT FRAME WHICH CAN DIFFER ACROSS SCENES
|
||||
current_frames = [scene.frame_current for scene in bpy.data.scenes]
|
||||
for scene in bpy.data.scenes:
|
||||
|
@ -35,7 +19,7 @@ def serialize_current():
|
|||
#serialize scene at frame 0
|
||||
"""with bpy.context.temp_override(scene=bpy.data.scenes[1]):
|
||||
bpy.context.scene.frame_set(0)"""
|
||||
current = serialize_scene()
|
||||
current = serialize_scene(settings)
|
||||
bpy.context.window.scene = current_scene
|
||||
|
||||
# reset previous frames
|
||||
|
@ -44,9 +28,9 @@ def serialize_current():
|
|||
|
||||
return current
|
||||
|
||||
def get_changes_per_scene():
|
||||
def get_changes_per_scene(settings):
|
||||
previous = load_settings(".blenvy.project_serialized_previous")
|
||||
current = serialize_current()
|
||||
current = serialize_current(settings)
|
||||
|
||||
# determine changes
|
||||
changes_per_scene = project_diff(previous, current)
|
|
@ -4,11 +4,14 @@ import numpy as np
|
|||
import bpy
|
||||
from ..constants import TEMPSCENE_PREFIX
|
||||
|
||||
fields_to_ignore_generic = ["tag", "type", "update_tag", "use_extra_user", "use_fake_user", "user_clear", "user_of_id", "user_remap", "users",
|
||||
'animation_data_clear', 'animation_data_create', 'asset_clear', 'asset_data', 'asset_generate_preview', 'asset_mark', 'bl_rna', 'evaluated_get',
|
||||
'library', 'library_weak_reference', 'make_local','name', 'name_full', 'original',
|
||||
'override_create', 'override_hierarchy_create', 'override_library', 'preview', 'preview_ensure', 'rna_type',
|
||||
'session_uid', 'copy', 'id_type', 'is_embedded_data', 'is_evaluated', 'is_library_indirect', 'is_missing', 'is_runtime_data']
|
||||
|
||||
fields_to_ignore_generic = [
|
||||
"tag", "type", "update_tag", "use_extra_user", "use_fake_user", "user_clear", "user_of_id", "user_remap", "users",
|
||||
'animation_data_clear', 'animation_data_create', 'asset_clear', 'asset_data', 'asset_generate_preview', 'asset_mark', 'bl_rna', 'evaluated_get',
|
||||
'library', 'library_weak_reference', 'make_local','name', 'name_full', 'original',
|
||||
'override_create', 'override_hierarchy_create', 'override_library', 'preview', 'preview_ensure', 'rna_type',
|
||||
'session_uid', 'copy', 'id_type', 'is_embedded_data', 'is_evaluated', 'is_library_indirect', 'is_missing', 'is_runtime_data'
|
||||
]
|
||||
|
||||
# possible alternatives https://blender.stackexchange.com/questions/286010/bpy-detect-modified-mesh-data-vertices-edges-loops-or-polygons-for-cachin
|
||||
def mesh_hash(obj):
|
||||
|
@ -118,6 +121,13 @@ def lineart(lineart_data):
|
|||
return str(fields)
|
||||
|
||||
def node_tree(nodetree_data):
|
||||
print("SCANNING NODE TREE", nodetree_data)
|
||||
# output node:
|
||||
output = nodetree_data.get_output_node("ALL")
|
||||
print("output", output)
|
||||
|
||||
|
||||
|
||||
fields_to_ignore = fields_to_ignore_generic+ ['contains_tree','get_output_node', 'interface_update', 'override_template_create']
|
||||
all_field_names = dir(nodetree_data)
|
||||
fields = [getattr(nodetree_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||
|
@ -141,7 +151,6 @@ def material_hash(material):
|
|||
|
||||
type_of = [type(getattr(material, prop, None)) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||
names = [prop for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||
|
||||
tutu = [t == Color for t in type_of] # bpy.types.MaterialLineArt bpy.types.ShaderNodeTree
|
||||
#print("fields", type_of)
|
||||
|
||||
|
@ -152,11 +161,10 @@ def material_hash(material):
|
|||
print("types", type(bla) == bpy.types.bpy_prop_collection, type(bla) == bpy.types.FloatColorAttributeValue)"""
|
||||
|
||||
# print("oooooh", material, material.bl_rna.properties.items())
|
||||
|
||||
return str(fields)#str(hash(str(fields)))
|
||||
|
||||
# TODO: this is partially taken from export_materials utilities, perhaps we could avoid having to fetch things multiple times ?
|
||||
def materials_hash(obj, cache):
|
||||
def materials_hash(obj, cache, settings):
|
||||
# print("materials")
|
||||
materials = []
|
||||
for material_slot in obj.material_slots:
|
||||
|
@ -170,20 +178,54 @@ def materials_hash(obj, cache):
|
|||
cache['materials'][material.name] = mat
|
||||
materials.append(mat)
|
||||
# print("NOT CACHHH", mat)
|
||||
|
||||
# materials = [material_hash(material_slot.material) if not material_slot.material.name in cache["materials"] else cache["materials"][material_slot.material.name] for material_slot in obj.material_slots]
|
||||
return str(hash(str(materials)))
|
||||
|
||||
# TODO : we should also check for custom props on scenes, meshes, materials
|
||||
def custom_properties_hash(obj):
|
||||
custom_properties = {}
|
||||
for property_name in obj.keys():
|
||||
if property_name not in '_RNA_UI' and property_name != 'components_meta':
|
||||
custom_properties[property_name] = obj[property_name]
|
||||
|
||||
return str(hash(str(custom_properties)))
|
||||
|
||||
def modifier_hash(modifier_data, settings):
|
||||
fields_to_ignore = fields_to_ignore_generic
|
||||
all_field_names = dir(modifier_data)
|
||||
fields = [getattr(modifier_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||
|
||||
def serialize_scene():
|
||||
filtered_field_names = [prop for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||
print("fields", fields, "field names", filtered_field_names)
|
||||
node_group = getattr(modifier_data, "node_group", None)
|
||||
if node_group is not None:
|
||||
print("THIS IS A GEOMETRY NODE")
|
||||
for node in node_group.nodes:
|
||||
print("node", node)
|
||||
print("node type", node.type)
|
||||
try:
|
||||
print("node value", node.values())
|
||||
except:pass
|
||||
for input in node.inputs:
|
||||
print(" input", input, input.name, input.label)
|
||||
if hasattr(input, "default_value"):
|
||||
print("YOHO", dict(input), input.default_value)
|
||||
|
||||
|
||||
|
||||
return str(fields)
|
||||
|
||||
def modifiers_hash(object, settings):
|
||||
print("modifiers", object.modifiers)
|
||||
|
||||
modifiers = []
|
||||
for modifier in object.modifiers:
|
||||
print("modifier", modifier )# modifier.node_group)
|
||||
try:
|
||||
print("MODIFIER FIEEEEEEELD", modifier.ratio) # apparently this only works for non geometry nodes ??
|
||||
except: pass
|
||||
modifiers.append(modifier_hash(modifier, settings))
|
||||
return str(hash(str(modifiers)))
|
||||
|
||||
def serialize_scene(settings):
|
||||
cache = {"materials":{}}
|
||||
print("serializing scene")
|
||||
data = {}
|
||||
|
@ -206,7 +248,8 @@ def serialize_scene():
|
|||
armature = armature_hash(object) if object.type == 'ARMATURE' else None
|
||||
parent = object.parent.name if object.parent else None
|
||||
collections = [collection.name for collection in object.users_collection]
|
||||
materials = materials_hash(object, cache) if len(object.material_slots) > 0 else None
|
||||
materials = materials_hash(object, cache, settings) if len(object.material_slots) > 0 else None
|
||||
modifiers = modifiers_hash(object, settings) if len(object.modifiers) > 0 else None
|
||||
|
||||
|
||||
object_field_hashes = {
|
||||
|
@ -221,7 +264,8 @@ def serialize_scene():
|
|||
"armature": armature,
|
||||
"parent": parent,
|
||||
"collections": collections,
|
||||
"materials": materials
|
||||
"materials": materials,
|
||||
"modifiers":modifiers
|
||||
}
|
||||
object_field_hashes_filtered = {key: object_field_hashes[key] for key in object_field_hashes.keys() if object_field_hashes[key] is not None}
|
||||
objectHash = str(hash(str(object_field_hashes_filtered)))
|
|
@ -1 +1,4 @@
|
|||
TEMPSCENE_PREFIX = "__temp_scene"
|
||||
|
||||
#hard coded custom properties to ignore
|
||||
custom_properties_to_filter_out = ['_combine', 'template', 'components_meta']
|
||||
|
|
|
@ -1,59 +0,0 @@
|
|||
import bpy
|
||||
from blenvy.core.helpers_collections import (set_active_collection)
|
||||
from ..export.export_gltf import export_gltf
|
||||
|
||||
"""
|
||||
generates a temporary scene, fills it with data, cleans up after itself
|
||||
* named using temp_scene_name
|
||||
* filled using the tempScene_filler
|
||||
* written on disk to gltf_output_path, with the gltf export parameters in gltf_export_settings
|
||||
* cleaned up using tempScene_cleaner
|
||||
|
||||
"""
|
||||
def generate_and_export(settings, gltf_export_settings, gltf_output_path, temp_scene_name="__temp_scene", tempScene_filler=None, tempScene_cleaner=None):
|
||||
|
||||
temp_scene = bpy.data.scenes.new(name=temp_scene_name)
|
||||
temp_root_collection = temp_scene.collection
|
||||
|
||||
# save active scene
|
||||
original_scene = bpy.context.window.scene
|
||||
# and selected collection
|
||||
original_collection = bpy.context.view_layer.active_layer_collection
|
||||
# and mode
|
||||
original_mode = bpy.context.active_object.mode if bpy.context.active_object != None else None
|
||||
# we change the mode to object mode, otherwise the gltf exporter is not happy
|
||||
if original_mode != None and original_mode != 'OBJECT':
|
||||
print("setting to object mode", original_mode)
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
# we set our active scene to be this one : this is needed otherwise the stand-in empties get generated in the wrong scene
|
||||
bpy.context.window.scene = temp_scene
|
||||
|
||||
area = [area for area in bpy.context.screen.areas if area.type == "VIEW_3D"][0]
|
||||
region = [region for region in area.regions if region.type == 'WINDOW'][0]
|
||||
with bpy.context.temp_override(scene=temp_scene, area=area, region=region):
|
||||
# detect scene mistmatch
|
||||
scene_mismatch = bpy.context.scene.name != bpy.context.window.scene.name
|
||||
if scene_mismatch:
|
||||
raise Exception("Context scene mismatch, aborting", bpy.context.scene.name, bpy.context.window.scene.name)
|
||||
|
||||
set_active_collection(bpy.context.scene, temp_root_collection.name)
|
||||
# generate contents of temporary scene
|
||||
scene_filler_data = tempScene_filler(temp_root_collection)
|
||||
# export the temporary scene
|
||||
try:
|
||||
if settings.auto_export.dry_run == "DISABLED":
|
||||
export_gltf(gltf_output_path, gltf_export_settings)
|
||||
except Exception as error:
|
||||
print("failed to export gltf !", error)
|
||||
raise error
|
||||
# restore everything
|
||||
tempScene_cleaner(temp_scene, scene_filler_data)
|
||||
|
||||
# reset active scene
|
||||
bpy.context.window.scene = original_scene
|
||||
# reset active collection
|
||||
bpy.context.view_layer.active_layer_collection = original_collection
|
||||
# reset mode
|
||||
if original_mode != None:
|
||||
bpy.ops.object.mode_set( mode = original_mode )
|
||||
|
|
@ -1,13 +1,9 @@
|
|||
import os
|
||||
import bpy
|
||||
from pathlib import Path
|
||||
from blenvy.blueprints.blueprint_helpers import inject_blueprints_list_into_main_scene, remove_blueprints_list_from_main_scene
|
||||
|
||||
from ..constants import TEMPSCENE_PREFIX
|
||||
from ..helpers.generate_and_export import generate_and_export
|
||||
from .export_gltf import (generate_gltf_export_settings, export_gltf)
|
||||
from ..modules.bevy_dynamic import is_object_dynamic, is_object_static
|
||||
from ..helpers.helpers_scenes import clear_hollow_scene, copy_hollowed_collection_into
|
||||
from ..common.generate_temporary_scene_and_export import generate_temporary_scene_and_export, copy_hollowed_collection_into, clear_hollow_scene
|
||||
from ..common.export_gltf import (generate_gltf_export_settings, export_gltf)
|
||||
from .is_object_dynamic import is_object_dynamic, is_object_static
|
||||
|
||||
def export_main_scene(scene, blend_file_path, settings, blueprints_data):
|
||||
gltf_export_settings = generate_gltf_export_settings(settings)
|
||||
|
@ -33,7 +29,7 @@ def export_main_scene(scene, blend_file_path, settings, blueprints_data):
|
|||
if export_separate_dynamic_and_static_objects:
|
||||
#print("SPLIT STATIC AND DYNAMIC")
|
||||
# first export static objects
|
||||
generate_and_export(
|
||||
generate_temporary_scene_and_export(
|
||||
settings,
|
||||
temp_scene_name=TEMPSCENE_PREFIX,
|
||||
gltf_export_settings=gltf_export_settings,
|
||||
|
@ -44,7 +40,7 @@ def export_main_scene(scene, blend_file_path, settings, blueprints_data):
|
|||
|
||||
# then export all dynamic objects
|
||||
gltf_output_path = os.path.join(levels_path_full, scene.name+ "_dynamic")
|
||||
generate_and_export(
|
||||
generate_temporary_scene_and_export(
|
||||
settings,
|
||||
temp_scene_name=TEMPSCENE_PREFIX,
|
||||
gltf_export_settings=gltf_export_settings,
|
||||
|
@ -55,7 +51,7 @@ def export_main_scene(scene, blend_file_path, settings, blueprints_data):
|
|||
|
||||
else:
|
||||
#print("NO SPLIT")
|
||||
generate_and_export(
|
||||
generate_temporary_scene_and_export(
|
||||
settings,
|
||||
temp_scene_name=TEMPSCENE_PREFIX,
|
||||
gltf_export_settings=gltf_export_settings,
|
|
@ -34,7 +34,7 @@ class AutoExportSettings(PropertyGroup):
|
|||
|
||||
materials_in_depth_scan : BoolProperty(
|
||||
name='In depth scan of materials (could be slow)',
|
||||
description='serializes more details of materials in order to detect changes',
|
||||
description='serializes more details of materials in order to detect changes (slower, but more accurate in detecting changes)',
|
||||
default=False,
|
||||
update=save_settings
|
||||
) # type: ignore
|
||||
|
|
|
@ -32,6 +32,9 @@ def draw_settings_ui(layout, auto_export_settings):
|
|||
section.enabled = controls_enabled
|
||||
section.prop(auto_export_settings, "change_detection", text="Use change detection")
|
||||
|
||||
section.prop(auto_export_settings, "materials_in_depth_scan", text="Detailed materials scan")
|
||||
|
||||
|
||||
header, panel = layout.panel("Blueprints", default_closed=False)
|
||||
header.label(text="Blueprints")
|
||||
if panel:
|
||||
|
|
Loading…
Reference in New Issue