mirror of
https://github.com/kaosat-dev/Blender_bevy_components_workflow.git
synced 2024-11-23 04:10:53 +00:00
Compare commits
3 Commits
2470736f81
...
cdab4c50f7
Author | SHA1 | Date | |
---|---|---|---|
|
cdab4c50f7 | ||
|
a0b1959a43 | ||
|
2187fee4c8 |
@ -11,7 +11,6 @@ Auto export
|
|||||||
- blueprints/levels/blueprints path => relative to assets path
|
- blueprints/levels/blueprints path => relative to assets path
|
||||||
- [ ] add error handling for de/serialization of project, so that in case of error, the previous saved serialized project is thrown away
|
- [ ] add error handling for de/serialization of project, so that in case of error, the previous saved serialized project is thrown away
|
||||||
|
|
||||||
|
|
||||||
- move out some parameters from auto export to a higher level (as they are now used in multiple places)
|
- move out some parameters from auto export to a higher level (as they are now used in multiple places)
|
||||||
- [x] main/ library scene names
|
- [x] main/ library scene names
|
||||||
- [x] paths
|
- [x] paths
|
||||||
@ -91,9 +90,16 @@ General issues:
|
|||||||
|
|
||||||
|
|
||||||
- [x] addon-prefs => settings
|
- [x] addon-prefs => settings
|
||||||
- [x] generate_gltf_export_preferences => should not use add-on prefs at all ? since we are not overriding gltf settings that way anymore ?
|
- [x] generate_gltf_export_settings => should not use add-on prefs at all ? since we are not overriding gltf settings that way anymore ?
|
||||||
- [x] remove hard coded path for standard gltf settings
|
- [x] remove hard coded path for standard gltf settings
|
||||||
- [x] load settings on file load
|
- [x] load settings on file load
|
||||||
- [x] auto_export
|
- [x] auto_export
|
||||||
- [x] components
|
- [x] components
|
||||||
- [ ] add handling of errors when trying to load settings
|
- [ ] add handling of errors when trying to load settings
|
||||||
|
|
||||||
|
|
||||||
|
- [ ] force overwrite of settings files instead of partial updates ?
|
||||||
|
- [ ] add tests for disabled components
|
||||||
|
- [x] fix auto export workflow
|
||||||
|
- [ ] should we write the previous _xxx data only AFTER a sucessfull export only ?
|
||||||
|
- [ ] add hashing of modifiers/ geometry nodes in serialize scene
|
||||||
|
@ -30,7 +30,7 @@ from .add_ons.bevy_components.settings import ComponentsSettings
|
|||||||
|
|
||||||
# auto export
|
# auto export
|
||||||
from .add_ons.auto_export import gltf_post_export_callback
|
from .add_ons.auto_export import gltf_post_export_callback
|
||||||
from .add_ons.auto_export.export.tracker import AutoExportTracker
|
from .add_ons.auto_export.common.tracker import AutoExportTracker
|
||||||
from .add_ons.auto_export.settings import AutoExportSettings
|
from .add_ons.auto_export.settings import AutoExportSettings
|
||||||
|
|
||||||
# asset management
|
# asset management
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
import bpy
|
import bpy
|
||||||
from .helpers.generate_complete_preferences_dict import generate_complete_preferences_dict_gltf
|
from blenvy.settings import generate_complete_settings_dict
|
||||||
|
from io_scene_gltf2 import ExportGLTF2_Base
|
||||||
|
|
||||||
|
|
||||||
def cleanup_file():
|
def cleanup_file():
|
||||||
gltf_filepath = bpy.context.window_manager.auto_export_tracker.dummy_file_path
|
gltf_filepath = bpy.context.window_manager.auto_export_tracker.dummy_file_path
|
||||||
@ -36,7 +38,8 @@ def gltf_post_export_callback(data):
|
|||||||
# now write new settings
|
# now write new settings
|
||||||
gltf_export_settings.clear()
|
gltf_export_settings.clear()
|
||||||
|
|
||||||
current_gltf_settings = generate_complete_preferences_dict_gltf(dict(settings))
|
settings = dict(settings)
|
||||||
|
current_gltf_settings = generate_complete_settings_dict(settings, presets=ExportGLTF2_Base, ignore_list=["use_active_collection", "use_active_collection_with_nested", "use_active_scene", "use_selection", "will_save_settings", "gltf_export_id"], preset_defaults=False)
|
||||||
gltf_export_settings.write(json.dumps(current_gltf_settings))
|
gltf_export_settings.write(json.dumps(current_gltf_settings))
|
||||||
# now reset the original gltf_settings
|
# now reset the original gltf_settings
|
||||||
if gltf_settings_backup != "":
|
if gltf_settings_backup != "":
|
||||||
|
@ -1,15 +1,12 @@
|
|||||||
import os
|
import os
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
from ..constants import TEMPSCENE_PREFIX
|
from ..constants import TEMPSCENE_PREFIX
|
||||||
from ..helpers.generate_and_export import generate_and_export
|
from ..common.generate_temporary_scene_and_export import generate_temporary_scene_and_export, copy_hollowed_collection_into, clear_hollow_scene
|
||||||
from .export_gltf import (generate_gltf_export_preferences)
|
from ..common.export_gltf import generate_gltf_export_settings
|
||||||
from ..helpers.helpers_scenes import clear_hollow_scene, copy_hollowed_collection_into
|
|
||||||
|
|
||||||
|
|
||||||
def export_blueprints(blueprints, settings, blueprints_data):
|
def export_blueprints(blueprints, settings, blueprints_data):
|
||||||
blueprints_path_full = getattr(settings, "blueprints_path_full")
|
blueprints_path_full = getattr(settings, "blueprints_path_full")
|
||||||
gltf_export_preferences = generate_gltf_export_preferences(settings)
|
gltf_export_settings = generate_gltf_export_settings(settings)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# save current active collection
|
# save current active collection
|
||||||
@ -19,7 +16,7 @@ def export_blueprints(blueprints, settings, blueprints_data):
|
|||||||
for blueprint in blueprints:
|
for blueprint in blueprints:
|
||||||
print("exporting collection", blueprint.name)
|
print("exporting collection", blueprint.name)
|
||||||
gltf_output_path = os.path.join(blueprints_path_full, blueprint.name)
|
gltf_output_path = os.path.join(blueprints_path_full, blueprint.name)
|
||||||
gltf_export_settings = { **gltf_export_preferences, 'use_active_scene': True, 'use_active_collection': True, 'use_active_collection_with_nested':True}
|
gltf_export_settings = { **gltf_export_settings, 'use_active_scene': True, 'use_active_collection': True, 'use_active_collection_with_nested':True}
|
||||||
|
|
||||||
# if we are using the material library option, do not export materials, use placeholder instead
|
# if we are using the material library option, do not export materials, use placeholder instead
|
||||||
if export_materials_library:
|
if export_materials_library:
|
||||||
@ -27,7 +24,7 @@ def export_blueprints(blueprints, settings, blueprints_data):
|
|||||||
|
|
||||||
collection = bpy.data.collections[blueprint.name]
|
collection = bpy.data.collections[blueprint.name]
|
||||||
# do the actual export
|
# do the actual export
|
||||||
generate_and_export(
|
generate_temporary_scene_and_export(
|
||||||
settings,
|
settings,
|
||||||
temp_scene_name=TEMPSCENE_PREFIX+collection.name,
|
temp_scene_name=TEMPSCENE_PREFIX+collection.name,
|
||||||
gltf_export_settings=gltf_export_settings,
|
gltf_export_settings=gltf_export_settings,
|
@ -7,14 +7,14 @@ from blenvy.core.scene_helpers import get_main_and_library_scenes
|
|||||||
from blenvy.blueprints.blueprints_scan import blueprints_scan
|
from blenvy.blueprints.blueprints_scan import blueprints_scan
|
||||||
from blenvy.blueprints.blueprint_helpers import inject_export_path_into_internal_blueprints
|
from blenvy.blueprints.blueprint_helpers import inject_export_path_into_internal_blueprints
|
||||||
|
|
||||||
from .get_blueprints_to_export import get_blueprints_to_export
|
from ..blueprints.get_blueprints_to_export import get_blueprints_to_export
|
||||||
from .get_levels_to_export import get_levels_to_export
|
from ..levels.get_levels_to_export import get_levels_to_export
|
||||||
from .export_gltf import get_standard_exporter_settings
|
from .export_gltf import get_standard_exporter_settings
|
||||||
|
|
||||||
from .export_main_scenes import export_main_scene
|
from ..levels.export_main_scenes import export_main_scene
|
||||||
from .export_blueprints import export_blueprints
|
from ..blueprints.export_blueprints import export_blueprints
|
||||||
from .export_materials import cleanup_materials, export_materials
|
from .export_materials import cleanup_materials, export_materials
|
||||||
from ..modules.bevy_scene_components import remove_scene_components, upsert_scene_components
|
from ..levels.bevy_scene_components import remove_scene_components, upsert_scene_components
|
||||||
|
|
||||||
|
|
||||||
"""this is the main 'central' function for all auto export """
|
"""this is the main 'central' function for all auto export """
|
||||||
@ -41,8 +41,9 @@ def auto_export(changes_per_scene, changed_export_parameters, settings):
|
|||||||
|
|
||||||
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_main_and_library_scenes(settings)
|
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_main_and_library_scenes(settings)
|
||||||
|
|
||||||
bpy.context.window_manager.blueprints_registry.refresh_blueprints()
|
blueprints_data = bpy.context.window_manager.blueprints_registry.refresh_blueprints()
|
||||||
blueprints_data = bpy.context.window_manager.blueprints_registry.blueprints_data
|
#blueprints_data = bpy.context.window_manager.blueprints_registry.blueprints_data
|
||||||
|
#print("blueprints_data", blueprints_data)
|
||||||
blueprints_per_scene = blueprints_data.blueprints_per_scenes
|
blueprints_per_scene = blueprints_data.blueprints_per_scenes
|
||||||
internal_blueprints = [blueprint.name for blueprint in blueprints_data.internal_blueprints]
|
internal_blueprints = [blueprint.name for blueprint in blueprints_data.internal_blueprints]
|
||||||
external_blueprints = [blueprint.name for blueprint in blueprints_data.external_blueprints]
|
external_blueprints = [blueprint.name for blueprint in blueprints_data.external_blueprints]
|
@ -1,10 +1,7 @@
|
|||||||
import json
|
import json
|
||||||
import bpy
|
import bpy
|
||||||
from blenvy.core.object_makers import (make_empty)
|
from blenvy.core.object_makers import (make_empty)
|
||||||
|
from ..constants import custom_properties_to_filter_out
|
||||||
|
|
||||||
# these are mostly for when using this add-on together with the bevy_components add-on
|
|
||||||
custom_properties_to_filter_out = ['_combine', 'template', 'components_meta']
|
|
||||||
|
|
||||||
def is_component_valid_and_enabled(object, component_name):
|
def is_component_valid_and_enabled(object, component_name):
|
||||||
if "components_meta" in object or hasattr(object, "components_meta"):
|
if "components_meta" in object or hasattr(object, "components_meta"):
|
||||||
@ -90,26 +87,27 @@ def copy_animation_data(source, target):
|
|||||||
markers_formated += '}'
|
markers_formated += '}'
|
||||||
target["AnimationMarkers"] = f'( {markers_formated} )'
|
target["AnimationMarkers"] = f'( {markers_formated} )'
|
||||||
|
|
||||||
|
|
||||||
def duplicate_object(object, parent, combine_mode, destination_collection, blueprints_data, nester=""):
|
def duplicate_object(object, parent, combine_mode, destination_collection, blueprints_data, nester=""):
|
||||||
copy = None
|
copy = None
|
||||||
internal_blueprint_names = [blueprint.name for blueprint in blueprints_data.internal_blueprints]
|
internal_blueprint_names = [blueprint.name for blueprint in blueprints_data.internal_blueprints]
|
||||||
# print("COMBINE MODE", combine_mode)
|
# print("COMBINE MODE", combine_mode)
|
||||||
if object.instance_type == 'COLLECTION' and (combine_mode == 'Split' or (combine_mode == 'EmbedExternal' and (object.instance_collection.name in internal_blueprint_names)) ):
|
if object.instance_type == 'COLLECTION' and (combine_mode == 'Split' or (combine_mode == 'EmbedExternal' and (object.instance_collection.name in internal_blueprint_names)) ):
|
||||||
#print("creating empty for", object.name, object.instance_collection.name, internal_blueprint_names, combine_mode)
|
#print("creating empty for", object.name, object.instance_collection.name, internal_blueprint_names, combine_mode)
|
||||||
collection_name = object.instance_collection.name
|
original_collection = object.instance_collection
|
||||||
original_name = object.name
|
original_name = object.name
|
||||||
|
blueprint_name = original_collection.name
|
||||||
|
blueprint_path = original_collection['export_path'] if 'export_path' in original_collection else f'./{blueprint_name}' # TODO: the default requires the currently used extension !!
|
||||||
|
|
||||||
|
|
||||||
object.name = original_name + "____bak"
|
object.name = original_name + "____bak"
|
||||||
empty_obj = make_empty(original_name, object.location, object.rotation_euler, object.scale, destination_collection)
|
empty_obj = make_empty(original_name, object.location, object.rotation_euler, object.scale, destination_collection)
|
||||||
|
|
||||||
"""we inject the collection/blueprint name, as a component called 'BlueprintName', but we only do this in the empty, not the original object"""
|
"""we inject the collection/blueprint name, as a component called 'BlueprintName', but we only do this in the empty, not the original object"""
|
||||||
empty_obj['BlueprintName'] = '("'+collection_name+'")'
|
empty_obj['BlueprintName'] = f'("{blueprint_name}")'
|
||||||
empty_obj["BlueprintPath"] = ''
|
empty_obj["BlueprintPath"] = f'("{blueprint_path}")'
|
||||||
empty_obj['SpawnHere'] = '()'
|
empty_obj['SpawnHere'] = '()'
|
||||||
|
|
||||||
# we also inject a list of all sub blueprints, so that the bevy side can preload them
|
# we also inject a list of all sub blueprints, so that the bevy side can preload them
|
||||||
blueprint_name = collection_name
|
|
||||||
children_per_blueprint = {}
|
children_per_blueprint = {}
|
||||||
blueprint = blueprints_data.blueprints_per_name.get(blueprint_name, None)
|
blueprint = blueprints_data.blueprints_per_name.get(blueprint_name, None)
|
||||||
if blueprint:
|
if blueprint:
|
||||||
@ -131,10 +129,6 @@ def duplicate_object(object, parent, combine_mode, destination_collection, bluep
|
|||||||
|
|
||||||
destination_collection.objects.link(copy)
|
destination_collection.objects.link(copy)
|
||||||
|
|
||||||
"""if object.parent == None:
|
|
||||||
if parent_empty is not None:
|
|
||||||
copy.parent = parent_empty
|
|
||||||
"""
|
|
||||||
# do this both for empty replacements & normal copies
|
# do this both for empty replacements & normal copies
|
||||||
if parent is not None:
|
if parent is not None:
|
||||||
copy.parent = parent
|
copy.parent = parent
|
||||||
@ -143,64 +137,3 @@ def duplicate_object(object, parent, combine_mode, destination_collection, bluep
|
|||||||
|
|
||||||
for child in object.children:
|
for child in object.children:
|
||||||
duplicate_object(child, copy, combine_mode, destination_collection, blueprints_data, nester+" ")
|
duplicate_object(child, copy, combine_mode, destination_collection, blueprints_data, nester+" ")
|
||||||
|
|
||||||
# copies the contents of a collection into another one while replacing library instances with empties
|
|
||||||
def copy_hollowed_collection_into(source_collection, destination_collection, parent_empty=None, filter=None, blueprints_data=None, settings={}):
|
|
||||||
collection_instances_combine_mode = getattr(settings.auto_export, "collection_instances_combine_mode")
|
|
||||||
|
|
||||||
for object in source_collection.objects:
|
|
||||||
if object.name.endswith("____bak"): # some objects could already have been handled, ignore them
|
|
||||||
continue
|
|
||||||
if filter is not None and filter(object) is False:
|
|
||||||
continue
|
|
||||||
#check if a specific collection instance does not have an ovveride for combine_mode
|
|
||||||
combine_mode = object['_combine'] if '_combine' in object else collection_instances_combine_mode
|
|
||||||
parent = parent_empty
|
|
||||||
duplicate_object(object, parent, combine_mode, destination_collection, blueprints_data)
|
|
||||||
|
|
||||||
# for every child-collection of the source, copy its content into a new sub-collection of the destination
|
|
||||||
for collection in source_collection.children:
|
|
||||||
original_name = collection.name
|
|
||||||
collection.name = original_name + "____bak"
|
|
||||||
collection_placeholder = make_empty(original_name, [0,0,0], [0,0,0], [1,1,1], destination_collection)
|
|
||||||
|
|
||||||
if parent_empty is not None:
|
|
||||||
collection_placeholder.parent = parent_empty
|
|
||||||
copy_hollowed_collection_into(
|
|
||||||
source_collection = collection,
|
|
||||||
destination_collection = destination_collection,
|
|
||||||
parent_empty = collection_placeholder,
|
|
||||||
filter = filter,
|
|
||||||
blueprints_data = blueprints_data,
|
|
||||||
settings=settings
|
|
||||||
)
|
|
||||||
return {}
|
|
||||||
|
|
||||||
# clear & remove "hollow scene"
|
|
||||||
def clear_hollow_scene(temp_scene, original_root_collection):
|
|
||||||
def restore_original_names(collection):
|
|
||||||
if collection.name.endswith("____bak"):
|
|
||||||
collection.name = collection.name.replace("____bak", "")
|
|
||||||
for object in collection.objects:
|
|
||||||
if object.instance_type == 'COLLECTION':
|
|
||||||
if object.name.endswith("____bak"):
|
|
||||||
object.name = object.name.replace("____bak", "")
|
|
||||||
else:
|
|
||||||
if object.name.endswith("____bak"):
|
|
||||||
object.name = object.name.replace("____bak", "")
|
|
||||||
for child_collection in collection.children:
|
|
||||||
restore_original_names(child_collection)
|
|
||||||
|
|
||||||
|
|
||||||
# remove any data we created
|
|
||||||
temp_root_collection = temp_scene.collection
|
|
||||||
temp_scene_objects = [o for o in temp_root_collection.all_objects]
|
|
||||||
for object in temp_scene_objects:
|
|
||||||
#print("removing", object.name)
|
|
||||||
bpy.data.objects.remove(object, do_unlink=True)
|
|
||||||
|
|
||||||
# remove the temporary scene
|
|
||||||
bpy.data.scenes.remove(temp_scene, do_unlink=True)
|
|
||||||
|
|
||||||
# reset original names
|
|
||||||
restore_original_names(original_root_collection)
|
|
@ -8,9 +8,9 @@ def get_standard_exporter_settings():
|
|||||||
standard_gltf_exporter_settings = load_settings(".blenvy_gltf_settings")
|
standard_gltf_exporter_settings = load_settings(".blenvy_gltf_settings")
|
||||||
return standard_gltf_exporter_settings if standard_gltf_exporter_settings is not None else {}
|
return standard_gltf_exporter_settings if standard_gltf_exporter_settings is not None else {}
|
||||||
|
|
||||||
def generate_gltf_export_preferences(settings):
|
def generate_gltf_export_settings(settings):
|
||||||
# default values
|
# default values
|
||||||
gltf_export_preferences = dict(
|
gltf_export_settings = dict(
|
||||||
# export_format= 'GLB', #'GLB', 'GLTF_SEPARATE', 'GLTF_EMBEDDED'
|
# export_format= 'GLB', #'GLB', 'GLTF_SEPARATE', 'GLTF_EMBEDDED'
|
||||||
check_existing=False,
|
check_existing=False,
|
||||||
|
|
||||||
@ -49,7 +49,7 @@ def generate_gltf_export_preferences(settings):
|
|||||||
"""for key in settings.__annotations__.keys():
|
"""for key in settings.__annotations__.keys():
|
||||||
if str(key) not in AutoExportGltfPreferenceNames:
|
if str(key) not in AutoExportGltfPreferenceNames:
|
||||||
#print("overriding setting", key, "value", getattr(settings,key))
|
#print("overriding setting", key, "value", getattr(settings,key))
|
||||||
pass#gltf_export_preferences[key] = getattr(settings, key)"""
|
pass#gltf_export_settings[key] = getattr(settings, key)"""
|
||||||
|
|
||||||
|
|
||||||
standard_gltf_exporter_settings = get_standard_exporter_settings()
|
standard_gltf_exporter_settings = get_standard_exporter_settings()
|
||||||
@ -68,8 +68,10 @@ def generate_gltf_export_preferences(settings):
|
|||||||
# a certain number of essential params should NEVER be overwritten , no matter the settings of the standard exporter
|
# a certain number of essential params should NEVER be overwritten , no matter the settings of the standard exporter
|
||||||
for key in standard_gltf_exporter_settings.keys():
|
for key in standard_gltf_exporter_settings.keys():
|
||||||
if str(key) not in constant_keys:
|
if str(key) not in constant_keys:
|
||||||
gltf_export_preferences[key] = standard_gltf_exporter_settings.get(key)
|
gltf_export_settings[key] = standard_gltf_exporter_settings.get(key)
|
||||||
return gltf_export_preferences
|
|
||||||
|
print("GLTF EXPORT SETTINGS", gltf_export_settings)
|
||||||
|
return gltf_export_settings
|
||||||
|
|
||||||
|
|
||||||
#https://docs.blender.org/api/current/bpy.ops.export_scene.html#bpy.ops.export_scene.gltf
|
#https://docs.blender.org/api/current/bpy.ops.export_scene.html#bpy.ops.export_scene.gltf
|
@ -5,8 +5,8 @@ from pathlib import Path
|
|||||||
from blenvy.core.helpers_collections import (traverse_tree)
|
from blenvy.core.helpers_collections import (traverse_tree)
|
||||||
from blenvy.core.object_makers import make_cube
|
from blenvy.core.object_makers import make_cube
|
||||||
from blenvy.materials.materials_helpers import get_all_materials
|
from blenvy.materials.materials_helpers import get_all_materials
|
||||||
from ..helpers.generate_and_export import generate_and_export
|
from .generate_temporary_scene_and_export import generate_temporary_scene_and_export
|
||||||
from .export_gltf import (generate_gltf_export_preferences)
|
from .export_gltf import (generate_gltf_export_settings)
|
||||||
|
|
||||||
def clear_material_info(collection_names, library_scenes):
|
def clear_material_info(collection_names, library_scenes):
|
||||||
for scene in library_scenes:
|
for scene in library_scenes:
|
||||||
@ -60,13 +60,13 @@ def clear_materials_scene(temp_scene):
|
|||||||
# exports the materials used inside the current project:
|
# exports the materials used inside the current project:
|
||||||
# the name of the output path is <materials_folder>/<name_of_your_blend_file>_materials_library.gltf/glb
|
# the name of the output path is <materials_folder>/<name_of_your_blend_file>_materials_library.gltf/glb
|
||||||
def export_materials(collections, library_scenes, settings):
|
def export_materials(collections, library_scenes, settings):
|
||||||
gltf_export_preferences = generate_gltf_export_preferences(settings)
|
gltf_export_settings = generate_gltf_export_settings(settings)
|
||||||
materials_path_full = getattr(settings,"materials_path_full")
|
materials_path_full = getattr(settings,"materials_path_full")
|
||||||
|
|
||||||
used_material_names = get_all_materials(collections, library_scenes)
|
used_material_names = get_all_materials(collections, library_scenes)
|
||||||
current_project_name = Path(bpy.context.blend_data.filepath).stem
|
current_project_name = Path(bpy.context.blend_data.filepath).stem
|
||||||
|
|
||||||
gltf_export_settings = { **gltf_export_preferences,
|
gltf_export_settings = { **gltf_export_settings,
|
||||||
'use_active_scene': True,
|
'use_active_scene': True,
|
||||||
'use_active_collection':True,
|
'use_active_collection':True,
|
||||||
'use_active_collection_with_nested':True,
|
'use_active_collection_with_nested':True,
|
||||||
@ -79,7 +79,7 @@ def export_materials(collections, library_scenes, settings):
|
|||||||
|
|
||||||
print(" exporting Materials to", gltf_output_path, ".gltf/glb")
|
print(" exporting Materials to", gltf_output_path, ".gltf/glb")
|
||||||
|
|
||||||
generate_and_export(
|
generate_temporary_scene_and_export(
|
||||||
settings=settings,
|
settings=settings,
|
||||||
gltf_export_settings=gltf_export_settings,
|
gltf_export_settings=gltf_export_settings,
|
||||||
temp_scene_name="__materials_scene",
|
temp_scene_name="__materials_scene",
|
@ -0,0 +1,124 @@
|
|||||||
|
import bpy
|
||||||
|
from blenvy.core.helpers_collections import (set_active_collection)
|
||||||
|
from blenvy.core.object_makers import (make_empty)
|
||||||
|
from .duplicate_object import duplicate_object
|
||||||
|
from .export_gltf import export_gltf
|
||||||
|
|
||||||
|
"""
|
||||||
|
generates a temporary scene, fills it with data, cleans up after itself
|
||||||
|
* named using temp_scene_name
|
||||||
|
* filled using the tempScene_filler
|
||||||
|
* written on disk to gltf_output_path, with the gltf export parameters in gltf_export_settings
|
||||||
|
* cleaned up using tempScene_cleaner
|
||||||
|
|
||||||
|
"""
|
||||||
|
def generate_temporary_scene_and_export(settings, gltf_export_settings, gltf_output_path, temp_scene_name="__temp_scene", tempScene_filler=None, tempScene_cleaner=None):
|
||||||
|
|
||||||
|
temp_scene = bpy.data.scenes.new(name=temp_scene_name)
|
||||||
|
temp_root_collection = temp_scene.collection
|
||||||
|
|
||||||
|
# save active scene
|
||||||
|
original_scene = bpy.context.window.scene
|
||||||
|
# and selected collection
|
||||||
|
original_collection = bpy.context.view_layer.active_layer_collection
|
||||||
|
# and mode
|
||||||
|
original_mode = bpy.context.active_object.mode if bpy.context.active_object != None else None
|
||||||
|
# we change the mode to object mode, otherwise the gltf exporter is not happy
|
||||||
|
if original_mode != None and original_mode != 'OBJECT':
|
||||||
|
print("setting to object mode", original_mode)
|
||||||
|
bpy.ops.object.mode_set(mode='OBJECT')
|
||||||
|
# we set our active scene to be this one : this is needed otherwise the stand-in empties get generated in the wrong scene
|
||||||
|
bpy.context.window.scene = temp_scene
|
||||||
|
|
||||||
|
area = [area for area in bpy.context.screen.areas if area.type == "VIEW_3D"][0]
|
||||||
|
region = [region for region in area.regions if region.type == 'WINDOW'][0]
|
||||||
|
with bpy.context.temp_override(scene=temp_scene, area=area, region=region):
|
||||||
|
# detect scene mistmatch
|
||||||
|
scene_mismatch = bpy.context.scene.name != bpy.context.window.scene.name
|
||||||
|
if scene_mismatch:
|
||||||
|
raise Exception("Context scene mismatch, aborting", bpy.context.scene.name, bpy.context.window.scene.name)
|
||||||
|
|
||||||
|
set_active_collection(bpy.context.scene, temp_root_collection.name)
|
||||||
|
# generate contents of temporary scene
|
||||||
|
scene_filler_data = tempScene_filler(temp_root_collection)
|
||||||
|
# export the temporary scene
|
||||||
|
try:
|
||||||
|
if settings.auto_export.dry_run == "DISABLED":
|
||||||
|
export_gltf(gltf_output_path, gltf_export_settings)
|
||||||
|
except Exception as error:
|
||||||
|
print("failed to export gltf !", error)
|
||||||
|
raise error
|
||||||
|
# restore everything
|
||||||
|
tempScene_cleaner(temp_scene, scene_filler_data)
|
||||||
|
|
||||||
|
# reset active scene
|
||||||
|
bpy.context.window.scene = original_scene
|
||||||
|
# reset active collection
|
||||||
|
bpy.context.view_layer.active_layer_collection = original_collection
|
||||||
|
# reset mode
|
||||||
|
if original_mode != None:
|
||||||
|
bpy.ops.object.mode_set( mode = original_mode )
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# copies the contents of a collection into another one while replacing library instances with empties
|
||||||
|
def copy_hollowed_collection_into(source_collection, destination_collection, parent_empty=None, filter=None, blueprints_data=None, settings={}):
|
||||||
|
collection_instances_combine_mode = getattr(settings.auto_export, "collection_instances_combine_mode")
|
||||||
|
|
||||||
|
for object in source_collection.objects:
|
||||||
|
if object.name.endswith("____bak"): # some objects could already have been handled, ignore them
|
||||||
|
continue
|
||||||
|
if filter is not None and filter(object) is False:
|
||||||
|
continue
|
||||||
|
#check if a specific collection instance does not have an ovveride for combine_mode
|
||||||
|
combine_mode = object['_combine'] if '_combine' in object else collection_instances_combine_mode
|
||||||
|
parent = parent_empty
|
||||||
|
duplicate_object(object, parent, combine_mode, destination_collection, blueprints_data)
|
||||||
|
|
||||||
|
# for every child-collection of the source, copy its content into a new sub-collection of the destination
|
||||||
|
for collection in source_collection.children:
|
||||||
|
original_name = collection.name
|
||||||
|
collection.name = original_name + "____bak"
|
||||||
|
collection_placeholder = make_empty(original_name, [0,0,0], [0,0,0], [1,1,1], destination_collection)
|
||||||
|
|
||||||
|
if parent_empty is not None:
|
||||||
|
collection_placeholder.parent = parent_empty
|
||||||
|
copy_hollowed_collection_into(
|
||||||
|
source_collection = collection,
|
||||||
|
destination_collection = destination_collection,
|
||||||
|
parent_empty = collection_placeholder,
|
||||||
|
filter = filter,
|
||||||
|
blueprints_data = blueprints_data,
|
||||||
|
settings=settings
|
||||||
|
)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
# clear & remove "hollow scene"
|
||||||
|
def clear_hollow_scene(temp_scene, original_root_collection):
|
||||||
|
def restore_original_names(collection):
|
||||||
|
if collection.name.endswith("____bak"):
|
||||||
|
collection.name = collection.name.replace("____bak", "")
|
||||||
|
for object in collection.objects:
|
||||||
|
if object.instance_type == 'COLLECTION':
|
||||||
|
if object.name.endswith("____bak"):
|
||||||
|
object.name = object.name.replace("____bak", "")
|
||||||
|
else:
|
||||||
|
if object.name.endswith("____bak"):
|
||||||
|
object.name = object.name.replace("____bak", "")
|
||||||
|
for child_collection in collection.children:
|
||||||
|
restore_original_names(child_collection)
|
||||||
|
|
||||||
|
|
||||||
|
# remove any data we created
|
||||||
|
temp_root_collection = temp_scene.collection
|
||||||
|
temp_scene_objects = [o for o in temp_root_collection.all_objects]
|
||||||
|
for object in temp_scene_objects:
|
||||||
|
#print("removing", object.name)
|
||||||
|
bpy.data.objects.remove(object, do_unlink=True)
|
||||||
|
|
||||||
|
# remove the temporary scene
|
||||||
|
bpy.data.scenes.remove(temp_scene, do_unlink=True)
|
||||||
|
|
||||||
|
# reset original names
|
||||||
|
restore_original_names(original_root_collection)
|
@ -1,6 +1,6 @@
|
|||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
from .project_diff import get_changes_per_scene, project_diff, serialize_current
|
from .project_diff import get_changes_per_scene
|
||||||
from .auto_export import auto_export
|
from .auto_export import auto_export
|
||||||
from .settings_diff import get_setting_changes
|
from .settings_diff import get_setting_changes
|
||||||
|
|
||||||
@ -13,11 +13,11 @@ def prepare_and_export():
|
|||||||
if auto_export_settings.auto_export: # only do the actual exporting if auto export is actually enabled
|
if auto_export_settings.auto_export: # only do the actual exporting if auto export is actually enabled
|
||||||
|
|
||||||
# determine changed objects
|
# determine changed objects
|
||||||
per_scene_changes = get_changes_per_scene()
|
per_scene_changes = get_changes_per_scene(settings=blenvy)
|
||||||
# determine changed parameters
|
# determine changed parameters
|
||||||
setting_changes = get_setting_changes(auto_export_settings)
|
setting_changes = get_setting_changes()
|
||||||
# do the actual export
|
# do the actual export
|
||||||
auto_export(per_scene_changes, setting_changes, blenvy)
|
# auto_export(per_scene_changes, setting_changes, blenvy)
|
||||||
|
|
||||||
# cleanup
|
# cleanup
|
||||||
# TODO: these are likely obsolete
|
# TODO: these are likely obsolete
|
@ -1,29 +1,14 @@
|
|||||||
import json
|
import json
|
||||||
import bpy
|
import bpy
|
||||||
from ..helpers.serialize_scene import serialize_scene
|
from .serialize_scene import serialize_scene
|
||||||
|
from blenvy.settings import load_settings, upsert_settings
|
||||||
|
|
||||||
def bubble_up_changes(object, changes_per_scene):
|
def bubble_up_changes(object, changes_per_scene):
|
||||||
if object.parent:
|
if object.parent:
|
||||||
changes_per_scene[object.parent.name] = bpy.data.objects[object.parent.name]
|
changes_per_scene[object.parent.name] = bpy.data.objects[object.parent.name]
|
||||||
bubble_up_changes(object.parent, changes_per_scene)
|
bubble_up_changes(object.parent, changes_per_scene)
|
||||||
|
|
||||||
|
def serialize_current(settings):
|
||||||
def foo():
|
|
||||||
current = json.loads(current)
|
|
||||||
|
|
||||||
previous_stored = bpy.data.texts[".TESTING"] if ".TESTING" in bpy.data.texts else None # bpy.data.texts.new(".TESTING")
|
|
||||||
if previous_stored == None:
|
|
||||||
previous_stored = bpy.data.texts.new(".TESTING")
|
|
||||||
previous_stored.write(current)
|
|
||||||
return {}
|
|
||||||
previous = json.loads(previous_stored.as_string())
|
|
||||||
|
|
||||||
|
|
||||||
previous_stored.clear()
|
|
||||||
previous_stored.write(json.dumps(current))
|
|
||||||
|
|
||||||
|
|
||||||
def serialize_current():
|
|
||||||
# sigh... you need to save & reset the frame otherwise it saves the values AT THE CURRENT FRAME WHICH CAN DIFFER ACROSS SCENES
|
# sigh... you need to save & reset the frame otherwise it saves the values AT THE CURRENT FRAME WHICH CAN DIFFER ACROSS SCENES
|
||||||
current_frames = [scene.frame_current for scene in bpy.data.scenes]
|
current_frames = [scene.frame_current for scene in bpy.data.scenes]
|
||||||
for scene in bpy.data.scenes:
|
for scene in bpy.data.scenes:
|
||||||
@ -34,7 +19,7 @@ def serialize_current():
|
|||||||
#serialize scene at frame 0
|
#serialize scene at frame 0
|
||||||
"""with bpy.context.temp_override(scene=bpy.data.scenes[1]):
|
"""with bpy.context.temp_override(scene=bpy.data.scenes[1]):
|
||||||
bpy.context.scene.frame_set(0)"""
|
bpy.context.scene.frame_set(0)"""
|
||||||
current = serialize_scene()
|
current = serialize_scene(settings)
|
||||||
bpy.context.window.scene = current_scene
|
bpy.context.window.scene = current_scene
|
||||||
|
|
||||||
# reset previous frames
|
# reset previous frames
|
||||||
@ -43,27 +28,26 @@ def serialize_current():
|
|||||||
|
|
||||||
return current
|
return current
|
||||||
|
|
||||||
def get_changes_per_scene():
|
def get_changes_per_scene(settings):
|
||||||
current = serialize_current()
|
previous = load_settings(".blenvy.project_serialized_previous")
|
||||||
|
current = serialize_current(settings)
|
||||||
|
|
||||||
previous_stored = bpy.data.texts[".blenvy.project.serialized"] if ".blenvy.project.serialized" in bpy.data.texts else None
|
# determine changes
|
||||||
if previous_stored == None:
|
|
||||||
previous_stored = bpy.data.texts.new(".blenvy.project.serialized")
|
|
||||||
previous_stored.write(json.dumps(current))
|
|
||||||
return {}
|
|
||||||
|
|
||||||
previous = json.loads(previous_stored.as_string())
|
|
||||||
|
|
||||||
# determin changes
|
|
||||||
changes_per_scene = project_diff(previous, current)
|
changes_per_scene = project_diff(previous, current)
|
||||||
|
|
||||||
# save the current project as previous
|
# save the current project as previous
|
||||||
previous_stored.clear()
|
upsert_settings(".blenvy.project_serialized_previous", current, overwrite=True)
|
||||||
previous_stored.write(json.dumps(current))
|
|
||||||
|
print("changes per scene", changes_per_scene)
|
||||||
return changes_per_scene
|
return changes_per_scene
|
||||||
|
|
||||||
|
|
||||||
def project_diff(previous, current):
|
def project_diff(previous, current):
|
||||||
|
"""print("previous", previous)
|
||||||
|
print("current", current)"""
|
||||||
|
if previous is None or current is None:
|
||||||
|
return {}
|
||||||
|
print("HERE")
|
||||||
|
|
||||||
changes_per_scene = {}
|
changes_per_scene = {}
|
||||||
|
|
||||||
@ -100,5 +84,4 @@ def project_diff(previous, current):
|
|||||||
bubble_up_changes(bpy.data.objects[object_name], changes_per_scene[scene])
|
bubble_up_changes(bpy.data.objects[object_name], changes_per_scene[scene])
|
||||||
# now bubble up for instances & parents
|
# now bubble up for instances & parents
|
||||||
|
|
||||||
print("changes per scene", changes_per_scene)
|
|
||||||
return changes_per_scene
|
return changes_per_scene
|
@ -4,11 +4,14 @@ import numpy as np
|
|||||||
import bpy
|
import bpy
|
||||||
from ..constants import TEMPSCENE_PREFIX
|
from ..constants import TEMPSCENE_PREFIX
|
||||||
|
|
||||||
fields_to_ignore_generic = ["tag", "type", "update_tag", "use_extra_user", "use_fake_user", "user_clear", "user_of_id", "user_remap", "users",
|
|
||||||
'animation_data_clear', 'animation_data_create', 'asset_clear', 'asset_data', 'asset_generate_preview', 'asset_mark', 'bl_rna', 'evaluated_get',
|
fields_to_ignore_generic = [
|
||||||
'library', 'library_weak_reference', 'make_local','name', 'name_full', 'original',
|
"tag", "type", "update_tag", "use_extra_user", "use_fake_user", "user_clear", "user_of_id", "user_remap", "users",
|
||||||
'override_create', 'override_hierarchy_create', 'override_library', 'preview', 'preview_ensure', 'rna_type',
|
'animation_data_clear', 'animation_data_create', 'asset_clear', 'asset_data', 'asset_generate_preview', 'asset_mark', 'bl_rna', 'evaluated_get',
|
||||||
'session_uid', 'copy', 'id_type', 'is_embedded_data', 'is_evaluated', 'is_library_indirect', 'is_missing', 'is_runtime_data']
|
'library', 'library_weak_reference', 'make_local','name', 'name_full', 'original',
|
||||||
|
'override_create', 'override_hierarchy_create', 'override_library', 'preview', 'preview_ensure', 'rna_type',
|
||||||
|
'session_uid', 'copy', 'id_type', 'is_embedded_data', 'is_evaluated', 'is_library_indirect', 'is_missing', 'is_runtime_data'
|
||||||
|
]
|
||||||
|
|
||||||
# possible alternatives https://blender.stackexchange.com/questions/286010/bpy-detect-modified-mesh-data-vertices-edges-loops-or-polygons-for-cachin
|
# possible alternatives https://blender.stackexchange.com/questions/286010/bpy-detect-modified-mesh-data-vertices-edges-loops-or-polygons-for-cachin
|
||||||
def mesh_hash(obj):
|
def mesh_hash(obj):
|
||||||
@ -118,6 +121,13 @@ def lineart(lineart_data):
|
|||||||
return str(fields)
|
return str(fields)
|
||||||
|
|
||||||
def node_tree(nodetree_data):
|
def node_tree(nodetree_data):
|
||||||
|
print("SCANNING NODE TREE", nodetree_data)
|
||||||
|
# output node:
|
||||||
|
output = nodetree_data.get_output_node("ALL")
|
||||||
|
print("output", output)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
fields_to_ignore = fields_to_ignore_generic+ ['contains_tree','get_output_node', 'interface_update', 'override_template_create']
|
fields_to_ignore = fields_to_ignore_generic+ ['contains_tree','get_output_node', 'interface_update', 'override_template_create']
|
||||||
all_field_names = dir(nodetree_data)
|
all_field_names = dir(nodetree_data)
|
||||||
fields = [getattr(nodetree_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
fields = [getattr(nodetree_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||||
@ -141,7 +151,6 @@ def material_hash(material):
|
|||||||
|
|
||||||
type_of = [type(getattr(material, prop, None)) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
type_of = [type(getattr(material, prop, None)) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||||
names = [prop for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
names = [prop for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||||
|
|
||||||
tutu = [t == Color for t in type_of] # bpy.types.MaterialLineArt bpy.types.ShaderNodeTree
|
tutu = [t == Color for t in type_of] # bpy.types.MaterialLineArt bpy.types.ShaderNodeTree
|
||||||
#print("fields", type_of)
|
#print("fields", type_of)
|
||||||
|
|
||||||
@ -152,11 +161,10 @@ def material_hash(material):
|
|||||||
print("types", type(bla) == bpy.types.bpy_prop_collection, type(bla) == bpy.types.FloatColorAttributeValue)"""
|
print("types", type(bla) == bpy.types.bpy_prop_collection, type(bla) == bpy.types.FloatColorAttributeValue)"""
|
||||||
|
|
||||||
# print("oooooh", material, material.bl_rna.properties.items())
|
# print("oooooh", material, material.bl_rna.properties.items())
|
||||||
|
|
||||||
return str(fields)#str(hash(str(fields)))
|
return str(fields)#str(hash(str(fields)))
|
||||||
|
|
||||||
# TODO: this is partially taken from export_materials utilities, perhaps we could avoid having to fetch things multiple times ?
|
# TODO: this is partially taken from export_materials utilities, perhaps we could avoid having to fetch things multiple times ?
|
||||||
def materials_hash(obj, cache):
|
def materials_hash(obj, cache, settings):
|
||||||
# print("materials")
|
# print("materials")
|
||||||
materials = []
|
materials = []
|
||||||
for material_slot in obj.material_slots:
|
for material_slot in obj.material_slots:
|
||||||
@ -170,20 +178,54 @@ def materials_hash(obj, cache):
|
|||||||
cache['materials'][material.name] = mat
|
cache['materials'][material.name] = mat
|
||||||
materials.append(mat)
|
materials.append(mat)
|
||||||
# print("NOT CACHHH", mat)
|
# print("NOT CACHHH", mat)
|
||||||
|
|
||||||
# materials = [material_hash(material_slot.material) if not material_slot.material.name in cache["materials"] else cache["materials"][material_slot.material.name] for material_slot in obj.material_slots]
|
|
||||||
return str(hash(str(materials)))
|
return str(hash(str(materials)))
|
||||||
|
|
||||||
|
# TODO : we should also check for custom props on scenes, meshes, materials
|
||||||
def custom_properties_hash(obj):
|
def custom_properties_hash(obj):
|
||||||
custom_properties = {}
|
custom_properties = {}
|
||||||
for property_name in obj.keys():
|
for property_name in obj.keys():
|
||||||
if property_name not in '_RNA_UI' and property_name != 'components_meta':
|
if property_name not in '_RNA_UI' and property_name != 'components_meta':
|
||||||
custom_properties[property_name] = obj[property_name]
|
custom_properties[property_name] = obj[property_name]
|
||||||
|
|
||||||
return str(hash(str(custom_properties)))
|
return str(hash(str(custom_properties)))
|
||||||
|
|
||||||
|
def modifier_hash(modifier_data, settings):
|
||||||
|
fields_to_ignore = fields_to_ignore_generic
|
||||||
|
all_field_names = dir(modifier_data)
|
||||||
|
fields = [getattr(modifier_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||||
|
|
||||||
def serialize_scene():
|
filtered_field_names = [prop for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||||
|
print("fields", fields, "field names", filtered_field_names)
|
||||||
|
node_group = getattr(modifier_data, "node_group", None)
|
||||||
|
if node_group is not None:
|
||||||
|
print("THIS IS A GEOMETRY NODE")
|
||||||
|
for node in node_group.nodes:
|
||||||
|
print("node", node)
|
||||||
|
print("node type", node.type)
|
||||||
|
try:
|
||||||
|
print("node value", node.values())
|
||||||
|
except:pass
|
||||||
|
for input in node.inputs:
|
||||||
|
print(" input", input, input.name, input.label)
|
||||||
|
if hasattr(input, "default_value"):
|
||||||
|
print("YOHO", dict(input), input.default_value)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
return str(fields)
|
||||||
|
|
||||||
|
def modifiers_hash(object, settings):
|
||||||
|
print("modifiers", object.modifiers)
|
||||||
|
|
||||||
|
modifiers = []
|
||||||
|
for modifier in object.modifiers:
|
||||||
|
print("modifier", modifier )# modifier.node_group)
|
||||||
|
try:
|
||||||
|
print("MODIFIER FIEEEEEEELD", modifier.ratio) # apparently this only works for non geometry nodes ??
|
||||||
|
except: pass
|
||||||
|
modifiers.append(modifier_hash(modifier, settings))
|
||||||
|
return str(hash(str(modifiers)))
|
||||||
|
|
||||||
|
def serialize_scene(settings):
|
||||||
cache = {"materials":{}}
|
cache = {"materials":{}}
|
||||||
print("serializing scene")
|
print("serializing scene")
|
||||||
data = {}
|
data = {}
|
||||||
@ -206,9 +248,11 @@ def serialize_scene():
|
|||||||
armature = armature_hash(object) if object.type == 'ARMATURE' else None
|
armature = armature_hash(object) if object.type == 'ARMATURE' else None
|
||||||
parent = object.parent.name if object.parent else None
|
parent = object.parent.name if object.parent else None
|
||||||
collections = [collection.name for collection in object.users_collection]
|
collections = [collection.name for collection in object.users_collection]
|
||||||
materials = materials_hash(object, cache) if len(object.material_slots) > 0 else None
|
materials = materials_hash(object, cache, settings) if len(object.material_slots) > 0 else None
|
||||||
|
modifiers = modifiers_hash(object, settings) if len(object.modifiers) > 0 else None
|
||||||
|
|
||||||
data[scene.name][object.name] = {
|
|
||||||
|
object_field_hashes = {
|
||||||
"name": object.name,
|
"name": object.name,
|
||||||
"transforms": transform,
|
"transforms": transform,
|
||||||
"visibility": visibility,
|
"visibility": visibility,
|
||||||
@ -220,8 +264,12 @@ def serialize_scene():
|
|||||||
"armature": armature,
|
"armature": armature,
|
||||||
"parent": parent,
|
"parent": parent,
|
||||||
"collections": collections,
|
"collections": collections,
|
||||||
"materials": materials
|
"materials": materials,
|
||||||
|
"modifiers":modifiers
|
||||||
}
|
}
|
||||||
|
object_field_hashes_filtered = {key: object_field_hashes[key] for key in object_field_hashes.keys() if object_field_hashes[key] is not None}
|
||||||
|
objectHash = str(hash(str(object_field_hashes_filtered)))
|
||||||
|
data[scene.name][object.name] = objectHash
|
||||||
|
|
||||||
"""print("data", data)
|
"""print("data", data)
|
||||||
print("")
|
print("")
|
57
tools/blenvy/add_ons/auto_export/common/settings_diff.py
Normal file
57
tools/blenvy/add_ons/auto_export/common/settings_diff.py
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
import bpy
|
||||||
|
|
||||||
|
from blenvy.settings import are_settings_identical, load_settings, upsert_settings
|
||||||
|
|
||||||
|
# which settings are specific to auto_export # TODO: can we infer this ?
|
||||||
|
parameter_names_whitelist_common = [
|
||||||
|
# blenvy core
|
||||||
|
'project_root_path',
|
||||||
|
'assets_path',
|
||||||
|
'blueprints_path',
|
||||||
|
'levels_path',
|
||||||
|
'materials_path',
|
||||||
|
'main_scene_names',
|
||||||
|
'library_scene_names',
|
||||||
|
]
|
||||||
|
|
||||||
|
parameter_names_whitelist_auto_export = [
|
||||||
|
# auto export
|
||||||
|
'export_scene_settings',
|
||||||
|
'export_blueprints',
|
||||||
|
'export_separate_dynamic_and_static_objects',
|
||||||
|
'export_materials_library',
|
||||||
|
'collection_instances_combine_mode',
|
||||||
|
'export_marked_assets'
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_setting_changes():
|
||||||
|
print("get setting changes")
|
||||||
|
|
||||||
|
previous_common_settings = load_settings(".blenvy_common_settings_previous")
|
||||||
|
current_common_settings = load_settings(".blenvy_common_settings")
|
||||||
|
common_settings_changed = not are_settings_identical(previous_common_settings, current_common_settings, white_list=parameter_names_whitelist_common)
|
||||||
|
|
||||||
|
previous_export_settings = load_settings(".blenvy_export_settings_previous")
|
||||||
|
current_export_settings = load_settings(".blenvy_export_settings")
|
||||||
|
export_settings_changed = not are_settings_identical(previous_export_settings, current_export_settings, white_list=parameter_names_whitelist_auto_export)
|
||||||
|
|
||||||
|
previous_gltf_settings = load_settings(".blenvy_gltf_settings_previous")
|
||||||
|
current_gltf_settings = load_settings(".blenvy_gltf_settings")
|
||||||
|
gltf_settings_changed = not are_settings_identical(previous_gltf_settings, current_gltf_settings)
|
||||||
|
|
||||||
|
# write the new settings to the old settings
|
||||||
|
upsert_settings(".blenvy_common_settings_previous", current_common_settings, overwrite=True)
|
||||||
|
upsert_settings(".blenvy_export_settings_previous", current_export_settings, overwrite=True)
|
||||||
|
upsert_settings(".blenvy_gltf_settings_previous", current_gltf_settings, overwrite=True)
|
||||||
|
|
||||||
|
print("common_settings_changed", common_settings_changed,"export_settings_changed", export_settings_changed, "gltf_settings_changed", gltf_settings_changed, )
|
||||||
|
|
||||||
|
# if there were no setting before, it is new, we need export # TODO: do we even need this ? I guess in the case where both the previous & the new one are both none ? very unlikely, but still
|
||||||
|
if previous_common_settings is None:
|
||||||
|
return True
|
||||||
|
if previous_export_settings is None:
|
||||||
|
return True
|
||||||
|
if previous_gltf_settings is None:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return common_settings_changed or gltf_settings_changed or export_settings_changed
|
@ -1 +1,4 @@
|
|||||||
TEMPSCENE_PREFIX = "__temp_scene"
|
TEMPSCENE_PREFIX = "__temp_scene"
|
||||||
|
|
||||||
|
#hard coded custom properties to ignore
|
||||||
|
custom_properties_to_filter_out = ['_combine', 'template', 'components_meta']
|
||||||
|
@ -1,97 +0,0 @@
|
|||||||
import bpy
|
|
||||||
|
|
||||||
from blenvy.settings import are_settings_identical, load_settings, upsert_settings
|
|
||||||
|
|
||||||
# which settings are specific to auto_export # TODO: can we infer this ?
|
|
||||||
auto_export_parameter_names = [
|
|
||||||
# blenvy core
|
|
||||||
'project_root_path',
|
|
||||||
'assets_path',
|
|
||||||
'blueprints_path',
|
|
||||||
'levels_path',
|
|
||||||
'materials_path',
|
|
||||||
#'main_scene_names',
|
|
||||||
#'library_scene_names',
|
|
||||||
|
|
||||||
# auto export
|
|
||||||
'export_scene_settings',
|
|
||||||
'export_blueprints',
|
|
||||||
'export_separate_dynamic_and_static_objects',
|
|
||||||
'export_materials_library',
|
|
||||||
'collection_instances_combine_mode',
|
|
||||||
'export_marked_assets'
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_setting_changes(auto_export_settings):
|
|
||||||
print("get setting changes", dict(auto_export_settings))
|
|
||||||
previous_gltf_settings = load_settings(".blenvy_gltf_settings_previous")
|
|
||||||
current_gltf_settings = load_settings(".blenvy_gltf_settings")
|
|
||||||
gltf_settings_changed = not are_settings_identical(previous_gltf_settings, current_gltf_settings)
|
|
||||||
|
|
||||||
previous_export_settings = load_settings(".blenvy_export_settings_previous")
|
|
||||||
current_export_settings = dict(auto_export_settings) #load_settings(".blenvy_export_settings")
|
|
||||||
export_settings_changed = not are_settings_identical(previous_export_settings, current_export_settings)
|
|
||||||
|
|
||||||
# if there were no setting before, it is new, we need export
|
|
||||||
if previous_gltf_settings is None:
|
|
||||||
pass
|
|
||||||
if previous_export_settings is None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# write the new settings to the old settings
|
|
||||||
upsert_settings(".blenvy_gltf_settings_previous", current_gltf_settings)
|
|
||||||
upsert_settings(".blenvy_export_settings_previous", current_export_settings)
|
|
||||||
|
|
||||||
return gltf_settings_changed or export_settings_changed
|
|
||||||
|
|
||||||
def did_export_settings_change(self):
|
|
||||||
return True
|
|
||||||
# compare both the auto export settings & the gltf settings
|
|
||||||
previous_auto_settings = bpy.data.texts[".gltf_auto_export_settings_previous"] if ".gltf_auto_export_settings_previous" in bpy.data.texts else None
|
|
||||||
previous_gltf_settings = bpy.data.texts[".blenvy_gltf_settings_previous"] if ".blenvy_gltf_settings_previous" in bpy.data.texts else None
|
|
||||||
|
|
||||||
current_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else None
|
|
||||||
current_gltf_settings = bpy.data.texts[".blenvy_gltf_settings"] if ".blenvy_gltf_settings" in bpy.data.texts else None
|
|
||||||
|
|
||||||
#check if params have changed
|
|
||||||
|
|
||||||
# if there were no setting before, it is new, we need export
|
|
||||||
changed = False
|
|
||||||
if previous_auto_settings == None:
|
|
||||||
#print("previous settings missing, exporting")
|
|
||||||
changed = True
|
|
||||||
elif previous_gltf_settings == None:
|
|
||||||
#print("previous gltf settings missing, exporting")
|
|
||||||
previous_gltf_settings = bpy.data.texts.new(".blenvy_gltf_settings_previous")
|
|
||||||
previous_gltf_settings.write(json.dumps({}))
|
|
||||||
if current_gltf_settings == None:
|
|
||||||
current_gltf_settings = bpy.data.texts.new(".blenvy_gltf_settings")
|
|
||||||
current_gltf_settings.write(json.dumps({}))
|
|
||||||
|
|
||||||
changed = True
|
|
||||||
|
|
||||||
else:
|
|
||||||
auto_settings_changed = sorted(json.loads(previous_auto_settings.as_string()).items()) != sorted(json.loads(current_auto_settings.as_string()).items()) if current_auto_settings != None else False
|
|
||||||
gltf_settings_changed = sorted(json.loads(previous_gltf_settings.as_string()).items()) != sorted(json.loads(current_gltf_settings.as_string()).items()) if current_gltf_settings != None else False
|
|
||||||
|
|
||||||
"""print("auto settings previous", sorted(json.loads(previous_auto_settings.as_string()).items()))
|
|
||||||
print("auto settings current", sorted(json.loads(current_auto_settings.as_string()).items()))
|
|
||||||
print("auto_settings_changed", auto_settings_changed)
|
|
||||||
|
|
||||||
print("gltf settings previous", sorted(json.loads(previous_gltf_settings.as_string()).items()))
|
|
||||||
print("gltf settings current", sorted(json.loads(current_gltf_settings.as_string()).items()))
|
|
||||||
print("gltf_settings_changed", gltf_settings_changed)"""
|
|
||||||
|
|
||||||
changed = auto_settings_changed or gltf_settings_changed
|
|
||||||
# now write the current settings to the "previous settings"
|
|
||||||
if current_auto_settings != None:
|
|
||||||
previous_auto_settings = bpy.data.texts[".gltf_auto_export_settings_previous"] if ".gltf_auto_export_settings_previous" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings_previous")
|
|
||||||
previous_auto_settings.clear()
|
|
||||||
previous_auto_settings.write(current_auto_settings.as_string()) # TODO : check if this is always valid
|
|
||||||
|
|
||||||
if current_gltf_settings != None:
|
|
||||||
previous_gltf_settings = bpy.data.texts[".blenvy_gltf_settings_previous"] if ".blenvy_gltf_settings_previous" in bpy.data.texts else bpy.data.texts.new(".blenvy_gltf_settings_previous")
|
|
||||||
previous_gltf_settings.clear()
|
|
||||||
previous_gltf_settings.write(current_gltf_settings.as_string())
|
|
||||||
|
|
||||||
return changed
|
|
@ -1,59 +0,0 @@
|
|||||||
import bpy
|
|
||||||
from blenvy.core.helpers_collections import (set_active_collection)
|
|
||||||
from ..export.export_gltf import export_gltf
|
|
||||||
|
|
||||||
"""
|
|
||||||
generates a temporary scene, fills it with data, cleans up after itself
|
|
||||||
* named using temp_scene_name
|
|
||||||
* filled using the tempScene_filler
|
|
||||||
* written on disk to gltf_output_path, with the gltf export parameters in gltf_export_settings
|
|
||||||
* cleaned up using tempScene_cleaner
|
|
||||||
|
|
||||||
"""
|
|
||||||
def generate_and_export(settings, gltf_export_settings, gltf_output_path, temp_scene_name="__temp_scene", tempScene_filler=None, tempScene_cleaner=None):
|
|
||||||
|
|
||||||
temp_scene = bpy.data.scenes.new(name=temp_scene_name)
|
|
||||||
temp_root_collection = temp_scene.collection
|
|
||||||
|
|
||||||
# save active scene
|
|
||||||
original_scene = bpy.context.window.scene
|
|
||||||
# and selected collection
|
|
||||||
original_collection = bpy.context.view_layer.active_layer_collection
|
|
||||||
# and mode
|
|
||||||
original_mode = bpy.context.active_object.mode if bpy.context.active_object != None else None
|
|
||||||
# we change the mode to object mode, otherwise the gltf exporter is not happy
|
|
||||||
if original_mode != None and original_mode != 'OBJECT':
|
|
||||||
print("setting to object mode", original_mode)
|
|
||||||
bpy.ops.object.mode_set(mode='OBJECT')
|
|
||||||
# we set our active scene to be this one : this is needed otherwise the stand-in empties get generated in the wrong scene
|
|
||||||
bpy.context.window.scene = temp_scene
|
|
||||||
|
|
||||||
area = [area for area in bpy.context.screen.areas if area.type == "VIEW_3D"][0]
|
|
||||||
region = [region for region in area.regions if region.type == 'WINDOW'][0]
|
|
||||||
with bpy.context.temp_override(scene=temp_scene, area=area, region=region):
|
|
||||||
# detect scene mistmatch
|
|
||||||
scene_mismatch = bpy.context.scene.name != bpy.context.window.scene.name
|
|
||||||
if scene_mismatch:
|
|
||||||
raise Exception("Context scene mismatch, aborting", bpy.context.scene.name, bpy.context.window.scene.name)
|
|
||||||
|
|
||||||
set_active_collection(bpy.context.scene, temp_root_collection.name)
|
|
||||||
# generate contents of temporary scene
|
|
||||||
scene_filler_data = tempScene_filler(temp_root_collection)
|
|
||||||
# export the temporary scene
|
|
||||||
try:
|
|
||||||
if settings.auto_export.dry_run == "DISABLED":
|
|
||||||
export_gltf(gltf_output_path, gltf_export_settings)
|
|
||||||
except Exception as error:
|
|
||||||
print("failed to export gltf !", error)
|
|
||||||
raise error
|
|
||||||
# restore everything
|
|
||||||
tempScene_cleaner(temp_scene, scene_filler_data)
|
|
||||||
|
|
||||||
# reset active scene
|
|
||||||
bpy.context.window.scene = original_scene
|
|
||||||
# reset active collection
|
|
||||||
bpy.context.view_layer.active_layer_collection = original_collection
|
|
||||||
# reset mode
|
|
||||||
if original_mode != None:
|
|
||||||
bpy.ops.object.mode_set( mode = original_mode )
|
|
||||||
|
|
@ -1,28 +0,0 @@
|
|||||||
|
|
||||||
from io_scene_gltf2 import (ExportGLTF2_Base)
|
|
||||||
|
|
||||||
# given the input (actual) gltf settings, filters out any invalid/useless params & params that are equal to defaults
|
|
||||||
def generate_complete_preferences_dict_gltf(settings):
|
|
||||||
complete_preferences = {}
|
|
||||||
defaults = {}
|
|
||||||
gltf_parameters_to_ignore = ["use_active_collection", "use_active_collection_with_nested", "use_active_scene", "use_selection", "will_save_settings", "gltf_export_id"]
|
|
||||||
def filter_out(pair):
|
|
||||||
key, value = pair
|
|
||||||
if key in gltf_parameters_to_ignore:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
for k in ExportGLTF2_Base.__annotations__: # we use parameters from the base class of the standard gltf exporter, that contains all relevant parameters
|
|
||||||
item = ExportGLTF2_Base.__annotations__[k]
|
|
||||||
#print("item", item)
|
|
||||||
default = item.keywords.get('default', None)
|
|
||||||
#complete_preferences[k] = default
|
|
||||||
defaults[k] = default
|
|
||||||
|
|
||||||
for key in list(settings.keys()):
|
|
||||||
if key in defaults and settings[key] != defaults[key]: # only write out values different from defaults
|
|
||||||
complete_preferences[key] = settings[key]
|
|
||||||
|
|
||||||
complete_preferences = dict(filter(filter_out, dict(complete_preferences).items()))
|
|
||||||
return complete_preferences
|
|
||||||
|
|
@ -1,23 +1,19 @@
|
|||||||
import os
|
import os
|
||||||
import bpy
|
|
||||||
from pathlib import Path
|
|
||||||
from blenvy.blueprints.blueprint_helpers import inject_blueprints_list_into_main_scene, remove_blueprints_list_from_main_scene
|
from blenvy.blueprints.blueprint_helpers import inject_blueprints_list_into_main_scene, remove_blueprints_list_from_main_scene
|
||||||
|
|
||||||
from ..constants import TEMPSCENE_PREFIX
|
from ..constants import TEMPSCENE_PREFIX
|
||||||
from ..helpers.generate_and_export import generate_and_export
|
from ..common.generate_temporary_scene_and_export import generate_temporary_scene_and_export, copy_hollowed_collection_into, clear_hollow_scene
|
||||||
from .export_gltf import (generate_gltf_export_preferences, export_gltf)
|
from ..common.export_gltf import (generate_gltf_export_settings, export_gltf)
|
||||||
from ..modules.bevy_dynamic import is_object_dynamic, is_object_static
|
from .is_object_dynamic import is_object_dynamic, is_object_static
|
||||||
from ..helpers.helpers_scenes import clear_hollow_scene, copy_hollowed_collection_into
|
|
||||||
|
|
||||||
def export_main_scene(scene, blend_file_path, settings, blueprints_data):
|
def export_main_scene(scene, blend_file_path, settings, blueprints_data):
|
||||||
gltf_export_preferences = generate_gltf_export_preferences(settings)
|
gltf_export_settings = generate_gltf_export_settings(settings)
|
||||||
assets_path_full = getattr(settings,"assets_path_full")
|
assets_path_full = getattr(settings,"assets_path_full")
|
||||||
levels_path_full = getattr(settings,"levels_path_full")
|
levels_path_full = getattr(settings,"levels_path_full")
|
||||||
|
|
||||||
export_blueprints = getattr(settings.auto_export,"export_blueprints")
|
export_blueprints = getattr(settings.auto_export,"export_blueprints")
|
||||||
export_separate_dynamic_and_static_objects = getattr(settings.auto_export, "export_separate_dynamic_and_static_objects")
|
export_separate_dynamic_and_static_objects = getattr(settings.auto_export, "export_separate_dynamic_and_static_objects")
|
||||||
|
|
||||||
gltf_export_settings = { **gltf_export_preferences,
|
gltf_export_settings = { **gltf_export_settings,
|
||||||
'use_active_scene': True,
|
'use_active_scene': True,
|
||||||
'use_active_collection':True,
|
'use_active_collection':True,
|
||||||
'use_active_collection_with_nested':True,
|
'use_active_collection_with_nested':True,
|
||||||
@ -33,7 +29,7 @@ def export_main_scene(scene, blend_file_path, settings, blueprints_data):
|
|||||||
if export_separate_dynamic_and_static_objects:
|
if export_separate_dynamic_and_static_objects:
|
||||||
#print("SPLIT STATIC AND DYNAMIC")
|
#print("SPLIT STATIC AND DYNAMIC")
|
||||||
# first export static objects
|
# first export static objects
|
||||||
generate_and_export(
|
generate_temporary_scene_and_export(
|
||||||
settings,
|
settings,
|
||||||
temp_scene_name=TEMPSCENE_PREFIX,
|
temp_scene_name=TEMPSCENE_PREFIX,
|
||||||
gltf_export_settings=gltf_export_settings,
|
gltf_export_settings=gltf_export_settings,
|
||||||
@ -44,7 +40,7 @@ def export_main_scene(scene, blend_file_path, settings, blueprints_data):
|
|||||||
|
|
||||||
# then export all dynamic objects
|
# then export all dynamic objects
|
||||||
gltf_output_path = os.path.join(levels_path_full, scene.name+ "_dynamic")
|
gltf_output_path = os.path.join(levels_path_full, scene.name+ "_dynamic")
|
||||||
generate_and_export(
|
generate_temporary_scene_and_export(
|
||||||
settings,
|
settings,
|
||||||
temp_scene_name=TEMPSCENE_PREFIX,
|
temp_scene_name=TEMPSCENE_PREFIX,
|
||||||
gltf_export_settings=gltf_export_settings,
|
gltf_export_settings=gltf_export_settings,
|
||||||
@ -55,7 +51,7 @@ def export_main_scene(scene, blend_file_path, settings, blueprints_data):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
#print("NO SPLIT")
|
#print("NO SPLIT")
|
||||||
generate_and_export(
|
generate_temporary_scene_and_export(
|
||||||
settings,
|
settings,
|
||||||
temp_scene_name=TEMPSCENE_PREFIX,
|
temp_scene_name=TEMPSCENE_PREFIX,
|
||||||
gltf_export_settings=gltf_export_settings,
|
gltf_export_settings=gltf_export_settings,
|
@ -1,16 +1,16 @@
|
|||||||
import bpy
|
import bpy
|
||||||
from bpy_types import (PropertyGroup)
|
from bpy_types import (PropertyGroup)
|
||||||
from bpy.props import (EnumProperty, PointerProperty, StringProperty, BoolProperty, CollectionProperty, IntProperty)
|
from bpy.props import (EnumProperty, PointerProperty, StringProperty, BoolProperty, CollectionProperty, IntProperty)
|
||||||
from blenvy.settings import load_settings, upsert_settings, generate_complete_preferences_dict
|
from blenvy.settings import load_settings, upsert_settings, generate_complete_settings_dict
|
||||||
|
|
||||||
# list of settings we do NOT want to save
|
# list of settings we do NOT want to save
|
||||||
settings_black_list = ['settings_save_enabled', 'dry_run']
|
settings_black_list = ['settings_save_enabled', 'dry_run']
|
||||||
|
|
||||||
def save_settings(settings, context):
|
def save_settings(settings, context):
|
||||||
if settings.settings_save_enabled:
|
if settings.settings_save_enabled:
|
||||||
settings_dict = generate_complete_preferences_dict(settings, AutoExportSettings, [])
|
settings_dict = generate_complete_settings_dict(settings, AutoExportSettings, [])
|
||||||
print("save settings", settings, context, settings_dict)
|
print("save settings", settings, context, settings_dict)
|
||||||
upsert_settings(settings.settings_save_path, {key: settings_dict[key] for key in settings_dict.keys() if key not in settings_black_list})
|
upsert_settings(settings.settings_save_path, {key: settings_dict[key] for key in settings_dict.keys() if key not in settings_black_list}, overwrite=True)
|
||||||
|
|
||||||
class AutoExportSettings(PropertyGroup):
|
class AutoExportSettings(PropertyGroup):
|
||||||
|
|
||||||
@ -24,7 +24,7 @@ class AutoExportSettings(PropertyGroup):
|
|||||||
update=save_settings
|
update=save_settings
|
||||||
) # type: ignore
|
) # type: ignore
|
||||||
|
|
||||||
#### general
|
#### change detection
|
||||||
change_detection: BoolProperty(
|
change_detection: BoolProperty(
|
||||||
name='Change detection',
|
name='Change detection',
|
||||||
description='Use change detection to determine what/if should be exported',
|
description='Use change detection to determine what/if should be exported',
|
||||||
@ -32,6 +32,13 @@ class AutoExportSettings(PropertyGroup):
|
|||||||
update=save_settings
|
update=save_settings
|
||||||
) # type: ignore
|
) # type: ignore
|
||||||
|
|
||||||
|
materials_in_depth_scan : BoolProperty(
|
||||||
|
name='In depth scan of materials (could be slow)',
|
||||||
|
description='serializes more details of materials in order to detect changes (slower, but more accurate in detecting changes)',
|
||||||
|
default=False,
|
||||||
|
update=save_settings
|
||||||
|
) # type: ignore
|
||||||
|
|
||||||
# scenes
|
# scenes
|
||||||
|
|
||||||
# scene components
|
# scene components
|
||||||
|
@ -32,6 +32,9 @@ def draw_settings_ui(layout, auto_export_settings):
|
|||||||
section.enabled = controls_enabled
|
section.enabled = controls_enabled
|
||||||
section.prop(auto_export_settings, "change_detection", text="Use change detection")
|
section.prop(auto_export_settings, "change_detection", text="Use change detection")
|
||||||
|
|
||||||
|
section.prop(auto_export_settings, "materials_in_depth_scan", text="Detailed materials scan")
|
||||||
|
|
||||||
|
|
||||||
header, panel = layout.panel("Blueprints", default_closed=False)
|
header, panel = layout.panel("Blueprints", default_closed=False)
|
||||||
header.label(text="Blueprints")
|
header.label(text="Blueprints")
|
||||||
if panel:
|
if panel:
|
||||||
|
@ -193,7 +193,7 @@ class OT_OpenSchemaFileBrowser(Operator, ImportHelper):
|
|||||||
|
|
||||||
blenvy = context.window_manager.blenvy
|
blenvy = context.window_manager.blenvy
|
||||||
blenvy.components.schema_path = relative_path
|
blenvy.components.schema_path = relative_path
|
||||||
upsert_settings(blenvy.settings_save_path, {"components_schemaPath": relative_path})
|
upsert_settings(blenvy.components.settings_save_path, {"schema_path": relative_path})
|
||||||
|
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@ import os
|
|||||||
import bpy
|
import bpy
|
||||||
from bpy_types import (PropertyGroup)
|
from bpy_types import (PropertyGroup)
|
||||||
from bpy.props import (EnumProperty, PointerProperty, StringProperty, BoolProperty, CollectionProperty, IntProperty)
|
from bpy.props import (EnumProperty, PointerProperty, StringProperty, BoolProperty, CollectionProperty, IntProperty)
|
||||||
from blenvy.settings import load_settings, upsert_settings, generate_complete_preferences_dict
|
from blenvy.settings import load_settings, upsert_settings, generate_complete_settings_dict
|
||||||
from .propGroups.prop_groups import generate_propertyGroups_for_components
|
from .propGroups.prop_groups import generate_propertyGroups_for_components
|
||||||
from .components.metadata import ensure_metadata_for_all_items
|
from .components.metadata import ensure_metadata_for_all_items
|
||||||
|
|
||||||
@ -11,9 +11,9 @@ settings_black_list = ['settings_save_enabled', 'watcher_active']
|
|||||||
|
|
||||||
def save_settings(settings, context):
|
def save_settings(settings, context):
|
||||||
if settings.settings_save_enabled:
|
if settings.settings_save_enabled:
|
||||||
settings_dict = generate_complete_preferences_dict(settings, ComponentsSettings, [])
|
settings_dict = generate_complete_settings_dict(settings, ComponentsSettings, [])
|
||||||
print("save settings", settings, context,settings_dict)
|
print("save settings", settings, context,settings_dict)
|
||||||
upsert_settings(settings.settings_save_path, {key: settings_dict[key] for key in settings_dict.keys() if key not in settings_black_list})
|
upsert_settings(settings.settings_save_path, {key: settings_dict[key] for key in settings_dict.keys() if key not in settings_black_list}, overwrite=True)
|
||||||
|
|
||||||
# helper function to deal with timer
|
# helper function to deal with timer
|
||||||
def toggle_watcher(self, context):
|
def toggle_watcher(self, context):
|
||||||
|
@ -17,14 +17,13 @@ def refresh_blueprints():
|
|||||||
try:
|
try:
|
||||||
blueprints_registry = bpy.context.window_manager.blueprints_registry
|
blueprints_registry = bpy.context.window_manager.blueprints_registry
|
||||||
blueprints_registry.refresh_blueprints()
|
blueprints_registry.refresh_blueprints()
|
||||||
#print('refresh blueprints')
|
|
||||||
except:pass
|
except:pass
|
||||||
|
|
||||||
return 3
|
return 3
|
||||||
|
|
||||||
# this is where we store the information for all available Blueprints
|
# this is where we store the information for all available Blueprints
|
||||||
class BlueprintsRegistry(PropertyGroup):
|
class BlueprintsRegistry(PropertyGroup):
|
||||||
blueprints_data = {}
|
blueprints_data = None
|
||||||
blueprints_list = []
|
blueprints_list = []
|
||||||
|
|
||||||
asset_name_selector: StringProperty(
|
asset_name_selector: StringProperty(
|
||||||
@ -66,10 +65,13 @@ class BlueprintsRegistry(PropertyGroup):
|
|||||||
self.blueprints_list.append(blueprint)
|
self.blueprints_list.append(blueprint)
|
||||||
|
|
||||||
def refresh_blueprints(self):
|
def refresh_blueprints(self):
|
||||||
|
#print("titi", self)
|
||||||
blenvy = bpy.context.window_manager.blenvy
|
blenvy = bpy.context.window_manager.blenvy
|
||||||
settings = blenvy
|
settings = blenvy
|
||||||
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_main_and_library_scenes(settings)
|
[main_scene_names, level_scenes, library_scene_names, library_scenes] = get_main_and_library_scenes(settings)
|
||||||
blueprints_data = blueprints_scan(level_scenes, library_scenes, settings)
|
blueprints_data = blueprints_scan(level_scenes, library_scenes, settings)
|
||||||
self.blueprints_data = blueprints_data
|
self.blueprints_data = blueprints_data
|
||||||
|
return blueprints_data
|
||||||
|
#print("bla", self.blueprints_data)
|
||||||
"""for blueprint in blueprints_data.blueprints:
|
"""for blueprint in blueprints_data.blueprints:
|
||||||
self.add_blueprint(blueprint)"""
|
self.add_blueprint(blueprint)"""
|
||||||
|
@ -3,7 +3,7 @@ import bpy
|
|||||||
from bpy_types import (PropertyGroup)
|
from bpy_types import (PropertyGroup)
|
||||||
from bpy.props import (BoolProperty, EnumProperty, PointerProperty, StringProperty, CollectionProperty, IntProperty)
|
from bpy.props import (BoolProperty, EnumProperty, PointerProperty, StringProperty, CollectionProperty, IntProperty)
|
||||||
from .scene_helpers import SceneSelector
|
from .scene_helpers import SceneSelector
|
||||||
from ..settings import upsert_settings, load_settings, generate_complete_preferences_dict
|
from ..settings import upsert_settings, load_settings, generate_complete_settings_dict
|
||||||
import blenvy.add_ons.auto_export.settings as auto_export_settings
|
import blenvy.add_ons.auto_export.settings as auto_export_settings
|
||||||
import blenvy.add_ons.bevy_components.settings as component_settings
|
import blenvy.add_ons.bevy_components.settings as component_settings
|
||||||
|
|
||||||
@ -15,15 +15,15 @@ settings_black_list = ['settings_save_enabled', 'main_scene_selector', 'main_sce
|
|||||||
|
|
||||||
def save_settings(settings, context):
|
def save_settings(settings, context):
|
||||||
if settings.settings_save_enabled:
|
if settings.settings_save_enabled:
|
||||||
settings_dict = generate_complete_preferences_dict(settings, BlenvyManager, [])
|
settings_dict = generate_complete_settings_dict(settings, BlenvyManager, [])
|
||||||
print("save settings", settings, context, settings_dict)
|
print("save settings", settings, context, settings_dict)
|
||||||
# upsert_settings(settings.settings_save_path, {key: settings_dict[key] for key in settings_dict.keys() if key not in settings_black_list})
|
# upsert_settings(settings.settings_save_path, {key: settings_dict[key] for key in settings_dict.keys() if key not in settings_black_list})
|
||||||
|
|
||||||
def update_scene_lists(blenvy, context):
|
def update_scene_lists(blenvy, context):
|
||||||
blenvy.main_scene_names = [scene.name for scene in blenvy.main_scenes] # FIXME: unsure
|
blenvy.main_scene_names = [scene.name for scene in blenvy.main_scenes] # FIXME: unsure
|
||||||
blenvy.library_scene_names = [scene.name for scene in blenvy.library_scenes] # FIXME: unsure
|
blenvy.library_scene_names = [scene.name for scene in blenvy.library_scenes] # FIXME: unsure
|
||||||
upsert_settings(blenvy.settings_save_path, {"common_main_scene_names": [scene.name for scene in blenvy.main_scenes]})
|
upsert_settings(blenvy.settings_save_path, {"main_scene_names": [scene.name for scene in blenvy.main_scenes]})
|
||||||
upsert_settings(blenvy.settings_save_path, {"common_library_scene_names": [scene.name for scene in blenvy.library_scenes]})
|
upsert_settings(blenvy.settings_save_path, {"library_scene_names": [scene.name for scene in blenvy.library_scenes]})
|
||||||
|
|
||||||
def update_asset_folders(blenvy, context):
|
def update_asset_folders(blenvy, context):
|
||||||
asset_path_names = ['project_root_path', 'assets_path', 'blueprints_path', 'levels_path', 'materials_path']
|
asset_path_names = ['project_root_path', 'assets_path', 'blueprints_path', 'levels_path', 'materials_path']
|
||||||
@ -145,12 +145,12 @@ class BlenvyManager(PropertyGroup):
|
|||||||
if settings is not None:
|
if settings is not None:
|
||||||
if "mode" in settings:
|
if "mode" in settings:
|
||||||
self.mode = settings["mode"]
|
self.mode = settings["mode"]
|
||||||
if "common_main_scene_names" in settings:
|
if "main_scene_names" in settings:
|
||||||
for main_scene_name in settings["common_main_scene_names"]:
|
for main_scene_name in settings["main_scene_names"]:
|
||||||
added = self.main_scenes.add()
|
added = self.main_scenes.add()
|
||||||
added.name = main_scene_name
|
added.name = main_scene_name
|
||||||
if "common_library_scene_names" in settings:
|
if "library_scene_names" in settings:
|
||||||
for main_scene_name in settings["common_library_scene_names"]:
|
for main_scene_name in settings["library_scene_names"]:
|
||||||
added = self.library_scenes.add()
|
added = self.library_scenes.add()
|
||||||
added.name = main_scene_name
|
added.name = main_scene_name
|
||||||
|
|
||||||
|
@ -1,16 +1,20 @@
|
|||||||
import json
|
import json
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
def upsert_settings(name, data):
|
def upsert_settings(name, data, overwrite=False):
|
||||||
stored_settings = bpy.data.texts[name] if name in bpy.data.texts else None
|
stored_settings = bpy.data.texts[name] if name in bpy.data.texts else None
|
||||||
if stored_settings is None:
|
if stored_settings is None:
|
||||||
stored_settings = bpy.data.texts.new(name)
|
stored_settings = bpy.data.texts.new(name)
|
||||||
stored_settings.write(json.dumps(data))
|
stored_settings.write(json.dumps(data))
|
||||||
else:
|
else:
|
||||||
current_settings = json.loads(stored_settings.as_string())
|
if overwrite:
|
||||||
current_settings = {**current_settings, **data}
|
stored_settings.clear()
|
||||||
stored_settings.clear()
|
stored_settings.write(json.dumps(data))
|
||||||
stored_settings.write(json.dumps(current_settings))
|
else:
|
||||||
|
current_settings = json.loads(stored_settings.as_string())
|
||||||
|
stored_settings.clear()
|
||||||
|
current_settings = {**current_settings, **data}
|
||||||
|
stored_settings.write(json.dumps(current_settings))
|
||||||
|
|
||||||
def load_settings(name):
|
def load_settings(name):
|
||||||
stored_settings = bpy.data.texts[name] if name in bpy.data.texts else None
|
stored_settings = bpy.data.texts[name] if name in bpy.data.texts else None
|
||||||
@ -23,7 +27,7 @@ def load_settings(name):
|
|||||||
|
|
||||||
|
|
||||||
# given the input (actual) settings, filters out any invalid/useless params & params that are equal to defaults
|
# given the input (actual) settings, filters out any invalid/useless params & params that are equal to defaults
|
||||||
def generate_complete_preferences_dict(settings, presets, ignore_list=[], preset_defaults=True):
|
def generate_complete_settings_dict(settings, presets, ignore_list=[], preset_defaults=True):
|
||||||
complete_preferences = {}
|
complete_preferences = {}
|
||||||
defaults = {}
|
defaults = {}
|
||||||
|
|
||||||
@ -40,12 +44,18 @@ def generate_complete_preferences_dict(settings, presets, ignore_list=[], preset
|
|||||||
defaults[k] = default
|
defaults[k] = default
|
||||||
if preset_defaults:
|
if preset_defaults:
|
||||||
complete_preferences[k] = default
|
complete_preferences[k] = default
|
||||||
# print("defaults", defaults)
|
#print("defaults", defaults)
|
||||||
|
|
||||||
|
|
||||||
for key in list(settings.keys()):
|
for key in list(settings.keys()):
|
||||||
if key in defaults and settings[key] != defaults[key]: # only write out values different from defaults
|
if key in defaults and settings[key] != defaults[key]: # only write out values different from defaults
|
||||||
complete_preferences[key] = getattr(settings, key, None)
|
value = getattr(settings, key, None) # this is needed for most of our settings (PropertyGroups)
|
||||||
|
if value is None:
|
||||||
|
value = settings[key] # and this for ...gltf settings
|
||||||
|
complete_preferences[key] = value
|
||||||
|
#print("setting", key, value, settings[key], settings)
|
||||||
|
|
||||||
|
|
||||||
complete_preferences = dict(filter(filter_out, dict(complete_preferences).items()))
|
complete_preferences = dict(filter(filter_out, dict(complete_preferences).items()))
|
||||||
|
|
||||||
return complete_preferences
|
return complete_preferences
|
||||||
@ -60,17 +70,19 @@ def are_settings_identical(old, new, white_list=None):
|
|||||||
if old is not None and new is None:
|
if old is not None and new is None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
#print("TUTU", old_items, new_items)
|
||||||
old_items = sorted(old.items())
|
old_items = sorted(old.items())
|
||||||
new_items = sorted(new.items())
|
new_items = sorted(new.items())
|
||||||
|
|
||||||
if white_list is not None:
|
if white_list is not None:
|
||||||
old_items_override = {}
|
old_items_override = {}
|
||||||
new_items_override = {}
|
new_items_override = {}
|
||||||
for key in white_list:
|
for key in white_list:
|
||||||
if key in old_items:
|
if key in old:
|
||||||
old_items_override[key] = old_items[key]
|
old_items_override[key] = old[key]
|
||||||
if key in new_items:
|
if key in new:
|
||||||
new_items_override[key] = new_items[key]
|
new_items_override[key] = new[key]
|
||||||
old_items = old_items_override
|
old_items = sorted(old_items_override.items())
|
||||||
new_items = new_items_override
|
new_items = sorted(new_items_override.items())
|
||||||
|
|
||||||
return old_items != new_items if new is not None else False
|
return old_items == new_items
|
Loading…
Reference in New Issue
Block a user