feat(auto_export): since all variations of using depsgraphs_update & co failed to be reliable, switching to brute force
* aka added basics of serialization of scene(s) & brute compare * added basics for transforms, custom properties, meshes, cameras & lights, & armatures * should only run on save as I can imagine it could get quite costly, computation wise * added some boilerplate for determining changed objects * not hooked up to actual exporter yet * already way more reliable than the depsgraph: * ignores selections & related weird issues * undo /redo proof * expandable
This commit is contained in:
parent
9ec60a783b
commit
2f54bea7c9
|
@ -188,7 +188,11 @@ from bpy.app.handlers import persistent
|
||||||
|
|
||||||
@persistent
|
@persistent
|
||||||
def post_update(scene, depsgraph):
|
def post_update(scene, depsgraph):
|
||||||
bpy.context.window_manager.auto_export_tracker.deps_update_handler( scene, depsgraph)
|
bpy.context.window_manager.auto_export_tracker.deps_post_update_handler( scene, depsgraph)
|
||||||
|
|
||||||
|
@persistent
|
||||||
|
def pre_update(scene, depsgraph):
|
||||||
|
bpy.context.window_manager.auto_export_tracker.deps_pre_update_handler( scene, depsgraph)
|
||||||
|
|
||||||
@persistent
|
@persistent
|
||||||
def post_save(scene, depsgraph):
|
def post_save(scene, depsgraph):
|
||||||
|
@ -198,6 +202,7 @@ def register():
|
||||||
for cls in classes:
|
for cls in classes:
|
||||||
bpy.utils.register_class(cls)
|
bpy.utils.register_class(cls)
|
||||||
# for some reason, adding these directly to the tracker class in register() do not work reliably
|
# for some reason, adding these directly to the tracker class in register() do not work reliably
|
||||||
|
bpy.app.handlers.depsgraph_update_pre.append(pre_update)
|
||||||
bpy.app.handlers.depsgraph_update_post.append(post_update)
|
bpy.app.handlers.depsgraph_update_post.append(post_update)
|
||||||
bpy.app.handlers.save_post.append(post_save)
|
bpy.app.handlers.save_post.append(post_save)
|
||||||
|
|
||||||
|
@ -214,6 +219,7 @@ def unregister():
|
||||||
bpy.utils.unregister_class(cls)
|
bpy.utils.unregister_class(cls)
|
||||||
bpy.types.TOPBAR_MT_file_export.remove(menu_func_import)
|
bpy.types.TOPBAR_MT_file_export.remove(menu_func_import)
|
||||||
|
|
||||||
|
bpy.app.handlers.depsgraph_update_pre.remove(pre_update)
|
||||||
bpy.app.handlers.depsgraph_update_post.remove(post_update)
|
bpy.app.handlers.depsgraph_update_post.remove(post_update)
|
||||||
bpy.app.handlers.save_post.remove(post_save)
|
bpy.app.handlers.save_post.remove(post_save)
|
||||||
|
|
||||||
|
|
|
@ -4,9 +4,11 @@ from bpy.types import Operator
|
||||||
from bpy_extras.io_utils import ExportHelper
|
from bpy_extras.io_utils import ExportHelper
|
||||||
from bpy.props import (IntProperty)
|
from bpy.props import (IntProperty)
|
||||||
|
|
||||||
|
|
||||||
from .preferences import (AutoExportGltfAddonPreferences, AutoExportGltfPreferenceNames)
|
from .preferences import (AutoExportGltfAddonPreferences, AutoExportGltfPreferenceNames)
|
||||||
from .auto_export import auto_export
|
from .auto_export import auto_export
|
||||||
from ..helpers.generate_complete_preferences_dict import generate_complete_preferences_dict_auto
|
from ..helpers.generate_complete_preferences_dict import generate_complete_preferences_dict_auto
|
||||||
|
from ..helpers.serialize_scene import serialize_scene
|
||||||
|
|
||||||
class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
|
class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
|
||||||
"""auto export gltf"""
|
"""auto export gltf"""
|
||||||
|
@ -17,6 +19,7 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
|
||||||
# ExportHelper mixin class uses this
|
# ExportHelper mixin class uses this
|
||||||
filename_ext = ''
|
filename_ext = ''
|
||||||
|
|
||||||
|
|
||||||
#list of settings (other than purely gltf settings) whose change should trigger a re-generation of gltf files
|
#list of settings (other than purely gltf settings) whose change should trigger a re-generation of gltf files
|
||||||
white_list = ['auto_export',
|
white_list = ['auto_export',
|
||||||
'export_main_scene_name',
|
'export_main_scene_name',
|
||||||
|
@ -199,19 +202,83 @@ class AutoExportGLTF(Operator, AutoExportGltfAddonPreferences, ExportHelper):
|
||||||
|
|
||||||
return changed
|
return changed
|
||||||
|
|
||||||
|
def did_objects_change(self):
|
||||||
|
previous_stored = bpy.data.texts[".TESTING"] if ".TESTING" in bpy.data.texts else None # bpy.data.texts.new(".TESTING")
|
||||||
|
current = serialize_scene()
|
||||||
|
if previous_stored == None:
|
||||||
|
print("setting bla")
|
||||||
|
previous_stored = bpy.data.texts.new(".TESTING")
|
||||||
|
previous_stored.write(current)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
previous = json.loads(previous_stored.as_string())
|
||||||
|
current = json.loads(current)
|
||||||
|
|
||||||
|
changes_per_scene = {}
|
||||||
|
# TODO : how do we deal with changed scene names ???
|
||||||
|
for scene in current:
|
||||||
|
print('scene', scene)
|
||||||
|
changes_per_scene[scene] = {}
|
||||||
|
previous_object_names = list(previous[scene].keys())
|
||||||
|
current_object_names =list(current[scene].keys())
|
||||||
|
#print("previous_object_names", len(previous_object_names), previous_object_names)
|
||||||
|
#print("current_object_names", len(current_object_names), current_object_names)
|
||||||
|
|
||||||
|
"""if len(previous_object_names) > len(current_object_names):
|
||||||
|
print("removed")
|
||||||
|
if len(current_object_names) > len(previous_object_names):
|
||||||
|
print("added")"""
|
||||||
|
added = list(set(current_object_names) - set(previous_object_names))
|
||||||
|
removed = list(set(previous_object_names) - set(current_object_names))
|
||||||
|
print("removed", removed)
|
||||||
|
print("added",added)
|
||||||
|
for obj in added:
|
||||||
|
changes_per_scene[scene][obj] = bpy.data.objects[obj]
|
||||||
|
# TODO: how do we deal with this, as we obviously do not have data for removed objects ?
|
||||||
|
for obj in removed:
|
||||||
|
changes_per_scene[scene][obj] = None # bpy.data.objects[obj]
|
||||||
|
|
||||||
|
for object_name in list(current[scene].keys()): # todo : exclude directly added/removed objects
|
||||||
|
#print("ob", object_name)
|
||||||
|
if object_name in previous[scene]:
|
||||||
|
# print("object", object_name,"in previous scene, comparing")
|
||||||
|
current_obj = current[scene][object_name]
|
||||||
|
prev_obj = previous[scene][object_name]
|
||||||
|
same = str(current_obj) == str(prev_obj)
|
||||||
|
|
||||||
|
if "Camera" in object_name:
|
||||||
|
pass#print(" current", current_obj, prev_obj)
|
||||||
|
if "Fox" in object_name:
|
||||||
|
print(" current", current_obj)
|
||||||
|
print(" previou", prev_obj)
|
||||||
|
print(" same?", same)
|
||||||
|
#print("foo", same)
|
||||||
|
if not same:
|
||||||
|
changes_per_scene[scene][object_name] = bpy.data.objects[object_name]
|
||||||
|
|
||||||
|
"""if len(current[scene]) != len(previous[scene]) :
|
||||||
|
print("toto")"""
|
||||||
|
previous_stored.clear()
|
||||||
|
previous_stored.write(json.dumps(current))
|
||||||
|
print("changes per scene alternative", changes_per_scene)
|
||||||
|
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
#print("execute")
|
#print("execute")
|
||||||
bpy.context.window_manager.auto_export_tracker.disable_change_detection()
|
bpy.context.window_manager.auto_export_tracker.disable_change_detection()
|
||||||
if self.direct_mode:
|
if self.direct_mode:
|
||||||
self.load_settings(context)
|
self.load_settings(context)
|
||||||
print("toto", self.will_save_settings)
|
|
||||||
if self.will_save_settings:
|
if self.will_save_settings:
|
||||||
self.save_settings(context)
|
self.save_settings(context)
|
||||||
|
|
||||||
if self.auto_export: # only do the actual exporting if auto export is actually enabled
|
if self.auto_export: # only do the actual exporting if auto export is actually enabled
|
||||||
changes_per_scene = context.window_manager.auto_export_tracker.changed_objects_per_scene
|
changes_per_scene = context.window_manager.auto_export_tracker.changed_objects_per_scene
|
||||||
|
changes_per_scene_2 = self.did_objects_change()
|
||||||
|
|
||||||
#& do the export
|
#& do the export
|
||||||
if self.direct_mode: #Do not auto export when applying settings in the menu, do it on save only
|
if self.direct_mode: #Do not auto export when applying settings in the menu, do it on save only
|
||||||
|
# determine changed objects
|
||||||
# determine changed parameters
|
# determine changed parameters
|
||||||
params_changed = self.did_export_settings_change()
|
params_changed = self.did_export_settings_change()
|
||||||
auto_export(changes_per_scene, params_changed, self)
|
auto_export(changes_per_scene, params_changed, self)
|
||||||
|
|
|
@ -74,7 +74,32 @@ class AutoExportTracker(PropertyGroup):
|
||||||
# all our logic is done, mark this as done
|
# all our logic is done, mark this as done
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def deps_update_handler(cls, scene, depsgraph):
|
def deps_pre_update_handler(cls, scene, depsgraph):
|
||||||
|
pass
|
||||||
|
#print("before depsgraph update", scene, depsgraph)
|
||||||
|
|
||||||
|
# only deal with changes if we are NOT in the mids of saving/exporting
|
||||||
|
"""if cls.change_detection_enabled:
|
||||||
|
# ignore anything going on with temporary scenes
|
||||||
|
if not scene.name.startswith(TEMPSCENE_PREFIX):
|
||||||
|
print("depsgraph_update_post", scene.name)
|
||||||
|
changed_scene = scene.name or ""
|
||||||
|
#print("-------------")
|
||||||
|
|
||||||
|
# print("cls.changed_objects_per_scene", cls.changed_objects_per_scene)
|
||||||
|
# depsgraph = bpy.context.evaluated_depsgraph_get()
|
||||||
|
for obj in depsgraph.updates:
|
||||||
|
#print("depsgraph update", obj)
|
||||||
|
if isinstance(obj.id, bpy.types.Object):
|
||||||
|
# get the actual object
|
||||||
|
object = bpy.data.objects[obj.id.name]
|
||||||
|
print(" changed object", obj.id.name, "changes", obj, "evalutated", obj.id.is_evaluated, "transforms", obj.is_updated_transform, "geometry", obj.is_updated_geometry)
|
||||||
|
elif isinstance(obj.id, bpy.types.Material): # or isinstance(obj.id, bpy.types.ShaderNodeTree):
|
||||||
|
# print(" changed material", obj.id, "scene", scene.name,)
|
||||||
|
pass"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def deps_post_update_handler(cls, scene, depsgraph):
|
||||||
# print("change detection enabled", cls.change_detection_enabled)
|
# print("change detection enabled", cls.change_detection_enabled)
|
||||||
|
|
||||||
"""ops = bpy.context.window_manager.operators
|
"""ops = bpy.context.window_manager.operators
|
||||||
|
@ -96,17 +121,21 @@ class AutoExportTracker(PropertyGroup):
|
||||||
# we set the last operator here so we can clear the specific settings (yeah for overly complex logic)
|
# we set the last operator here so we can clear the specific settings (yeah for overly complex logic)
|
||||||
cls.last_operator = active_operator
|
cls.last_operator = active_operator
|
||||||
#print("active_operator", active_operator.has_active_exporter_extensions, active_operator.__annotations__.keys(), active_operator.filepath, active_operator.gltf_export_id)
|
#print("active_operator", active_operator.has_active_exporter_extensions, active_operator.__annotations__.keys(), active_operator.filepath, active_operator.gltf_export_id)
|
||||||
|
return
|
||||||
|
|
||||||
if active_operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf":
|
if active_operator.bl_idname == "EXPORT_SCENES_OT_auto_gltf":
|
||||||
# we force saving params
|
# we force saving params
|
||||||
active_operator.will_save_settings = True
|
active_operator.will_save_settings = True
|
||||||
active_operator.auto_export = True
|
active_operator.auto_export = True
|
||||||
|
# if we are using the operator, bail out for the rest
|
||||||
print("setting stuff for auto_export")
|
print("setting stuff for auto_export")
|
||||||
|
return
|
||||||
|
|
||||||
# only deal with changes if we are NOT in the mids of saving/exporting
|
# only deal with changes if we are NOT in the mids of saving/exporting
|
||||||
if cls.change_detection_enabled:
|
if cls.change_detection_enabled:
|
||||||
# ignore anything going on with temporary scenes
|
# ignore anything going on with temporary scenes
|
||||||
if not scene.name.startswith(TEMPSCENE_PREFIX):
|
if not scene.name.startswith(TEMPSCENE_PREFIX):
|
||||||
print("depsgraph_update_post", scene.name)
|
#print("depsgraph_update_post", scene.name)
|
||||||
changed_scene = scene.name or ""
|
changed_scene = scene.name or ""
|
||||||
#print("-------------")
|
#print("-------------")
|
||||||
if not changed_scene in cls.changed_objects_per_scene:
|
if not changed_scene in cls.changed_objects_per_scene:
|
||||||
|
@ -118,7 +147,7 @@ class AutoExportTracker(PropertyGroup):
|
||||||
if isinstance(obj.id, bpy.types.Object):
|
if isinstance(obj.id, bpy.types.Object):
|
||||||
# get the actual object
|
# get the actual object
|
||||||
object = bpy.data.objects[obj.id.name]
|
object = bpy.data.objects[obj.id.name]
|
||||||
print(" changed object", obj.id.name, "changes", obj, "evalutated", obj.id.is_evaluated, "transforms", obj.is_updated_transform, "geometry", obj.is_updated_geometry)
|
#print(" changed object", obj.id.name, "changes", obj, "evalutated", obj.id.is_evaluated, "transforms", obj.is_updated_transform, "geometry", obj.is_updated_geometry)
|
||||||
if obj.is_updated_transform or obj.is_updated_geometry:
|
if obj.is_updated_transform or obj.is_updated_geometry:
|
||||||
cls.changed_objects_per_scene[scene.name][obj.id.name] = object
|
cls.changed_objects_per_scene[scene.name][obj.id.name] = object
|
||||||
elif isinstance(obj.id, bpy.types.Material): # or isinstance(obj.id, bpy.types.ShaderNodeTree):
|
elif isinstance(obj.id, bpy.types.Material): # or isinstance(obj.id, bpy.types.ShaderNodeTree):
|
||||||
|
@ -175,9 +204,9 @@ class AutoExportTracker(PropertyGroup):
|
||||||
|
|
||||||
addon_prefs = SimpleNamespace(**tmp)
|
addon_prefs = SimpleNamespace(**tmp)
|
||||||
|
|
||||||
print("cls.changed_objects_per_scene", cls.changed_objects_per_scene)
|
#print("cls.changed_objects_per_scene", cls.changed_objects_per_scene)
|
||||||
(collections, collections_to_export, library_collections, collections_per_scene) = get_collections_to_export(cls.changed_objects_per_scene, export_settings_changed, addon_prefs)
|
(collections, collections_to_export, library_collections, collections_per_scene) = get_collections_to_export(cls.changed_objects_per_scene, export_settings_changed, addon_prefs)
|
||||||
print("collections to export", collections_to_export)
|
#print("collections to export", collections_to_export)
|
||||||
try:
|
try:
|
||||||
# we save this list of collections in the context
|
# we save this list of collections in the context
|
||||||
bpy.context.window_manager.exportedCollections.clear()
|
bpy.context.window_manager.exportedCollections.clear()
|
||||||
|
|
|
@ -0,0 +1,146 @@
|
||||||
|
import json
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
|
||||||
|
fields_to_ignore_generic = ["tag", "type", "update_tag", "use_extra_user", "use_fake_user", "user_clear", "user_of_id", "user_remap", "users",
|
||||||
|
'animation_data_clear', 'animation_data_create', 'asset_clear', 'asset_data', 'asset_generate_preview', 'asset_mark', 'bl_rna', 'evaluated_get',
|
||||||
|
'library', 'library_weak_reference', 'make_local','name', 'name_full', 'original',
|
||||||
|
'override_create', 'override_hierarchy_create', 'override_library', 'preview', 'preview_ensure', 'rna_type',
|
||||||
|
'session_uid', 'copy', 'id_type', 'is_embedded_data', 'is_evaluated', 'is_library_indirect', 'is_missing', 'is_runtime_data']
|
||||||
|
|
||||||
|
# possible alternatives https://blender.stackexchange.com/questions/286010/bpy-detect-modified-mesh-data-vertices-edges-loops-or-polygons-for-cachin
|
||||||
|
def mesh_hash(obj):
|
||||||
|
# this is incomplete, how about edges ?
|
||||||
|
vertex_count = len(obj.data.vertices)
|
||||||
|
vertices_np = np.empty(vertex_count * 3, dtype=np.float32)
|
||||||
|
obj.data.vertices.foreach_get("co", vertices_np)
|
||||||
|
h = str(hash(vertices_np.tobytes()))
|
||||||
|
return h
|
||||||
|
|
||||||
|
def animation_hash(obj):
|
||||||
|
animation_data = obj.animation_data
|
||||||
|
if not animation_data:
|
||||||
|
return None
|
||||||
|
return None
|
||||||
|
blender_actions = []
|
||||||
|
blender_tracks = {}
|
||||||
|
|
||||||
|
# TODO: this might need to be modified/ adapted to match the standard gltf exporter settings
|
||||||
|
for track in animation_data.nla_tracks:
|
||||||
|
non_muted_strips = [strip for strip in track.strips if strip.action is not None and strip.mute is False]
|
||||||
|
for strip in non_muted_strips: #t.strips:
|
||||||
|
# print(" ", source.name,'uses',strip.action.name, "active", strip.active, "action", strip.action)
|
||||||
|
blender_actions.append(strip.action)
|
||||||
|
blender_tracks[strip.action.name] = track.name
|
||||||
|
|
||||||
|
# Remove duplicate actions.
|
||||||
|
blender_actions = list(set(blender_actions))
|
||||||
|
# sort animations alphabetically (case insensitive) so they have a defined order and match Blender's Action list
|
||||||
|
blender_actions.sort(key = lambda a: a.name.lower())
|
||||||
|
|
||||||
|
markers_per_animation = {}
|
||||||
|
animations_infos = []
|
||||||
|
|
||||||
|
for action in blender_actions:
|
||||||
|
animation_name = blender_tracks[action.name]
|
||||||
|
animations_infos.append(
|
||||||
|
f'(name: "{animation_name}", frame_start: {action.frame_range[0]}, frame_end: {action.frame_range[1]}, frames_length: {action.frame_range[1] - action.frame_range[0]}, frame_start_override: {action.frame_start}, frame_end_override: {action.frame_end})'
|
||||||
|
)
|
||||||
|
markers_per_animation[animation_name] = {}
|
||||||
|
|
||||||
|
for marker in action.pose_markers:
|
||||||
|
if marker.frame not in markers_per_animation[animation_name]:
|
||||||
|
markers_per_animation[animation_name][marker.frame] = []
|
||||||
|
markers_per_animation[animation_name][marker.frame].append(marker.name)
|
||||||
|
|
||||||
|
|
||||||
|
def camera_hash(obj):
|
||||||
|
camera_fields = ["angle", "angle_x", "angle_y", "animation_data", "background_images", "clip_end", "clip_start", "display_size", "dof", "fisheye_fov"]
|
||||||
|
camera_data = obj.data
|
||||||
|
fields_to_ignore= fields_to_ignore_generic
|
||||||
|
|
||||||
|
all_field_names = dir(camera_data)
|
||||||
|
fields = [getattr(camera_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||||
|
# TODO: the above is not enough, certain fields are left as bpy.data.xx
|
||||||
|
#print("camera", obj, fields)
|
||||||
|
return str(fields)
|
||||||
|
|
||||||
|
def light_hash(obj):
|
||||||
|
light_data = obj.data
|
||||||
|
fields_to_ignore = fields_to_ignore_generic
|
||||||
|
|
||||||
|
all_field_names = dir(light_data)
|
||||||
|
fields = [getattr(light_data, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||||
|
return str(fields)
|
||||||
|
|
||||||
|
def bones_hash(bones):
|
||||||
|
fields_to_ignore = fields_to_ignore_generic + ['AxisRollFromMatrix', 'MatrixFromAxisRoll', 'evaluate_envelope', 'convert_local_to_pose', 'foreach_get', 'foreach_set', 'get', 'set', 'find', 'items', 'keys', 'values']
|
||||||
|
|
||||||
|
bones_result = []
|
||||||
|
for bone in bones:
|
||||||
|
all_field_names = dir(bone)
|
||||||
|
fields = [getattr(bone, prop, None) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||||
|
bones_result.append(fields)
|
||||||
|
print("fields of bone", bones_result)
|
||||||
|
return str(hash(str(bones_result)))
|
||||||
|
|
||||||
|
# fixme: not good enough ?
|
||||||
|
def armature_hash(obj):
|
||||||
|
fields_to_ignore = fields_to_ignore_generic + ['display_type', 'is_editmode', 'pose_position', 'foreach_get', 'get']
|
||||||
|
fields_to_convert = {'bones': bones_hash}#, 'collections_all': bones_hash}
|
||||||
|
armature_data = obj.data
|
||||||
|
all_field_names = dir(armature_data)
|
||||||
|
|
||||||
|
fields = [getattr(armature_data, prop, None) if not prop in fields_to_convert.keys() else fields_to_convert[prop](getattr(armature_data, prop)) for prop in all_field_names if not prop.startswith("__") and not prop in fields_to_ignore and not prop.startswith("show_")]
|
||||||
|
#print("ARMATURE", fields)
|
||||||
|
|
||||||
|
"""for bone in armature_data.bones:
|
||||||
|
print("bone", bone, bone_hash(bone))"""
|
||||||
|
return str(fields)
|
||||||
|
|
||||||
|
def serialize_scene():
|
||||||
|
print("serializing scene")
|
||||||
|
data = {}
|
||||||
|
for scene in bpy.data.scenes:
|
||||||
|
data[scene.name] = {}
|
||||||
|
for object in scene.objects:
|
||||||
|
#print("object", object.name, object.location)
|
||||||
|
transform = str((object.location, object.rotation_euler, object.scale))
|
||||||
|
visibility = object.visible_get()
|
||||||
|
|
||||||
|
print("object type", object.type)
|
||||||
|
custom_properties = {}
|
||||||
|
for K in object.keys():
|
||||||
|
if K not in '_RNA_UI' and K != 'components_meta':
|
||||||
|
#print( K , "-" , object[K] )
|
||||||
|
custom_properties[K] = object[K]
|
||||||
|
|
||||||
|
animations = animation_hash(object)
|
||||||
|
mesh = mesh_hash(object) if object.type == 'MESH' else None
|
||||||
|
camera = camera_hash(object) if object.type == 'CAMERA' else None
|
||||||
|
light = light_hash(object) if object.type == 'LIGHT' else None
|
||||||
|
armature = armature_hash(object) if object.type == 'ARMATURE' else None
|
||||||
|
|
||||||
|
data[scene.name][object.name] = {
|
||||||
|
"name": object.name,
|
||||||
|
"transforms": transform,
|
||||||
|
"visibility": visibility,
|
||||||
|
"custom_properties": custom_properties,
|
||||||
|
"animations": animations,
|
||||||
|
"mesh": mesh,
|
||||||
|
"camera": camera,
|
||||||
|
"light": light,
|
||||||
|
"armature": armature
|
||||||
|
}
|
||||||
|
|
||||||
|
"""print("data", data)
|
||||||
|
print("")
|
||||||
|
print("")
|
||||||
|
print("data json", json.dumps(data))"""
|
||||||
|
|
||||||
|
return json.dumps(data)
|
||||||
|
|
||||||
|
#loc, rot, scale = bpy.context.object.matrix_world.decompose()
|
||||||
|
|
Loading…
Reference in New Issue