chore(): cleanup

This commit is contained in:
kaosat.dev 2024-03-15 10:37:35 +01:00
parent fa4386a185
commit 2c4312b5bb
4 changed files with 41 additions and 19 deletions

View File

@ -46,13 +46,13 @@ def get_sub_collections(collections, parent=None, children_per_collection=None):
used_collections = []
for root_collection in collections:
print("collections", collections)
#print("collections", collections)
node = CollectionNode(name=root_collection.name, parent=parent)
parent.children.append(node)
#print("root collection", root_collection.name)
for collection in traverse_tree(root_collection): # TODO: filter out COLLECTIONS that have the flatten flag (unlike the flatten flag on colleciton instances themselves)
print("sub", collection)
#print("sub", collection)
node_name = collection.name
children_per_collection[node_name] = []
#print(" scanning", collection.name)
@ -60,14 +60,12 @@ def get_sub_collections(collections, parent=None, children_per_collection=None):
#print("FLATTEN", object.name, 'Flatten' in object)
if object.instance_type == 'COLLECTION' : # and not 'Flatten' in object:
collection_name = object.instance_collection.name
print("sub obj", collection_name)
#print("sub obj", collection_name)
# FIXME: not sure:
children_per_collection[node_name].append(collection_name)
(sub_names, sub_collections) = get_sub_collections([object.instance_collection], node, children_per_collection)
print("gna", sub_names, sub_collections)
if len(list(sub_names)) > 0:
print("toto")
children_per_collection[node_name] += (list(sub_names))
#print(" found sub collection in use", object.name, object.instance_collection)

View File

@ -160,7 +160,7 @@ def get_scenes(addon_prefs):
def inject_blueprints_list_into_main_scene(scene):
print("injecting assets data")
print("injecting assets/blueprints data into scene")
root_collection = scene.collection
assets_list = None
for object in scene.objects:
@ -187,5 +187,5 @@ def inject_blueprints_list_into_main_scene(scene):
#assets_list["blueprints_direct"] = list(collection_names)
assets_list["BlueprintsList"] = f"({json.dumps(dict(children_per_collection))})"
assets_list["Materials"]= '()'
print("assets list", assets_list["BlueprintsList"], children_per_collection)
#assets_list["Materials"]= '()'
# print("assets list", assets_list["BlueprintsList"], children_per_collection)

View File

@ -19,6 +19,8 @@ def setup_data(request):
def finalizer():
print("\nPerforming teardown...")
get_orphan_data()
if os.path.exists(models_path):
shutil.rmtree(models_path)
@ -33,9 +35,14 @@ def setup_data(request):
return None
def get_orphan_data():
orphan_meshes = [m.name for m in bpy.data.meshes if m.users == 0]
# print("orphan meshes before", orphan_meshes)
def test_export_do_not_export_blueprints(setup_data):
auto_export_operator = bpy.ops.export_scenes.auto_gltf
# first, configure things
# we use the global settings for that
export_props = {
@ -57,7 +64,6 @@ def test_export_do_not_export_blueprints(setup_data):
def test_export_custom_blueprints_path(setup_data):
auto_export_operator = bpy.ops.export_scenes.auto_gltf
# first, configure things
# we use the global settings for that
export_props = {
@ -210,3 +216,27 @@ def test_export_separate_dynamic_and_static_objects(setup_data):
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
assert os.path.exists(os.path.join(setup_data["models_path"], "World_dynamic.glb")) == True
def test_export_should_not_generate_orphan_data(setup_data):
auto_export_operator = bpy.ops.export_scenes.auto_gltf
# first, configure things
# we use the global settings for that
export_props = {
"main_scene_names" : ['World'],
"library_scene_names": ['Library']
}
stored_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
stored_settings.clear()
stored_settings.write(json.dumps(export_props))
auto_export_operator(
direct_mode=True,
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=False,
)
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint1.glb")) == False

View File

@ -20,21 +20,19 @@ def setup_data(request):
#other_materials_path = os.path.join("../../testing", "other_materials")
print("\nPerforming teardown...")
'''if os.path.exists(models_path):
if os.path.exists(models_path):
shutil.rmtree(models_path)
"""if os.path.exists(materials_path):
if os.path.exists(materials_path):
shutil.rmtree(materials_path)
if os.path.exists(other_materials_path):
shutil.rmtree(other_materials_path)"""
diagnostics_file_path = os.path.join(root_path, "bevy_diagnostics.json")
if os.path.exists(diagnostics_file_path):
os.remove(diagnostics_file_path)
screenshot_observed_path = os.path.join(root_path, "screenshot.png")
if os.path.exists(screenshot_observed_path):
os.remove(screenshot_observed_path)'''
os.remove(screenshot_observed_path)
request.addfinalizer(finalizer)
@ -96,11 +94,7 @@ def test_export_complex(setup_data):
# now run bevy
command = "cargo run --features bevy/dynamic_linking"
# assert getattr(propertyGroup, 'a') == 0.5714026093482971
FNULL = open(os.devnull, 'w') #use this if you want to suppress output to stdout from the subprocess
filename = "my_file.dat"
args = command
#subprocess.call(args, stdout=FNULL, stderr=FNULL, shell=False, cwd=bevy_run_exec_path)
return_code = subprocess.call(["cargo", "run", "--features", "bevy/dynamic_linking"], cwd=root_path)
print("RETURN CODE OF BEVY APP", return_code)
assert return_code == 0