test(auto_export): overhauled & cleaned up tests

This commit is contained in:
kaosat.dev 2024-04-30 11:05:24 +02:00
parent f387fbec48
commit eda18b7d25
5 changed files with 174 additions and 129 deletions

Binary file not shown.

View File

@ -6,6 +6,8 @@ import shutil
import pathlib
import mathutils
from .test_helpers import prepare_auto_export, run_auto_export_and_compare
@pytest.fixture
def setup_data(request):
print("\nSetting up resources...")
@ -56,85 +58,6 @@ def setup_data(request):
return None
def prepare_auto_export(auto_export_overrides={}):
# with change detection
# first, configure things
# we use the global settings for that
export_props = {
"main_scene_names" : ['World'],
"library_scene_names": ['Library'],
**auto_export_overrides
}
# store settings for the auto_export part
stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
stored_auto_settings.clear()
stored_auto_settings.write(json.dumps(export_props))
gltf_settings = {
"export_animations": False,
"export_optimize_animation_size": False
}
# and store settings for the gltf part
stored_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings")
stored_gltf_settings.clear()
stored_gltf_settings.write(json.dumps(gltf_settings))
def run_auto_export(setup_data):
auto_export_operator = bpy.ops.export_scenes.auto_gltf
auto_export_operator(
auto_export=True,
direct_mode=True,
export_root_folder = os.path.abspath(setup_data["root_path"]),
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
)
levels_path = setup_data["levels_path"]
level_file_paths = list(map(lambda file_name: os.path.join(levels_path, file_name), sorted(os.listdir(levels_path)))) if os.path.exists(levels_path) else []
blueprints_path = setup_data["blueprints_path"]
blueprints_file_paths = list(map(lambda file_name: os.path.join(blueprints_path, file_name), sorted(os.listdir(blueprints_path)))) if os.path.exists(blueprints_path) else []
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), blueprints_file_paths + level_file_paths))
# assert os.path.exists(world_file_path) == True
mapped_files_to_timestamps_and_index = {}
for (index, file_path) in enumerate(blueprints_file_paths + level_file_paths):
file_path = pathlib.Path(file_path).stem
mapped_files_to_timestamps_and_index[file_path] = (modification_times[index], index)
return (modification_times, mapped_files_to_timestamps_and_index)
def run_auto_export_and_compare(setup_data, changes, expected_changed_files = []):
(modification_times_first, mapped ) = run_auto_export(setup_data)
for index, change in enumerate(changes):
change()
(modification_times, mapped ) = run_auto_export(setup_data)
changed_files = expected_changed_files[index]
changed_file_indices = [mapped[changed_file][1] for changed_file in changed_files]
#print("changed files", changed_files, changed_file_indices, "mapped", mapped)
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in changed_file_indices]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in changed_file_indices]
print("other_files_modification_times_new ", other_files_modification_times)
print("other_files_modification_times_first", other_files_modification_times_first)
for changed_file_index in changed_file_indices:
#print("modification_times_new [changed_file_index]", modification_times[changed_file_index])
#print("modification_times_first[changed_file_index]", modification_times_first[changed_file_index])
if changed_file_index in modification_times_first and changed_file_index in modification_times:
assert modification_times[changed_file_index] != modification_times_first[changed_file_index], f"failure in change: {index}, at file {changed_file_index}"
# TODO: we should throw an error in the "else" case ?
assert other_files_modification_times == other_files_modification_times_first , f"failure in change: {index}"
# reset the comparing
modification_times_first = modification_times
def test_export_change_tracking_custom_properties(setup_data):
# set things up
prepare_auto_export()
@ -185,7 +108,7 @@ def test_export_change_tracking_custom_properties_collection_instances_combine_m
run_auto_export_and_compare(
setup_data=setup_data,
changes=[first_change, second_change, third_change, fourth_change],
expected_changed_files = [[], ["World"], ["World"], ["World"]] # only the "world" file should have changed
expected_changed_files = [[], ["World"], ["World","Blueprint1"], ["World"]] # only the "world" file should have changed
)

View File

@ -4,18 +4,38 @@ import json
import pytest
import shutil
from .test_helpers import prepare_auto_export
@pytest.fixture
def setup_data(request):
print("\nSetting up resources...")
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")
blueprints_path = os.path.join(assets_root_path, "blueprints")
levels_path = os.path.join(assets_root_path, "levels")
models_path = os.path.join(assets_root_path, "models")
materials_path = os.path.join(assets_root_path, "materials")
#other_materials_path = os.path.join("../../testing", "other_materials")
yield {
"root_path": root_path,
"models_path": models_path,
"blueprints_path": blueprints_path,
"levels_path": levels_path,
"materials_path":materials_path
}
def finalizer():
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")
models_path = os.path.join(assets_root_path, "models")
materials_path = os.path.join(assets_root_path, "materials")
#other_materials_path = os.path.join("../../testing", "other_materials")
print("\nPerforming teardown...")
if os.path.exists(blueprints_path):
shutil.rmtree(blueprints_path)
if os.path.exists(levels_path):
shutil.rmtree(levels_path)
if os.path.exists(models_path):
shutil.rmtree(models_path)
@ -49,11 +69,7 @@ def setup_data(request):
- removes generated files
"""
def test_export_no_parameters(setup_data):
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")
models_path = os.path.join(assets_root_path, "models")
auto_export_operator = bpy.ops.export_scenes.auto_gltf
# make sure to clear any parameters first
@ -61,23 +77,20 @@ def test_export_no_parameters(setup_data):
stored_auto_settings.clear()
stored_auto_settings.write(json.dumps({}))
# first test exporting withouth any parameters set, this should not export anything
# first test exporting without any parameters set, this should not export anything
auto_export_operator(
auto_export=True,
direct_mode=True,
export_materials_library=True,
export_root_folder = os.path.abspath(setup_data["root_path"]),
export_output_folder="./models",
export_materials_library=True
)
world_file_path = os.path.join(models_path, "World.glb")
world_file_path = os.path.join(setup_data["levels_path"], "World.glb")
assert os.path.exists(world_file_path) != True
def test_export_auto_export_parameters_only(setup_data):
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")
models_path = os.path.join(assets_root_path, "models")
auto_export_operator = bpy.ops.export_scenes.auto_gltf
export_props = {
"main_scene_names" : ['World'],
"library_scene_names": ['Library'],
@ -91,17 +104,15 @@ def test_export_auto_export_parameters_only(setup_data):
auto_export_operator(
auto_export=True,
direct_mode=True,
export_root_folder = os.path.abspath(setup_data["root_path"]),
export_output_folder="./models",
export_materials_library=True
)
world_file_path = os.path.join(models_path, "World.glb")
world_file_path = os.path.join(setup_data["levels_path"], "World.glb")
assert os.path.exists(world_file_path) == True
def test_export_changed_parameters(setup_data):
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")
models_path = os.path.join(assets_root_path, "models")
auto_export_operator = bpy.ops.export_scenes.auto_gltf
# with change detection
@ -129,27 +140,26 @@ def test_export_changed_parameters(setup_data):
auto_export_operator(
auto_export=True,
direct_mode=True,
export_root_folder = os.path.abspath(setup_data["root_path"]),
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=True
)
world_file_path = os.path.join(models_path, "World.glb")
world_file_path = os.path.join(setup_data["levels_path"], "World.glb")
assert os.path.exists(world_file_path) == True
models_library_path = os.path.join(models_path, "library")
model_library_file_paths = list(map(lambda file_name: os.path.join(models_library_path, file_name), sorted(os.listdir(models_library_path))))
blueprints_path = setup_data["blueprints_path"]
model_library_file_paths = list(map(lambda file_name: os.path.join(blueprints_path, file_name), sorted(os.listdir(blueprints_path))))
modification_times_first = list(map(lambda file_path: os.path.getmtime(file_path), model_library_file_paths))
print("files", model_library_file_paths)
print("mod times", modification_times_first)
# export again, with no param changes: this should NOT export anything again, ie, modification times should be the same
print("second export")
auto_export_operator(
auto_export=True,
direct_mode=True,
export_root_folder = os.path.abspath(setup_data["root_path"]),
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
@ -174,6 +184,7 @@ def test_export_changed_parameters(setup_data):
auto_export_operator(
auto_export=True,
direct_mode=True,
export_root_folder = os.path.abspath(setup_data["root_path"]),
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
@ -189,6 +200,7 @@ def test_export_changed_parameters(setup_data):
auto_export_operator(
auto_export=True,
direct_mode=True,
export_root_folder = os.path.abspath(setup_data["root_path"]),
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
@ -217,6 +229,7 @@ def test_export_changed_parameters(setup_data):
auto_export_operator(
auto_export=True,
direct_mode=True,
export_root_folder = os.path.abspath(setup_data["root_path"]),
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
@ -232,6 +245,7 @@ def test_export_changed_parameters(setup_data):
auto_export_operator(
auto_export=True,
direct_mode=True,
export_root_folder = os.path.abspath(setup_data["root_path"]),
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,

View File

@ -11,15 +11,34 @@ def setup_data(request):
print("\nSetting up resources...")
root_path = "../../testing/bevy_example"
assets_root_path = os.path.join(root_path, "assets")
blueprints_path = os.path.join(assets_root_path, "blueprints")
levels_path = os.path.join(assets_root_path, "levels")
models_path = os.path.join(assets_root_path, "models")
materials_path = os.path.join(assets_root_path, "materials")
other_materials_path = os.path.join(assets_root_path, "other_materials")
yield {"root_path": root_path, "assets_root_path": assets_root_path, "models_path": models_path, "materials_path": materials_path, "other_materials_path": other_materials_path}
other_blueprints_path = os.path.join(assets_root_path, "other_blueprints")
yield {
"root_path": root_path,
"models_path": models_path,
"blueprints_path": blueprints_path,
"levels_path": levels_path,
"materials_path":materials_path,
"other_materials_path":other_materials_path,
"other_blueprints_path":other_blueprints_path
}
def finalizer():
print("\nPerforming teardown...")
if os.path.exists(blueprints_path):
shutil.rmtree(blueprints_path)
if os.path.exists(levels_path):
shutil.rmtree(levels_path)
if os.path.exists(models_path):
shutil.rmtree(models_path)
@ -28,6 +47,9 @@ def setup_data(request):
if os.path.exists(other_materials_path):
shutil.rmtree(other_materials_path)
if os.path.exists(other_blueprints_path):
shutil.rmtree(other_blueprints_path)
request.addfinalizer(finalizer)
@ -58,12 +80,13 @@ def test_export_do_not_export_blueprints(setup_data):
auto_export_operator(
auto_export=True,
direct_mode=True,
export_output_folder="./models",
export_root_folder = os.path.abspath(setup_data["root_path"]),
export_output_folder="assets/models",
export_scene_settings=True,
export_blueprints=False,
)
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint1.glb")) == False
assert os.path.exists(os.path.join(setup_data["blueprints_path"],"Blueprint1.glb")) == False
orphan_data = get_orphan_data()
assert len(orphan_data) == 0
@ -84,13 +107,14 @@ def test_export_custom_blueprints_path(setup_data):
auto_export_operator(
auto_export=True,
direct_mode=True,
export_root_folder = os.path.abspath(setup_data["root_path"]),
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_blueprints_path = "another_library_path"
export_blueprints_path = "assets/other_blueprints"
)
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
assert os.path.exists(os.path.join(setup_data["models_path"], "another_library_path", "Blueprint1.glb")) == True
assert os.path.exists(os.path.join(setup_data["levels_path"], "World.glb")) == True
assert os.path.exists(os.path.join(setup_data["root_path"],"assets", "other_blueprints", "Blueprint1.glb")) == True
assert len(get_orphan_data()) == 0
def test_export_materials_library(setup_data):
@ -109,13 +133,14 @@ def test_export_materials_library(setup_data):
auto_export_operator(
auto_export=True,
direct_mode=True,
export_root_folder = os.path.abspath(setup_data["root_path"]),
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library = True
)
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint1.glb")) == True
assert os.path.exists(os.path.join(setup_data["blueprints_path"], "Blueprint1.glb")) == True
assert os.path.exists(os.path.join(setup_data["materials_path"], "testing_materials_library.glb")) == True
assert len(get_orphan_data()) == 0
@ -135,19 +160,20 @@ def test_export_materials_library_custom_path(setup_data):
auto_export_operator(
auto_export=True,
direct_mode=True,
export_root_folder = os.path.abspath(setup_data["root_path"]),
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library = True,
export_materials_path="other_materials"
export_materials_path="assets/other_materials"
)
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint1.glb")) == True
assert os.path.exists(os.path.join(setup_data["blueprints_path"], "Blueprint1.glb")) == True
assert os.path.exists(os.path.join(setup_data["materials_path"], "testing_materials_library.glb")) == False
assert os.path.exists(os.path.join(setup_data["other_materials_path"], "testing_materials_library.glb")) == True
assert len(get_orphan_data()) == 0
def test_export_collection_instances_combine_mode(setup_data): # TODO: change & check this
def test_export_collection_instances_combine_mode(setup_data): # There is more in depth testing of this in the "change_tracking" tests
auto_export_operator = bpy.ops.export_scenes.auto_gltf
# first, configure things
@ -166,13 +192,14 @@ def test_export_collection_instances_combine_mode(setup_data): # TODO: change &
auto_export_operator(
auto_export=True,
direct_mode=True,
export_root_folder = os.path.abspath(setup_data["root_path"]),
export_output_folder="./models",
export_blueprints=True,
collection_instances_combine_mode = 'Embed'
)
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
assert os.path.exists(os.path.join(setup_data["models_path"], "World_dynamic.glb")) == False
assert os.path.exists(os.path.join(setup_data["levels_path"], "World.glb")) == True
assert os.path.exists(os.path.join(setup_data["levels_path"], "World_dynamic.glb")) == False
assert len(get_orphan_data()) == 0
@ -192,17 +219,18 @@ def test_export_do_not_export_marked_assets(setup_data):
auto_export_operator(
auto_export=True,
direct_mode=True,
export_root_folder = os.path.abspath(setup_data["root_path"]),
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_marked_assets = False
)
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint1.glb")) == True
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint2.glb")) == False
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint3.glb")) == True
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint4_nested.glb")) == True
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint5.glb")) == False
assert os.path.exists(os.path.join(setup_data["levels_path"], "World.glb")) == True
assert os.path.exists(os.path.join(setup_data["blueprints_path"], "Blueprint1.glb")) == True
assert os.path.exists(os.path.join(setup_data["blueprints_path"],"Blueprint2.glb")) == False
assert os.path.exists(os.path.join(setup_data["blueprints_path"],"Blueprint3.glb")) == True
assert os.path.exists(os.path.join(setup_data["blueprints_path"],"Blueprint4_nested.glb")) == True
assert os.path.exists(os.path.join(setup_data["blueprints_path"],"Blueprint5.glb")) == False
assert len(get_orphan_data()) == 0
@ -225,14 +253,15 @@ def test_export_separate_dynamic_and_static_objects(setup_data):
auto_export_operator(
auto_export=True,
direct_mode=True,
export_root_folder = os.path.abspath(setup_data["root_path"]),
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_separate_dynamic_and_static_objects = True
)
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
assert os.path.exists(os.path.join(setup_data["models_path"], "World_dynamic.glb")) == True
assert os.path.exists(os.path.join(setup_data["levels_path"], "World.glb")) == True
assert os.path.exists(os.path.join(setup_data["levels_path"], "World_dynamic.glb")) == True
assert len(get_orphan_data()) == 0
@ -252,11 +281,12 @@ def test_export_should_not_generate_orphan_data(setup_data):
auto_export_operator(
auto_export=True,
direct_mode=True,
export_root_folder = os.path.abspath(setup_data["root_path"]),
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=False,
export_blueprints=True,
)
assert os.path.exists(os.path.join(setup_data["models_path"], "World.glb")) == True
assert os.path.exists(os.path.join(setup_data["models_path"], "library", "Blueprint1.glb")) == False
assert os.path.exists(os.path.join(setup_data["levels_path"], "World.glb")) == True
assert os.path.exists(os.path.join(setup_data["blueprints_path"],"Blueprint1.glb")) == True
assert len(get_orphan_data()) == 0

View File

@ -0,0 +1,78 @@
import bpy
import os
import json
import pathlib
def prepare_auto_export(auto_export_overrides={}, gltf_export_settings = {"export_animations": False, "export_optimize_animation_size": False}):
# with change detection
# first, configure things
# we use the global settings for that
export_props = {
"main_scene_names" : ['World'],
"library_scene_names": ['Library'],
**auto_export_overrides
}
# store settings for the auto_export part
stored_auto_settings = bpy.data.texts[".gltf_auto_export_settings"] if ".gltf_auto_export_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_settings")
stored_auto_settings.clear()
stored_auto_settings.write(json.dumps(export_props))
gltf_settings = gltf_export_settings
# and store settings for the gltf part
stored_gltf_settings = bpy.data.texts[".gltf_auto_export_gltf_settings"] if ".gltf_auto_export_gltf_settings" in bpy.data.texts else bpy.data.texts.new(".gltf_auto_export_gltf_settings")
stored_gltf_settings.clear()
stored_gltf_settings.write(json.dumps(gltf_settings))
def run_auto_export(setup_data):
auto_export_operator = bpy.ops.export_scenes.auto_gltf
auto_export_operator(
auto_export=True,
direct_mode=True,
export_root_folder = os.path.abspath(setup_data["root_path"]),
export_output_folder="./models",
export_scene_settings=True,
export_blueprints=True,
export_materials_library=False
)
levels_path = setup_data["levels_path"]
level_file_paths = list(map(lambda file_name: os.path.join(levels_path, file_name), sorted(os.listdir(levels_path)))) if os.path.exists(levels_path) else []
blueprints_path = setup_data["blueprints_path"]
blueprints_file_paths = list(map(lambda file_name: os.path.join(blueprints_path, file_name), sorted(os.listdir(blueprints_path)))) if os.path.exists(blueprints_path) else []
modification_times = list(map(lambda file_path: os.path.getmtime(file_path), blueprints_file_paths + level_file_paths))
# assert os.path.exists(world_file_path) == True
mapped_files_to_timestamps_and_index = {}
for (index, file_path) in enumerate(blueprints_file_paths + level_file_paths):
file_path = pathlib.Path(file_path).stem
mapped_files_to_timestamps_and_index[file_path] = (modification_times[index], index)
return (modification_times, mapped_files_to_timestamps_and_index)
def run_auto_export_and_compare(setup_data, changes, expected_changed_files = []):
(modification_times_first, mapped ) = run_auto_export(setup_data)
for index, change in enumerate(changes):
change()
(modification_times, mapped ) = run_auto_export(setup_data)
changed_files = expected_changed_files[index]
changed_file_indices = [mapped[changed_file][1] for changed_file in changed_files]
print("changed files", changed_files, changed_file_indices, "mapped", mapped)
other_files_modification_times = [value for index, value in enumerate(modification_times) if index not in changed_file_indices]
other_files_modification_times_first = [value for index, value in enumerate(modification_times_first) if index not in changed_file_indices]
print("other_files_modification_times_new ", other_files_modification_times)
print("other_files_modification_times_first", other_files_modification_times_first)
for changed_file_index in changed_file_indices:
#print("modification_times_new [changed_file_index]", modification_times[changed_file_index])
#print("modification_times_first[changed_file_index]", modification_times_first[changed_file_index])
if changed_file_index in modification_times_first and changed_file_index in modification_times:
assert modification_times[changed_file_index] != modification_times_first[changed_file_index], f"failure in change: {index}, at file {changed_file_index}"
# TODO: we should throw an error in the "else" case ?
assert other_files_modification_times == other_files_modification_times_first , f"failure in change: {index}"
# reset the comparing
modification_times_first = modification_times