sync addons and tests with trunks
authorBenoit Bolsee <benoit.bolsee@online.be>
Tue, 7 Jun 2011 20:34:23 +0000 (20:34 +0000)
committerBenoit Bolsee <benoit.bolsee@online.be>
Tue, 7 Jun 2011 20:34:23 +0000 (20:34 +0000)
source/tests/CMakeLists.txt [new file with mode: 0644]
source/tests/batch_import.py [new file with mode: 0644]
source/tests/bl_load_addons.py [new file with mode: 0644]
source/tests/bl_load_py_modules.py [new file with mode: 0644]
source/tests/bl_run_operators.py [new file with mode: 0644]
source/tests/bl_test.py [new file with mode: 0644]
source/tests/pep8.py [new file with mode: 0644]
source/tests/rna_array.py [new file with mode: 0644]
source/tests/rna_info_dump.py [new file with mode: 0644]

diff --git a/source/tests/CMakeLists.txt b/source/tests/CMakeLists.txt
new file mode 100644 (file)
index 0000000..7abac7b
--- /dev/null
@@ -0,0 +1,293 @@
+# -*- mode: cmake; indent-tabs-mode: t; -*-
+# $Id: CMakeLists.txt 34198 2011-01-09 15:12:08Z campbellbarton $
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# Contributor(s): Jacques Beaurain.
+#
+# ***** END GPL LICENSE BLOCK *****
+
+# --env-system-scripts allows to run without the install target. 
+
+# Use '--write-blend=/tmp/test.blend' to view output
+
+
+set(TEST_SRC_DIR ${CMAKE_SOURCE_DIR}/../lib/tests)
+set(TEST_OUT_DIR ${CMAKE_BINARY_DIR}/tests)
+
+# ugh, any better way to do this on testing only?
+execute_process(COMMAND ${CMAKE_COMMAND} -E make_directory ${TEST_OUT_DIR}) 
+
+#~ if(NOT IS_DIRECTORY ${TEST_SRC_DIR})
+#~     message(FATAL_ERROR "CMake test directory not found!")
+#~ endif()
+
+# all calls to blender use this
+if(APPLE)
+       set(TEST_BLENDER_EXE ${EXECUTABLE_OUTPUT_PATH}/blender.app/Contents/MacOS/blender)
+else()
+       set(TEST_BLENDER_EXE ${EXECUTABLE_OUTPUT_PATH}/blender)
+endif()
+
+# for testing with valgrind prefix: valgrind --track-origins=yes --error-limit=no 
+set(TEST_BLENDER_EXE ${TEST_BLENDER_EXE} --background --factory-startup --env-system-scripts ${CMAKE_SOURCE_DIR}/release/scripts)
+
+
+# ------------------------------------------------------------------------------
+# GENERAL PYTHON CORRECTNESS TESTS
+add_test(script_load_addons ${TEST_BLENDER_EXE}
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_load_addons.py
+)
+
+add_test(script_load_modules ${TEST_BLENDER_EXE}
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_load_py_modules.py
+)
+
+# test running operators doesn't segfault under various conditions
+add_test(script_run_operators ${TEST_BLENDER_EXE}
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_run_operators.py
+)
+
+# ------------------------------------------------------------------------------
+# IO TESTS
+
+# OBJ Import tests
+add_test(import_obj_cube ${TEST_BLENDER_EXE}
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.import_scene.obj\(filepath='${TEST_SRC_DIR}/io_tests/obj/cube.obj'\)
+       --md5=39cce4bacac2d1b18fc470380279bc15 --md5_method=SCENE
+       --write-blend=${TEST_OUT_DIR}/import_obj_cube.blend
+)
+
+add_test(import_obj_nurbs_cyclic ${TEST_BLENDER_EXE}
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.import_scene.obj\(filepath='${TEST_SRC_DIR}/io_tests/obj/nurbs_cyclic.obj'\)
+       --md5=ad3c307e5883224a0492378cd32691ab --md5_method=SCENE
+       --write-blend=${TEST_OUT_DIR}/import_obj_nurbs_cyclic.blend
+)
+
+add_test(import_obj_makehuman ${TEST_BLENDER_EXE}
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.import_scene.obj\(filepath='${TEST_SRC_DIR}/io_tests/obj/makehuman.obj'\)
+       --md5=c9f78b185e58358daa4ecaecfa75464e --md5_method=SCENE
+       --write-blend=${TEST_OUT_DIR}/import_obj_makehuman.blend
+)
+
+# OBJ Export tests
+add_test(export_obj_cube ${TEST_BLENDER_EXE}
+       ${TEST_SRC_DIR}/io_tests/blend_geometry/all_quads.blend
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.export_scene.obj\(filepath='${TEST_OUT_DIR}/export_obj_cube.obj',use_selection=False\)
+       --md5_source=${TEST_OUT_DIR}/export_obj_cube.obj
+       --md5_source=${TEST_OUT_DIR}/export_obj_cube.mtl
+       --md5=70bdc394c2726203ad26c085176e3484 --md5_method=FILE
+)
+
+add_test(export_obj_nurbs ${TEST_BLENDER_EXE}
+       ${TEST_SRC_DIR}/io_tests/blend_geometry/nurbs.blend
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.export_scene.obj\(filepath='${TEST_OUT_DIR}/export_obj_nurbs.obj',use_selection=False,use_nurbs=True\)
+       --md5_source=${TEST_OUT_DIR}/export_obj_nurbs.obj
+       --md5_source=${TEST_OUT_DIR}/export_obj_nurbs.mtl
+       --md5=a733ae4fa4a591ea9b0912da3af042de --md5_method=FILE
+)
+
+add_test(export_obj_all_objects ${TEST_BLENDER_EXE}
+       ${TEST_SRC_DIR}/io_tests/blend_scene/all_objects.blend
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.export_scene.obj\(filepath='${TEST_OUT_DIR}/export_obj_all_objects.obj',use_selection=False,use_nurbs=True\)
+       --md5_source=${TEST_OUT_DIR}/export_obj_all_objects.obj
+       --md5_source=${TEST_OUT_DIR}/export_obj_all_objects.mtl
+       --md5=d06bd49e6c084e4e3348fa397a88790c --md5_method=FILE
+)
+
+
+
+# PLY Import tests
+add_test(import_ply_cube ${TEST_BLENDER_EXE}
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.import_mesh.ply\(filepath='${TEST_SRC_DIR}/io_tests/ply/cube_ascii.ply'\)
+       --md5=527134343c27fc0ea73115b85fbfd3ac --md5_method=SCENE
+       --write-blend=${TEST_OUT_DIR}/import_ply_cube.blend
+)
+
+add_test(import_ply_bunny ${TEST_BLENDER_EXE}
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.import_mesh.ply\(filepath='${TEST_SRC_DIR}/io_tests/ply/bunny2.ply'\)
+       --md5=6ea5b8533400a17accf928b8fd024eaa --md5_method=SCENE
+       --write-blend=${TEST_OUT_DIR}/import_ply_bunny.blend
+)
+
+add_test(import_ply_small_holes ${TEST_BLENDER_EXE}
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.import_mesh.ply\(filepath='${TEST_SRC_DIR}/io_tests/ply/many_small_holes.ply'\)
+       --md5=c3093e26ecae5b6d59fbbcf2a0d0b39f --md5_method=SCENE
+       --write-blend=${TEST_OUT_DIR}/import_ply_small_holes.blend
+)
+
+# PLY Export tests (TODO)
+
+
+
+# STL Import tests
+add_test(import_stl_cube ${TEST_BLENDER_EXE}
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.import_mesh.stl\(filepath='${TEST_SRC_DIR}/io_tests/stl/cube.stl'\)
+       --md5=8ceb5bb7e1cb5f4342fa1669988c66b4 --md5_method=SCENE
+       --write-blend=${TEST_OUT_DIR}/import_stl_cube.blend
+)
+
+add_test(import_stl_conrod ${TEST_BLENDER_EXE}
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.import_mesh.stl\(filepath='${TEST_SRC_DIR}/io_tests/stl/conrod.stl'\)
+       --md5=690a4b8eb9002dcd8631c5a575ea7348 --md5_method=SCENE
+       --write-blend=${TEST_OUT_DIR}/import_stl_conrod.blend
+)
+
+add_test(import_stl_knot_max_simplified ${TEST_BLENDER_EXE}
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.import_mesh.stl\(filepath='${TEST_SRC_DIR}/io_tests/stl/knot_max_simplified.stl'\)
+       --md5=baf82803f45a84ec4ddbad9cef57dd3e --md5_method=SCENE
+       --write-blend=${TEST_OUT_DIR}/import_stl_knot_max_simplified.blend
+)
+
+# STL Export tests (TODO)
+
+
+
+# X3D Import
+add_test(import_x3d_cube ${TEST_BLENDER_EXE}
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.import_scene.x3d\(filepath='${TEST_SRC_DIR}/io_tests/x3d/color_cube.x3d'\)
+       --md5=2ed64325dd3d62be6ce43c64219376ec --md5_method=SCENE
+       --write-blend=${TEST_OUT_DIR}/import_x3d_cube.blend
+)
+
+add_test(import_x3d_teapot ${TEST_BLENDER_EXE}
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.import_scene.x3d\(filepath='${TEST_SRC_DIR}/io_tests/x3d/teapot.x3d'\)
+       --md5=8b8b386900b8e3d2c036a38c625f4079 --md5_method=SCENE
+       --write-blend=${TEST_OUT_DIR}/import_x3d_teapot.blend
+)
+
+add_test(import_x3d_suzanne_material ${TEST_BLENDER_EXE}
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.import_scene.x3d\(filepath='${TEST_SRC_DIR}/io_tests/x3d/suzanne_material.x3d'\)
+       --md5=999129ba835f0ccb98c4bb299f6c2fef --md5_method=SCENE
+       --write-blend=${TEST_OUT_DIR}/import_x3d_suzanne_material.blend
+)
+
+# X3D Export
+add_test(export_x3d_cube ${TEST_BLENDER_EXE}
+       ${TEST_SRC_DIR}/io_tests/blend_geometry/all_quads.blend
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.export_scene.x3d\(filepath='${TEST_OUT_DIR}/export_x3d_cube.x3d',use_selection=False\)
+       --md5_source=${TEST_OUT_DIR}/export_x3d_cube.x3d
+       --md5=30d2b056c004144cd4a0d172484a66f3 --md5_method=FILE
+)
+
+add_test(export_x3d_nurbs ${TEST_BLENDER_EXE}
+       ${TEST_SRC_DIR}/io_tests/blend_geometry/nurbs.blend
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.export_scene.x3d\(filepath='${TEST_OUT_DIR}/export_x3d_nurbs.x3d',use_selection=False\)
+       --md5_source=${TEST_OUT_DIR}/export_x3d_nurbs.x3d
+       --md5=7c15afe7b0cf007b842a925508b7d966 --md5_method=FILE
+)
+
+add_test(export_x3d_all_objects ${TEST_BLENDER_EXE}
+       ${TEST_SRC_DIR}/io_tests/blend_scene/all_objects.blend
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.export_scene.x3d\(filepath='${TEST_OUT_DIR}/export_x3d_all_objects.x3d',use_selection=False\)
+       --md5_source=${TEST_OUT_DIR}/export_x3d_all_objects.x3d
+       --md5=cef017805f684f27c311fdf4ba87462a --md5_method=FILE
+)
+
+
+
+# 3DS Import
+add_test(import_3ds_cube ${TEST_BLENDER_EXE}
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.import_scene.autodesk_3ds\(filepath='${TEST_SRC_DIR}/io_tests/3ds/cube.3ds'\)
+       --md5=cb5a45c35a343c3f5beca2a918472951 --md5_method=SCENE
+       --write-blend=${TEST_OUT_DIR}/import_3ds_cube.blend
+)
+
+add_test(import_3ds_hierarchy_lara ${TEST_BLENDER_EXE}
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.import_scene.autodesk_3ds\(filepath='${TEST_SRC_DIR}/io_tests/3ds/hierarchy_lara.3ds'\)
+       --md5=766c873d9fdb5f190e43796cfbae63b6 --md5_method=SCENE
+       --write-blend=${TEST_OUT_DIR}/import_3ds_hierarchy_lara.blend
+)
+
+add_test(import_3ds_hierarchy_greek_trireme ${TEST_BLENDER_EXE}
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.import_scene.autodesk_3ds\(filepath='${TEST_SRC_DIR}/io_tests/3ds/hierarchy_greek_trireme.3ds'\)
+       --md5=b62ee30101e8999cb91ef4f8a8760056 --md5_method=SCENE
+       --write-blend=${TEST_OUT_DIR}/import_3ds_hierarchy_greek_trireme.blend
+)
+
+# 3DS Export
+add_test(export_3ds_cube ${TEST_BLENDER_EXE}
+       ${TEST_SRC_DIR}/io_tests/blend_geometry/all_quads.blend
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.export_scene.autodesk_3ds\(filepath='${TEST_OUT_DIR}/export_3ds_cube.3ds',use_selection=False\)
+       --md5_source=${TEST_OUT_DIR}/export_3ds_cube.3ds
+       --md5=0df6cfb130052d01e31ef77d391d4cc0 --md5_method=FILE
+)
+
+add_test(export_3ds_nurbs ${TEST_BLENDER_EXE}
+       ${TEST_SRC_DIR}/io_tests/blend_geometry/nurbs.blend
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.export_scene.autodesk_3ds\(filepath='${TEST_OUT_DIR}/export_3ds_nurbs.3ds',use_selection=False\)
+       --md5_source=${TEST_OUT_DIR}/export_3ds_nurbs.3ds
+       --md5=ba1a6d43346fee3bcadc7e30e3c95935 --md5_method=FILE
+)
+
+add_test(export_3ds_all_objects ${TEST_BLENDER_EXE}
+       ${TEST_SRC_DIR}/io_tests/blend_scene/all_objects.blend
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.export_scene.autodesk_3ds\(filepath='${TEST_OUT_DIR}/export_3ds_all_objects.3ds',use_selection=False\)
+       --md5_source=${TEST_OUT_DIR}/export_3ds_all_objects.3ds
+       --md5=cdf8fa8475fda0b9ef565ac09339254b --md5_method=FILE
+)
+
+
+
+# FBX Export
+# 'use_metadata=False' for reliable md5's
+add_test(export_fbx_cube ${TEST_BLENDER_EXE}
+       ${TEST_SRC_DIR}/io_tests/blend_geometry/all_quads.blend
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.export_scene.fbx\(filepath='${TEST_OUT_DIR}/export_fbx_cube.fbx',use_selection=False,use_metadata=False\)
+       --md5_source=${TEST_OUT_DIR}/export_fbx_cube.fbx
+       --md5=642a5a1fa199d5b9bbf1643519ae974d --md5_method=FILE
+)
+
+add_test(export_fbx_nurbs ${TEST_BLENDER_EXE}
+       ${TEST_SRC_DIR}/io_tests/blend_geometry/nurbs.blend
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.export_scene.fbx\(filepath='${TEST_OUT_DIR}/export_fbx_nurbs.fbx',use_selection=False,use_metadata=False\)
+       --md5_source=${TEST_OUT_DIR}/export_fbx_nurbs.fbx
+       --md5=ec1e8965bdbc3bf70707d77f82c2cb9c --md5_method=FILE
+)
+
+add_test(export_fbx_all_objects ${TEST_BLENDER_EXE}
+       ${TEST_SRC_DIR}/io_tests/blend_scene/all_objects.blend
+       --python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
+       --run={'FINISHED'}&bpy.ops.export_scene.fbx\(filepath='${TEST_OUT_DIR}/export_fbx_all_objects.fbx',use_selection=False,use_metadata=False\)
+       --md5_source=${TEST_OUT_DIR}/export_fbx_all_objects.fbx
+       --md5=af3b65665687ac92e4aba07b017d87fe --md5_method=FILE
+)
diff --git a/source/tests/batch_import.py b/source/tests/batch_import.py
new file mode 100644 (file)
index 0000000..5c228c0
--- /dev/null
@@ -0,0 +1,202 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+#  This program is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU General Public License
+#  as published by the Free Software Foundation; either version 2
+#  of the License, or (at your option) any later version.
+#
+#  This program is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+#  You should have received a copy of the GNU General Public License
+#  along with this program; if not, write to the Free Software Foundation,
+#  Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+# <pep8 compliant>
+
+"""
+Example Usage:
+
+./blender.bin --background --python source/tests/batch_import.py -- \
+    --operator="bpy.ops.import_scene.obj" \
+    --path="/fe/obj" \
+    --match="*.obj" \
+    --start=0 --end=10 \
+    --save_path=/tmp/test
+
+./blender.bin --background --python source/tests/batch_import.py -- \
+    --operator="bpy.ops.import_scene.autodesk_3ds" \
+    --path="/fe/" \
+    --match="*.3ds" \
+    --start=0 --end=1000 \
+    --save_path=/tmp/test
+
+./blender.bin --background --addons io_curve_svg --python source/tests/batch_import.py -- \
+    --operator="bpy.ops.import_curve.svg" \
+    --path="/usr/" \
+    --match="*.svg" \
+    --start=0 --end=1000 \
+    --save_path=/tmp/test
+
+"""
+
+import os
+import sys
+
+
+def clear_scene():
+    import bpy
+    unique_obs = set()
+    for scene in bpy.data.scenes:
+        for obj in scene.objects[:]:
+            scene.objects.unlink(obj)
+            unique_obs.add(obj)
+
+    # remove obdata, for now only worry about the startup scene
+    for bpy_data_iter in (bpy.data.objects, bpy.data.meshes, bpy.data.lamps, bpy.data.cameras):
+        for id_data in bpy_data_iter:
+            bpy_data_iter.remove(id_data)
+
+
+def batch_import(operator="",
+                   path="",
+                   save_path="",
+                   match="",
+                   start=0,
+                   end=sys.maxsize,
+                   ):
+    import addon_utils
+    _reset_all = addon_utils.reset_all  # XXX, hack
+
+    import fnmatch
+
+    path = os.path.normpath(path)
+    path = os.path.abspath(path)
+
+    match_upper = match.upper()
+    pattern_match = lambda a: fnmatch.fnmatchcase(a.upper(), match_upper)
+
+    def file_generator(path):
+        for dirpath, dirnames, filenames in os.walk(path):
+
+            # skip '.svn'
+            if dirpath.startswith("."):
+                continue
+
+            for filename in filenames:
+                if pattern_match(filename):
+                    yield os.path.join(dirpath, filename)
+
+    print("Collecting %r files in %s" % (match, path), end="")
+
+    files = list(file_generator(path))
+    files_len = len(files)
+    end = min(end, len(files))
+    print(" found %d" % files_len, end="")
+
+    files.sort()
+    files = files[start:end]
+    if len(files) != files_len:
+        print(" using a subset in (%d, %d), total %d" % (start, end, len(files)), end="")
+
+    import bpy
+    op = eval(operator)
+
+    tot_done = 0
+    tot_fail = 0
+
+    for i, f in enumerate(files):
+        print("    %s(filepath=%r) # %d of %d" % (operator, f, i + start, len(files)))
+
+        # hack so loading the new file doesnt undo our loaded addons
+        addon_utils.reset_all = lambda: None  # XXX, hack
+
+        bpy.ops.wm.read_factory_settings()
+
+        addon_utils.reset_all = _reset_all  # XXX, hack
+        clear_scene()
+
+        result = op(filepath=f)
+
+        if 'FINISHED' in result:
+            tot_done += 1
+        else:
+            tot_fail += 1
+
+        if save_path:
+            fout = os.path.join(save_path, os.path.relpath(f, path))
+            fout_blend = os.path.splitext(fout)[0] + ".blend"
+
+            print("\tSaving: %r" % fout_blend)
+
+            fout_dir = os.path.dirname(fout_blend)
+            if not os.path.exists(fout_dir):
+                os.makedirs(fout_dir)
+
+            bpy.ops.wm.save_as_mainfile(filepath=fout_blend)
+
+    print("finished, done:%d,  fail:%d" % (tot_done, tot_fail))
+
+
+def main():
+    import optparse
+
+    # get the args passed to blender after "--", all of which are ignored by blender specifically
+    # so python may receive its own arguments
+    argv = sys.argv
+
+    if "--" not in argv:
+        argv = []  # as if no args are passed
+    else:
+        argv = argv[argv.index("--") + 1:]  # get all args after "--"
+
+    # When --help or no args are given, print this help
+    usage_text = "Run blender in background mode with this script:"
+    usage_text += "  blender --background --python " + __file__ + " -- [options]"
+
+    parser = optparse.OptionParser(usage=usage_text)
+
+    # Example background utility, add some text and renders or saves it (with options)
+    # Possible types are: string, int, long, choice, float and complex.
+    parser.add_option("-o", "--operator", dest="operator", help="This text will be used to render an image", type="string")
+    parser.add_option("-p", "--path", dest="path", help="Path to use for searching for files", type='string')
+    parser.add_option("-m", "--match", dest="match", help="Wildcard to match filename", type="string")
+    parser.add_option("-s", "--save_path", dest="save_path", help="Save the input file to a blend file in a new location", metavar='string')
+    parser.add_option("-S", "--start", dest="start", help="From collected files, start with this index", metavar='int')
+    parser.add_option("-E", "--end", dest="end", help="From collected files, end with this index", metavar='int')
+
+    options, args = parser.parse_args(argv)  # In this example we wont use the args
+
+    if not argv:
+        parser.print_help()
+        return
+
+    if not options.operator:
+        print("Error: --operator=\"some string\" argument not given, aborting.")
+        parser.print_help()
+        return
+
+    if options.start is None:
+        options.start = 0
+
+    if options.end is None:
+        options.end = sys.maxsize
+
+    # Run the example function
+    batch_import(operator=options.operator,
+                 path=options.path,
+                 save_path=options.save_path,
+                 match=options.match,
+                 start=int(options.start),
+                 end=int(options.end),
+                 )
+
+    print("batch job finished, exiting")
+
+
+if __name__ == "__main__":
+    main()
diff --git a/source/tests/bl_load_addons.py b/source/tests/bl_load_addons.py
new file mode 100644 (file)
index 0000000..5bd83ab
--- /dev/null
@@ -0,0 +1,78 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+#  This program is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU General Public License
+#  as published by the Free Software Foundation; either version 2
+#  of the License, or (at your option) any later version.
+#
+#  This program is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+#  You should have received a copy of the GNU General Public License
+#  along with this program; if not, write to the Free Software Foundation,
+#  Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+# <pep8 compliant>
+
+# simple script to enable all addons, and disable
+
+import bpy
+import addon_utils
+
+import sys
+import imp
+
+
+def reload_addons(do_reload=True, do_reverse=True):
+    modules = addon_utils.modules({})
+    modules.sort(key=lambda mod: mod.__name__)
+    addons = bpy.context.user_preferences.addons
+
+    # first disable all
+    for mod_name in list(addons.keys()):
+        addon_utils.disable(mod_name)
+
+    assert(bool(addons) == False)
+
+    # Run twice each time.
+    for i in (0, 1):
+        for mod in modules:
+            mod_name = mod.__name__
+            print("\tenabling:", mod_name)
+            addon_utils.enable(mod_name)
+            assert(mod_name in addons)
+
+        for mod in addon_utils.modules({}):
+            mod_name = mod.__name__
+            print("\tdisabling:", mod_name)
+            addon_utils.disable(mod_name)
+            assert(not (mod_name in addons))
+
+            # now test reloading
+            if do_reload:
+                imp.reload(sys.modules[mod_name])
+
+            if do_reverse:
+                # incase order matters when it shouldnt
+                modules.reverse()
+
+
+def main():
+    reload_addons(do_reload=False, do_reverse=False)
+    reload_addons(do_reload=False, do_reverse=True)
+    reload_addons(do_reload=True, do_reverse=True)
+
+
+if __name__ == "__main__":
+
+    # So a python error exits(1)
+    try:
+        main()
+    except:
+        import traceback
+        traceback.print_exc()
+        sys.exit(1)
diff --git a/source/tests/bl_load_py_modules.py b/source/tests/bl_load_py_modules.py
new file mode 100644 (file)
index 0000000..5a65578
--- /dev/null
@@ -0,0 +1,145 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+#  This program is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU General Public License
+#  as published by the Free Software Foundation; either version 2
+#  of the License, or (at your option) any later version.
+#
+#  This program is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+#  You should have received a copy of the GNU General Public License
+#  along with this program; if not, write to the Free Software Foundation,
+#  Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+# <pep8 compliant>
+
+# simple script to enable all addons, and disable
+
+import bpy
+import addon_utils
+
+import sys
+import os
+import imp
+
+
+def source_list(path, filename_check=None):
+    from os.path import join
+    for dirpath, dirnames, filenames in os.walk(path):
+        # skip '.svn'
+        if dirpath.startswith("."):
+            continue
+
+        for filename in filenames:
+            filepath = join(dirpath, filename)
+            if filename_check is None or filename_check(filepath):
+                yield filepath
+
+
+def load_addons():
+    modules = addon_utils.modules({})
+    modules.sort(key=lambda mod: mod.__name__)
+    addons = bpy.context.user_preferences.addons
+
+    # first disable all
+    for mod_name in list(addons.keys()):
+        addon_utils.disable(mod_name)
+
+    assert(bool(addons) == False)
+
+    for mod in modules:
+        mod_name = mod.__name__
+        addon_utils.enable(mod_name)
+        assert(mod_name in addons)
+
+
+def load_modules():
+    modules = []
+    module_paths = []
+
+    # paths blender stores scripts in.
+    paths = bpy.utils.script_paths()
+
+    #
+    # find all sys.path we added
+    for script_path in paths:
+        for mod_dir in sys.path:
+            if mod_dir.startswith(script_path):
+                module_paths.append(mod_dir)
+
+    #
+    # collect modules from our paths.
+    for mod_dir in module_paths:
+        # print("mod_dir", mod_dir)
+        for mod, mod_full in bpy.path.module_names(mod_dir):
+            modules.append(__import__(mod))
+
+    #
+    # now submodules
+    for m in modules:
+        filepath = m.__file__
+        if os.path.basename(filepath).startswith("__init__."):
+            mod_dir = os.path.dirname(filepath)
+            for submod, submod_full in bpy.path.module_names(mod_dir):
+                # fromlist is ignored, ugh.
+                mod_name_full = m.__name__ + "." + submod
+                __import__(mod_name_full)
+                mod_imp = sys.modules[mod_name_full]
+
+                # check we load what we ask for.
+                assert(os.path.samefile(mod_imp.__file__, submod_full))
+
+                modules.append(mod_imp)
+
+    #
+    # check which filepaths we didnt load
+    source_files = []
+    for mod_dir in module_paths:
+        source_files.extend(source_list(mod_dir, filename_check=lambda f: f.endswith(".py")))
+
+    source_files = list(set(source_files))
+    source_files.sort()
+
+    #
+    # remove loaded files
+    loaded_files = list({m.__file__ for m in modules})
+    loaded_files.sort()
+
+    for f in loaded_files:
+        source_files.remove(f)
+
+    #
+    # test we tested all files except for presets and templates
+    ignore_paths = [
+        os.sep + "presets" + os.sep,
+        os.sep + "templates" + os.sep,
+    ]
+
+    for f in source_files:
+        ok = False
+        for ignore in ignore_paths:
+            if ignore in f:
+                ok = True
+        if not ok:
+            raise Exception("Source file %r not loaded in test" % f)
+
+    print("loaded %d modules" % len(loaded_files))
+
+
+def main():
+    load_addons()
+    load_modules()
+
+if __name__ == "__main__":
+    # So a python error exits(1)
+    try:
+        main()
+    except:
+        import traceback
+        traceback.print_exc()
+        sys.exit(1)
diff --git a/source/tests/bl_run_operators.py b/source/tests/bl_run_operators.py
new file mode 100644 (file)
index 0000000..668b4e6
--- /dev/null
@@ -0,0 +1,176 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+#  This program is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU General Public License
+#  as published by the Free Software Foundation; either version 2
+#  of the License, or (at your option) any later version.
+#
+#  This program is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+#  You should have received a copy of the GNU General Public License
+#  along with this program; if not, write to the Free Software Foundation,
+#  Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+# <pep8 compliant>
+
+# semi-useful script, runs all operators in a number of different
+# contexts, cheap way to find misc small bugs but is in no way a complete test.
+#
+# only error checked for here is a segfault.
+
+import bpy
+import sys
+
+op_blacklist = (
+    "script.reload",
+    "export*.*",
+    "import*.*",
+    "*.save_*",
+    "*.read_*",
+    "*.open_*",
+    "*.link_append",
+    "render.render",
+    "*.*_export",
+    "*.*_import",
+    "wm.url_open",
+    "wm.doc_view",
+    "wm.path_open",
+    "help.operator_cheat_sheet",
+    )
+
+
+def filter_op_list(operators):
+    from fnmatch import fnmatchcase
+
+    def is_op_ok(op):
+        for op_match in op_blacklist:
+            if fnmatchcase(op, op_match):
+                print("    skipping: %s (%s)" % (op, op_match))
+                return False
+        return True
+
+    operators[:] = [op for op in operators if is_op_ok(op[0])]
+
+
+def run_ops(operators, setup_func=None):
+    print("\ncontext:", setup_func.__name__)
+    # first invoke
+    for op_id, op in operators:
+        if op.poll():
+            print("    operator:", op_id)
+            sys.stdout.flush()  # incase of crash
+
+            # disable will get blender in a bad state and crash easy!
+            bpy.ops.wm.read_factory_settings()
+
+            setup_func()
+
+            for mode in ('EXEC_DEFAULT', 'INVOKE_DEFAULT'):
+                try:
+                    op(mode)
+                except:
+                    #import traceback
+                    #traceback.print_exc()
+                    pass
+
+
+# contexts
+def ctx_clear_scene():  # copied from batch_import.py
+    unique_obs = set()
+    for scene in bpy.data.scenes:
+        for obj in scene.objects[:]:
+            scene.objects.unlink(obj)
+            unique_obs.add(obj)
+
+    # remove obdata, for now only worry about the startup scene
+    for bpy_data_iter in (bpy.data.objects, bpy.data.meshes, bpy.data.lamps, bpy.data.cameras):
+        for id_data in bpy_data_iter:
+            bpy_data_iter.remove(id_data)
+
+
+def ctx_editmode_mesh():
+    bpy.ops.object.mode_set(mode='EDIT')
+    bpy.ops.object.vertex_group_add()
+
+
+def ctx_editmode_curves():
+    bpy.ops.curve.primitive_nurbs_circle_add()
+    bpy.ops.object.mode_set(mode='EDIT')
+
+
+def ctx_editmode_surface():
+    bpy.ops.surface.primitive_nurbs_surface_torus_add()
+    bpy.ops.object.mode_set(mode='EDIT')
+
+
+def ctx_editmode_mball():
+    bpy.ops.object.metaball_add()
+    bpy.ops.object.mode_set(mode='EDIT')
+
+
+def ctx_editmode_text():
+    bpy.ops.object.text_add()
+    bpy.ops.object.mode_set(mode='EDIT')
+
+
+def ctx_editmode_armature():
+    bpy.ops.object.armature_add()
+    bpy.ops.object.mode_set(mode='EDIT')
+
+
+def ctx_editmode_lattice():
+    bpy.ops.object.add(type='LATTICE')
+    bpy.ops.object.mode_set(mode='EDIT')
+    # bpy.ops.object.vertex_group_add()
+
+
+def ctx_object_empty():
+    bpy.ops.object.add(type='EMPTY')
+
+
+def ctx_weightpaint():
+    bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
+
+
+def main():
+    # bpy.ops.wm.read_factory_settings()
+    import bpy
+    operators = []
+    for mod_name in dir(bpy.ops):
+        mod = getattr(bpy.ops, mod_name)
+        for submod_name in dir(mod):
+            op = getattr(mod, submod_name)
+            operators.append(("%s.%s" % (mod_name, submod_name), op))
+
+    operators.sort(key=lambda op: op[0])
+
+    filter_op_list(operators)
+
+    # for testing, mix the list up.
+    #operators.reverse()
+
+    #import random
+    #random.shuffle(operators)
+
+    # Run the operator tests in different contexts
+    run_ops(operators, setup_func=lambda: None)
+    run_ops(operators, setup_func=ctx_editmode_surface)
+    run_ops(operators, setup_func=ctx_object_empty)
+    run_ops(operators, setup_func=ctx_editmode_armature)
+    run_ops(operators, setup_func=ctx_editmode_mesh)
+    run_ops(operators, setup_func=ctx_clear_scene)
+    run_ops(operators, setup_func=ctx_editmode_curves)
+    run_ops(operators, setup_func=ctx_editmode_mball)
+    run_ops(operators, setup_func=ctx_editmode_text)
+    run_ops(operators, setup_func=ctx_weightpaint)
+    run_ops(operators, setup_func=ctx_editmode_lattice)
+
+    print("finished")
+
+if __name__ == "__main__":
+    main()
diff --git a/source/tests/bl_test.py b/source/tests/bl_test.py
new file mode 100644 (file)
index 0000000..cfe9135
--- /dev/null
@@ -0,0 +1,197 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+#  This program is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU General Public License
+#  as published by the Free Software Foundation; either version 2
+#  of the License, or (at your option) any later version.
+#
+#  This program is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+#  You should have received a copy of the GNU General Public License
+#  along with this program; if not, write to the Free Software Foundation,
+#  Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+# <pep8 compliant>
+
+import sys
+import os
+
+
+# may split this out into a new file
+def replace_bpy_app_version():
+    """ So MD5's are predictable from output which uses blenders versions.
+    """
+
+    import bpy
+
+    app = bpy.app
+    app_fake = type(bpy)("bpy.app")
+
+    for attr in dir(app):
+        if not attr.startswith("_"):
+            setattr(app_fake, attr, getattr(app, attr))
+
+    app_fake.version = 0, 0, 0
+    app_fake.version_string = "0.00 (sub 0)"
+    bpy.app = app_fake
+
+
+def clear_startup_blend():
+    import bpy
+
+    for scene in bpy.data.scenes:
+        for obj in scene.objects:
+            scene.objects.unlink(obj)
+
+
+def blend_to_md5():
+    import bpy
+    scene = bpy.context.scene
+    ROUND = 4
+
+    def matrix2str(matrix):
+        return "".join([str(round(axis, ROUND)) for vector in matrix for axis in vector]).encode('ASCII')
+
+    def coords2str(seq, attr):
+        return "".join([str(round(axis, ROUND)) for vertex in seq for axis in getattr(vertex, attr)]).encode('ASCII')
+
+    import hashlib
+
+    md5 = hashlib.new("md5")
+    md5_update = md5.update
+
+    for obj in scene.objects:
+        md5_update(matrix2str(obj.matrix_world))
+        data = obj.data
+
+        if type(data) == bpy.types.Mesh:
+            md5_update(coords2str(data.vertices, "co"))
+        elif type(data) == bpy.types.Curve:
+            for spline in data.splines:
+                md5_update(coords2str(spline.bezier_points, "co"))
+                md5_update(coords2str(spline.points, "co"))
+
+    return md5.hexdigest()
+
+
+def main():
+    argv = sys.argv
+    print("  args:", " ".join(argv))
+    argv = argv[argv.index("--") + 1:]
+
+    def arg_extract(arg, optional=True, array=False):
+        arg += "="
+        if array:
+            value = []
+        else:
+            value = None
+
+        i = 0
+        while i < len(argv):
+            if argv[i].startswith(arg):
+                item = argv[i][len(arg):]
+                del argv[i]
+                i -= 1
+
+                if array:
+                    value.append(item)
+                else:
+                    value = item
+                    break
+
+            i += 1
+
+        if (not value) and (not optional):
+            print("  '%s' not set" % arg)
+            sys.exit(1)
+
+        return value
+
+    run = arg_extract("--run", optional=False)
+    md5 = arg_extract("--md5", optional=False)
+    md5_method = arg_extract("--md5_method", optional=False)  # 'SCENE' / 'FILE'
+
+    # only when md5_method is 'FILE'
+    md5_source = arg_extract("--md5_source", optional=True, array=True)
+
+    # save blend file, for testing
+    write_blend = arg_extract("--write-blend", optional=True)
+
+    # ensure files are written anew
+    for f in md5_source:
+        if os.path.exists(f):
+            os.remove(f)
+
+    import bpy
+
+    replace_bpy_app_version()
+    if not bpy.data.filepath:
+        clear_startup_blend()
+
+    print("  Running: '%s'" % run)
+    print("  MD5: '%s'!" % md5)
+
+    try:
+        result = eval(run)
+    except:
+        import traceback
+        traceback.print_exc()
+        sys.exit(1)
+
+    if write_blend is not None:
+        print("  Writing Blend: %s" % write_blend)
+        bpy.ops.wm.save_mainfile(filepath=write_blend, check_existing=False)
+
+    print("  Result: '%s'" % str(result))
+    if not result:
+        print("  Running: %s -> False" % run)
+        sys.exit(1)
+
+    if md5_method == 'SCENE':
+        md5_new = blend_to_md5()
+    elif md5_method == 'FILE':
+        if not md5_source:
+            print("  Missing --md5_source argument")
+            sys.exit(1)
+
+        for f in md5_source:
+            if not os.path.exists(f):
+                print("  Missing --md5_source=%r argument does not point to a file")
+                sys.exit(1)
+
+        import hashlib
+
+        md5_instance = hashlib.new("md5")
+        md5_update = md5_instance.update
+
+        for f in md5_source:
+            filehandle = open(f, "rb")
+            md5_update(filehandle.read())
+            filehandle.close()
+
+        md5_new = md5_instance.hexdigest()
+
+    else:
+        print("  Invalid --md5_method=%s argument is not a valid source")
+        sys.exit(1)
+
+    if md5 != md5_new:
+        print("  Running: %s\n    MD5 Recieved: %s\n    MD5 Expected: %s" % (run, md5_new, md5))
+        sys.exit(1)
+
+    print("  Success: %s" % run)
+
+
+if __name__ == "__main__":
+    # So a python error exits(1)
+    try:
+        main()
+    except:
+        import traceback
+        traceback.print_exc()
+        sys.exit(1)
diff --git a/source/tests/pep8.py b/source/tests/pep8.py
new file mode 100644 (file)
index 0000000..f7c4165
--- /dev/null
@@ -0,0 +1,118 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+#  This program is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU General Public License
+#  as published by the Free Software Foundation; either version 2
+#  of the License, or (at your option) any later version.
+#
+#  This program is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+#  You should have received a copy of the GNU General Public License
+#  along with this program; if not, write to the Free Software Foundation,
+#  Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+# <pep8 compliant>
+
+import os
+
+# depends on pep8, pyflakes, pylint
+# for ubuntu
+#
+#   sudo apt-get install pylint pyflakes
+#
+#   sudo apt-get install python-setuptools python-pip
+#   sudo pip install pep8
+#
+# in debian install pylint pyflakes pep8 with apt-get/aptitude/etc
+#
+# on *nix run
+#   python source/tests/pep8.py > test_pep8.log 2>&1
+
+# how many lines to read into the file, pep8 comment
+# should be directly after the licence header, ~20 in most cases
+PEP8_SEEK_COMMENT = 40
+SKIP_PREFIX = "./tools", "./config", "./scons", "./extern"
+
+
+def file_list_py(path):
+    for dirpath, dirnames, filenames in os.walk(path):
+        for filename in filenames:
+            if filename.endswith(".py") or filename.endswith(".cfg"):
+                yield os.path.join(dirpath, filename)
+
+
+def is_pep8(path):
+    print(path)
+    if open(path, 'rb').read(3) == b'\xef\xbb\xbf':
+        print("\nfile contains BOM, remove first 3 bytes: %r\n" % path)
+
+    # templates dont have a header but should be pep8
+    for d in ("presets", "templates", "examples"):
+        if ("%s%s%s" % (os.sep, d, os.sep)) in path:
+            return 1
+
+    f = open(path, 'r', encoding="utf8")
+    for i in range(PEP8_SEEK_COMMENT):
+        line = f.readline()
+        if line.startswith("# <pep8"):
+            if line.startswith("# <pep8 compliant>"):
+                return 1
+            elif line.startswith("# <pep8-80 compliant>"):
+                return 2
+    f.close()
+    return 0
+
+
+def main():
+    files = []
+    files_skip = []
+    for f in file_list_py("."):
+        if [None for prefix in SKIP_PREFIX if f.startswith(prefix)]:
+            continue
+
+        pep8_type = is_pep8(f)
+
+        if pep8_type:
+            # so we can batch them for each tool.
+            files.append((os.path.abspath(f), pep8_type))
+        else:
+            files_skip.append(f)
+
+    print("\nSkipping...")
+    for f in files_skip:
+        print("    %s" % f)
+
+    # strict imports
+    print("\n\n\n# running pep8...")
+    import re
+    import_check = re.compile(r"\s*from\s+[A-z\.]+\s+import \*\s*")
+    for f, pep8_type in files:
+        for i, l in enumerate(open(f, 'r', encoding='utf8')):
+            if import_check.match(l):
+                print("%s:%d:0: global import bad practice" % (f, i + 1))
+
+    print("\n\n\n# running pep8...")
+    for f, pep8_type in files:
+        if pep8_type == 1:
+            # E501:80 line length
+            os.system("pep8 --repeat --ignore=E501 '%s'" % (f))
+        else:
+            os.system("pep8 --repeat '%s'" % (f))
+
+    # pyflakes
+    print("\n\n\n# running pyflakes...")
+    for f, pep8_type in files:
+        os.system("pyflakes '%s'" % f)
+
+    print("\n\n\n# running pylint...")
+    for f, pep8_type in files:
+        # let pep8 complain about line length
+        os.system("pylint --reports=n --max-line-length=1000 '%s'" % f)
+
+if __name__ == "__main__":
+    main()
diff --git a/source/tests/rna_array.py b/source/tests/rna_array.py
new file mode 100644 (file)
index 0000000..06b4735
--- /dev/null
@@ -0,0 +1,297 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+#  This program is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU General Public License
+#  as published by the Free Software Foundation; either version 2
+#  of the License, or (at your option) any later version.
+# 
+#  This program is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+# 
+#  You should have received a copy of the GNU General Public License
+#  along with this program; if not, write to the Free Software Foundation,
+#  Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+import unittest
+import random
+
+test= bpy.data.test
+
+# farr - 1-dimensional array of float
+# fdarr - dynamic 1-dimensional array of float
+# fmarr - 3-dimensional ([3][4][5]) array of float
+# fdmarr - dynamic 3-dimensional (ditto size) array of float
+
+# same as above for other types except that the first letter is "i" for int and "b" for bool
+
+class TestArray(unittest.TestCase):
+    # test that assignment works by: assign -> test value
+    # - rvalue = list of float
+    # - rvalue = list of numbers
+    # test.object
+    # bpy.data.test.farr[3], iarr[3], barr[...], fmarr, imarr, bmarr
+
+    def setUp(self):
+        test.farr= (1.0, 2.0, 3.0)
+        test.iarr= (7, 8, 9)
+        test.barr= (False, True, False)
+    
+    # test access
+    # test slice access, negative indices
+    def test_access(self):
+        rvals= ([1.0, 2.0, 3.0], [7, 8, 9], [False, True, False])
+        for arr, rval in zip((test.farr, test.iarr, test.barr), rvals):
+            self.assertEqual(prop_to_list(arr), rval)
+            self.assertEqual(arr[0:3], rval)
+            self.assertEqual(arr[1:2], rval[1:2])
+            self.assertEqual(arr[-1], arr[2])
+            self.assertEqual(arr[-2], arr[1])
+            self.assertEqual(arr[-3], arr[0])
+
+    # fail when index out of bounds
+    def test_access_fail(self):
+        for arr in (test.farr, test.iarr, test.barr):
+            self.assertRaises(IndexError, lambda : arr[4])
+    
+    # test assignment of a whole array
+    def test_assign_array(self):
+        # should accept int as float
+        test.farr= (1, 2, 3)
+
+    # fail when: unexpected no. of items, invalid item type
+    def test_assign_array_fail(self):
+        def assign_empty_list(arr):
+            setattr(test, arr, ())
+
+        for arr in ("farr", "iarr", "barr"):
+            self.assertRaises(ValueError, assign_empty_list, arr)
+
+        def assign_invalid_float():
+            test.farr= (1.0, 2.0, "3.0")
+
+        def assign_invalid_int():
+            test.iarr= ("1", 2, 3)
+
+        def assign_invalid_bool():
+            test.barr= (True, 0.123, False)
+
+        for func in [assign_invalid_float, assign_invalid_int, assign_invalid_bool]:
+            self.assertRaises(TypeError, func)
+
+        # shouldn't accept float as int
+        def assign_float_as_int():
+            test.iarr= (1, 2, 3.0)
+        self.assertRaises(TypeError, assign_float_as_int)
+
+        # non-dynamic arrays cannot change size
+        def assign_different_size(arr, val):
+            setattr(test, arr, val)
+        for arr, val in zip(("iarr", "farr", "barr"), ((1, 2), (1.0, 2.0), (True, False))):
+            self.assertRaises(ValueError, assign_different_size, arr, val)
+
+    # test assignment of specific items
+    def test_assign_item(self):
+        for arr, rand_func in zip((test.farr, test.iarr, test.barr), (rand_float, rand_int, rand_bool)):
+            for i in range(len(arr)):
+                val= rand_func()
+                arr[i]= val
+                
+                self.assertEqual(arr[i], val)
+
+        # float prop should accept also int
+        for i in range(len(test.farr)):
+            val= rand_int()
+            test.farr[i]= val
+            self.assertEqual(test.farr[i], float(val))
+
+        # 
+
+    def test_assign_item_fail(self):
+        def assign_bad_index(arr):
+            arr[4] = 1.0
+
+        def assign_bad_type(arr):
+            arr[1]= "123"
+            
+        for arr in [test.farr, test.iarr, test.barr]:
+            self.assertRaises(IndexError, assign_bad_index, arr)
+
+        # not testing bool because bool allows not only (True|False)
+        for arr in [test.farr, test.iarr]:    
+            self.assertRaises(TypeError, assign_bad_type, arr)
+
+    def test_dynamic_assign_array(self):
+        # test various lengths here
+        for arr, rand_func in zip(("fdarr", "idarr", "bdarr"), (rand_float, rand_int, rand_bool)):
+            for length in range(1, 64):
+                rval= make_random_array(length, rand_func)
+                setattr(test, arr, rval)
+                self.assertEqual(prop_to_list(getattr(test, arr)), rval)
+
+    def test_dynamic_assign_array_fail(self):
+        # could also test too big length here
+        
+        def assign_empty_list(arr):
+            setattr(test, arr, ())
+
+        for arr in ("fdarr", "idarr", "bdarr"):
+            self.assertRaises(ValueError, assign_empty_list, arr)
+
+
+class TestMArray(unittest.TestCase):
+    def setUp(self):
+        # reset dynamic array sizes
+        for arr, func in zip(("fdmarr", "idmarr", "bdmarr"), (rand_float, rand_int, rand_bool)):
+            setattr(test, arr, make_random_3d_array((3, 4, 5), func))
+
+    # test assignment
+    def test_assign_array(self):
+        for arr, func in zip(("fmarr", "imarr", "bmarr"), (rand_float, rand_int, rand_bool)):
+            # assignment of [3][4][5]
+            rval= make_random_3d_array((3, 4, 5), func)
+            setattr(test, arr, rval)
+            self.assertEqual(prop_to_list(getattr(test, arr)), rval)
+
+        # test assignment of [2][4][5], [1][4][5] should work on dynamic arrays
+
+    def test_assign_array_fail(self):
+        def assign_empty_array():
+            test.fmarr= ()
+        self.assertRaises(ValueError, assign_empty_array)
+
+        def assign_invalid_size(arr, rval):
+            setattr(test, arr, rval)
+
+        # assignment of 3,4,4 or 3,3,5 should raise ex
+        for arr, func in zip(("fmarr", "imarr", "bmarr"), (rand_float, rand_int, rand_bool)):
+            rval= make_random_3d_array((3, 4, 4), func)
+            self.assertRaises(ValueError, assign_invalid_size, arr, rval)
+
+            rval= make_random_3d_array((3, 3, 5), func)
+            self.assertRaises(ValueError, assign_invalid_size, arr, rval)
+
+            rval= make_random_3d_array((3, 3, 3), func)
+            self.assertRaises(ValueError, assign_invalid_size, arr, rval)
+
+    def test_assign_item(self):
+        # arr[i] = x
+        for arr, func in zip(("fmarr", "imarr", "bmarr", "fdmarr", "idmarr", "bdmarr"), (rand_float, rand_int, rand_bool) * 2):
+            rval= make_random_2d_array((4, 5), func)
+
+            for i in range(3):
+                getattr(test, arr)[i]= rval
+                self.assertEqual(prop_to_list(getattr(test, arr)[i]), rval)
+
+        # arr[i][j] = x
+        for arr, func in zip(("fmarr", "imarr", "bmarr", "fdmarr", "idmarr", "bdmarr"), (rand_float, rand_int, rand_bool) * 2):
+
+            arr= getattr(test, arr)
+            rval= make_random_array(5, func)
+
+            for i in range(3):
+                for j in range(4):
+                    arr[i][j]= rval
+                    self.assertEqual(prop_to_list(arr[i][j]), rval)
+
+
+    def test_assign_item_fail(self):
+        def assign_wrong_size(arr, i, rval):
+            getattr(test, arr)[i]= rval
+
+        # assign wrong size at level 2
+        for arr, func in zip(("fmarr", "imarr", "bmarr"), (rand_float, rand_int, rand_bool)):
+            rval1= make_random_2d_array((3, 5), func)
+            rval2= make_random_2d_array((4, 3), func)
+
+            for i in range(3):
+                self.assertRaises(ValueError, assign_wrong_size, arr, i, rval1)
+                self.assertRaises(ValueError, assign_wrong_size, arr, i, rval2)
+
+    def test_dynamic_assign_array(self):
+        for arr, func in zip(("fdmarr", "idmarr", "bdmarr"), (rand_float, rand_int, rand_bool)):
+            # assignment of [3][4][5]
+            rval= make_random_3d_array((3, 4, 5), func)
+            setattr(test, arr, rval)
+            self.assertEqual(prop_to_list(getattr(test, arr)), rval)
+
+            # [2][4][5]
+            rval= make_random_3d_array((2, 4, 5), func)
+            setattr(test, arr, rval)
+            self.assertEqual(prop_to_list(getattr(test, arr)), rval)
+
+            # [1][4][5]
+            rval= make_random_3d_array((1, 4, 5), func)
+            setattr(test, arr, rval)
+            self.assertEqual(prop_to_list(getattr(test, arr)), rval)
+
+
+    # test access
+    def test_access(self):
+        pass
+
+    # test slice access, negative indices
+    def test_access_fail(self):
+        pass
+
+random.seed()
+
+def rand_int():
+    return random.randint(-1000, 1000)
+
+def rand_float():
+    return float(rand_int())
+
+def rand_bool():
+    return bool(random.randint(0, 1))
+
+def make_random_array(len, rand_func):
+    arr= []
+    for i in range(len):
+        arr.append(rand_func())
+        
+    return arr
+
+def make_random_2d_array(dimsize, rand_func):
+    marr= []
+    for i in range(dimsize[0]):
+        marr.append([])
+
+        for j in range(dimsize[1]):
+            marr[-1].append(rand_func())
+
+    return marr
+
+def make_random_3d_array(dimsize, rand_func):
+    marr= []
+    for i in range(dimsize[0]):
+        marr.append([])
+
+        for j in range(dimsize[1]):
+            marr[-1].append([])
+
+            for k in range(dimsize[2]):
+                marr[-1][-1].append(rand_func())
+
+    return marr
+
+def prop_to_list(prop):
+    ret= []
+
+    for x in prop:
+        if type(x) not in (bool, int, float):
+            ret.append(prop_to_list(x))
+        else:
+            ret.append(x)
+
+    return ret
+
+def suite():
+    return unittest.TestSuite([unittest.TestLoader().loadTestsFromTestCase(TestArray), unittest.TestLoader().loadTestsFromTestCase(TestMArray)])
+
+if __name__ == "__main__":
+    unittest.TextTestRunner(verbosity=2).run(suite())
+
diff --git a/source/tests/rna_info_dump.py b/source/tests/rna_info_dump.py
new file mode 100644 (file)
index 0000000..62c1248
--- /dev/null
@@ -0,0 +1,131 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+#  This program is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU General Public License
+#  as published by the Free Software Foundation; either version 2
+#  of the License, or (at your option) any later version.
+#
+#  This program is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+#  You should have received a copy of the GNU General Public License
+#  along with this program; if not, write to the Free Software Foundation,
+#  Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+# <pep8 compliant>
+
+# Used for generating API diff's between releases
+#  ./blender.bin --background --python release/test/rna_info_dump.py
+
+import bpy
+
+
+def api_dump(use_properties=True, use_functions=True):
+
+    def prop_type(prop):
+        if prop.type == "pointer":
+            return prop.fixed_type.identifier
+        else:
+            return prop.type
+
+    def func_to_str(struct_id_str, func_id, func):
+
+        args = []
+        for prop in func.args:
+            data_str = "%s %s" % (prop_type(prop), prop.identifier)
+            if prop.array_length:
+                data_str += "[%d]" % prop.array_length
+            if not prop.is_required:
+                data_str += "=%s" % prop.default_str
+            args.append(data_str)
+
+        data_str = "%s.%s(%s)" % (struct_id_str, func_id, ", ".join(args))
+        if func.return_values:
+            return_args = ", ".join(prop_type(arg) for arg in func.return_values)
+            if len(func.return_values) > 1:
+                data_str += "  -->  (%s)" % return_args
+            else:
+                data_str += "  -->  %s" % return_args
+        return data_str
+
+    def prop_to_str(struct_id_str, prop_id, prop):
+
+        prop_str = "  <--  %s" % prop_type(prop)
+        if prop.array_length:
+            prop_str += "[%d]" % prop.array_length
+
+        data_str = "%s.%s %s" % (struct_id_str, prop_id, prop_str)
+        return data_str
+
+    def struct_full_id(v):
+        struct_id_str = v.identifier  # "".join(sid for sid in struct_id if struct_id)
+
+        for base in v.get_bases():
+            struct_id_str = base.identifier + "|" + struct_id_str
+
+        return struct_id_str
+
+    def dump_funcs():
+        data = []
+        for struct_id, v in sorted(struct.items()):
+            struct_id_str = struct_full_id(v)
+
+            funcs = [(func.identifier, func) for func in v.functions]
+
+            for func_id, func in funcs:
+                data.append(func_to_str(struct_id_str, func_id, func))
+
+            for prop in v.properties:
+                if prop.collection_type:
+                    funcs = [(prop.identifier + "." + func.identifier, func) for func in prop.collection_type.functions]
+                    for func_id, func in funcs:
+                        data.append(func_to_str(struct_id_str, func_id, func))
+        data.sort()
+        data.append("# * functions *")
+        return data
+
+    def dump_props():
+        data = []
+        for struct_id, v in sorted(struct.items()):
+            struct_id_str = struct_full_id(v)
+
+            props = [(prop.identifier, prop) for prop in v.properties]
+
+            for prop_id, prop in props:
+                data.append(prop_to_str(struct_id_str, prop_id, prop))
+
+            for prop in v.properties:
+                if prop.collection_type:
+                    props = [(prop.identifier + "." + prop_sub.identifier, prop_sub) for prop_sub in prop.collection_type.properties]
+                    for prop_sub_id, prop_sub in props:
+                        data.append(prop_to_str(struct_id_str, prop_sub_id, prop_sub))
+        data.sort()
+        data.insert(0, "# * properties *")
+        return data
+
+    import rna_info
+    struct = rna_info.BuildRNAInfo()[0]
+    data = []
+
+    if use_functions:
+        data.extend(dump_funcs())
+
+    if use_properties:
+        data.extend(dump_props())
+
+    if bpy.app.background:
+        import sys
+        sys.stderr.write("\n".join(data))
+        sys.stderr.write("\n\nEOF\n")
+    else:
+        text = bpy.data.texts.new(name="api.py")
+        text.from_string(data)
+
+    print("END")
+
+if __name__ == "__main__":
+    api_dump()