Cycles: camera motion blur enabled.
authorBrecht Van Lommel <brechtvanlommel@pandora.be>
Tue, 9 Oct 2012 18:37:14 +0000 (18:37 +0000)
committerBrecht Van Lommel <brechtvanlommel@pandora.be>
Tue, 9 Oct 2012 18:37:14 +0000 (18:37 +0000)
Still more work needed to get object motion blur ready.

12 files changed:
intern/cycles/blender/addon/ui.py
intern/cycles/blender/blender_object.cpp
intern/cycles/blender/blender_sync.cpp
intern/cycles/kernel/kernel_bvh.h
intern/cycles/kernel/kernel_camera.h
intern/cycles/kernel/kernel_object.h
intern/cycles/kernel/kernel_path.h
intern/cycles/kernel/kernel_shader.h
intern/cycles/kernel/kernel_types.h
intern/cycles/render/camera.cpp
intern/cycles/render/mesh.cpp
intern/cycles/render/object.cpp

index 4f4b03718393983cad1783b620b406c5ad74168f..ca43c345bfa6ad3ad1c3a076ae8d0eda2374d19a 100644 (file)
@@ -131,13 +131,9 @@ class CyclesRender_PT_light_paths(CyclesButtonsPanel, Panel):
 
 
 class CyclesRender_PT_motion_blur(CyclesButtonsPanel, Panel):
-    bl_label = "Motion Blur"
+    bl_label = "Camera Motion Blur"
     bl_options = {'DEFAULT_CLOSED'}
 
-    @classmethod
-    def poll(cls, context):
-        return False
-
     def draw_header(self, context):
         rd = context.scene.render
 
index 27301026d355175d39f67d3c67bae3f7c6a10b0d..3d74c57288b742fc912ada48a56332ae19835956 100644 (file)
@@ -17,6 +17,7 @@
  */
 
 #include "camera.h"
+#include "integrator.h"
 #include "graph.h"
 #include "light.h"
 #include "mesh.h"
@@ -227,7 +228,9 @@ void BlenderSync::sync_object(BL::Object b_parent, int b_index, BL::DupliObject
                                object->use_motion = true;
                        }
 
-                       sync_mesh_motion(b_ob, object->mesh, motion);
+                       /* mesh deformation blur not supported yet */
+                       if(!scene->integrator->motion_blur)
+                               sync_mesh_motion(b_ob, object->mesh, motion);
                }
 
                return;
index b4990eb815aeb66ee1e8fe7044306b74e29acf92..3d36eba0c4bde76a7e6439cce5fde4f23243e43f 100644 (file)
@@ -149,6 +149,9 @@ void BlenderSync::sync_data(BL::SpaceView3D b_v3d, BL::Object b_override, const
 
 void BlenderSync::sync_integrator()
 {
+#ifdef __CAMERA_MOTION__
+       BL::RenderSettings r = b_scene.render();
+#endif
        PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
 
        experimental = (RNA_enum_get(&cscene, "feature_set") != 0);
@@ -175,7 +178,7 @@ void BlenderSync::sync_integrator()
        integrator->layer_flag = render_layer.layer;
 
        integrator->sample_clamp = get_float(cscene, "sample_clamp");
-#ifdef __MOTION__
+#ifdef __CAMERA_MOTION__
        integrator->motion_blur = (!preview && r.use_motion_blur());
 #endif
 
index 34a44af8b8d974c8a0b18ee6d54280941ca92c6e..90aec2e46b3b01284aaea168a1db76cf7cbd0e4f 100644 (file)
@@ -349,7 +349,7 @@ __device_inline float3 bvh_triangle_refine(KernelGlobals *kg, ShaderData *sd, co
 
 #ifdef __INTERSECTION_REFINE__
        if(isect->object != ~0) {
-#ifdef __MOTION__
+#ifdef __OBJECT_MOTION__
                Transform tfm = sd->ob_itfm;
 #else
                Transform tfm = object_fetch_transform(kg, isect->object, ray->time, OBJECT_INVERSE_TRANSFORM);
@@ -370,7 +370,7 @@ __device_inline float3 bvh_triangle_refine(KernelGlobals *kg, ShaderData *sd, co
        P = P + D*rt;
 
        if(isect->object != ~0) {
-#ifdef __MOTION__
+#ifdef __OBJECT_MOTION__
                Transform tfm = sd->ob_tfm;
 #else
                Transform tfm = object_fetch_transform(kg, isect->object, ray->time, OBJECT_TRANSFORM);
index 7fa987197c921161b386843d7bbf7dddf40a878d..08674d0e379e6f0813d3e7d540c3ff3529b40a3b 100644 (file)
@@ -63,7 +63,7 @@ __device void camera_sample_perspective(KernelGlobals *kg, float raster_x, float
        /* transform ray from camera to world */
        Transform cameratoworld = kernel_data.cam.cameratoworld;
 
-#ifdef __MOTION__
+#ifdef __CAMERA_MOTION__
        if(kernel_data.cam.have_motion)
                transform_motion_interpolate(&cameratoworld, &kernel_data.cam.motion, ray->time);
 #endif
@@ -106,7 +106,7 @@ __device void camera_sample_orthographic(KernelGlobals *kg, float raster_x, floa
        /* transform ray from camera to world */
        Transform cameratoworld = kernel_data.cam.cameratoworld;
 
-#ifdef __MOTION__
+#ifdef __CAMERA_MOTION__
        if(kernel_data.cam.have_motion)
                transform_motion_interpolate(&cameratoworld, &kernel_data.cam.motion, ray->time);
 #endif
@@ -180,7 +180,7 @@ __device void camera_sample_panorama(KernelGlobals *kg, float raster_x, float ra
        /* transform ray from camera to world */
        Transform cameratoworld = kernel_data.cam.cameratoworld;
 
-#ifdef __MOTION__
+#ifdef __CAMERA_MOTION__
        if(kernel_data.cam.have_motion)
                transform_motion_interpolate(&cameratoworld, &kernel_data.cam.motion, ray->time);
 #endif
@@ -212,7 +212,7 @@ __device void camera_sample(KernelGlobals *kg, int x, int y, float filter_u, flo
        float raster_x = x + kernel_tex_interp(__filter_table, filter_u, FILTER_TABLE_SIZE);
        float raster_y = y + kernel_tex_interp(__filter_table, filter_v, FILTER_TABLE_SIZE);
 
-#ifdef __MOTION__
+#ifdef __CAMERA_MOTION__
        /* motion blur */
        if(kernel_data.cam.shuttertime == 0.0f)
                ray->time = TIME_INVALID;
index 01da5050c8d5d151e48c70d80d7c4c7f4034c326..ad43120146a607ce9b07e3a82a4e7ffe91203e34 100644 (file)
@@ -31,7 +31,7 @@ __device_inline Transform object_fetch_transform(KernelGlobals *kg, int object,
 {
        Transform tfm;
 
-#ifdef __MOTION__
+#ifdef __OBJECT_MOTION__
        /* if we do motion blur */
        if(sd->flag & SD_OBJECT_MOTION) {
                /* fetch motion transforms */
@@ -70,7 +70,7 @@ __device_inline Transform object_fetch_transform(KernelGlobals *kg, int object,
 
 __device_inline void object_position_transform(KernelGlobals *kg, ShaderData *sd, float3 *P)
 {
-#ifdef __MOTION__
+#ifdef __OBJECT_MOTION__
        *P = transform_point(&sd->ob_tfm, *P);
 #else
        Transform tfm = object_fetch_transform(kg, sd->object, TIME_INVALID, OBJECT_TRANSFORM);
@@ -80,7 +80,7 @@ __device_inline void object_position_transform(KernelGlobals *kg, ShaderData *sd
 
 __device_inline void object_inverse_position_transform(KernelGlobals *kg, ShaderData *sd, float3 *P)
 {
-#ifdef __MOTION__
+#ifdef __OBJECT_MOTION__
        *P = transform_point(&sd->ob_itfm, *P);
 #else
        Transform tfm = object_fetch_transform(kg, sd->object, TIME_INVALID, OBJECT_INVERSE_TRANSFORM);
@@ -90,7 +90,7 @@ __device_inline void object_inverse_position_transform(KernelGlobals *kg, Shader
 
 __device_inline void object_inverse_normal_transform(KernelGlobals *kg, ShaderData *sd, float3 *N)
 {
-#ifdef __MOTION__
+#ifdef __OBJECT_MOTION__
        *N = normalize(transform_direction_transposed(&sd->ob_tfm, *N));
 #else
        Transform tfm = object_fetch_transform(kg, sd->object, TIME_INVALID, OBJECT_TRANSFORM);
@@ -100,7 +100,7 @@ __device_inline void object_inverse_normal_transform(KernelGlobals *kg, ShaderDa
 
 __device_inline void object_normal_transform(KernelGlobals *kg, ShaderData *sd, float3 *N)
 {
-#ifdef __MOTION__
+#ifdef __OBJECT_MOTION__
        *N = normalize(transform_direction_transposed(&sd->ob_itfm, *N));
 #else
        Transform tfm = object_fetch_transform(kg, sd->object, TIME_INVALID, OBJECT_INVERSE_TRANSFORM);
@@ -110,7 +110,7 @@ __device_inline void object_normal_transform(KernelGlobals *kg, ShaderData *sd,
 
 __device_inline void object_dir_transform(KernelGlobals *kg, ShaderData *sd, float3 *D)
 {
-#ifdef __MOTION__
+#ifdef __OBJECT_MOTION__
        *D = transform_direction(&sd->ob_tfm, *D);
 #else
        Transform tfm = object_fetch_transform(kg, sd->object, 0.0f, OBJECT_TRANSFORM);
@@ -120,7 +120,7 @@ __device_inline void object_dir_transform(KernelGlobals *kg, ShaderData *sd, flo
 
 __device_inline float3 object_location(KernelGlobals *kg, ShaderData *sd)
 {
-#ifdef __MOTION__
+#ifdef __OBJECT_MOTION__
        return make_float3(sd->ob_tfm.x.w, sd->ob_tfm.y.w, sd->ob_tfm.z.w);
 #else
        Transform tfm = object_fetch_transform(kg, sd->object, 0.0f, OBJECT_TRANSFORM);
index d606c3d634a4268d853df68c324695149f441197..817f254a5e50e25b2903a933e0ac73df664ea981 100644 (file)
@@ -343,7 +343,7 @@ __device float4 kernel_path_progressive(KernelGlobals *kg, RNG *rng, int sample,
                                light_ray.P = ray_offset(sd.P, sd.Ng);
                                light_ray.D = ao_D;
                                light_ray.t = kernel_data.background.ao_distance;
-#ifdef __MOTION__
+#ifdef __OBJECT_MOTION__
                                light_ray.time = sd.time;
 #endif
 
@@ -368,7 +368,7 @@ __device float4 kernel_path_progressive(KernelGlobals *kg, RNG *rng, int sample,
                                BsdfEval L_light;
                                bool is_lamp;
 
-#ifdef __MOTION__
+#ifdef __OBJECT_MOTION__
                                light_ray.time = sd.time;
 #endif
 
@@ -520,7 +520,7 @@ __device void kernel_path_indirect(KernelGlobals *kg, RNG *rng, int sample, Ray
                                light_ray.P = ray_offset(sd.P, sd.Ng);
                                light_ray.D = ao_D;
                                light_ray.t = kernel_data.background.ao_distance;
-#ifdef __MOTION__
+#ifdef __OBJECT_MOTION__
                                light_ray.time = sd.time;
 #endif
 
@@ -545,7 +545,7 @@ __device void kernel_path_indirect(KernelGlobals *kg, RNG *rng, int sample, Ray
                                BsdfEval L_light;
                                bool is_lamp;
 
-#ifdef __MOTION__
+#ifdef __OBJECT_MOTION__
                                light_ray.time = sd.time;
 #endif
 
@@ -728,7 +728,7 @@ __device float4 kernel_path_non_progressive(KernelGlobals *kg, RNG *rng, int sam
                                        light_ray.P = ray_offset(sd.P, sd.Ng);
                                        light_ray.D = ao_D;
                                        light_ray.t = kernel_data.background.ao_distance;
-#ifdef __MOTION__
+#ifdef __OBJECT_MOTION__
                                        light_ray.time = sd.time;
 #endif
 
@@ -748,7 +748,7 @@ __device float4 kernel_path_non_progressive(KernelGlobals *kg, RNG *rng, int sam
                        BsdfEval L_light;
                        bool is_lamp;
 
-#ifdef __MOTION__
+#ifdef __OBJECT_MOTION__
                        light_ray.time = sd.time;
 #endif
 
@@ -867,7 +867,7 @@ __device float4 kernel_path_non_progressive(KernelGlobals *kg, RNG *rng, int sam
                                bsdf_ray.dP = sd.dP;
                                bsdf_ray.dD = bsdf_domega_in;
 #endif
-#ifdef __MOTION__
+#ifdef __OBJECT_MOTION__
                                bsdf_ray.time = sd.time;
 #endif
 
@@ -925,7 +925,7 @@ __device void kernel_path_trace(KernelGlobals *kg,
        float lens_u = path_rng(kg, &rng, sample, PRNG_LENS_U);
        float lens_v = path_rng(kg, &rng, sample, PRNG_LENS_V);
 
-#ifdef __MOTION__
+#ifdef __CAMERA_MOTION__
        float time = path_rng(kg, &rng, sample, PRNG_TIME);
 #else
        float time = 0.0f;
index b57e27bc8ed7c5e9d6e69d52db82c7aed03c5e41..ee4460a8541c842edb4a16016b4a5479bf01f203 100644 (file)
@@ -68,7 +68,7 @@ __device_inline void shader_setup_from_ray(KernelGlobals *kg, ShaderData *sd,
 #endif
 
        /* matrices and time */
-#ifdef __MOTION__
+#ifdef __OBJECT_MOTION__
        sd->ob_tfm = object_fetch_transform(kg, sd->object, ray->time, OBJECT_TRANSFORM);
        sd->ob_itfm = object_fetch_transform(kg, sd->object, ray->time, OBJECT_INVERSE_TRANSFORM);
 
@@ -171,7 +171,7 @@ __device void shader_setup_from_sample(KernelGlobals *kg, ShaderData *sd,
        }
 #endif
 
-#ifdef __MOTION__
+#ifdef __OBJECT_MOTION__
        sd->time = time;
 
        sd->ob_tfm = object_fetch_transform(kg, sd->object, time, OBJECT_TRANSFORM);
@@ -275,7 +275,7 @@ __device_inline void shader_setup_from_background(KernelGlobals *kg, ShaderData
        sd->I = -sd->P;
        sd->shader = kernel_data.background.shader;
        sd->flag = kernel_tex_fetch(__shader_flag, (sd->shader & SHADER_MASK)*2);
-#ifdef __MOTION__
+#ifdef __OBJECT_MOTION__
        sd->time = ray->time;
 #endif
        sd->ray_length = 0.0f;
index 48e271a9f3f56eeadb21204f10ea889bf4e69a21..b290d4c42d937be123ec1aef5dfbc241c3fac892 100644 (file)
@@ -108,7 +108,8 @@ CCL_NAMESPACE_BEGIN
 #define __PASSES__
 #define __BACKGROUND_MIS__
 #define __AO__
-//#define __MOTION__
+#define __CAMERA_MOTION__
+//#define __OBJECT_MOTION__
 #endif
 
 //#define __SOBOL_FULL_SCREEN__
@@ -129,7 +130,7 @@ enum PathTraceDimension {
        PRNG_FILTER_V = 1,
        PRNG_LENS_U = 2,
        PRNG_LENS_V = 3,
-#ifdef __MOTION__
+#ifdef __CAMERA_MOTION__
        PRNG_TIME = 4,
        PRNG_UNUSED = 5,
        PRNG_BASE_NUM = 6,
@@ -426,7 +427,7 @@ typedef struct ShaderData {
        /* length of the ray being shaded */
        float ray_length;
 
-#ifdef __MOTION__
+#ifdef __OBJECT_MOTION__
        /* object <-> world space transformations, cached to avoid
         * re-interpolating them constantly for shading */
        Transform ob_tfm;
index 3fd7a1b28e390a4d00688003557949419594572a..441f17d90e91af0a4f0d7921c3c7e9fc97ad987c 100644 (file)
@@ -193,6 +193,7 @@ void Camera::device_update(Device *device, DeviceScene *dscene, Scene *scene)
                        }
                }
        }
+#ifdef __CAMERA_MOTION__
        else if(need_motion == Scene::MOTION_BLUR) {
                /* todo: exact camera position will not be hit this way */
                if(use_motion) {
@@ -200,6 +201,7 @@ void Camera::device_update(Device *device, DeviceScene *dscene, Scene *scene)
                        kcam->have_motion = 1;
                }
        }
+#endif
 
        /* depth of field */
        kcam->aperturesize = aperturesize;
@@ -208,7 +210,11 @@ void Camera::device_update(Device *device, DeviceScene *dscene, Scene *scene)
        kcam->bladesrotation = bladesrotation;
 
        /* motion blur */
+#ifdef __CAMERA_MOTION__
        kcam->shuttertime = (need_motion == Scene::MOTION_BLUR) ? shuttertime: 0.0f;
+#else
+       kcam->shuttertime = 0.0f;
+#endif
 
        /* type */
        kcam->type = type;
index 7037e36f3138e4c9add5e37528b589cb010e7503..014b78dec2b4145624842bde770fe558fdcd2140 100644 (file)
@@ -722,7 +722,11 @@ void MeshManager::device_update(Device *device, DeviceScene *dscene, Scene *scen
        foreach(Shader *shader, scene->shaders)
                shader->need_update_attributes = false;
 
+#ifdef __OBJECT_MOTION__
        bool motion_blur = scene->need_motion() == Scene::MOTION_BLUR;
+#else
+       bool motion_blur = false;
+#endif
 
        foreach(Object *object, scene->objects)
                object->compute_bounds(motion_blur);
index d78a82d589af64ecd0206b83fbb2355c44345591..4a72dcc52f7876794dd13190cdd76a3eb2c4ac91 100644 (file)
@@ -220,6 +220,7 @@ void ObjectManager::device_update_transforms(Device *device, DeviceScene *dscene
                        memcpy(&objects[offset+8], &mtfm_pre, sizeof(float4)*4);
                        memcpy(&objects[offset+12], &mtfm_post, sizeof(float4)*4);
                }
+#ifdef __OBJECT_MOTION__
                else if(need_motion == Scene::MOTION_BLUR) {
                        if(ob->use_motion) {
                                /* decompose transformations for interpolation */
@@ -234,6 +235,7 @@ void ObjectManager::device_update_transforms(Device *device, DeviceScene *dscene
                                memcpy(&objects[offset+8], &no_motion, sizeof(float4));
                        }
                }
+#endif
 
                /* dupli object coords */
                objects[offset+16] = make_float4(ob->dupli_generated[0], ob->dupli_generated[1], ob->dupli_generated[2], 0.0f);
@@ -297,7 +299,11 @@ void ObjectManager::apply_static_transforms(Scene *scene, Progress& progress)
 
        /* counter mesh users */
        map<Mesh*, int> mesh_users;
+#ifdef __OBJECT_MOTION__
        bool motion_blur = scene->need_motion() == Scene::MOTION_BLUR;
+#else
+       bool motion_blur = false;
+#endif
 
        foreach(Object *object, scene->objects) {
                map<Mesh*, int>::iterator it = mesh_users.find(object->mesh);