Cycles: Make all #include statements relative to cycles source directory
[blender.git] / intern / cycles / render / camera.cpp
1 /*
2  * Copyright 2011-2013 Blender Foundation
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16
17 #include "render/camera.h"
18 #include "render/mesh.h"
19 #include "render/object.h"
20 #include "render/scene.h"
21 #include "render/tables.h"
22
23 #include "device/device.h"
24
25 #include "util/util_foreach.h"
26 #include "util/util_function.h"
27 #include "util/util_math_cdf.h"
28 #include "util/util_vector.h"
29
30 CCL_NAMESPACE_BEGIN
31
32 static float shutter_curve_eval(float x,
33                                 array<float>& shutter_curve)
34 {
35         if(shutter_curve.size() == 0) {
36                 return 1.0f;
37         }
38
39         x *= shutter_curve.size();
40         int index = (int)x;
41         float frac = x - index;
42         if(index < shutter_curve.size() - 1) {
43                 return lerp(shutter_curve[index], shutter_curve[index + 1], frac);
44         }
45         else {
46                 return shutter_curve[shutter_curve.size() - 1];
47         }
48 }
49
50 NODE_DEFINE(Camera)
51 {
52         NodeType* type = NodeType::add("camera", create);
53
54         SOCKET_FLOAT(shuttertime, "Shutter Time", 1.0f);
55
56         static NodeEnum motion_position_enum;
57         motion_position_enum.insert("start", MOTION_POSITION_START);
58         motion_position_enum.insert("center", MOTION_POSITION_CENTER);
59         motion_position_enum.insert("end", MOTION_POSITION_END);
60         SOCKET_ENUM(motion_position, "Motion Position", motion_position_enum, MOTION_POSITION_CENTER);
61
62         static NodeEnum rolling_shutter_type_enum;
63         rolling_shutter_type_enum.insert("none", ROLLING_SHUTTER_NONE);
64         rolling_shutter_type_enum.insert("top", ROLLING_SHUTTER_TOP);
65         SOCKET_ENUM(rolling_shutter_type, "Rolling Shutter Type", rolling_shutter_type_enum,  ROLLING_SHUTTER_NONE);
66         SOCKET_FLOAT(rolling_shutter_duration, "Rolling Shutter Duration", 0.1f);
67
68         SOCKET_FLOAT_ARRAY(shutter_curve, "Shutter Curve", array<float>());
69
70         SOCKET_FLOAT(aperturesize, "Aperture Size", 0.0f);
71         SOCKET_FLOAT(focaldistance, "Focal Distance", 10.0f);
72         SOCKET_UINT(blades, "Blades", 0);
73         SOCKET_FLOAT(bladesrotation, "Blades Rotation", 0.0f);
74
75         SOCKET_TRANSFORM(matrix, "Matrix", transform_identity());
76
77         SOCKET_FLOAT(aperture_ratio, "Aperture Ratio", 1.0f);
78
79         static NodeEnum type_enum;
80         type_enum.insert("perspective", CAMERA_PERSPECTIVE);
81         type_enum.insert("orthograph", CAMERA_ORTHOGRAPHIC);
82         type_enum.insert("panorama", CAMERA_PANORAMA);
83         SOCKET_ENUM(type, "Type", type_enum, CAMERA_PERSPECTIVE);
84
85         static NodeEnum panorama_type_enum;
86         panorama_type_enum.insert("equirectangular", PANORAMA_EQUIRECTANGULAR);
87         panorama_type_enum.insert("mirrorball", PANORAMA_MIRRORBALL);
88         panorama_type_enum.insert("fisheye_equidistant", PANORAMA_FISHEYE_EQUIDISTANT);
89         panorama_type_enum.insert("fisheye_equisolid", PANORAMA_FISHEYE_EQUISOLID);
90         SOCKET_ENUM(panorama_type, "Panorama Type", panorama_type_enum, PANORAMA_EQUIRECTANGULAR);
91
92         SOCKET_FLOAT(fisheye_fov, "Fisheye FOV", M_PI_F);
93         SOCKET_FLOAT(fisheye_lens, "Fisheye Lens", 10.5f);
94         SOCKET_FLOAT(latitude_min, "Latitude Min", -M_PI_2_F);
95         SOCKET_FLOAT(latitude_max, "Latitude Max", M_PI_2_F);
96         SOCKET_FLOAT(longitude_min, "Longitude Min", -M_PI_F);
97         SOCKET_FLOAT(longitude_max, "Longitude Max", M_PI_F);
98         SOCKET_FLOAT(fov, "FOV", M_PI_4_F);
99         SOCKET_FLOAT(fov_pre, "FOV Pre", M_PI_4_F);
100         SOCKET_FLOAT(fov_post, "FOV Post", M_PI_4_F);
101
102         static NodeEnum stereo_eye_enum;
103         stereo_eye_enum.insert("none", STEREO_NONE);
104         stereo_eye_enum.insert("left", STEREO_LEFT);
105         stereo_eye_enum.insert("right", STEREO_RIGHT);
106         SOCKET_ENUM(stereo_eye, "Stereo Eye", stereo_eye_enum, STEREO_NONE);
107
108         SOCKET_FLOAT(interocular_distance, "Interocular Distance", 0.065f);
109         SOCKET_FLOAT(convergence_distance, "Convergence Distance", 30.0f * 0.065f);
110
111         SOCKET_BOOLEAN(use_pole_merge, "Use Pole Merge", false);
112         SOCKET_FLOAT(pole_merge_angle_from, "Pole Merge Angle From",  60.0f * M_PI_F / 180.0f);
113         SOCKET_FLOAT(pole_merge_angle_to, "Pole Merge Angle To", 75.0f * M_PI_F / 180.0f);
114
115         SOCKET_FLOAT(sensorwidth, "Sensor Width", 0.036f);
116         SOCKET_FLOAT(sensorheight, "Sensor Height", 0.024f);
117
118         SOCKET_FLOAT(nearclip, "Near Clip", 1e-5f);
119         SOCKET_FLOAT(farclip, "Far Clip", 1e5f);
120
121         SOCKET_FLOAT(viewplane.left, "Viewplane Left", 0);
122         SOCKET_FLOAT(viewplane.right, "Viewplane Right", 0);
123         SOCKET_FLOAT(viewplane.bottom, "Viewplane Bottom", 0);
124         SOCKET_FLOAT(viewplane.top, "Viewplane Top", 0);
125
126         SOCKET_FLOAT(border.left, "Border Left", 0);
127         SOCKET_FLOAT(border.right, "Border Right", 0);
128         SOCKET_FLOAT(border.bottom, "Border Bottom", 0);
129         SOCKET_FLOAT(border.top, "Border Top", 0);
130
131         return type;
132 }
133
134 Camera::Camera()
135 : Node(node_type)
136 {
137         shutter_table_offset = TABLE_OFFSET_INVALID;
138
139         width = 1024;
140         height = 512;
141         resolution = 1;
142
143         motion.pre = transform_identity();
144         motion.post = transform_identity();
145         use_motion = false;
146         use_perspective_motion = false;
147
148         shutter_curve.resize(RAMP_TABLE_SIZE);
149         for(int i = 0; i < shutter_curve.size(); ++i) {
150                 shutter_curve[i] = 1.0f;
151         }
152
153         compute_auto_viewplane();
154
155         screentoworld = transform_identity();
156         rastertoworld = transform_identity();
157         ndctoworld = transform_identity();
158         rastertocamera = transform_identity();
159         cameratoworld = transform_identity();
160         worldtoraster = transform_identity();
161
162         dx = make_float3(0.0f, 0.0f, 0.0f);
163         dy = make_float3(0.0f, 0.0f, 0.0f);
164
165         need_update = true;
166         need_device_update = true;
167         need_flags_update = true;
168         previous_need_motion = -1;
169 }
170
171 Camera::~Camera()
172 {
173 }
174
175 void Camera::compute_auto_viewplane()
176 {
177         if(type == CAMERA_PANORAMA) {
178                 viewplane.left = 0.0f;
179                 viewplane.right = 1.0f;
180                 viewplane.bottom = 0.0f;
181                 viewplane.top = 1.0f;
182         }
183         else {
184                 float aspect = (float)width/(float)height;
185                 if(width >= height) {
186                         viewplane.left = -aspect;
187                         viewplane.right = aspect;
188                         viewplane.bottom = -1.0f;
189                         viewplane.top = 1.0f;
190                 }
191                 else {
192                         viewplane.left = -1.0f;
193                         viewplane.right = 1.0f;
194                         viewplane.bottom = -1.0f/aspect;
195                         viewplane.top = 1.0f/aspect;
196                 }
197         }
198 }
199
200 void Camera::update()
201 {
202         if(!need_update)
203                 return;
204
205         /* Full viewport to camera border in the viewport. */
206         Transform fulltoborder = transform_from_viewplane(viewport_camera_border);
207         Transform bordertofull = transform_inverse(fulltoborder);
208
209         /* ndc to raster */
210         Transform ndctoraster = transform_scale(width, height, 1.0f) * bordertofull;
211         Transform full_ndctoraster = transform_scale(full_width, full_height, 1.0f) * bordertofull;
212
213         /* raster to screen */
214         Transform screentondc = fulltoborder * transform_from_viewplane(viewplane);
215
216         Transform screentoraster = ndctoraster * screentondc;
217         Transform rastertoscreen = transform_inverse(screentoraster);
218         Transform full_screentoraster = full_ndctoraster * screentondc;
219         Transform full_rastertoscreen = transform_inverse(full_screentoraster);
220
221         /* screen to camera */
222         Transform cameratoscreen;
223         if(type == CAMERA_PERSPECTIVE)
224                 cameratoscreen = transform_perspective(fov, nearclip, farclip);
225         else if(type == CAMERA_ORTHOGRAPHIC)
226                 cameratoscreen = transform_orthographic(nearclip, farclip);
227         else
228                 cameratoscreen = transform_identity();
229         
230         Transform screentocamera = transform_inverse(cameratoscreen);
231
232         rastertocamera = screentocamera * rastertoscreen;
233         Transform full_rastertocamera = screentocamera * full_rastertoscreen;
234         cameratoraster = screentoraster * cameratoscreen;
235
236         cameratoworld = matrix;
237         screentoworld = cameratoworld * screentocamera;
238         rastertoworld = cameratoworld * rastertocamera;
239         ndctoworld = rastertoworld * ndctoraster;
240
241         /* note we recompose matrices instead of taking inverses of the above, this
242          * is needed to avoid inverting near degenerate matrices that happen due to
243          * precision issues with large scenes */
244         worldtocamera = transform_inverse(matrix);
245         worldtoscreen = cameratoscreen * worldtocamera;
246         worldtondc = screentondc * worldtoscreen;
247         worldtoraster = ndctoraster * worldtondc;
248
249         /* differentials */
250         if(type == CAMERA_ORTHOGRAPHIC) {
251                 dx = transform_direction(&rastertocamera, make_float3(1, 0, 0));
252                 dy = transform_direction(&rastertocamera, make_float3(0, 1, 0));
253                 full_dx = transform_direction(&full_rastertocamera, make_float3(1, 0, 0));
254                 full_dy = transform_direction(&full_rastertocamera, make_float3(0, 1, 0));
255         }
256         else if(type == CAMERA_PERSPECTIVE) {
257                 dx = transform_perspective(&rastertocamera, make_float3(1, 0, 0)) -
258                      transform_perspective(&rastertocamera, make_float3(0, 0, 0));
259                 dy = transform_perspective(&rastertocamera, make_float3(0, 1, 0)) -
260                      transform_perspective(&rastertocamera, make_float3(0, 0, 0));
261                 full_dx = transform_perspective(&full_rastertocamera, make_float3(1, 0, 0)) -
262                      transform_perspective(&full_rastertocamera, make_float3(0, 0, 0));
263                 full_dy = transform_perspective(&full_rastertocamera, make_float3(0, 1, 0)) -
264                      transform_perspective(&full_rastertocamera, make_float3(0, 0, 0));
265         }
266         else {
267                 dx = make_float3(0.0f, 0.0f, 0.0f);
268                 dy = make_float3(0.0f, 0.0f, 0.0f);
269         }
270
271         dx = transform_direction(&cameratoworld, dx);
272         dy = transform_direction(&cameratoworld, dy);
273         full_dx = transform_direction(&cameratoworld, full_dx);
274         full_dy = transform_direction(&cameratoworld, full_dy);
275
276         /* TODO(sergey): Support other types of camera. */
277         if(type == CAMERA_PERSPECTIVE) {
278                 /* TODO(sergey): Move to an utility function and de-duplicate with
279                  * calculation above.
280                  */
281                 Transform screentocamera_pre =
282                         transform_inverse(transform_perspective(fov_pre,
283                                                                 nearclip,
284                                                                 farclip));
285                 Transform screentocamera_post =
286                         transform_inverse(transform_perspective(fov_post,
287                                                                 nearclip,
288                                                                 farclip));
289                 perspective_motion.pre = screentocamera_pre * rastertoscreen;
290                 perspective_motion.post = screentocamera_post * rastertoscreen;
291         }
292
293         need_update = false;
294         need_device_update = true;
295         need_flags_update = true;
296 }
297
298 void Camera::device_update(Device *device, DeviceScene *dscene, Scene *scene)
299 {
300         Scene::MotionType need_motion = scene->need_motion(device->info.advanced_shading);
301
302         update();
303
304         if(previous_need_motion != need_motion) {
305                 /* scene's motion model could have been changed since previous device
306                  * camera update this could happen for example in case when one render
307                  * layer has got motion pass and another not */
308                 need_device_update = true;
309         }
310
311         if(!need_device_update)
312                 return;
313         
314         KernelCamera *kcam = &dscene->data.cam;
315
316         /* store matrices */
317         kcam->screentoworld = screentoworld;
318         kcam->rastertoworld = rastertoworld;
319         kcam->rastertocamera = rastertocamera;
320         kcam->cameratoworld = cameratoworld;
321         kcam->worldtocamera = worldtocamera;
322         kcam->worldtoscreen = worldtoscreen;
323         kcam->worldtoraster = worldtoraster;
324         kcam->worldtondc = worldtondc;
325
326         /* camera motion */
327         kcam->have_motion = 0;
328         kcam->have_perspective_motion = 0;
329
330         if(need_motion == Scene::MOTION_PASS) {
331                 /* TODO(sergey): Support perspective (zoom, fov) motion. */
332                 if(type == CAMERA_PANORAMA) {
333                         if(use_motion) {
334                                 kcam->motion.pre = transform_inverse(motion.pre);
335                                 kcam->motion.post = transform_inverse(motion.post);
336                         }
337                         else {
338                                 kcam->motion.pre = kcam->worldtocamera;
339                                 kcam->motion.post = kcam->worldtocamera;
340                         }
341                 }
342                 else {
343                         if(use_motion) {
344                                 kcam->motion.pre = cameratoraster * transform_inverse(motion.pre);
345                                 kcam->motion.post = cameratoraster * transform_inverse(motion.post);
346                         }
347                         else {
348                                 kcam->motion.pre = worldtoraster;
349                                 kcam->motion.post = worldtoraster;
350                         }
351                 }
352         }
353 #ifdef __CAMERA_MOTION__
354         else if(need_motion == Scene::MOTION_BLUR) {
355                 if(use_motion) {
356                         transform_motion_decompose((DecompMotionTransform*)&kcam->motion, &motion, &matrix);
357                         kcam->have_motion = 1;
358                 }
359                 if(use_perspective_motion) {
360                         kcam->perspective_motion = perspective_motion;
361                         kcam->have_perspective_motion = 1;
362                 }
363         }
364 #endif
365
366         /* depth of field */
367         kcam->aperturesize = aperturesize;
368         kcam->focaldistance = focaldistance;
369         kcam->blades = (blades < 3)? 0.0f: blades;
370         kcam->bladesrotation = bladesrotation;
371
372         /* motion blur */
373 #ifdef __CAMERA_MOTION__
374         kcam->shuttertime = (need_motion == Scene::MOTION_BLUR) ? shuttertime: -1.0f;
375
376         scene->lookup_tables->remove_table(&shutter_table_offset);
377         if(need_motion == Scene::MOTION_BLUR) {
378                 vector<float> shutter_table;
379                 util_cdf_inverted(SHUTTER_TABLE_SIZE,
380                                   0.0f,
381                                   1.0f,
382                                   function_bind(shutter_curve_eval, _1, shutter_curve),
383                                   false,
384                                   shutter_table);
385                 shutter_table_offset = scene->lookup_tables->add_table(dscene,
386                                                                        shutter_table);
387                 kcam->shutter_table_offset = (int)shutter_table_offset;
388         }
389 #else
390         kcam->shuttertime = -1.0f;
391 #endif
392
393         /* type */
394         kcam->type = type;
395
396         /* anamorphic lens bokeh */
397         kcam->inv_aperture_ratio = 1.0f / aperture_ratio;
398
399         /* panorama */
400         kcam->panorama_type = panorama_type;
401         kcam->fisheye_fov = fisheye_fov;
402         kcam->fisheye_lens = fisheye_lens;
403         kcam->equirectangular_range = make_float4(longitude_min - longitude_max, -longitude_min,
404                                                   latitude_min -  latitude_max, -latitude_min + M_PI_2_F);
405
406         switch(stereo_eye) {
407                 case STEREO_LEFT:
408                         kcam->interocular_offset = -interocular_distance * 0.5f;
409                         break;
410                 case STEREO_RIGHT:
411                         kcam->interocular_offset = interocular_distance * 0.5f;
412                         break;
413                 case STEREO_NONE:
414                 default:
415                         kcam->interocular_offset = 0.0f;
416                         break;
417         }
418
419         kcam->convergence_distance = convergence_distance;
420         if(use_pole_merge) {
421                 kcam->pole_merge_angle_from = pole_merge_angle_from;
422                 kcam->pole_merge_angle_to = pole_merge_angle_to;
423         }
424         else {
425                 kcam->pole_merge_angle_from = -1.0f;
426                 kcam->pole_merge_angle_to = -1.0f;
427         }
428
429         /* sensor size */
430         kcam->sensorwidth = sensorwidth;
431         kcam->sensorheight = sensorheight;
432
433         /* render size */
434         kcam->width = width;
435         kcam->height = height;
436         kcam->resolution = resolution;
437
438         /* store differentials */
439         kcam->dx = float3_to_float4(dx);
440         kcam->dy = float3_to_float4(dy);
441
442         /* clipping */
443         kcam->nearclip = nearclip;
444         kcam->cliplength = (farclip == FLT_MAX)? FLT_MAX: farclip - nearclip;
445
446         /* Camera in volume. */
447         kcam->is_inside_volume = 0;
448
449         /* Rolling shutter effect */
450         kcam->rolling_shutter_type = rolling_shutter_type;
451         kcam->rolling_shutter_duration = rolling_shutter_duration;
452
453         previous_need_motion = need_motion;
454 }
455
456 void Camera::device_update_volume(Device * /*device*/,
457                                   DeviceScene *dscene,
458                                   Scene *scene)
459 {
460         if(!need_device_update && !need_flags_update) {
461                 return;
462         }
463         KernelCamera *kcam = &dscene->data.cam;
464         BoundBox viewplane_boundbox = viewplane_bounds_get();
465         for(size_t i = 0; i < scene->objects.size(); ++i) {
466                 Object *object = scene->objects[i];
467                 if(object->mesh->has_volume &&
468                    viewplane_boundbox.intersects(object->bounds))
469                 {
470                         /* TODO(sergey): Consider adding more grained check. */
471                         kcam->is_inside_volume = 1;
472                         break;
473                 }
474         }
475         need_device_update = false;
476         need_flags_update = false;
477 }
478
479 void Camera::device_free(Device * /*device*/,
480                          DeviceScene * /*dscene*/,
481                          Scene *scene)
482 {
483         scene->lookup_tables->remove_table(&shutter_table_offset);
484 }
485
486 bool Camera::modified(const Camera& cam)
487 {
488         return !Node::equals(cam);
489 }
490
491 bool Camera::motion_modified(const Camera& cam)
492 {
493         return !((motion == cam.motion) &&
494                  (use_motion == cam.use_motion) &&
495                  (use_perspective_motion == cam.use_perspective_motion));
496 }
497
498 void Camera::tag_update()
499 {
500         need_update = true;
501 }
502
503 float3 Camera::transform_raster_to_world(float raster_x, float raster_y)
504 {
505         float3 D, P;
506         if(type == CAMERA_PERSPECTIVE) {
507                 D = transform_perspective(&rastertocamera,
508                                           make_float3(raster_x, raster_y, 0.0f));
509                 float3 Pclip = normalize(D);
510                 P = make_float3(0.0f, 0.0f, 0.0f);
511                 /* TODO(sergey): Aperture support? */
512                 P = transform_point(&cameratoworld, P);
513                 D = normalize(transform_direction(&cameratoworld, D));
514                 /* TODO(sergey): Clipping is conditional in kernel, and hence it could
515                  * be mistakes in here, currently leading to wrong camera-in-volume
516                  * detection.
517                  */
518                 P += nearclip * D / Pclip.z;
519         }
520         else if(type == CAMERA_ORTHOGRAPHIC) {
521                 D = make_float3(0.0f, 0.0f, 1.0f);
522                 /* TODO(sergey): Aperture support? */
523                 P = transform_perspective(&rastertocamera,
524                                           make_float3(raster_x, raster_y, 0.0f));
525                 P = transform_point(&cameratoworld, P);
526                 D = normalize(transform_direction(&cameratoworld, D));
527         }
528         else {
529                 assert(!"unsupported camera type");
530         }
531         return P;
532 }
533
534 BoundBox Camera::viewplane_bounds_get()
535 {
536         /* TODO(sergey): This is all rather stupid, but is there a way to perform
537          * checks we need in a more clear and smart fasion?
538          */
539         BoundBox bounds = BoundBox::empty;
540
541         if(type == CAMERA_PANORAMA) {
542                 if(use_spherical_stereo == false) {
543                         bounds.grow(make_float3(cameratoworld.x.w,
544                                                 cameratoworld.y.w,
545                                                 cameratoworld.z.w));
546                 }
547                 else {
548                         float half_eye_distance = interocular_distance * 0.5f;
549
550                         bounds.grow(make_float3(cameratoworld.x.w + half_eye_distance,
551                                                 cameratoworld.y.w,
552                                                 cameratoworld.z.w));
553
554                         bounds.grow(make_float3(cameratoworld.z.w,
555                                                 cameratoworld.y.w + half_eye_distance,
556                                                 cameratoworld.z.w));
557
558                         bounds.grow(make_float3(cameratoworld.x.w - half_eye_distance,
559                                                 cameratoworld.y.w,
560                                                 cameratoworld.z.w));
561
562                         bounds.grow(make_float3(cameratoworld.x.w,
563                                                 cameratoworld.y.w - half_eye_distance,
564                                                 cameratoworld.z.w));
565                 }
566         }
567         else {
568                 bounds.grow(transform_raster_to_world(0.0f, 0.0f));
569                 bounds.grow(transform_raster_to_world(0.0f, (float)height));
570                 bounds.grow(transform_raster_to_world((float)width, (float)height));
571                 bounds.grow(transform_raster_to_world((float)width, 0.0f));
572                 if(type == CAMERA_PERSPECTIVE) {
573                         /* Center point has the most distance in local Z axis,
574                          * use it to construct bounding box/
575                          */
576                         bounds.grow(transform_raster_to_world(0.5f*width, 0.5f*height));
577                 }
578         }
579         return bounds;
580 }
581
582 float Camera::world_to_raster_size(float3 P)
583 {
584         if(type == CAMERA_ORTHOGRAPHIC) {
585                 return min(len(full_dx), len(full_dy));
586         }
587         else if(type == CAMERA_PERSPECTIVE) {
588                 /* Calculate as if point is directly ahead of the camera. */
589                 float3 raster = make_float3(0.5f*width, 0.5f*height, 0.0f);
590                 float3 Pcamera = transform_perspective(&rastertocamera, raster);
591
592                 /* dDdx */
593                 float3 Ddiff = transform_direction(&cameratoworld, Pcamera);
594                 float3 dx = len_squared(full_dx) < len_squared(full_dy) ? full_dx : full_dy;
595                 float3 dDdx = normalize(Ddiff + dx) - normalize(Ddiff);
596
597                 /* dPdx */
598                 float dist = len(transform_point(&worldtocamera, P));
599                 float3 D = normalize(Ddiff);
600                 return len(dist*dDdx - dot(dist*dDdx, D)*D);
601         }
602         else {
603                 // TODO(mai): implement for CAMERA_PANORAMA
604                 assert(!"pixel width calculation for panoramic projection not implemented yet");
605         }
606
607         return 1.0f;
608 }
609
610 CCL_NAMESPACE_END