doxygen: prevent GPL license block from being parsed as doxygen comment.
[blender.git] / source / gameengine / Ketsji / KX_Camera.cpp
1 /*
2  * $Id$
3  *
4  * ***** BEGIN GPL LICENSE BLOCK *****
5  *
6  * This program is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU General Public License
8  * as published by the Free Software Foundation; either version 2
9  * of the License, or (at your option) any later version.
10  *
11  * This program is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14  * GNU General Public License for more details.
15  *
16  * You should have received a copy of the GNU General Public License
17  * along with this program; if not, write to the Free Software Foundation,
18  * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
19  *
20  * The Original Code is Copyright (C) 2001-2002 by NaN Holding BV.
21  * All rights reserved.
22  *
23  * The Original Code is: all of this file.
24  *
25  * Contributor(s): none yet.
26  *
27  * ***** END GPL LICENSE BLOCK *****
28  * Camera in the gameengine. Cameras are also used for views.
29  */
30  
31 #include "GL/glew.h"
32 #include "KX_Camera.h"
33 #include "KX_Scene.h"
34 #include "KX_PythonInit.h"
35 #include "KX_Python.h"
36 #include "KX_PyMath.h"
37 KX_Camera::KX_Camera(void* sgReplicationInfo,
38                                          SG_Callbacks callbacks,
39                                          const RAS_CameraData& camdata,
40                                          bool frustum_culling,
41                                          bool delete_node)
42                                         :
43                                         KX_GameObject(sgReplicationInfo,callbacks),
44                                         m_camdata(camdata),
45                                         m_dirty(true),
46                                         m_normalized(false),
47                                         m_frustum_culling(frustum_culling),
48                                         m_set_projection_matrix(false),
49                                         m_set_frustum_center(false),
50                                         m_delete_node(delete_node)
51 {
52         // setting a name would be nice...
53         m_name = "cam";
54         m_projection_matrix.setIdentity();
55         m_modelview_matrix.setIdentity();
56 }
57
58
59 KX_Camera::~KX_Camera()
60 {
61         if (m_delete_node && m_pSGNode)
62         {
63                 // for shadow camera, avoids memleak
64                 delete m_pSGNode;
65                 m_pSGNode = NULL;
66         }
67 }       
68
69
70 CValue* KX_Camera::GetReplica()
71 {
72         KX_Camera* replica = new KX_Camera(*this);
73         
74         // this will copy properties and so on...
75         replica->ProcessReplica();
76         
77         return replica;
78 }
79
80 void KX_Camera::ProcessReplica()
81 {
82         KX_GameObject::ProcessReplica();
83         // replicated camera are always registered in the scene
84         m_delete_node = false;
85 }
86
87 MT_Transform KX_Camera::GetWorldToCamera() const
88
89         MT_Transform camtrans;
90         camtrans.invert(MT_Transform(NodeGetWorldPosition(), NodeGetWorldOrientation()));
91         
92         return camtrans;
93 }
94
95
96          
97 MT_Transform KX_Camera::GetCameraToWorld() const
98 {
99         return MT_Transform(NodeGetWorldPosition(), NodeGetWorldOrientation());
100 }
101
102
103
104 void KX_Camera::CorrectLookUp(MT_Scalar speed)
105 {
106 }
107
108
109
110 const MT_Point3 KX_Camera::GetCameraLocation() const
111 {
112         /* this is the camera locatio in cam coords... */
113         //return m_trans1.getOrigin();
114         //return MT_Point3(0,0,0);   <-----
115         /* .... I want it in world coords */
116         //MT_Transform trans;
117         //trans.setBasis(NodeGetWorldOrientation());
118         
119         return NodeGetWorldPosition();          
120 }
121
122
123
124 /* I want the camera orientation as well. */
125 const MT_Quaternion KX_Camera::GetCameraOrientation() const
126 {
127         return NodeGetWorldOrientation().getRotation();
128 }
129
130
131
132 /**
133 * Sets the projection matrix that is used by the rasterizer.
134 */
135 void KX_Camera::SetProjectionMatrix(const MT_Matrix4x4 & mat)
136 {
137         m_projection_matrix = mat;
138         m_dirty = true;
139         m_set_projection_matrix = true;
140         m_set_frustum_center = false;
141 }
142
143
144
145 /**
146 * Sets the modelview matrix that is used by the rasterizer.
147 */
148 void KX_Camera::SetModelviewMatrix(const MT_Matrix4x4 & mat)
149 {
150         m_modelview_matrix = mat;
151         m_dirty = true;
152         m_set_frustum_center = false;
153 }
154
155
156
157 /**
158 * Gets the projection matrix that is used by the rasterizer.
159 */
160 const MT_Matrix4x4& KX_Camera::GetProjectionMatrix() const
161 {
162         return m_projection_matrix;
163 }
164
165
166
167 /**
168 * Gets the modelview matrix that is used by the rasterizer.
169 */
170 const MT_Matrix4x4& KX_Camera::GetModelviewMatrix() const
171 {
172         return m_modelview_matrix;
173 }
174
175
176 bool KX_Camera::hasValidProjectionMatrix() const
177 {
178         return m_set_projection_matrix;
179 }
180
181 void KX_Camera::InvalidateProjectionMatrix(bool valid)
182 {
183         m_set_projection_matrix = valid;
184 }
185
186
187 /*
188 * These getters retrieve the clip data and the focal length
189 */
190 float KX_Camera::GetLens() const
191 {
192         return m_camdata.m_lens;
193 }
194
195 float KX_Camera::GetScale() const
196 {
197         return m_camdata.m_scale;
198 }
199
200
201
202 float KX_Camera::GetCameraNear() const
203 {
204         return m_camdata.m_clipstart;
205 }
206
207
208
209 float KX_Camera::GetCameraFar() const
210 {
211         return m_camdata.m_clipend;
212 }
213
214 float KX_Camera::GetFocalLength() const
215 {
216         return m_camdata.m_focallength;
217 }
218
219
220
221 RAS_CameraData* KX_Camera::GetCameraData()
222 {
223         return &m_camdata; 
224 }
225
226 void KX_Camera::ExtractClipPlanes()
227 {
228         if (!m_dirty)
229                 return;
230
231         MT_Matrix4x4 m = m_projection_matrix * m_modelview_matrix;
232         // Left clip plane
233         m_planes[0] = m[3] + m[0];
234         // Right clip plane
235         m_planes[1] = m[3] - m[0];
236         // Top clip plane
237         m_planes[2] = m[3] - m[1];
238         // Bottom clip plane
239         m_planes[3] = m[3] + m[1];
240         // Near clip plane
241         m_planes[4] = m[3] + m[2];
242         // Far clip plane
243         m_planes[5] = m[3] - m[2];
244         
245         m_dirty = false;
246         m_normalized = false;
247 }
248
249 void KX_Camera::NormalizeClipPlanes()
250 {
251         if (m_normalized)
252                 return;
253         
254         for (unsigned int p = 0; p < 6; p++)
255         {
256                 MT_Scalar factor = sqrt(m_planes[p][0]*m_planes[p][0] + m_planes[p][1]*m_planes[p][1] + m_planes[p][2]*m_planes[p][2]);
257                 if (!MT_fuzzyZero(factor))
258                         m_planes[p] /= factor;
259         }
260         
261         m_normalized = true;
262 }
263
264 void KX_Camera::ExtractFrustumSphere()
265 {
266         if (m_set_frustum_center)
267                 return;
268
269     // compute sphere for the general case and not only symmetric frustum:
270     // the mirror code in ImageRender can use very asymmetric frustum.
271     // We will put the sphere center on the line that goes from origin to the center of the far clipping plane
272     // This is the optimal position if the frustum is symmetric or very asymmetric and probably close
273     // to optimal for the general case. The sphere center position is computed so that the distance to 
274     // the near and far extreme frustum points are equal.
275
276     // get the transformation matrix from device coordinate to camera coordinate
277         MT_Matrix4x4 clip_camcs_matrix = m_projection_matrix;
278         clip_camcs_matrix.invert();
279
280         if (m_projection_matrix[3][3] == MT_Scalar(0.0)) 
281         {
282                 // frustrum projection
283                 // detect which of the corner of the far clipping plane is the farthest to the origin
284                 MT_Vector4 nfar;    // far point in device normalized coordinate
285                 MT_Point3 farpoint; // most extreme far point in camera coordinate
286                 MT_Point3 nearpoint;// most extreme near point in camera coordinate
287                 MT_Point3 farcenter(0.,0.,0.);// center of far cliping plane in camera coordinate
288                 MT_Scalar F=-1.0, N; // square distance of far and near point to origin
289                 MT_Scalar f, n;     // distance of far and near point to z axis. f is always > 0 but n can be < 0
290                 MT_Scalar e, s;     // far and near clipping distance (<0)
291                 MT_Scalar c;        // slope of center line = distance of far clipping center to z axis / far clipping distance
292                 MT_Scalar z;        // projection of sphere center on z axis (<0)
293                 // tmp value
294                 MT_Vector4 npoint(1., 1., 1., 1.);
295                 MT_Vector4 hpoint;
296                 MT_Point3 point;
297                 MT_Scalar len;
298                 for (int i=0; i<4; i++)
299                 {
300                 hpoint = clip_camcs_matrix*npoint;
301                         point.setValue(hpoint[0]/hpoint[3], hpoint[1]/hpoint[3], hpoint[2]/hpoint[3]);
302                         len = point.dot(point);
303                         if (len > F)
304                         {
305                                 nfar = npoint;
306                                 farpoint = point;
307                                 F = len;
308                         }
309                         // rotate by 90 degree along the z axis to walk through the 4 extreme points of the far clipping plane
310                         len = npoint[0];
311                         npoint[0] = -npoint[1];
312                         npoint[1] = len;
313                         farcenter += point;
314                 }
315                 // the far center is the average of the far clipping points
316                 farcenter *= 0.25;
317                 // the extreme near point is the opposite point on the near clipping plane
318                 nfar.setValue(-nfar[0], -nfar[1], -1., 1.);
319                 nfar = clip_camcs_matrix*nfar;
320                 nearpoint.setValue(nfar[0]/nfar[3], nfar[1]/nfar[3], nfar[2]/nfar[3]);
321                 // this is a frustrum projection
322                 N = nearpoint.dot(nearpoint);
323                 e = farpoint[2];
324                 s = nearpoint[2];
325                 // projection on XY plane for distance to axis computation
326                 MT_Point2 farxy(farpoint[0], farpoint[1]);
327                 // f is forced positive by construction
328                 f = farxy.length();
329                 // get corresponding point on the near plane
330                 farxy *= s/e;
331                 // this formula preserve the sign of n
332                 n = f*s/e - MT_Point2(nearpoint[0]-farxy[0], nearpoint[1]-farxy[1]).length();
333                 c = MT_Point2(farcenter[0], farcenter[1]).length()/e;
334                 // the big formula, it simplifies to (F-N)/(2(e-s)) for the symmetric case
335                 z = (F-N)/(2.0*(e-s+c*(f-n)));
336                 m_frustum_center = MT_Point3(farcenter[0]*z/e, farcenter[1]*z/e, z);
337                 m_frustum_radius = m_frustum_center.distance(farpoint);
338         } 
339         else
340         {
341                 // orthographic projection
342                 // The most extreme points on the near and far plane. (normalized device coords)
343                 MT_Vector4 hnear(1., 1., 1., 1.), hfar(-1., -1., -1., 1.);
344                 
345                 // Transform to hom camera local space
346                 hnear = clip_camcs_matrix*hnear;
347                 hfar = clip_camcs_matrix*hfar;
348                 
349                 // Tranform to 3d camera local space.
350                 MT_Point3 nearpoint(hnear[0]/hnear[3], hnear[1]/hnear[3], hnear[2]/hnear[3]);
351                 MT_Point3 farpoint(hfar[0]/hfar[3], hfar[1]/hfar[3], hfar[2]/hfar[3]);
352                 
353                 // just use mediant point
354                 m_frustum_center = (farpoint + nearpoint)*0.5;
355                 m_frustum_radius = m_frustum_center.distance(farpoint);
356         }
357         // Transform to world space.
358         m_frustum_center = GetCameraToWorld()(m_frustum_center);
359         m_frustum_radius /= fabs(NodeGetWorldScaling()[NodeGetWorldScaling().closestAxis()]);
360         
361         m_set_frustum_center = true;
362 }
363
364 bool KX_Camera::PointInsideFrustum(const MT_Point3& x)
365 {
366         ExtractClipPlanes();
367         
368         for( unsigned int i = 0; i < 6 ; i++ )
369         {
370                 if (m_planes[i][0]*x[0] + m_planes[i][1]*x[1] + m_planes[i][2]*x[2] + m_planes[i][3] < 0.)
371                         return false;
372         }
373         return true;
374 }
375
376 int KX_Camera::BoxInsideFrustum(const MT_Point3 *box)
377 {
378         ExtractClipPlanes();
379         
380         unsigned int insideCount = 0;
381         // 6 view frustum planes
382         for( unsigned int p = 0; p < 6 ; p++ )
383         {
384                 unsigned int behindCount = 0;
385                 // 8 box verticies.
386                 for (unsigned int v = 0; v < 8 ; v++)
387                 {
388                         if (m_planes[p][0]*box[v][0] + m_planes[p][1]*box[v][1] + m_planes[p][2]*box[v][2] + m_planes[p][3] < 0.)
389                                 behindCount++;
390                 }
391                 
392                 // 8 points behind this plane
393                 if (behindCount == 8)
394                         return OUTSIDE;
395
396                 // Every box vertex is on the front side of this plane
397                 if (!behindCount)
398                         insideCount++;
399         }
400         
401         // All box verticies are on the front side of all frustum planes.
402         if (insideCount == 6)
403                 return INSIDE;
404         
405         return INTERSECT;
406 }
407
408 int KX_Camera::SphereInsideFrustum(const MT_Point3& center, const MT_Scalar &radius)
409 {
410         ExtractFrustumSphere();
411         if (center.distance2(m_frustum_center) > (radius + m_frustum_radius)*(radius + m_frustum_radius))
412                 return OUTSIDE;
413
414         unsigned int p;
415         ExtractClipPlanes();
416         NormalizeClipPlanes();
417                 
418         MT_Scalar distance;
419         int intersect = INSIDE;
420         // distance:  <-------- OUTSIDE -----|----- INTERSECT -----0----- INTERSECT -----|----- INSIDE -------->
421         //                                -radius                                      radius
422         for (p = 0; p < 6; p++)
423         {
424                 distance = m_planes[p][0]*center[0] + m_planes[p][1]*center[1] + m_planes[p][2]*center[2] + m_planes[p][3];
425                 if (fabs(distance) <= radius)
426                         intersect = INTERSECT;
427                 else if (distance < -radius)
428                         return OUTSIDE;
429         }
430         
431         return intersect;
432 }
433
434 bool KX_Camera::GetFrustumCulling() const
435 {
436         return m_frustum_culling;
437 }
438  
439 void KX_Camera::EnableViewport(bool viewport)
440 {
441         m_camdata.m_viewport = viewport;
442 }
443
444 void KX_Camera::SetViewport(int left, int bottom, int right, int top)
445 {
446         m_camdata.m_viewportleft = left;
447         m_camdata.m_viewportbottom = bottom;
448         m_camdata.m_viewportright = right;
449         m_camdata.m_viewporttop = top;
450 }
451
452 bool KX_Camera::GetViewport() const
453 {
454         return m_camdata.m_viewport;
455 }
456
457 int KX_Camera::GetViewportLeft() const
458 {
459         return m_camdata.m_viewportleft;
460 }
461
462 int KX_Camera::GetViewportBottom() const
463 {
464         return m_camdata.m_viewportbottom;
465 }
466
467 int KX_Camera::GetViewportRight() const
468 {
469         return m_camdata.m_viewportright;
470 }
471
472 int KX_Camera::GetViewportTop() const
473 {
474         return m_camdata.m_viewporttop;
475 }
476
477 #ifdef WITH_PYTHON
478 //----------------------------------------------------------------------------
479 //Python
480
481
482 PyMethodDef KX_Camera::Methods[] = {
483         KX_PYMETHODTABLE(KX_Camera, sphereInsideFrustum),
484         KX_PYMETHODTABLE_O(KX_Camera, boxInsideFrustum),
485         KX_PYMETHODTABLE_O(KX_Camera, pointInsideFrustum),
486         KX_PYMETHODTABLE_NOARGS(KX_Camera, getCameraToWorld),
487         KX_PYMETHODTABLE_NOARGS(KX_Camera, getWorldToCamera),
488         KX_PYMETHODTABLE(KX_Camera, setViewport),
489         KX_PYMETHODTABLE_NOARGS(KX_Camera, setOnTop),
490         KX_PYMETHODTABLE_O(KX_Camera, getScreenPosition),
491         KX_PYMETHODTABLE(KX_Camera, getScreenVect),
492         KX_PYMETHODTABLE(KX_Camera, getScreenRay),
493         {NULL,NULL} //Sentinel
494 };
495
496 PyAttributeDef KX_Camera::Attributes[] = {
497         
498         KX_PYATTRIBUTE_BOOL_RW("frustum_culling", KX_Camera, m_frustum_culling),
499         KX_PYATTRIBUTE_RW_FUNCTION("perspective", KX_Camera, pyattr_get_perspective, pyattr_set_perspective),
500         
501         KX_PYATTRIBUTE_RW_FUNCTION("lens",      KX_Camera,      pyattr_get_lens, pyattr_set_lens),
502         KX_PYATTRIBUTE_RW_FUNCTION("ortho_scale",       KX_Camera,      pyattr_get_ortho_scale, pyattr_set_ortho_scale),
503         KX_PYATTRIBUTE_RW_FUNCTION("near",      KX_Camera,      pyattr_get_near, pyattr_set_near),
504         KX_PYATTRIBUTE_RW_FUNCTION("far",       KX_Camera,      pyattr_get_far,  pyattr_set_far),
505         
506         KX_PYATTRIBUTE_RW_FUNCTION("useViewport",       KX_Camera,      pyattr_get_use_viewport,  pyattr_set_use_viewport),
507         
508         KX_PYATTRIBUTE_RW_FUNCTION("projection_matrix", KX_Camera,      pyattr_get_projection_matrix, pyattr_set_projection_matrix),
509         KX_PYATTRIBUTE_RO_FUNCTION("modelview_matrix",  KX_Camera,      pyattr_get_modelview_matrix),
510         KX_PYATTRIBUTE_RO_FUNCTION("camera_to_world",   KX_Camera,      pyattr_get_camera_to_world),
511         KX_PYATTRIBUTE_RO_FUNCTION("world_to_camera",   KX_Camera,      pyattr_get_world_to_camera),
512         
513         /* Grrr, functions for constants? */
514         KX_PYATTRIBUTE_RO_FUNCTION("INSIDE",    KX_Camera, pyattr_get_INSIDE),
515         KX_PYATTRIBUTE_RO_FUNCTION("OUTSIDE",   KX_Camera, pyattr_get_OUTSIDE),
516         KX_PYATTRIBUTE_RO_FUNCTION("INTERSECT", KX_Camera, pyattr_get_INTERSECT),
517         
518         { NULL }        //Sentinel
519 };
520
521 PyTypeObject KX_Camera::Type = {
522         PyVarObject_HEAD_INIT(NULL, 0)
523         "KX_Camera",
524         sizeof(PyObjectPlus_Proxy),
525         0,
526         py_base_dealloc,
527         0,
528         0,
529         0,
530         0,
531         py_base_repr,
532         0,
533         &KX_GameObject::Sequence,
534         &KX_GameObject::Mapping,
535         0,0,0,
536         NULL,
537         NULL,
538         0,
539         Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
540         0,0,0,0,0,0,0,
541         Methods,
542         0,
543         0,
544         &KX_GameObject::Type,
545         0,0,0,0,0,0,
546         py_base_new
547 };
548
549 KX_PYMETHODDEF_DOC_VARARGS(KX_Camera, sphereInsideFrustum,
550 "sphereInsideFrustum(center, radius) -> Integer\n"
551 "\treturns INSIDE, OUTSIDE or INTERSECT if the given sphere is\n"
552 "\tinside/outside/intersects this camera's viewing frustum.\n\n"
553 "\tcenter = the center of the sphere (in world coordinates.)\n"
554 "\tradius = the radius of the sphere\n\n"
555 "\tExample:\n"
556 "\timport bge.logic\n\n"
557 "\tco = bge.logic.getCurrentController()\n"
558 "\tcam = co.GetOwner()\n\n"
559 "\t# A sphere of radius 4.0 located at [x, y, z] = [1.0, 1.0, 1.0]\n"
560 "\tif (cam.sphereInsideFrustum([1.0, 1.0, 1.0], 4) != cam.OUTSIDE):\n"
561 "\t\t# Sphere is inside frustum !\n"
562 "\t\t# Do something useful !\n"
563 "\telse:\n"
564 "\t\t# Sphere is outside frustum\n"
565 )
566 {
567         PyObject *pycenter;
568         float radius;
569         if (PyArg_ParseTuple(args, "Of:sphereInsideFrustum", &pycenter, &radius))
570         {
571                 MT_Point3 center;
572                 if (PyVecTo(pycenter, center))
573                 {
574                         return PyLong_FromSsize_t(SphereInsideFrustum(center, radius)); /* new ref */
575                 }
576         }
577
578         PyErr_SetString(PyExc_TypeError, "camera.sphereInsideFrustum(center, radius): KX_Camera, expected arguments: (center, radius)");
579         
580         return NULL;
581 }
582
583 KX_PYMETHODDEF_DOC_O(KX_Camera, boxInsideFrustum,
584 "boxInsideFrustum(box) -> Integer\n"
585 "\treturns INSIDE, OUTSIDE or INTERSECT if the given box is\n"
586 "\tinside/outside/intersects this camera's viewing frustum.\n\n"
587 "\tbox = a list of the eight (8) corners of the box (in world coordinates.)\n\n"
588 "\tExample:\n"
589 "\timport bge.logic\n\n"
590 "\tco = bge.logic.getCurrentController()\n"
591 "\tcam = co.GetOwner()\n\n"
592 "\tbox = []\n"
593 "\tbox.append([-1.0, -1.0, -1.0])\n"
594 "\tbox.append([-1.0, -1.0,  1.0])\n"
595 "\tbox.append([-1.0,  1.0, -1.0])\n"
596 "\tbox.append([-1.0,  1.0,  1.0])\n"
597 "\tbox.append([ 1.0, -1.0, -1.0])\n"
598 "\tbox.append([ 1.0, -1.0,  1.0])\n"
599 "\tbox.append([ 1.0,  1.0, -1.0])\n"
600 "\tbox.append([ 1.0,  1.0,  1.0])\n\n"
601 "\tif (cam.boxInsideFrustum(box) != cam.OUTSIDE):\n"
602 "\t\t# Box is inside/intersects frustum !\n"
603 "\t\t# Do something useful !\n"
604 "\telse:\n"
605 "\t\t# Box is outside the frustum !\n"
606 )
607 {
608         unsigned int num_points = PySequence_Size(value);
609         if (num_points != 8)
610         {
611                 PyErr_Format(PyExc_TypeError, "camera.boxInsideFrustum(box): KX_Camera, expected eight (8) points, got %d", num_points);
612                 return NULL;
613         }
614         
615         MT_Point3 box[8];
616         for (unsigned int p = 0; p < 8 ; p++)
617         {
618                 PyObject *item = PySequence_GetItem(value, p); /* new ref */
619                 bool error = !PyVecTo(item, box[p]);
620                 Py_DECREF(item);
621                 if (error)
622                         return NULL;
623         }
624         
625         return PyLong_FromSsize_t(BoxInsideFrustum(box)); /* new ref */
626 }
627
628 KX_PYMETHODDEF_DOC_O(KX_Camera, pointInsideFrustum,
629 "pointInsideFrustum(point) -> Bool\n"
630 "\treturns 1 if the given point is inside this camera's viewing frustum.\n\n"
631 "\tpoint = The point to test (in world coordinates.)\n\n"
632 "\tExample:\n"
633 "\timport bge.logic\n\n"
634 "\tco = bge.logic.getCurrentController()\n"
635 "\tcam = co.GetOwner()\n\n"
636 "\t# Test point [0.0, 0.0, 0.0]"
637 "\tif (cam.pointInsideFrustum([0.0, 0.0, 0.0])):\n"
638 "\t\t# Point is inside frustum !\n"
639 "\t\t# Do something useful !\n"
640 "\telse:\n"
641 "\t\t# Box is outside the frustum !\n"
642 )
643 {
644         MT_Point3 point;
645         if (PyVecTo(value, point))
646         {
647                 return PyLong_FromSsize_t(PointInsideFrustum(point)); /* new ref */
648         }
649         
650         PyErr_SetString(PyExc_TypeError, "camera.pointInsideFrustum(point): KX_Camera, expected point argument.");
651         return NULL;
652 }
653
654 KX_PYMETHODDEF_DOC_NOARGS(KX_Camera, getCameraToWorld,
655 "getCameraToWorld() -> Matrix4x4\n"
656 "\treturns the camera to world transformation matrix, as a list of four lists of four values.\n\n"
657 "\tie: [[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]])\n"
658 )
659 {
660         return PyObjectFrom(GetCameraToWorld()); /* new ref */
661 }
662
663 KX_PYMETHODDEF_DOC_NOARGS(KX_Camera, getWorldToCamera,
664 "getWorldToCamera() -> Matrix4x4\n"
665 "\treturns the world to camera transformation matrix, as a list of four lists of four values.\n\n"
666 "\tie: [[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]])\n"
667 )
668 {
669         return PyObjectFrom(GetWorldToCamera()); /* new ref */
670 }
671
672 KX_PYMETHODDEF_DOC_VARARGS(KX_Camera, setViewport,
673 "setViewport(left, bottom, right, top)\n"
674 "Sets this camera's viewport\n")
675 {
676         int left, bottom, right, top;
677         if (!PyArg_ParseTuple(args,"iiii:setViewport",&left, &bottom, &right, &top))
678                 return NULL;
679         
680         SetViewport(left, bottom, right, top);
681         Py_RETURN_NONE;
682 }
683
684 KX_PYMETHODDEF_DOC_NOARGS(KX_Camera, setOnTop,
685 "setOnTop()\n"
686 "Sets this camera's viewport on top\n")
687 {
688         class KX_Scene* scene = KX_GetActiveScene();
689         scene->SetCameraOnTop(this);
690         Py_RETURN_NONE;
691 }
692
693 PyObject* KX_Camera::pyattr_get_perspective(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
694 {
695         KX_Camera* self= static_cast<KX_Camera*>(self_v);
696         return PyBool_FromLong(self->m_camdata.m_perspective);
697 }
698
699 int KX_Camera::pyattr_set_perspective(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
700 {
701         KX_Camera* self= static_cast<KX_Camera*>(self_v);
702         int param = PyObject_IsTrue( value );
703         if (param == -1) {
704                 PyErr_SetString(PyExc_AttributeError, "camera.perspective = bool: KX_Camera, expected True/False or 0/1");
705                 return PY_SET_ATTR_FAIL;
706         }
707         
708         self->m_camdata.m_perspective= param;
709         self->InvalidateProjectionMatrix();
710         return PY_SET_ATTR_SUCCESS;
711 }
712
713 PyObject* KX_Camera::pyattr_get_lens(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
714 {
715         KX_Camera* self= static_cast<KX_Camera*>(self_v);
716         return PyFloat_FromDouble(self->m_camdata.m_lens);
717 }
718
719 int KX_Camera::pyattr_set_lens(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
720 {
721         KX_Camera* self= static_cast<KX_Camera*>(self_v);
722         float param = PyFloat_AsDouble(value);
723         if (param == -1) {
724                 PyErr_SetString(PyExc_AttributeError, "camera.lens = float: KX_Camera, expected a float greater then zero");
725                 return PY_SET_ATTR_FAIL;
726         }
727         
728         self->m_camdata.m_lens= param;
729         self->m_set_projection_matrix = false;
730         return PY_SET_ATTR_SUCCESS;
731 }
732
733 PyObject* KX_Camera::pyattr_get_ortho_scale(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
734 {
735         KX_Camera* self= static_cast<KX_Camera*>(self_v);
736         return PyFloat_FromDouble(self->m_camdata.m_scale);
737 }
738
739 int KX_Camera::pyattr_set_ortho_scale(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
740 {
741         KX_Camera* self= static_cast<KX_Camera*>(self_v);
742         float param = PyFloat_AsDouble(value);
743         if (param == -1) {
744                 PyErr_SetString(PyExc_AttributeError, "camera.ortho_scale = float: KX_Camera, expected a float greater then zero");
745                 return PY_SET_ATTR_FAIL;
746         }
747         
748         self->m_camdata.m_scale= param;
749         self->m_set_projection_matrix = false;
750         return PY_SET_ATTR_SUCCESS;
751 }
752
753 PyObject* KX_Camera::pyattr_get_near(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
754 {
755         KX_Camera* self= static_cast<KX_Camera*>(self_v);
756         return PyFloat_FromDouble(self->m_camdata.m_clipstart);
757 }
758
759 int KX_Camera::pyattr_set_near(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
760 {
761         KX_Camera* self= static_cast<KX_Camera*>(self_v);
762         float param = PyFloat_AsDouble(value);
763         if (param == -1) {
764                 PyErr_SetString(PyExc_AttributeError, "camera.near = float: KX_Camera, expected a float greater then zero");
765                 return PY_SET_ATTR_FAIL;
766         }
767         
768         self->m_camdata.m_clipstart= param;
769         self->m_set_projection_matrix = false;
770         return PY_SET_ATTR_SUCCESS;
771 }
772
773 PyObject* KX_Camera::pyattr_get_far(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
774 {
775         KX_Camera* self= static_cast<KX_Camera*>(self_v);
776         return PyFloat_FromDouble(self->m_camdata.m_clipend);
777 }
778
779 int KX_Camera::pyattr_set_far(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
780 {
781         KX_Camera* self= static_cast<KX_Camera*>(self_v);
782         float param = PyFloat_AsDouble(value);
783         if (param == -1) {
784                 PyErr_SetString(PyExc_AttributeError, "camera.far = float: KX_Camera, expected a float greater then zero");
785                 return PY_SET_ATTR_FAIL;
786         }
787         
788         self->m_camdata.m_clipend= param;
789         self->m_set_projection_matrix = false;
790         return PY_SET_ATTR_SUCCESS;
791 }
792
793
794 PyObject* KX_Camera::pyattr_get_use_viewport(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
795 {
796         KX_Camera* self= static_cast<KX_Camera*>(self_v);
797         return PyBool_FromLong(self->GetViewport());
798 }
799
800 int KX_Camera::pyattr_set_use_viewport(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
801 {
802         KX_Camera* self= static_cast<KX_Camera*>(self_v);
803         int param = PyObject_IsTrue( value );
804         if (param == -1) {
805                 PyErr_SetString(PyExc_AttributeError, "camera.useViewport = bool: KX_Camera, expected True or False");
806                 return PY_SET_ATTR_FAIL;
807         }
808         self->EnableViewport((bool)param);
809         return PY_SET_ATTR_SUCCESS;
810 }
811
812
813 PyObject* KX_Camera::pyattr_get_projection_matrix(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
814 {
815         KX_Camera* self= static_cast<KX_Camera*>(self_v);
816         return PyObjectFrom(self->GetProjectionMatrix()); 
817 }
818
819 int KX_Camera::pyattr_set_projection_matrix(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
820 {
821         KX_Camera* self= static_cast<KX_Camera*>(self_v);
822         MT_Matrix4x4 mat;
823         if (!PyMatTo(value, mat)) 
824                 return PY_SET_ATTR_FAIL;
825         
826         self->SetProjectionMatrix(mat);
827         return PY_SET_ATTR_SUCCESS;
828 }
829
830 PyObject* KX_Camera::pyattr_get_modelview_matrix(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
831 {
832         KX_Camera* self= static_cast<KX_Camera*>(self_v);
833         return PyObjectFrom(self->GetModelviewMatrix()); 
834 }
835
836 PyObject* KX_Camera::pyattr_get_camera_to_world(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
837 {
838         KX_Camera* self= static_cast<KX_Camera*>(self_v);
839         return PyObjectFrom(self->GetCameraToWorld());
840 }
841
842 PyObject* KX_Camera::pyattr_get_world_to_camera(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
843 {
844         KX_Camera* self= static_cast<KX_Camera*>(self_v);
845         return PyObjectFrom(self->GetWorldToCamera()); 
846 }
847
848
849 PyObject* KX_Camera::pyattr_get_INSIDE(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
850 {       return PyLong_FromSsize_t(INSIDE); }
851 PyObject* KX_Camera::pyattr_get_OUTSIDE(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
852 {       return PyLong_FromSsize_t(OUTSIDE); }
853 PyObject* KX_Camera::pyattr_get_INTERSECT(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
854 {       return PyLong_FromSsize_t(INTERSECT); }
855
856
857 bool ConvertPythonToCamera(PyObject * value, KX_Camera **object, bool py_none_ok, const char *error_prefix)
858 {
859         if (value==NULL) {
860                 PyErr_Format(PyExc_TypeError, "%s, python pointer NULL, should never happen", error_prefix);
861                 *object = NULL;
862                 return false;
863         }
864                 
865         if (value==Py_None) {
866                 *object = NULL;
867                 
868                 if (py_none_ok) {
869                         return true;
870                 } else {
871                         PyErr_Format(PyExc_TypeError, "%s, expected KX_Camera or a KX_Camera name, None is invalid", error_prefix);
872                         return false;
873                 }
874         }
875         
876         if (PyUnicode_Check(value)) {
877                 STR_String value_str = _PyUnicode_AsString(value);
878                 *object = KX_GetActiveScene()->FindCamera(value_str);
879                 
880                 if (*object) {
881                         return true;
882                 } else {
883                         PyErr_Format(PyExc_ValueError, "%s, requested name \"%s\" did not match any KX_Camera in this scene", error_prefix, _PyUnicode_AsString(value));
884                         return false;
885                 }
886         }
887         
888         if (PyObject_TypeCheck(value, &KX_Camera::Type)) {
889                 *object = static_cast<KX_Camera*>BGE_PROXY_REF(value);
890                 
891                 /* sets the error */
892                 if (*object==NULL) {
893                         PyErr_Format(PyExc_SystemError, "%s, " BGE_PROXY_ERROR_MSG, error_prefix);
894                         return false;
895                 }
896                 
897                 return true;
898         }
899         
900         *object = NULL;
901         
902         if (py_none_ok) {
903                 PyErr_Format(PyExc_TypeError, "%s, expect a KX_Camera, a string or None", error_prefix);
904         } else {
905                 PyErr_Format(PyExc_TypeError, "%s, expect a KX_Camera or a string", error_prefix);
906         }
907         
908         return false;
909 }
910
911 KX_PYMETHODDEF_DOC_O(KX_Camera, getScreenPosition,
912 "getScreenPosition()\n"
913 )
914
915 {
916         MT_Vector3 vect;
917         KX_GameObject *obj = NULL;
918
919         if (!PyVecTo(value, vect))
920         {
921                 if(ConvertPythonToGameObject(value, &obj, true, ""))
922                 {
923                         PyErr_Clear();
924                         vect = MT_Vector3(obj->NodeGetWorldPosition());
925                 }
926                 else
927                 {
928                         PyErr_SetString(PyExc_TypeError, "Error in getScreenPosition. Expected a Vector3 or a KX_GameObject or a string for a name of a KX_GameObject");
929                         return NULL;
930                 }
931         }
932
933         GLint viewport[4];
934         GLdouble win[3];
935         GLdouble modelmatrix[16];
936         GLdouble projmatrix[16];
937
938         MT_Matrix4x4 m_modelmatrix = this->GetModelviewMatrix();
939         MT_Matrix4x4 m_projmatrix = this->GetProjectionMatrix();
940
941         m_modelmatrix.getValue(modelmatrix);
942         m_projmatrix.getValue(projmatrix);
943
944         glGetIntegerv(GL_VIEWPORT, viewport);
945
946         gluProject(vect[0], vect[1], vect[2], modelmatrix, projmatrix, viewport, &win[0], &win[1], &win[2]);
947
948         vect[0] =  (win[0] - viewport[0]) / viewport[2];
949         vect[1] =  (win[1] - viewport[1]) / viewport[3];
950
951         vect[1] = 1.0 - vect[1]; //to follow Blender window coordinate system (Top-Down)
952
953         PyObject* ret = PyTuple_New(2);
954         if(ret){
955                 PyTuple_SET_ITEM(ret, 0, PyFloat_FromDouble(vect[0]));
956                 PyTuple_SET_ITEM(ret, 1, PyFloat_FromDouble(vect[1]));
957                 return ret;
958         }
959
960         return NULL;
961 }
962
963 KX_PYMETHODDEF_DOC_VARARGS(KX_Camera, getScreenVect,
964 "getScreenVect()\n"
965 )
966 {
967         double x,y;
968         if (!PyArg_ParseTuple(args,"dd:getScreenVect",&x,&y))
969                 return NULL;
970
971         y = 1.0 - y; //to follow Blender window coordinate system (Top-Down)
972
973         MT_Vector3 vect;
974         MT_Point3 campos, screenpos;
975
976         GLint viewport[4];
977         GLdouble win[3];
978         GLdouble modelmatrix[16];
979         GLdouble projmatrix[16];
980
981         MT_Matrix4x4 m_modelmatrix = this->GetModelviewMatrix();
982         MT_Matrix4x4 m_projmatrix = this->GetProjectionMatrix();
983
984         m_modelmatrix.getValue(modelmatrix);
985         m_projmatrix.getValue(projmatrix);
986
987         glGetIntegerv(GL_VIEWPORT, viewport);
988
989         vect[0] = x * viewport[2];
990         vect[1] = y * viewport[3];
991
992         vect[0] += viewport[0];
993         vect[1] += viewport[1];
994
995         glReadPixels(x, y, 1, 1, GL_DEPTH_COMPONENT, GL_FLOAT, &vect[2]);
996         gluUnProject(vect[0], vect[1], vect[2], modelmatrix, projmatrix, viewport, &win[0], &win[1], &win[2]);
997
998         campos = this->GetCameraLocation();
999         screenpos = MT_Point3(win[0], win[1], win[2]);
1000         vect = campos-screenpos;
1001
1002         vect.normalize();
1003         return PyObjectFrom(vect);
1004 }
1005
1006 KX_PYMETHODDEF_DOC_VARARGS(KX_Camera, getScreenRay,
1007 "getScreenRay()\n"
1008 )
1009 {
1010         MT_Vector3 vect;
1011         double x,y,dist;
1012         char *propName = NULL;
1013
1014         if (!PyArg_ParseTuple(args,"ddd|s:getScreenRay",&x,&y,&dist,&propName))
1015                 return NULL;
1016
1017         PyObject* argValue = PyTuple_New(2);
1018         if (argValue) {
1019                 PyTuple_SET_ITEM(argValue, 0, PyFloat_FromDouble(x));
1020                 PyTuple_SET_ITEM(argValue, 1, PyFloat_FromDouble(y));
1021         }
1022
1023         if(!PyVecTo(PygetScreenVect(argValue), vect))
1024         {
1025                 Py_DECREF(argValue);
1026                 PyErr_SetString(PyExc_TypeError,
1027                         "Error in getScreenRay. Invalid 2D coordinate. Expected a normalized 2D screen coordinate, a distance and an optional property argument");
1028                 return NULL;
1029         }
1030         Py_DECREF(argValue);
1031
1032         dist = -dist;
1033         vect += this->GetCameraLocation();
1034
1035         argValue = (propName?PyTuple_New(3):PyTuple_New(2));
1036         if (argValue) {
1037                 PyTuple_SET_ITEM(argValue, 0, PyObjectFrom(vect));
1038                 PyTuple_SET_ITEM(argValue, 1, PyFloat_FromDouble(dist));
1039                 if (propName)
1040                         PyTuple_SET_ITEM(argValue, 2, PyUnicode_FromString(propName));
1041
1042                 PyObject* ret= this->PyrayCastTo(argValue,NULL);
1043                 Py_DECREF(argValue);
1044                 return ret;
1045         }
1046
1047         return NULL;
1048 }
1049 #endif