39b0a24865e8690bfe61ed3ec909124a587d870a
[blender.git] / source / gameengine / Ketsji / KX_Camera.cpp
1 /*
2  * ***** BEGIN GPL LICENSE BLOCK *****
3  *
4  * This program is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU General Public License
6  * as published by the Free Software Foundation; either version 2
7  * of the License, or (at your option) any later version.
8  *
9  * This program is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12  * GNU General Public License for more details.
13  *
14  * You should have received a copy of the GNU General Public License
15  * along with this program; if not, write to the Free Software Foundation,
16  * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
17  *
18  * The Original Code is Copyright (C) 2001-2002 by NaN Holding BV.
19  * All rights reserved.
20  *
21  * The Original Code is: all of this file.
22  *
23  * Contributor(s): none yet.
24  *
25  * ***** END GPL LICENSE BLOCK *****
26  * Camera in the gameengine. Cameras are also used for views.
27  */
28
29 /** \file gameengine/Ketsji/KX_Camera.cpp
30  *  \ingroup ketsji
31  */
32
33  
34 #include "GL/glew.h"
35 #include "KX_Camera.h"
36 #include "KX_Scene.h"
37 #include "KX_PythonInit.h"
38 #include "KX_Python.h"
39 #include "KX_PyMath.h"
40 KX_Camera::KX_Camera(void* sgReplicationInfo,
41                      SG_Callbacks callbacks,
42                      const RAS_CameraData& camdata,
43                      bool frustum_culling,
44                      bool delete_node)
45     :
46       KX_GameObject(sgReplicationInfo,callbacks),
47       m_camdata(camdata),
48       m_dirty(true),
49       m_normalized(false),
50       m_frustum_culling(frustum_culling),
51       m_set_projection_matrix(false),
52       m_set_frustum_center(false),
53       m_delete_node(delete_node)
54 {
55         // setting a name would be nice...
56         m_name = "cam";
57         m_projection_matrix.setIdentity();
58         m_modelview_matrix.setIdentity();
59 }
60
61
62 KX_Camera::~KX_Camera()
63 {
64         if (m_delete_node && m_pSGNode)
65         {
66                 // for shadow camera, avoids memleak
67                 delete m_pSGNode;
68                 m_pSGNode = NULL;
69         }
70 }
71
72
73 CValue* KX_Camera::GetReplica()
74 {
75         KX_Camera* replica = new KX_Camera(*this);
76         
77         // this will copy properties and so on...
78         replica->ProcessReplica();
79         
80         return replica;
81 }
82
83 void KX_Camera::ProcessReplica()
84 {
85         KX_GameObject::ProcessReplica();
86         // replicated camera are always registered in the scene
87         m_delete_node = false;
88 }
89
90 MT_Transform KX_Camera::GetWorldToCamera() const
91
92         MT_Transform camtrans;
93         camtrans.invert(MT_Transform(NodeGetWorldPosition(), NodeGetWorldOrientation()));
94         
95         return camtrans;
96 }
97
98
99          
100 MT_Transform KX_Camera::GetCameraToWorld() const
101 {
102         return MT_Transform(NodeGetWorldPosition(), NodeGetWorldOrientation());
103 }
104
105
106
107 void KX_Camera::CorrectLookUp(MT_Scalar speed)
108 {
109 }
110
111
112
113 const MT_Point3 KX_Camera::GetCameraLocation() const
114 {
115         /* this is the camera locatio in cam coords... */
116         //return m_trans1.getOrigin();
117         //return MT_Point3(0,0,0);   <-----
118         /* .... I want it in world coords */
119         //MT_Transform trans;
120         //trans.setBasis(NodeGetWorldOrientation());
121         
122         return NodeGetWorldPosition();
123 }
124
125
126
127 /* I want the camera orientation as well. */
128 const MT_Quaternion KX_Camera::GetCameraOrientation() const
129 {
130         return NodeGetWorldOrientation().getRotation();
131 }
132
133
134
135 /**
136  * Sets the projection matrix that is used by the rasterizer.
137  */
138 void KX_Camera::SetProjectionMatrix(const MT_Matrix4x4 & mat)
139 {
140         m_projection_matrix = mat;
141         m_dirty = true;
142         m_set_projection_matrix = true;
143         m_set_frustum_center = false;
144 }
145
146
147
148 /**
149  * Sets the modelview matrix that is used by the rasterizer.
150  */
151 void KX_Camera::SetModelviewMatrix(const MT_Matrix4x4 & mat)
152 {
153         m_modelview_matrix = mat;
154         m_dirty = true;
155         m_set_frustum_center = false;
156 }
157
158
159
160 /**
161  * Gets the projection matrix that is used by the rasterizer.
162  */
163 const MT_Matrix4x4& KX_Camera::GetProjectionMatrix() const
164 {
165         return m_projection_matrix;
166 }
167
168
169
170 /**
171  * Gets the modelview matrix that is used by the rasterizer.
172  */
173 const MT_Matrix4x4& KX_Camera::GetModelviewMatrix() const
174 {
175         return m_modelview_matrix;
176 }
177
178
179 bool KX_Camera::hasValidProjectionMatrix() const
180 {
181         return m_set_projection_matrix;
182 }
183
184 void KX_Camera::InvalidateProjectionMatrix(bool valid)
185 {
186         m_set_projection_matrix = valid;
187 }
188
189
190 /**
191  * These getters retrieve the clip data and the focal length
192  */
193 float KX_Camera::GetLens() const
194 {
195         return m_camdata.m_lens;
196 }
197
198 float KX_Camera::GetScale() const
199 {
200         return m_camdata.m_scale;
201 }
202
203 /**
204  * Gets the horizontal size of the sensor - for camera matching.
205  */
206 float KX_Camera::GetSensorWidth() const
207 {
208         return m_camdata.m_sensor_x;
209 }
210
211 /**
212  * Gets the vertical size of the sensor - for camera matching.
213  */
214 float KX_Camera::GetSensorHeight() const
215 {
216         return m_camdata.m_sensor_y;
217 }
218 /** Gets the mode FOV is calculating from sensor dimensions */
219 short KX_Camera::GetSensorFit() const
220 {
221         return m_camdata.m_sensor_fit;
222 }
223
224 float KX_Camera::GetCameraNear() const
225 {
226         return m_camdata.m_clipstart;
227 }
228
229
230
231 float KX_Camera::GetCameraFar() const
232 {
233         return m_camdata.m_clipend;
234 }
235
236 float KX_Camera::GetFocalLength() const
237 {
238         return m_camdata.m_focallength;
239 }
240
241
242
243 RAS_CameraData* KX_Camera::GetCameraData()
244 {
245         return &m_camdata; 
246 }
247
248 void KX_Camera::ExtractClipPlanes()
249 {
250         if (!m_dirty)
251                 return;
252
253         MT_Matrix4x4 m = m_projection_matrix * m_modelview_matrix;
254         // Left clip plane
255         m_planes[0] = m[3] + m[0];
256         // Right clip plane
257         m_planes[1] = m[3] - m[0];
258         // Top clip plane
259         m_planes[2] = m[3] - m[1];
260         // Bottom clip plane
261         m_planes[3] = m[3] + m[1];
262         // Near clip plane
263         m_planes[4] = m[3] + m[2];
264         // Far clip plane
265         m_planes[5] = m[3] - m[2];
266         
267         m_dirty = false;
268         m_normalized = false;
269 }
270
271 void KX_Camera::NormalizeClipPlanes()
272 {
273         if (m_normalized)
274                 return;
275         
276         for (unsigned int p = 0; p < 6; p++)
277         {
278                 MT_Scalar factor = sqrt(m_planes[p][0]*m_planes[p][0] + m_planes[p][1]*m_planes[p][1] + m_planes[p][2]*m_planes[p][2]);
279                 if (!MT_fuzzyZero(factor))
280                         m_planes[p] /= factor;
281         }
282         
283         m_normalized = true;
284 }
285
286 void KX_Camera::ExtractFrustumSphere()
287 {
288         if (m_set_frustum_center)
289                 return;
290
291         // compute sphere for the general case and not only symmetric frustum:
292         // the mirror code in ImageRender can use very asymmetric frustum.
293         // We will put the sphere center on the line that goes from origin to the center of the far clipping plane
294         // This is the optimal position if the frustum is symmetric or very asymmetric and probably close
295         // to optimal for the general case. The sphere center position is computed so that the distance to
296         // the near and far extreme frustum points are equal.
297
298         // get the transformation matrix from device coordinate to camera coordinate
299         MT_Matrix4x4 clip_camcs_matrix = m_projection_matrix;
300         clip_camcs_matrix.invert();
301
302         if (m_projection_matrix[3][3] == MT_Scalar(0.0))
303         {
304                 // frustrum projection
305                 // detect which of the corner of the far clipping plane is the farthest to the origin
306                 MT_Vector4 nfar;    // far point in device normalized coordinate
307                 MT_Point3 farpoint; // most extreme far point in camera coordinate
308                 MT_Point3 nearpoint;// most extreme near point in camera coordinate
309                 MT_Point3 farcenter(0.0, 0.0, 0.0);// center of far cliping plane in camera coordinate
310                 MT_Scalar F=-1.0, N; // square distance of far and near point to origin
311                 MT_Scalar f, n;     // distance of far and near point to z axis. f is always > 0 but n can be < 0
312                 MT_Scalar e, s;     // far and near clipping distance (<0)
313                 MT_Scalar c;        // slope of center line = distance of far clipping center to z axis / far clipping distance
314                 MT_Scalar z;        // projection of sphere center on z axis (<0)
315                 // tmp value
316                 MT_Vector4 npoint(1.0, 1.0, 1.0, 1.0);
317                 MT_Vector4 hpoint;
318                 MT_Point3 point;
319                 MT_Scalar len;
320                 for (int i=0; i<4; i++)
321                 {
322                         hpoint = clip_camcs_matrix*npoint;
323                         point.setValue(hpoint[0]/hpoint[3], hpoint[1]/hpoint[3], hpoint[2]/hpoint[3]);
324                         len = point.dot(point);
325                         if (len > F)
326                         {
327                                 nfar = npoint;
328                                 farpoint = point;
329                                 F = len;
330                         }
331                         // rotate by 90 degree along the z axis to walk through the 4 extreme points of the far clipping plane
332                         len = npoint[0];
333                         npoint[0] = -npoint[1];
334                         npoint[1] = len;
335                         farcenter += point;
336                 }
337                 // the far center is the average of the far clipping points
338                 farcenter *= 0.25;
339                 // the extreme near point is the opposite point on the near clipping plane
340                 nfar.setValue(-nfar[0], -nfar[1], -1.0, 1.0);
341                 nfar = clip_camcs_matrix*nfar;
342                 nearpoint.setValue(nfar[0]/nfar[3], nfar[1]/nfar[3], nfar[2]/nfar[3]);
343                 // this is a frustrum projection
344                 N = nearpoint.dot(nearpoint);
345                 e = farpoint[2];
346                 s = nearpoint[2];
347                 // projection on XY plane for distance to axis computation
348                 MT_Point2 farxy(farpoint[0], farpoint[1]);
349                 // f is forced positive by construction
350                 f = farxy.length();
351                 // get corresponding point on the near plane
352                 farxy *= s/e;
353                 // this formula preserve the sign of n
354                 n = f*s/e - MT_Point2(nearpoint[0]-farxy[0], nearpoint[1]-farxy[1]).length();
355                 c = MT_Point2(farcenter[0], farcenter[1]).length()/e;
356                 // the big formula, it simplifies to (F-N)/(2(e-s)) for the symmetric case
357                 z = (F-N)/(2.0*(e-s+c*(f-n)));
358                 m_frustum_center = MT_Point3(farcenter[0]*z/e, farcenter[1]*z/e, z);
359                 m_frustum_radius = m_frustum_center.distance(farpoint);
360         }
361         else
362         {
363                 // orthographic projection
364                 // The most extreme points on the near and far plane. (normalized device coords)
365                 MT_Vector4 hnear(1.0, 1.0, 1.0, 1.0), hfar(-1.0, -1.0, -1.0, 1.0);
366                 
367                 // Transform to hom camera local space
368                 hnear = clip_camcs_matrix*hnear;
369                 hfar = clip_camcs_matrix*hfar;
370                 
371                 // Tranform to 3d camera local space.
372                 MT_Point3 nearpoint(hnear[0]/hnear[3], hnear[1]/hnear[3], hnear[2]/hnear[3]);
373                 MT_Point3 farpoint(hfar[0]/hfar[3], hfar[1]/hfar[3], hfar[2]/hfar[3]);
374                 
375                 // just use mediant point
376                 m_frustum_center = (farpoint + nearpoint)*0.5;
377                 m_frustum_radius = m_frustum_center.distance(farpoint);
378         }
379         // Transform to world space.
380         m_frustum_center = GetCameraToWorld()(m_frustum_center);
381         m_frustum_radius /= fabs(NodeGetWorldScaling()[NodeGetWorldScaling().closestAxis()]);
382         
383         m_set_frustum_center = true;
384 }
385
386 bool KX_Camera::PointInsideFrustum(const MT_Point3& x)
387 {
388         ExtractClipPlanes();
389         
390         for ( unsigned int i = 0; i < 6 ; i++ )
391         {
392                 if (m_planes[i][0] * x[0] + m_planes[i][1] * x[1] + m_planes[i][2] * x[2] + m_planes[i][3] < 0.0)
393                         return false;
394         }
395         return true;
396 }
397
398 int KX_Camera::BoxInsideFrustum(const MT_Point3 *box)
399 {
400         ExtractClipPlanes();
401         
402         unsigned int insideCount = 0;
403         // 6 view frustum planes
404         for ( unsigned int p = 0; p < 6 ; p++ )
405         {
406                 unsigned int behindCount = 0;
407                 // 8 box vertices.
408                 for (unsigned int v = 0; v < 8 ; v++)
409                 {
410                         if (m_planes[p][0] * box[v][0] + m_planes[p][1] * box[v][1] + m_planes[p][2] * box[v][2] + m_planes[p][3] < 0.0)
411                                 behindCount++;
412                 }
413                 
414                 // 8 points behind this plane
415                 if (behindCount == 8)
416                         return OUTSIDE;
417
418                 // Every box vertex is on the front side of this plane
419                 if (!behindCount)
420                         insideCount++;
421         }
422         
423         // All box vertices are on the front side of all frustum planes.
424         if (insideCount == 6)
425                 return INSIDE;
426         
427         return INTERSECT;
428 }
429
430 int KX_Camera::SphereInsideFrustum(const MT_Point3& center, const MT_Scalar &radius)
431 {
432         ExtractFrustumSphere();
433         if (center.distance2(m_frustum_center) > (radius + m_frustum_radius)*(radius + m_frustum_radius))
434                 return OUTSIDE;
435
436         unsigned int p;
437         ExtractClipPlanes();
438         NormalizeClipPlanes();
439                 
440         MT_Scalar distance;
441         int intersect = INSIDE;
442         // distance:  <-------- OUTSIDE -----|----- INTERSECT -----0----- INTERSECT -----|----- INSIDE -------->
443         //                                -radius                                      radius
444         for (p = 0; p < 6; p++)
445         {
446                 distance = m_planes[p][0]*center[0] + m_planes[p][1]*center[1] + m_planes[p][2]*center[2] + m_planes[p][3];
447                 if (fabs(distance) <= radius)
448                         intersect = INTERSECT;
449                 else if (distance < -radius)
450                         return OUTSIDE;
451         }
452         
453         return intersect;
454 }
455
456 bool KX_Camera::GetFrustumCulling() const
457 {
458         return m_frustum_culling;
459 }
460  
461 void KX_Camera::EnableViewport(bool viewport)
462 {
463         m_camdata.m_viewport = viewport;
464 }
465
466 void KX_Camera::SetViewport(int left, int bottom, int right, int top)
467 {
468         m_camdata.m_viewportleft = left;
469         m_camdata.m_viewportbottom = bottom;
470         m_camdata.m_viewportright = right;
471         m_camdata.m_viewporttop = top;
472 }
473
474 bool KX_Camera::GetViewport() const
475 {
476         return m_camdata.m_viewport;
477 }
478
479 int KX_Camera::GetViewportLeft() const
480 {
481         return m_camdata.m_viewportleft;
482 }
483
484 int KX_Camera::GetViewportBottom() const
485 {
486         return m_camdata.m_viewportbottom;
487 }
488
489 int KX_Camera::GetViewportRight() const
490 {
491         return m_camdata.m_viewportright;
492 }
493
494 int KX_Camera::GetViewportTop() const
495 {
496         return m_camdata.m_viewporttop;
497 }
498
499 #ifdef WITH_PYTHON
500 //----------------------------------------------------------------------------
501 //Python
502
503
504 PyMethodDef KX_Camera::Methods[] = {
505         KX_PYMETHODTABLE(KX_Camera, sphereInsideFrustum),
506         KX_PYMETHODTABLE_O(KX_Camera, boxInsideFrustum),
507         KX_PYMETHODTABLE_O(KX_Camera, pointInsideFrustum),
508         KX_PYMETHODTABLE_NOARGS(KX_Camera, getCameraToWorld),
509         KX_PYMETHODTABLE_NOARGS(KX_Camera, getWorldToCamera),
510         KX_PYMETHODTABLE(KX_Camera, setViewport),
511         KX_PYMETHODTABLE_NOARGS(KX_Camera, setOnTop),
512         KX_PYMETHODTABLE_O(KX_Camera, getScreenPosition),
513         KX_PYMETHODTABLE(KX_Camera, getScreenVect),
514         KX_PYMETHODTABLE(KX_Camera, getScreenRay),
515         {NULL,NULL} //Sentinel
516 };
517
518 PyAttributeDef KX_Camera::Attributes[] = {
519         
520         KX_PYATTRIBUTE_BOOL_RW("frustum_culling", KX_Camera, m_frustum_culling),
521         KX_PYATTRIBUTE_RW_FUNCTION("perspective", KX_Camera, pyattr_get_perspective, pyattr_set_perspective),
522         
523         KX_PYATTRIBUTE_RW_FUNCTION("lens",      KX_Camera,      pyattr_get_lens, pyattr_set_lens),
524         KX_PYATTRIBUTE_RW_FUNCTION("ortho_scale",       KX_Camera,      pyattr_get_ortho_scale, pyattr_set_ortho_scale),
525         KX_PYATTRIBUTE_RW_FUNCTION("near",      KX_Camera,      pyattr_get_near, pyattr_set_near),
526         KX_PYATTRIBUTE_RW_FUNCTION("far",       KX_Camera,      pyattr_get_far,  pyattr_set_far),
527         
528         KX_PYATTRIBUTE_RW_FUNCTION("useViewport",       KX_Camera,      pyattr_get_use_viewport,  pyattr_set_use_viewport),
529         
530         KX_PYATTRIBUTE_RW_FUNCTION("projection_matrix", KX_Camera,      pyattr_get_projection_matrix, pyattr_set_projection_matrix),
531         KX_PYATTRIBUTE_RO_FUNCTION("modelview_matrix",  KX_Camera,      pyattr_get_modelview_matrix),
532         KX_PYATTRIBUTE_RO_FUNCTION("camera_to_world",   KX_Camera,      pyattr_get_camera_to_world),
533         KX_PYATTRIBUTE_RO_FUNCTION("world_to_camera",   KX_Camera,      pyattr_get_world_to_camera),
534         
535         /* Grrr, functions for constants? */
536         KX_PYATTRIBUTE_RO_FUNCTION("INSIDE",    KX_Camera, pyattr_get_INSIDE),
537         KX_PYATTRIBUTE_RO_FUNCTION("OUTSIDE",   KX_Camera, pyattr_get_OUTSIDE),
538         KX_PYATTRIBUTE_RO_FUNCTION("INTERSECT", KX_Camera, pyattr_get_INTERSECT),
539         
540         { NULL }        //Sentinel
541 };
542
543 PyTypeObject KX_Camera::Type = {
544         PyVarObject_HEAD_INIT(NULL, 0)
545         "KX_Camera",
546         sizeof(PyObjectPlus_Proxy),
547         0,
548         py_base_dealloc,
549         0,
550         0,
551         0,
552         0,
553         py_base_repr,
554         0,
555         &KX_GameObject::Sequence,
556         &KX_GameObject::Mapping,
557         0,0,0,
558         NULL,
559         NULL,
560         0,
561         Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
562         0,0,0,0,0,0,0,
563         Methods,
564         0,
565         0,
566         &KX_GameObject::Type,
567         0,0,0,0,0,0,
568         py_base_new
569 };
570
571 KX_PYMETHODDEF_DOC_VARARGS(KX_Camera, sphereInsideFrustum,
572 "sphereInsideFrustum(center, radius) -> Integer\n"
573 "\treturns INSIDE, OUTSIDE or INTERSECT if the given sphere is\n"
574 "\tinside/outside/intersects this camera's viewing frustum.\n\n"
575 "\tcenter = the center of the sphere (in world coordinates.)\n"
576 "\tradius = the radius of the sphere\n\n"
577 "\tExample:\n"
578 "\timport bge.logic\n\n"
579 "\tco = bge.logic.getCurrentController()\n"
580 "\tcam = co.GetOwner()\n\n"
581 "\t# A sphere of radius 4.0 located at [x, y, z] = [1.0, 1.0, 1.0]\n"
582 "\tif (cam.sphereInsideFrustum([1.0, 1.0, 1.0], 4) != cam.OUTSIDE):\n"
583 "\t\t# Sphere is inside frustum !\n"
584 "\t\t# Do something useful !\n"
585 "\telse:\n"
586 "\t\t# Sphere is outside frustum\n"
587 )
588 {
589         PyObject *pycenter;
590         float radius;
591         if (PyArg_ParseTuple(args, "Of:sphereInsideFrustum", &pycenter, &radius))
592         {
593                 MT_Point3 center;
594                 if (PyVecTo(pycenter, center))
595                 {
596                         return PyLong_FromSsize_t(SphereInsideFrustum(center, radius)); /* new ref */
597                 }
598         }
599
600         PyErr_SetString(PyExc_TypeError, "camera.sphereInsideFrustum(center, radius): KX_Camera, expected arguments: (center, radius)");
601         
602         return NULL;
603 }
604
605 KX_PYMETHODDEF_DOC_O(KX_Camera, boxInsideFrustum,
606 "boxInsideFrustum(box) -> Integer\n"
607 "\treturns INSIDE, OUTSIDE or INTERSECT if the given box is\n"
608 "\tinside/outside/intersects this camera's viewing frustum.\n\n"
609 "\tbox = a list of the eight (8) corners of the box (in world coordinates.)\n\n"
610 "\tExample:\n"
611 "\timport bge.logic\n\n"
612 "\tco = bge.logic.getCurrentController()\n"
613 "\tcam = co.GetOwner()\n\n"
614 "\tbox = []\n"
615 "\tbox.append([-1.0, -1.0, -1.0])\n"
616 "\tbox.append([-1.0, -1.0,  1.0])\n"
617 "\tbox.append([-1.0,  1.0, -1.0])\n"
618 "\tbox.append([-1.0,  1.0,  1.0])\n"
619 "\tbox.append([ 1.0, -1.0, -1.0])\n"
620 "\tbox.append([ 1.0, -1.0,  1.0])\n"
621 "\tbox.append([ 1.0,  1.0, -1.0])\n"
622 "\tbox.append([ 1.0,  1.0,  1.0])\n\n"
623 "\tif (cam.boxInsideFrustum(box) != cam.OUTSIDE):\n"
624 "\t\t# Box is inside/intersects frustum !\n"
625 "\t\t# Do something useful !\n"
626 "\telse:\n"
627 "\t\t# Box is outside the frustum !\n"
628 )
629 {
630         unsigned int num_points = PySequence_Size(value);
631         if (num_points != 8)
632         {
633                 PyErr_Format(PyExc_TypeError, "camera.boxInsideFrustum(box): KX_Camera, expected eight (8) points, got %d", num_points);
634                 return NULL;
635         }
636         
637         MT_Point3 box[8];
638         for (unsigned int p = 0; p < 8 ; p++)
639         {
640                 PyObject *item = PySequence_GetItem(value, p); /* new ref */
641                 bool error = !PyVecTo(item, box[p]);
642                 Py_DECREF(item);
643                 if (error)
644                         return NULL;
645         }
646         
647         return PyLong_FromSsize_t(BoxInsideFrustum(box)); /* new ref */
648 }
649
650 KX_PYMETHODDEF_DOC_O(KX_Camera, pointInsideFrustum,
651 "pointInsideFrustum(point) -> Bool\n"
652 "\treturns 1 if the given point is inside this camera's viewing frustum.\n\n"
653 "\tpoint = The point to test (in world coordinates.)\n\n"
654 "\tExample:\n"
655 "\timport bge.logic\n\n"
656 "\tco = bge.logic.getCurrentController()\n"
657 "\tcam = co.GetOwner()\n\n"
658 "\t# Test point [0.0, 0.0, 0.0]"
659 "\tif (cam.pointInsideFrustum([0.0, 0.0, 0.0])):\n"
660 "\t\t# Point is inside frustum !\n"
661 "\t\t# Do something useful !\n"
662 "\telse:\n"
663 "\t\t# Box is outside the frustum !\n"
664 )
665 {
666         MT_Point3 point;
667         if (PyVecTo(value, point))
668         {
669                 return PyLong_FromSsize_t(PointInsideFrustum(point)); /* new ref */
670         }
671         
672         PyErr_SetString(PyExc_TypeError, "camera.pointInsideFrustum(point): KX_Camera, expected point argument.");
673         return NULL;
674 }
675
676 KX_PYMETHODDEF_DOC_NOARGS(KX_Camera, getCameraToWorld,
677 "getCameraToWorld() -> Matrix4x4\n"
678 "\treturns the camera to world transformation matrix, as a list of four lists of four values.\n\n"
679 "\tie: [[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]])\n"
680 )
681 {
682         return PyObjectFrom(GetCameraToWorld()); /* new ref */
683 }
684
685 KX_PYMETHODDEF_DOC_NOARGS(KX_Camera, getWorldToCamera,
686 "getWorldToCamera() -> Matrix4x4\n"
687 "\treturns the world to camera transformation matrix, as a list of four lists of four values.\n\n"
688 "\tie: [[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]])\n"
689 )
690 {
691         return PyObjectFrom(GetWorldToCamera()); /* new ref */
692 }
693
694 KX_PYMETHODDEF_DOC_VARARGS(KX_Camera, setViewport,
695 "setViewport(left, bottom, right, top)\n"
696 "Sets this camera's viewport\n")
697 {
698         int left, bottom, right, top;
699         if (!PyArg_ParseTuple(args,"iiii:setViewport",&left, &bottom, &right, &top))
700                 return NULL;
701         
702         SetViewport(left, bottom, right, top);
703         Py_RETURN_NONE;
704 }
705
706 KX_PYMETHODDEF_DOC_NOARGS(KX_Camera, setOnTop,
707 "setOnTop()\n"
708 "Sets this camera's viewport on top\n")
709 {
710         class KX_Scene* scene = KX_GetActiveScene();
711         scene->SetCameraOnTop(this);
712         Py_RETURN_NONE;
713 }
714
715 PyObject *KX_Camera::pyattr_get_perspective(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
716 {
717         KX_Camera* self = static_cast<KX_Camera*>(self_v);
718         return PyBool_FromLong(self->m_camdata.m_perspective);
719 }
720
721 int KX_Camera::pyattr_set_perspective(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
722 {
723         KX_Camera* self = static_cast<KX_Camera*>(self_v);
724         int param = PyObject_IsTrue( value );
725         if (param == -1) {
726                 PyErr_SetString(PyExc_AttributeError, "camera.perspective = bool: KX_Camera, expected True/False or 0/1");
727                 return PY_SET_ATTR_FAIL;
728         }
729         
730         self->m_camdata.m_perspective= param;
731         self->InvalidateProjectionMatrix();
732         return PY_SET_ATTR_SUCCESS;
733 }
734
735 PyObject *KX_Camera::pyattr_get_lens(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
736 {
737         KX_Camera* self = static_cast<KX_Camera*>(self_v);
738         return PyFloat_FromDouble(self->m_camdata.m_lens);
739 }
740
741 int KX_Camera::pyattr_set_lens(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
742 {
743         KX_Camera* self = static_cast<KX_Camera*>(self_v);
744         float param = PyFloat_AsDouble(value);
745         if (param == -1) {
746                 PyErr_SetString(PyExc_AttributeError, "camera.lens = float: KX_Camera, expected a float greater then zero");
747                 return PY_SET_ATTR_FAIL;
748         }
749         
750         self->m_camdata.m_lens= param;
751         self->m_set_projection_matrix = false;
752         return PY_SET_ATTR_SUCCESS;
753 }
754
755 PyObject *KX_Camera::pyattr_get_ortho_scale(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
756 {
757         KX_Camera* self = static_cast<KX_Camera*>(self_v);
758         return PyFloat_FromDouble(self->m_camdata.m_scale);
759 }
760
761 int KX_Camera::pyattr_set_ortho_scale(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
762 {
763         KX_Camera* self = static_cast<KX_Camera*>(self_v);
764         float param = PyFloat_AsDouble(value);
765         if (param == -1) {
766                 PyErr_SetString(PyExc_AttributeError, "camera.ortho_scale = float: KX_Camera, expected a float greater then zero");
767                 return PY_SET_ATTR_FAIL;
768         }
769         
770         self->m_camdata.m_scale= param;
771         self->m_set_projection_matrix = false;
772         return PY_SET_ATTR_SUCCESS;
773 }
774
775 PyObject *KX_Camera::pyattr_get_near(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
776 {
777         KX_Camera* self = static_cast<KX_Camera*>(self_v);
778         return PyFloat_FromDouble(self->m_camdata.m_clipstart);
779 }
780
781 int KX_Camera::pyattr_set_near(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
782 {
783         KX_Camera* self = static_cast<KX_Camera*>(self_v);
784         float param = PyFloat_AsDouble(value);
785         if (param == -1) {
786                 PyErr_SetString(PyExc_AttributeError, "camera.near = float: KX_Camera, expected a float greater then zero");
787                 return PY_SET_ATTR_FAIL;
788         }
789         
790         self->m_camdata.m_clipstart= param;
791         self->m_set_projection_matrix = false;
792         return PY_SET_ATTR_SUCCESS;
793 }
794
795 PyObject *KX_Camera::pyattr_get_far(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
796 {
797         KX_Camera* self = static_cast<KX_Camera*>(self_v);
798         return PyFloat_FromDouble(self->m_camdata.m_clipend);
799 }
800
801 int KX_Camera::pyattr_set_far(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
802 {
803         KX_Camera* self = static_cast<KX_Camera*>(self_v);
804         float param = PyFloat_AsDouble(value);
805         if (param == -1) {
806                 PyErr_SetString(PyExc_AttributeError, "camera.far = float: KX_Camera, expected a float greater then zero");
807                 return PY_SET_ATTR_FAIL;
808         }
809         
810         self->m_camdata.m_clipend= param;
811         self->m_set_projection_matrix = false;
812         return PY_SET_ATTR_SUCCESS;
813 }
814
815
816 PyObject *KX_Camera::pyattr_get_use_viewport(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
817 {
818         KX_Camera* self = static_cast<KX_Camera*>(self_v);
819         return PyBool_FromLong(self->GetViewport());
820 }
821
822 int KX_Camera::pyattr_set_use_viewport(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
823 {
824         KX_Camera* self = static_cast<KX_Camera*>(self_v);
825         int param = PyObject_IsTrue( value );
826         if (param == -1) {
827                 PyErr_SetString(PyExc_AttributeError, "camera.useViewport = bool: KX_Camera, expected True or False");
828                 return PY_SET_ATTR_FAIL;
829         }
830         self->EnableViewport((bool)param);
831         return PY_SET_ATTR_SUCCESS;
832 }
833
834
835 PyObject *KX_Camera::pyattr_get_projection_matrix(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
836 {
837         KX_Camera* self = static_cast<KX_Camera*>(self_v);
838         return PyObjectFrom(self->GetProjectionMatrix()); 
839 }
840
841 int KX_Camera::pyattr_set_projection_matrix(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
842 {
843         KX_Camera* self = static_cast<KX_Camera*>(self_v);
844         MT_Matrix4x4 mat;
845         if (!PyMatTo(value, mat)) 
846                 return PY_SET_ATTR_FAIL;
847         
848         self->SetProjectionMatrix(mat);
849         return PY_SET_ATTR_SUCCESS;
850 }
851
852 PyObject *KX_Camera::pyattr_get_modelview_matrix(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
853 {
854         KX_Camera* self = static_cast<KX_Camera*>(self_v);
855         return PyObjectFrom(self->GetModelviewMatrix()); 
856 }
857
858 PyObject *KX_Camera::pyattr_get_camera_to_world(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
859 {
860         KX_Camera* self = static_cast<KX_Camera*>(self_v);
861         return PyObjectFrom(self->GetCameraToWorld());
862 }
863
864 PyObject *KX_Camera::pyattr_get_world_to_camera(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
865 {
866         KX_Camera* self = static_cast<KX_Camera*>(self_v);
867         return PyObjectFrom(self->GetWorldToCamera()); 
868 }
869
870
871 PyObject *KX_Camera::pyattr_get_INSIDE(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
872 {       return PyLong_FromSsize_t(INSIDE); }
873 PyObject *KX_Camera::pyattr_get_OUTSIDE(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
874 {       return PyLong_FromSsize_t(OUTSIDE); }
875 PyObject *KX_Camera::pyattr_get_INTERSECT(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
876 {       return PyLong_FromSsize_t(INTERSECT); }
877
878
879 bool ConvertPythonToCamera(PyObject *value, KX_Camera **object, bool py_none_ok, const char *error_prefix)
880 {
881         if (value==NULL) {
882                 PyErr_Format(PyExc_TypeError, "%s, python pointer NULL, should never happen", error_prefix);
883                 *object = NULL;
884                 return false;
885         }
886                 
887         if (value==Py_None) {
888                 *object = NULL;
889                 
890                 if (py_none_ok) {
891                         return true;
892                 } else {
893                         PyErr_Format(PyExc_TypeError, "%s, expected KX_Camera or a KX_Camera name, None is invalid", error_prefix);
894                         return false;
895                 }
896         }
897         
898         if (PyUnicode_Check(value)) {
899                 STR_String value_str = _PyUnicode_AsString(value);
900                 *object = KX_GetActiveScene()->FindCamera(value_str);
901                 
902                 if (*object) {
903                         return true;
904                 } else {
905                         PyErr_Format(PyExc_ValueError,
906                                      "%s, requested name \"%s\" did not match any KX_Camera in this scene",
907                                      error_prefix, _PyUnicode_AsString(value));
908                         return false;
909                 }
910         }
911         
912         if (PyObject_TypeCheck(value, &KX_Camera::Type)) {
913                 *object = static_cast<KX_Camera*>BGE_PROXY_REF(value);
914                 
915                 /* sets the error */
916                 if (*object==NULL) {
917                         PyErr_Format(PyExc_SystemError, "%s, " BGE_PROXY_ERROR_MSG, error_prefix);
918                         return false;
919                 }
920                 
921                 return true;
922         }
923         
924         *object = NULL;
925         
926         if (py_none_ok) {
927                 PyErr_Format(PyExc_TypeError, "%s, expect a KX_Camera, a string or None", error_prefix);
928         } else {
929                 PyErr_Format(PyExc_TypeError, "%s, expect a KX_Camera or a string", error_prefix);
930         }
931         
932         return false;
933 }
934
935 KX_PYMETHODDEF_DOC_O(KX_Camera, getScreenPosition,
936 "getScreenPosition()\n"
937 )
938
939 {
940         MT_Vector3 vect;
941         KX_GameObject *obj = NULL;
942
943         if (!PyVecTo(value, vect))
944         {
945                 PyErr_Clear();
946
947                 if (ConvertPythonToGameObject(value, &obj, true, ""))
948                 {
949                         PyErr_Clear();
950                         vect = MT_Vector3(obj->NodeGetWorldPosition());
951                 }
952                 else
953                 {
954                         PyErr_SetString(PyExc_TypeError, "Error in getScreenPosition. Expected a Vector3 or a KX_GameObject or a string for a name of a KX_GameObject");
955                         return NULL;
956                 }
957         }
958
959         const GLint *viewport;
960         GLdouble win[3];
961         GLdouble modelmatrix[16];
962         GLdouble projmatrix[16];
963
964         MT_Matrix4x4 m_modelmatrix = this->GetModelviewMatrix();
965         MT_Matrix4x4 m_projmatrix = this->GetProjectionMatrix();
966
967         m_modelmatrix.getValue(modelmatrix);
968         m_projmatrix.getValue(projmatrix);
969
970         viewport = KX_GetActiveEngine()->GetCanvas()->GetViewPort();
971
972         gluProject(vect[0], vect[1], vect[2], modelmatrix, projmatrix, viewport, &win[0], &win[1], &win[2]);
973
974         vect[0] =  (win[0] - viewport[0]) / viewport[2];
975         vect[1] =  (win[1] - viewport[1]) / viewport[3];
976
977         vect[1] = 1.0 - vect[1]; //to follow Blender window coordinate system (Top-Down)
978
979         PyObject *ret = PyTuple_New(2);
980         if (ret) {
981                 PyTuple_SET_ITEM(ret, 0, PyFloat_FromDouble(vect[0]));
982                 PyTuple_SET_ITEM(ret, 1, PyFloat_FromDouble(vect[1]));
983                 return ret;
984         }
985
986         return NULL;
987 }
988
989 KX_PYMETHODDEF_DOC_VARARGS(KX_Camera, getScreenVect,
990 "getScreenVect()\n"
991 )
992 {
993         double x,y;
994         if (!PyArg_ParseTuple(args,"dd:getScreenVect",&x,&y))
995                 return NULL;
996
997         y = 1.0 - y; //to follow Blender window coordinate system (Top-Down)
998
999         MT_Vector3 vect;
1000         MT_Point3 campos, screenpos;
1001
1002         const GLint *viewport;
1003         GLdouble win[3];
1004         GLdouble modelmatrix[16];
1005         GLdouble projmatrix[16];
1006
1007         MT_Matrix4x4 m_modelmatrix = this->GetModelviewMatrix();
1008         MT_Matrix4x4 m_projmatrix = this->GetProjectionMatrix();
1009
1010         m_modelmatrix.getValue(modelmatrix);
1011         m_projmatrix.getValue(projmatrix);
1012
1013         viewport = KX_GetActiveEngine()->GetCanvas()->GetViewPort();
1014
1015         vect[0] = x * viewport[2];
1016         vect[1] = y * viewport[3];
1017
1018         vect[0] += viewport[0];
1019         vect[1] += viewport[1];
1020
1021         glReadPixels(x, y, 1, 1, GL_DEPTH_COMPONENT, GL_FLOAT, &vect[2]);
1022         gluUnProject(vect[0], vect[1], vect[2], modelmatrix, projmatrix, viewport, &win[0], &win[1], &win[2]);
1023
1024         campos = this->GetCameraLocation();
1025         screenpos = MT_Point3(win[0], win[1], win[2]);
1026         vect = campos-screenpos;
1027
1028         vect.normalize();
1029         return PyObjectFrom(vect);
1030 }
1031
1032 KX_PYMETHODDEF_DOC_VARARGS(KX_Camera, getScreenRay,
1033 "getScreenRay()\n"
1034 )
1035 {
1036         MT_Vector3 vect;
1037         double x,y,dist;
1038         char *propName = NULL;
1039
1040         if (!PyArg_ParseTuple(args,"ddd|s:getScreenRay",&x,&y,&dist,&propName))
1041                 return NULL;
1042
1043         PyObject *argValue = PyTuple_New(2);
1044         PyTuple_SET_ITEM(argValue, 0, PyFloat_FromDouble(x));
1045         PyTuple_SET_ITEM(argValue, 1, PyFloat_FromDouble(y));
1046
1047         if (!PyVecTo(PygetScreenVect(argValue), vect))
1048         {
1049                 Py_DECREF(argValue);
1050                 PyErr_SetString(PyExc_TypeError,
1051                                 "Error in getScreenRay. Invalid 2D coordinate. "
1052                                 "Expected a normalized 2D screen coordinate, "
1053                                 "a distance and an optional property argument");
1054                 return NULL;
1055         }
1056         Py_DECREF(argValue);
1057
1058         dist = -dist;
1059         vect += this->GetCameraLocation();
1060
1061         argValue = (propName?PyTuple_New(3):PyTuple_New(2));
1062         if (argValue) {
1063                 PyTuple_SET_ITEM(argValue, 0, PyObjectFrom(vect));
1064                 PyTuple_SET_ITEM(argValue, 1, PyFloat_FromDouble(dist));
1065                 if (propName)
1066                         PyTuple_SET_ITEM(argValue, 2, PyUnicode_FromString(propName));
1067
1068                 PyObject *ret= this->PyrayCastTo(argValue,NULL);
1069                 Py_DECREF(argValue);
1070                 return ret;
1071         }
1072
1073         return NULL;
1074 }
1075 #endif