merge from trunk #37722
[blender-staging.git] / source / gameengine / Ketsji / KX_Camera.cpp
1 /*
2  * $Id$
3  *
4  * ***** BEGIN GPL LICENSE BLOCK *****
5  *
6  * This program is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU General Public License
8  * as published by the Free Software Foundation; either version 2
9  * of the License, or (at your option) any later version.
10  *
11  * This program is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14  * GNU General Public License for more details.
15  *
16  * You should have received a copy of the GNU General Public License
17  * along with this program; if not, write to the Free Software Foundation,
18  * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
19  *
20  * The Original Code is Copyright (C) 2001-2002 by NaN Holding BV.
21  * All rights reserved.
22  *
23  * The Original Code is: all of this file.
24  *
25  * Contributor(s): none yet.
26  *
27  * ***** END GPL LICENSE BLOCK *****
28  * Camera in the gameengine. Cameras are also used for views.
29  */
30
31 /** \file gameengine/Ketsji/KX_Camera.cpp
32  *  \ingroup ketsji
33  */
34
35  
36 #include "GL/glew.h"
37 #include "KX_Camera.h"
38 #include "KX_Scene.h"
39 #include "KX_PythonInit.h"
40 #include "KX_Python.h"
41 #include "KX_PyMath.h"
42 KX_Camera::KX_Camera(void* sgReplicationInfo,
43                                          SG_Callbacks callbacks,
44                                          const RAS_CameraData& camdata,
45                                          bool frustum_culling,
46                                          bool delete_node)
47                                         :
48                                         KX_GameObject(sgReplicationInfo,callbacks),
49                                         m_camdata(camdata),
50                                         m_dirty(true),
51                                         m_normalized(false),
52                                         m_frustum_culling(frustum_culling),
53                                         m_set_projection_matrix(false),
54                                         m_set_frustum_center(false),
55                                         m_delete_node(delete_node)
56 {
57         // setting a name would be nice...
58         m_name = "cam";
59         m_projection_matrix.setIdentity();
60         m_modelview_matrix.setIdentity();
61 }
62
63
64 KX_Camera::~KX_Camera()
65 {
66         if (m_delete_node && m_pSGNode)
67         {
68                 // for shadow camera, avoids memleak
69                 delete m_pSGNode;
70                 m_pSGNode = NULL;
71         }
72 }       
73
74
75 CValue* KX_Camera::GetReplica()
76 {
77         KX_Camera* replica = new KX_Camera(*this);
78         
79         // this will copy properties and so on...
80         replica->ProcessReplica();
81         
82         return replica;
83 }
84
85 void KX_Camera::ProcessReplica()
86 {
87         KX_GameObject::ProcessReplica();
88         // replicated camera are always registered in the scene
89         m_delete_node = false;
90 }
91
92 MT_Transform KX_Camera::GetWorldToCamera() const
93
94         MT_Transform camtrans;
95         camtrans.invert(MT_Transform(NodeGetWorldPosition(), NodeGetWorldOrientation()));
96         
97         return camtrans;
98 }
99
100
101          
102 MT_Transform KX_Camera::GetCameraToWorld() const
103 {
104         return MT_Transform(NodeGetWorldPosition(), NodeGetWorldOrientation());
105 }
106
107
108
109 void KX_Camera::CorrectLookUp(MT_Scalar speed)
110 {
111 }
112
113
114
115 const MT_Point3 KX_Camera::GetCameraLocation() const
116 {
117         /* this is the camera locatio in cam coords... */
118         //return m_trans1.getOrigin();
119         //return MT_Point3(0,0,0);   <-----
120         /* .... I want it in world coords */
121         //MT_Transform trans;
122         //trans.setBasis(NodeGetWorldOrientation());
123         
124         return NodeGetWorldPosition();          
125 }
126
127
128
129 /* I want the camera orientation as well. */
130 const MT_Quaternion KX_Camera::GetCameraOrientation() const
131 {
132         return NodeGetWorldOrientation().getRotation();
133 }
134
135
136
137 /**
138 * Sets the projection matrix that is used by the rasterizer.
139 */
140 void KX_Camera::SetProjectionMatrix(const MT_Matrix4x4 & mat)
141 {
142         m_projection_matrix = mat;
143         m_dirty = true;
144         m_set_projection_matrix = true;
145         m_set_frustum_center = false;
146 }
147
148
149
150 /**
151 * Sets the modelview matrix that is used by the rasterizer.
152 */
153 void KX_Camera::SetModelviewMatrix(const MT_Matrix4x4 & mat)
154 {
155         m_modelview_matrix = mat;
156         m_dirty = true;
157         m_set_frustum_center = false;
158 }
159
160
161
162 /**
163 * Gets the projection matrix that is used by the rasterizer.
164 */
165 const MT_Matrix4x4& KX_Camera::GetProjectionMatrix() const
166 {
167         return m_projection_matrix;
168 }
169
170
171
172 /**
173 * Gets the modelview matrix that is used by the rasterizer.
174 */
175 const MT_Matrix4x4& KX_Camera::GetModelviewMatrix() const
176 {
177         return m_modelview_matrix;
178 }
179
180
181 bool KX_Camera::hasValidProjectionMatrix() const
182 {
183         return m_set_projection_matrix;
184 }
185
186 void KX_Camera::InvalidateProjectionMatrix(bool valid)
187 {
188         m_set_projection_matrix = valid;
189 }
190
191
192 /*
193 * These getters retrieve the clip data and the focal length
194 */
195 float KX_Camera::GetLens() const
196 {
197         return m_camdata.m_lens;
198 }
199
200 float KX_Camera::GetScale() const
201 {
202         return m_camdata.m_scale;
203 }
204
205
206
207 float KX_Camera::GetCameraNear() const
208 {
209         return m_camdata.m_clipstart;
210 }
211
212
213
214 float KX_Camera::GetCameraFar() const
215 {
216         return m_camdata.m_clipend;
217 }
218
219 float KX_Camera::GetFocalLength() const
220 {
221         return m_camdata.m_focallength;
222 }
223
224
225
226 RAS_CameraData* KX_Camera::GetCameraData()
227 {
228         return &m_camdata; 
229 }
230
231 void KX_Camera::ExtractClipPlanes()
232 {
233         if (!m_dirty)
234                 return;
235
236         MT_Matrix4x4 m = m_projection_matrix * m_modelview_matrix;
237         // Left clip plane
238         m_planes[0] = m[3] + m[0];
239         // Right clip plane
240         m_planes[1] = m[3] - m[0];
241         // Top clip plane
242         m_planes[2] = m[3] - m[1];
243         // Bottom clip plane
244         m_planes[3] = m[3] + m[1];
245         // Near clip plane
246         m_planes[4] = m[3] + m[2];
247         // Far clip plane
248         m_planes[5] = m[3] - m[2];
249         
250         m_dirty = false;
251         m_normalized = false;
252 }
253
254 void KX_Camera::NormalizeClipPlanes()
255 {
256         if (m_normalized)
257                 return;
258         
259         for (unsigned int p = 0; p < 6; p++)
260         {
261                 MT_Scalar factor = sqrt(m_planes[p][0]*m_planes[p][0] + m_planes[p][1]*m_planes[p][1] + m_planes[p][2]*m_planes[p][2]);
262                 if (!MT_fuzzyZero(factor))
263                         m_planes[p] /= factor;
264         }
265         
266         m_normalized = true;
267 }
268
269 void KX_Camera::ExtractFrustumSphere()
270 {
271         if (m_set_frustum_center)
272                 return;
273
274     // compute sphere for the general case and not only symmetric frustum:
275     // the mirror code in ImageRender can use very asymmetric frustum.
276     // We will put the sphere center on the line that goes from origin to the center of the far clipping plane
277     // This is the optimal position if the frustum is symmetric or very asymmetric and probably close
278     // to optimal for the general case. The sphere center position is computed so that the distance to 
279     // the near and far extreme frustum points are equal.
280
281     // get the transformation matrix from device coordinate to camera coordinate
282         MT_Matrix4x4 clip_camcs_matrix = m_projection_matrix;
283         clip_camcs_matrix.invert();
284
285         if (m_projection_matrix[3][3] == MT_Scalar(0.0)) 
286         {
287                 // frustrum projection
288                 // detect which of the corner of the far clipping plane is the farthest to the origin
289                 MT_Vector4 nfar;    // far point in device normalized coordinate
290                 MT_Point3 farpoint; // most extreme far point in camera coordinate
291                 MT_Point3 nearpoint;// most extreme near point in camera coordinate
292                 MT_Point3 farcenter(0.,0.,0.);// center of far cliping plane in camera coordinate
293                 MT_Scalar F=-1.0, N; // square distance of far and near point to origin
294                 MT_Scalar f, n;     // distance of far and near point to z axis. f is always > 0 but n can be < 0
295                 MT_Scalar e, s;     // far and near clipping distance (<0)
296                 MT_Scalar c;        // slope of center line = distance of far clipping center to z axis / far clipping distance
297                 MT_Scalar z;        // projection of sphere center on z axis (<0)
298                 // tmp value
299                 MT_Vector4 npoint(1., 1., 1., 1.);
300                 MT_Vector4 hpoint;
301                 MT_Point3 point;
302                 MT_Scalar len;
303                 for (int i=0; i<4; i++)
304                 {
305                 hpoint = clip_camcs_matrix*npoint;
306                         point.setValue(hpoint[0]/hpoint[3], hpoint[1]/hpoint[3], hpoint[2]/hpoint[3]);
307                         len = point.dot(point);
308                         if (len > F)
309                         {
310                                 nfar = npoint;
311                                 farpoint = point;
312                                 F = len;
313                         }
314                         // rotate by 90 degree along the z axis to walk through the 4 extreme points of the far clipping plane
315                         len = npoint[0];
316                         npoint[0] = -npoint[1];
317                         npoint[1] = len;
318                         farcenter += point;
319                 }
320                 // the far center is the average of the far clipping points
321                 farcenter *= 0.25;
322                 // the extreme near point is the opposite point on the near clipping plane
323                 nfar.setValue(-nfar[0], -nfar[1], -1., 1.);
324                 nfar = clip_camcs_matrix*nfar;
325                 nearpoint.setValue(nfar[0]/nfar[3], nfar[1]/nfar[3], nfar[2]/nfar[3]);
326                 // this is a frustrum projection
327                 N = nearpoint.dot(nearpoint);
328                 e = farpoint[2];
329                 s = nearpoint[2];
330                 // projection on XY plane for distance to axis computation
331                 MT_Point2 farxy(farpoint[0], farpoint[1]);
332                 // f is forced positive by construction
333                 f = farxy.length();
334                 // get corresponding point on the near plane
335                 farxy *= s/e;
336                 // this formula preserve the sign of n
337                 n = f*s/e - MT_Point2(nearpoint[0]-farxy[0], nearpoint[1]-farxy[1]).length();
338                 c = MT_Point2(farcenter[0], farcenter[1]).length()/e;
339                 // the big formula, it simplifies to (F-N)/(2(e-s)) for the symmetric case
340                 z = (F-N)/(2.0*(e-s+c*(f-n)));
341                 m_frustum_center = MT_Point3(farcenter[0]*z/e, farcenter[1]*z/e, z);
342                 m_frustum_radius = m_frustum_center.distance(farpoint);
343         } 
344         else
345         {
346                 // orthographic projection
347                 // The most extreme points on the near and far plane. (normalized device coords)
348                 MT_Vector4 hnear(1., 1., 1., 1.), hfar(-1., -1., -1., 1.);
349                 
350                 // Transform to hom camera local space
351                 hnear = clip_camcs_matrix*hnear;
352                 hfar = clip_camcs_matrix*hfar;
353                 
354                 // Tranform to 3d camera local space.
355                 MT_Point3 nearpoint(hnear[0]/hnear[3], hnear[1]/hnear[3], hnear[2]/hnear[3]);
356                 MT_Point3 farpoint(hfar[0]/hfar[3], hfar[1]/hfar[3], hfar[2]/hfar[3]);
357                 
358                 // just use mediant point
359                 m_frustum_center = (farpoint + nearpoint)*0.5;
360                 m_frustum_radius = m_frustum_center.distance(farpoint);
361         }
362         // Transform to world space.
363         m_frustum_center = GetCameraToWorld()(m_frustum_center);
364         m_frustum_radius /= fabs(NodeGetWorldScaling()[NodeGetWorldScaling().closestAxis()]);
365         
366         m_set_frustum_center = true;
367 }
368
369 bool KX_Camera::PointInsideFrustum(const MT_Point3& x)
370 {
371         ExtractClipPlanes();
372         
373         for( unsigned int i = 0; i < 6 ; i++ )
374         {
375                 if (m_planes[i][0]*x[0] + m_planes[i][1]*x[1] + m_planes[i][2]*x[2] + m_planes[i][3] < 0.)
376                         return false;
377         }
378         return true;
379 }
380
381 int KX_Camera::BoxInsideFrustum(const MT_Point3 *box)
382 {
383         ExtractClipPlanes();
384         
385         unsigned int insideCount = 0;
386         // 6 view frustum planes
387         for( unsigned int p = 0; p < 6 ; p++ )
388         {
389                 unsigned int behindCount = 0;
390                 // 8 box vertices.
391                 for (unsigned int v = 0; v < 8 ; v++)
392                 {
393                         if (m_planes[p][0]*box[v][0] + m_planes[p][1]*box[v][1] + m_planes[p][2]*box[v][2] + m_planes[p][3] < 0.)
394                                 behindCount++;
395                 }
396                 
397                 // 8 points behind this plane
398                 if (behindCount == 8)
399                         return OUTSIDE;
400
401                 // Every box vertex is on the front side of this plane
402                 if (!behindCount)
403                         insideCount++;
404         }
405         
406         // All box vertices are on the front side of all frustum planes.
407         if (insideCount == 6)
408                 return INSIDE;
409         
410         return INTERSECT;
411 }
412
413 int KX_Camera::SphereInsideFrustum(const MT_Point3& center, const MT_Scalar &radius)
414 {
415         ExtractFrustumSphere();
416         if (center.distance2(m_frustum_center) > (radius + m_frustum_radius)*(radius + m_frustum_radius))
417                 return OUTSIDE;
418
419         unsigned int p;
420         ExtractClipPlanes();
421         NormalizeClipPlanes();
422                 
423         MT_Scalar distance;
424         int intersect = INSIDE;
425         // distance:  <-------- OUTSIDE -----|----- INTERSECT -----0----- INTERSECT -----|----- INSIDE -------->
426         //                                -radius                                      radius
427         for (p = 0; p < 6; p++)
428         {
429                 distance = m_planes[p][0]*center[0] + m_planes[p][1]*center[1] + m_planes[p][2]*center[2] + m_planes[p][3];
430                 if (fabs(distance) <= radius)
431                         intersect = INTERSECT;
432                 else if (distance < -radius)
433                         return OUTSIDE;
434         }
435         
436         return intersect;
437 }
438
439 bool KX_Camera::GetFrustumCulling() const
440 {
441         return m_frustum_culling;
442 }
443  
444 void KX_Camera::EnableViewport(bool viewport)
445 {
446         m_camdata.m_viewport = viewport;
447 }
448
449 void KX_Camera::SetViewport(int left, int bottom, int right, int top)
450 {
451         m_camdata.m_viewportleft = left;
452         m_camdata.m_viewportbottom = bottom;
453         m_camdata.m_viewportright = right;
454         m_camdata.m_viewporttop = top;
455 }
456
457 bool KX_Camera::GetViewport() const
458 {
459         return m_camdata.m_viewport;
460 }
461
462 int KX_Camera::GetViewportLeft() const
463 {
464         return m_camdata.m_viewportleft;
465 }
466
467 int KX_Camera::GetViewportBottom() const
468 {
469         return m_camdata.m_viewportbottom;
470 }
471
472 int KX_Camera::GetViewportRight() const
473 {
474         return m_camdata.m_viewportright;
475 }
476
477 int KX_Camera::GetViewportTop() const
478 {
479         return m_camdata.m_viewporttop;
480 }
481
482 #ifdef WITH_PYTHON
483 //----------------------------------------------------------------------------
484 //Python
485
486
487 PyMethodDef KX_Camera::Methods[] = {
488         KX_PYMETHODTABLE(KX_Camera, sphereInsideFrustum),
489         KX_PYMETHODTABLE_O(KX_Camera, boxInsideFrustum),
490         KX_PYMETHODTABLE_O(KX_Camera, pointInsideFrustum),
491         KX_PYMETHODTABLE_NOARGS(KX_Camera, getCameraToWorld),
492         KX_PYMETHODTABLE_NOARGS(KX_Camera, getWorldToCamera),
493         KX_PYMETHODTABLE(KX_Camera, setViewport),
494         KX_PYMETHODTABLE_NOARGS(KX_Camera, setOnTop),
495         KX_PYMETHODTABLE_O(KX_Camera, getScreenPosition),
496         KX_PYMETHODTABLE(KX_Camera, getScreenVect),
497         KX_PYMETHODTABLE(KX_Camera, getScreenRay),
498         {NULL,NULL} //Sentinel
499 };
500
501 PyAttributeDef KX_Camera::Attributes[] = {
502         
503         KX_PYATTRIBUTE_BOOL_RW("frustum_culling", KX_Camera, m_frustum_culling),
504         KX_PYATTRIBUTE_RW_FUNCTION("perspective", KX_Camera, pyattr_get_perspective, pyattr_set_perspective),
505         
506         KX_PYATTRIBUTE_RW_FUNCTION("lens",      KX_Camera,      pyattr_get_lens, pyattr_set_lens),
507         KX_PYATTRIBUTE_RW_FUNCTION("ortho_scale",       KX_Camera,      pyattr_get_ortho_scale, pyattr_set_ortho_scale),
508         KX_PYATTRIBUTE_RW_FUNCTION("near",      KX_Camera,      pyattr_get_near, pyattr_set_near),
509         KX_PYATTRIBUTE_RW_FUNCTION("far",       KX_Camera,      pyattr_get_far,  pyattr_set_far),
510         
511         KX_PYATTRIBUTE_RW_FUNCTION("useViewport",       KX_Camera,      pyattr_get_use_viewport,  pyattr_set_use_viewport),
512         
513         KX_PYATTRIBUTE_RW_FUNCTION("projection_matrix", KX_Camera,      pyattr_get_projection_matrix, pyattr_set_projection_matrix),
514         KX_PYATTRIBUTE_RO_FUNCTION("modelview_matrix",  KX_Camera,      pyattr_get_modelview_matrix),
515         KX_PYATTRIBUTE_RO_FUNCTION("camera_to_world",   KX_Camera,      pyattr_get_camera_to_world),
516         KX_PYATTRIBUTE_RO_FUNCTION("world_to_camera",   KX_Camera,      pyattr_get_world_to_camera),
517         
518         /* Grrr, functions for constants? */
519         KX_PYATTRIBUTE_RO_FUNCTION("INSIDE",    KX_Camera, pyattr_get_INSIDE),
520         KX_PYATTRIBUTE_RO_FUNCTION("OUTSIDE",   KX_Camera, pyattr_get_OUTSIDE),
521         KX_PYATTRIBUTE_RO_FUNCTION("INTERSECT", KX_Camera, pyattr_get_INTERSECT),
522         
523         { NULL }        //Sentinel
524 };
525
526 PyTypeObject KX_Camera::Type = {
527         PyVarObject_HEAD_INIT(NULL, 0)
528         "KX_Camera",
529         sizeof(PyObjectPlus_Proxy),
530         0,
531         py_base_dealloc,
532         0,
533         0,
534         0,
535         0,
536         py_base_repr,
537         0,
538         &KX_GameObject::Sequence,
539         &KX_GameObject::Mapping,
540         0,0,0,
541         NULL,
542         NULL,
543         0,
544         Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
545         0,0,0,0,0,0,0,
546         Methods,
547         0,
548         0,
549         &KX_GameObject::Type,
550         0,0,0,0,0,0,
551         py_base_new
552 };
553
554 KX_PYMETHODDEF_DOC_VARARGS(KX_Camera, sphereInsideFrustum,
555 "sphereInsideFrustum(center, radius) -> Integer\n"
556 "\treturns INSIDE, OUTSIDE or INTERSECT if the given sphere is\n"
557 "\tinside/outside/intersects this camera's viewing frustum.\n\n"
558 "\tcenter = the center of the sphere (in world coordinates.)\n"
559 "\tradius = the radius of the sphere\n\n"
560 "\tExample:\n"
561 "\timport bge.logic\n\n"
562 "\tco = bge.logic.getCurrentController()\n"
563 "\tcam = co.GetOwner()\n\n"
564 "\t# A sphere of radius 4.0 located at [x, y, z] = [1.0, 1.0, 1.0]\n"
565 "\tif (cam.sphereInsideFrustum([1.0, 1.0, 1.0], 4) != cam.OUTSIDE):\n"
566 "\t\t# Sphere is inside frustum !\n"
567 "\t\t# Do something useful !\n"
568 "\telse:\n"
569 "\t\t# Sphere is outside frustum\n"
570 )
571 {
572         PyObject *pycenter;
573         float radius;
574         if (PyArg_ParseTuple(args, "Of:sphereInsideFrustum", &pycenter, &radius))
575         {
576                 MT_Point3 center;
577                 if (PyVecTo(pycenter, center))
578                 {
579                         return PyLong_FromSsize_t(SphereInsideFrustum(center, radius)); /* new ref */
580                 }
581         }
582
583         PyErr_SetString(PyExc_TypeError, "camera.sphereInsideFrustum(center, radius): KX_Camera, expected arguments: (center, radius)");
584         
585         return NULL;
586 }
587
588 KX_PYMETHODDEF_DOC_O(KX_Camera, boxInsideFrustum,
589 "boxInsideFrustum(box) -> Integer\n"
590 "\treturns INSIDE, OUTSIDE or INTERSECT if the given box is\n"
591 "\tinside/outside/intersects this camera's viewing frustum.\n\n"
592 "\tbox = a list of the eight (8) corners of the box (in world coordinates.)\n\n"
593 "\tExample:\n"
594 "\timport bge.logic\n\n"
595 "\tco = bge.logic.getCurrentController()\n"
596 "\tcam = co.GetOwner()\n\n"
597 "\tbox = []\n"
598 "\tbox.append([-1.0, -1.0, -1.0])\n"
599 "\tbox.append([-1.0, -1.0,  1.0])\n"
600 "\tbox.append([-1.0,  1.0, -1.0])\n"
601 "\tbox.append([-1.0,  1.0,  1.0])\n"
602 "\tbox.append([ 1.0, -1.0, -1.0])\n"
603 "\tbox.append([ 1.0, -1.0,  1.0])\n"
604 "\tbox.append([ 1.0,  1.0, -1.0])\n"
605 "\tbox.append([ 1.0,  1.0,  1.0])\n\n"
606 "\tif (cam.boxInsideFrustum(box) != cam.OUTSIDE):\n"
607 "\t\t# Box is inside/intersects frustum !\n"
608 "\t\t# Do something useful !\n"
609 "\telse:\n"
610 "\t\t# Box is outside the frustum !\n"
611 )
612 {
613         unsigned int num_points = PySequence_Size(value);
614         if (num_points != 8)
615         {
616                 PyErr_Format(PyExc_TypeError, "camera.boxInsideFrustum(box): KX_Camera, expected eight (8) points, got %d", num_points);
617                 return NULL;
618         }
619         
620         MT_Point3 box[8];
621         for (unsigned int p = 0; p < 8 ; p++)
622         {
623                 PyObject *item = PySequence_GetItem(value, p); /* new ref */
624                 bool error = !PyVecTo(item, box[p]);
625                 Py_DECREF(item);
626                 if (error)
627                         return NULL;
628         }
629         
630         return PyLong_FromSsize_t(BoxInsideFrustum(box)); /* new ref */
631 }
632
633 KX_PYMETHODDEF_DOC_O(KX_Camera, pointInsideFrustum,
634 "pointInsideFrustum(point) -> Bool\n"
635 "\treturns 1 if the given point is inside this camera's viewing frustum.\n\n"
636 "\tpoint = The point to test (in world coordinates.)\n\n"
637 "\tExample:\n"
638 "\timport bge.logic\n\n"
639 "\tco = bge.logic.getCurrentController()\n"
640 "\tcam = co.GetOwner()\n\n"
641 "\t# Test point [0.0, 0.0, 0.0]"
642 "\tif (cam.pointInsideFrustum([0.0, 0.0, 0.0])):\n"
643 "\t\t# Point is inside frustum !\n"
644 "\t\t# Do something useful !\n"
645 "\telse:\n"
646 "\t\t# Box is outside the frustum !\n"
647 )
648 {
649         MT_Point3 point;
650         if (PyVecTo(value, point))
651         {
652                 return PyLong_FromSsize_t(PointInsideFrustum(point)); /* new ref */
653         }
654         
655         PyErr_SetString(PyExc_TypeError, "camera.pointInsideFrustum(point): KX_Camera, expected point argument.");
656         return NULL;
657 }
658
659 KX_PYMETHODDEF_DOC_NOARGS(KX_Camera, getCameraToWorld,
660 "getCameraToWorld() -> Matrix4x4\n"
661 "\treturns the camera to world transformation matrix, as a list of four lists of four values.\n\n"
662 "\tie: [[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]])\n"
663 )
664 {
665         return PyObjectFrom(GetCameraToWorld()); /* new ref */
666 }
667
668 KX_PYMETHODDEF_DOC_NOARGS(KX_Camera, getWorldToCamera,
669 "getWorldToCamera() -> Matrix4x4\n"
670 "\treturns the world to camera transformation matrix, as a list of four lists of four values.\n\n"
671 "\tie: [[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]])\n"
672 )
673 {
674         return PyObjectFrom(GetWorldToCamera()); /* new ref */
675 }
676
677 KX_PYMETHODDEF_DOC_VARARGS(KX_Camera, setViewport,
678 "setViewport(left, bottom, right, top)\n"
679 "Sets this camera's viewport\n")
680 {
681         int left, bottom, right, top;
682         if (!PyArg_ParseTuple(args,"iiii:setViewport",&left, &bottom, &right, &top))
683                 return NULL;
684         
685         SetViewport(left, bottom, right, top);
686         Py_RETURN_NONE;
687 }
688
689 KX_PYMETHODDEF_DOC_NOARGS(KX_Camera, setOnTop,
690 "setOnTop()\n"
691 "Sets this camera's viewport on top\n")
692 {
693         class KX_Scene* scene = KX_GetActiveScene();
694         scene->SetCameraOnTop(this);
695         Py_RETURN_NONE;
696 }
697
698 PyObject* KX_Camera::pyattr_get_perspective(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
699 {
700         KX_Camera* self= static_cast<KX_Camera*>(self_v);
701         return PyBool_FromLong(self->m_camdata.m_perspective);
702 }
703
704 int KX_Camera::pyattr_set_perspective(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
705 {
706         KX_Camera* self= static_cast<KX_Camera*>(self_v);
707         int param = PyObject_IsTrue( value );
708         if (param == -1) {
709                 PyErr_SetString(PyExc_AttributeError, "camera.perspective = bool: KX_Camera, expected True/False or 0/1");
710                 return PY_SET_ATTR_FAIL;
711         }
712         
713         self->m_camdata.m_perspective= param;
714         self->InvalidateProjectionMatrix();
715         return PY_SET_ATTR_SUCCESS;
716 }
717
718 PyObject* KX_Camera::pyattr_get_lens(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
719 {
720         KX_Camera* self= static_cast<KX_Camera*>(self_v);
721         return PyFloat_FromDouble(self->m_camdata.m_lens);
722 }
723
724 int KX_Camera::pyattr_set_lens(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
725 {
726         KX_Camera* self= static_cast<KX_Camera*>(self_v);
727         float param = PyFloat_AsDouble(value);
728         if (param == -1) {
729                 PyErr_SetString(PyExc_AttributeError, "camera.lens = float: KX_Camera, expected a float greater then zero");
730                 return PY_SET_ATTR_FAIL;
731         }
732         
733         self->m_camdata.m_lens= param;
734         self->m_set_projection_matrix = false;
735         return PY_SET_ATTR_SUCCESS;
736 }
737
738 PyObject* KX_Camera::pyattr_get_ortho_scale(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
739 {
740         KX_Camera* self= static_cast<KX_Camera*>(self_v);
741         return PyFloat_FromDouble(self->m_camdata.m_scale);
742 }
743
744 int KX_Camera::pyattr_set_ortho_scale(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
745 {
746         KX_Camera* self= static_cast<KX_Camera*>(self_v);
747         float param = PyFloat_AsDouble(value);
748         if (param == -1) {
749                 PyErr_SetString(PyExc_AttributeError, "camera.ortho_scale = float: KX_Camera, expected a float greater then zero");
750                 return PY_SET_ATTR_FAIL;
751         }
752         
753         self->m_camdata.m_scale= param;
754         self->m_set_projection_matrix = false;
755         return PY_SET_ATTR_SUCCESS;
756 }
757
758 PyObject* KX_Camera::pyattr_get_near(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
759 {
760         KX_Camera* self= static_cast<KX_Camera*>(self_v);
761         return PyFloat_FromDouble(self->m_camdata.m_clipstart);
762 }
763
764 int KX_Camera::pyattr_set_near(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
765 {
766         KX_Camera* self= static_cast<KX_Camera*>(self_v);
767         float param = PyFloat_AsDouble(value);
768         if (param == -1) {
769                 PyErr_SetString(PyExc_AttributeError, "camera.near = float: KX_Camera, expected a float greater then zero");
770                 return PY_SET_ATTR_FAIL;
771         }
772         
773         self->m_camdata.m_clipstart= param;
774         self->m_set_projection_matrix = false;
775         return PY_SET_ATTR_SUCCESS;
776 }
777
778 PyObject* KX_Camera::pyattr_get_far(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
779 {
780         KX_Camera* self= static_cast<KX_Camera*>(self_v);
781         return PyFloat_FromDouble(self->m_camdata.m_clipend);
782 }
783
784 int KX_Camera::pyattr_set_far(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
785 {
786         KX_Camera* self= static_cast<KX_Camera*>(self_v);
787         float param = PyFloat_AsDouble(value);
788         if (param == -1) {
789                 PyErr_SetString(PyExc_AttributeError, "camera.far = float: KX_Camera, expected a float greater then zero");
790                 return PY_SET_ATTR_FAIL;
791         }
792         
793         self->m_camdata.m_clipend= param;
794         self->m_set_projection_matrix = false;
795         return PY_SET_ATTR_SUCCESS;
796 }
797
798
799 PyObject* KX_Camera::pyattr_get_use_viewport(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
800 {
801         KX_Camera* self= static_cast<KX_Camera*>(self_v);
802         return PyBool_FromLong(self->GetViewport());
803 }
804
805 int KX_Camera::pyattr_set_use_viewport(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
806 {
807         KX_Camera* self= static_cast<KX_Camera*>(self_v);
808         int param = PyObject_IsTrue( value );
809         if (param == -1) {
810                 PyErr_SetString(PyExc_AttributeError, "camera.useViewport = bool: KX_Camera, expected True or False");
811                 return PY_SET_ATTR_FAIL;
812         }
813         self->EnableViewport((bool)param);
814         return PY_SET_ATTR_SUCCESS;
815 }
816
817
818 PyObject* KX_Camera::pyattr_get_projection_matrix(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
819 {
820         KX_Camera* self= static_cast<KX_Camera*>(self_v);
821         return PyObjectFrom(self->GetProjectionMatrix()); 
822 }
823
824 int KX_Camera::pyattr_set_projection_matrix(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
825 {
826         KX_Camera* self= static_cast<KX_Camera*>(self_v);
827         MT_Matrix4x4 mat;
828         if (!PyMatTo(value, mat)) 
829                 return PY_SET_ATTR_FAIL;
830         
831         self->SetProjectionMatrix(mat);
832         return PY_SET_ATTR_SUCCESS;
833 }
834
835 PyObject* KX_Camera::pyattr_get_modelview_matrix(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
836 {
837         KX_Camera* self= static_cast<KX_Camera*>(self_v);
838         return PyObjectFrom(self->GetModelviewMatrix()); 
839 }
840
841 PyObject* KX_Camera::pyattr_get_camera_to_world(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
842 {
843         KX_Camera* self= static_cast<KX_Camera*>(self_v);
844         return PyObjectFrom(self->GetCameraToWorld());
845 }
846
847 PyObject* KX_Camera::pyattr_get_world_to_camera(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
848 {
849         KX_Camera* self= static_cast<KX_Camera*>(self_v);
850         return PyObjectFrom(self->GetWorldToCamera()); 
851 }
852
853
854 PyObject* KX_Camera::pyattr_get_INSIDE(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
855 {       return PyLong_FromSsize_t(INSIDE); }
856 PyObject* KX_Camera::pyattr_get_OUTSIDE(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
857 {       return PyLong_FromSsize_t(OUTSIDE); }
858 PyObject* KX_Camera::pyattr_get_INTERSECT(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
859 {       return PyLong_FromSsize_t(INTERSECT); }
860
861
862 bool ConvertPythonToCamera(PyObject * value, KX_Camera **object, bool py_none_ok, const char *error_prefix)
863 {
864         if (value==NULL) {
865                 PyErr_Format(PyExc_TypeError, "%s, python pointer NULL, should never happen", error_prefix);
866                 *object = NULL;
867                 return false;
868         }
869                 
870         if (value==Py_None) {
871                 *object = NULL;
872                 
873                 if (py_none_ok) {
874                         return true;
875                 } else {
876                         PyErr_Format(PyExc_TypeError, "%s, expected KX_Camera or a KX_Camera name, None is invalid", error_prefix);
877                         return false;
878                 }
879         }
880         
881         if (PyUnicode_Check(value)) {
882                 STR_String value_str = _PyUnicode_AsString(value);
883                 *object = KX_GetActiveScene()->FindCamera(value_str);
884                 
885                 if (*object) {
886                         return true;
887                 } else {
888                         PyErr_Format(PyExc_ValueError, "%s, requested name \"%s\" did not match any KX_Camera in this scene", error_prefix, _PyUnicode_AsString(value));
889                         return false;
890                 }
891         }
892         
893         if (PyObject_TypeCheck(value, &KX_Camera::Type)) {
894                 *object = static_cast<KX_Camera*>BGE_PROXY_REF(value);
895                 
896                 /* sets the error */
897                 if (*object==NULL) {
898                         PyErr_Format(PyExc_SystemError, "%s, " BGE_PROXY_ERROR_MSG, error_prefix);
899                         return false;
900                 }
901                 
902                 return true;
903         }
904         
905         *object = NULL;
906         
907         if (py_none_ok) {
908                 PyErr_Format(PyExc_TypeError, "%s, expect a KX_Camera, a string or None", error_prefix);
909         } else {
910                 PyErr_Format(PyExc_TypeError, "%s, expect a KX_Camera or a string", error_prefix);
911         }
912         
913         return false;
914 }
915
916 KX_PYMETHODDEF_DOC_O(KX_Camera, getScreenPosition,
917 "getScreenPosition()\n"
918 )
919
920 {
921         MT_Vector3 vect;
922         KX_GameObject *obj = NULL;
923
924         if (!PyVecTo(value, vect))
925         {
926                 PyErr_Clear();
927
928                 if(ConvertPythonToGameObject(value, &obj, true, ""))
929                 {
930                         PyErr_Clear();
931                         vect = MT_Vector3(obj->NodeGetWorldPosition());
932                 }
933                 else
934                 {
935                         PyErr_SetString(PyExc_TypeError, "Error in getScreenPosition. Expected a Vector3 or a KX_GameObject or a string for a name of a KX_GameObject");
936                         return NULL;
937                 }
938         }
939
940         GLint viewport[4];
941         GLdouble win[3];
942         GLdouble modelmatrix[16];
943         GLdouble projmatrix[16];
944
945         MT_Matrix4x4 m_modelmatrix = this->GetModelviewMatrix();
946         MT_Matrix4x4 m_projmatrix = this->GetProjectionMatrix();
947
948         m_modelmatrix.getValue(modelmatrix);
949         m_projmatrix.getValue(projmatrix);
950
951         glGetIntegerv(GL_VIEWPORT, viewport);
952
953         gluProject(vect[0], vect[1], vect[2], modelmatrix, projmatrix, viewport, &win[0], &win[1], &win[2]);
954
955         vect[0] =  (win[0] - viewport[0]) / viewport[2];
956         vect[1] =  (win[1] - viewport[1]) / viewport[3];
957
958         vect[1] = 1.0 - vect[1]; //to follow Blender window coordinate system (Top-Down)
959
960         PyObject* ret = PyTuple_New(2);
961         if(ret){
962                 PyTuple_SET_ITEM(ret, 0, PyFloat_FromDouble(vect[0]));
963                 PyTuple_SET_ITEM(ret, 1, PyFloat_FromDouble(vect[1]));
964                 return ret;
965         }
966
967         return NULL;
968 }
969
970 KX_PYMETHODDEF_DOC_VARARGS(KX_Camera, getScreenVect,
971 "getScreenVect()\n"
972 )
973 {
974         double x,y;
975         if (!PyArg_ParseTuple(args,"dd:getScreenVect",&x,&y))
976                 return NULL;
977
978         y = 1.0 - y; //to follow Blender window coordinate system (Top-Down)
979
980         MT_Vector3 vect;
981         MT_Point3 campos, screenpos;
982
983         GLint viewport[4];
984         GLdouble win[3];
985         GLdouble modelmatrix[16];
986         GLdouble projmatrix[16];
987
988         MT_Matrix4x4 m_modelmatrix = this->GetModelviewMatrix();
989         MT_Matrix4x4 m_projmatrix = this->GetProjectionMatrix();
990
991         m_modelmatrix.getValue(modelmatrix);
992         m_projmatrix.getValue(projmatrix);
993
994         glGetIntegerv(GL_VIEWPORT, viewport);
995
996         vect[0] = x * viewport[2];
997         vect[1] = y * viewport[3];
998
999         vect[0] += viewport[0];
1000         vect[1] += viewport[1];
1001
1002         glReadPixels(x, y, 1, 1, GL_DEPTH_COMPONENT, GL_FLOAT, &vect[2]);
1003         gluUnProject(vect[0], vect[1], vect[2], modelmatrix, projmatrix, viewport, &win[0], &win[1], &win[2]);
1004
1005         campos = this->GetCameraLocation();
1006         screenpos = MT_Point3(win[0], win[1], win[2]);
1007         vect = campos-screenpos;
1008
1009         vect.normalize();
1010         return PyObjectFrom(vect);
1011 }
1012
1013 KX_PYMETHODDEF_DOC_VARARGS(KX_Camera, getScreenRay,
1014 "getScreenRay()\n"
1015 )
1016 {
1017         MT_Vector3 vect;
1018         double x,y,dist;
1019         char *propName = NULL;
1020
1021         if (!PyArg_ParseTuple(args,"ddd|s:getScreenRay",&x,&y,&dist,&propName))
1022                 return NULL;
1023
1024         PyObject* argValue = PyTuple_New(2);
1025         if (argValue) {
1026                 PyTuple_SET_ITEM(argValue, 0, PyFloat_FromDouble(x));
1027                 PyTuple_SET_ITEM(argValue, 1, PyFloat_FromDouble(y));
1028         }
1029
1030         if(!PyVecTo(PygetScreenVect(argValue), vect))
1031         {
1032                 Py_DECREF(argValue);
1033                 PyErr_SetString(PyExc_TypeError,
1034                         "Error in getScreenRay. Invalid 2D coordinate. Expected a normalized 2D screen coordinate, a distance and an optional property argument");
1035                 return NULL;
1036         }
1037         Py_DECREF(argValue);
1038
1039         dist = -dist;
1040         vect += this->GetCameraLocation();
1041
1042         argValue = (propName?PyTuple_New(3):PyTuple_New(2));
1043         if (argValue) {
1044                 PyTuple_SET_ITEM(argValue, 0, PyObjectFrom(vect));
1045                 PyTuple_SET_ITEM(argValue, 1, PyFloat_FromDouble(dist));
1046                 if (propName)
1047                         PyTuple_SET_ITEM(argValue, 2, PyUnicode_FromString(propName));
1048
1049                 PyObject* ret= this->PyrayCastTo(argValue,NULL);
1050                 Py_DECREF(argValue);
1051                 return ret;
1052         }
1053
1054         return NULL;
1055 }
1056 #endif