Some bugfixing and coding styles changes suggested by ideasman_42.
authorBenjy Cook <benjycook@hotmail.com>
Sat, 25 Jun 2011 23:50:50 +0000 (23:50 +0000)
committerBenjy Cook <benjycook@hotmail.com>
Sat, 25 Jun 2011 23:50:50 +0000 (23:50 +0000)
release/scripts/modules/mocap_tools.py
release/scripts/modules/retarget.py
release/scripts/startup/ui_mocap.py

index 739dc40b2727a968c1fe9cfb96afaaa897d83842..dfb8aaf0eeccf4298f4f1c590ed328e4fadf4754 100644 (file)
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+#  This program is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU General Public License
+#  as published by the Free Software Foundation; either version 2
+#  of the License, or (at your option) any later version.
+#
+#  This program is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+#  You should have received a copy of the GNU General Public License
+#  along with this program; if not, write to the Free Software Foundation,
+#  Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+# <pep8 compliant>
+
 from math import hypot, sqrt, isfinite
 import bpy
 import time
 from mathutils import Vector
 
+
 #Vector utility functions
 class NdVector:
     vec = []
-    
-    def __init__(self,vec):
+
+    def __init__(self, vec):
         self.vec = vec[:]
-        
+
     def __len__(self):
         return len(self.vec)
-    
-    def __mul__(self,otherMember):
-        if type(otherMember)==type(1) or type(otherMember)==type(1.0):
-            return NdVector([otherMember*x for x in self.vec])
+
+    def __mul__(self, otherMember):
+        if (isinstance(otherMember, int) or
+            isinstance(otherMember, float)):
+            return NdVector([otherMember * x for x in self.vec])
         else:
             a = self.vec
             b = otherMember.vec
             n = len(self)
-            return sum([a[i]*b[i] for i in range(n)])
-    
-    def __sub__(self,otherVec):
+            return sum([a[i] * b[i] for i in range(n)])
+
+    def __sub__(self, otherVec):
         a = self.vec
         b = otherVec.vec
         n = len(self)
-        return NdVector([a[i]-b[i] for i in range(n)])
-        
-    def __add__(self,otherVec):
+        return NdVector([a[i] - b[i] for i in range(n)])
+
+    def __add__(self, otherVec):
         a = self.vec
         b = otherVec.vec
         n = len(self)
-        return NdVector([a[i]+b[i] for i in range(n)])
-    
+        return NdVector([a[i] + b[i] for i in range(n)])
+
+    def __div__(self, scalar):
+        return NdVector([x / scalar for x in self.vec])
+
     def vecLength(self):
         return sqrt(self * self)
-        
+
     def vecLengthSq(self):
         return (self * self)
-        
-    def __getitem__(self,i):
+
+    def normalize(self):
+        len = self.length
+        self.vec = [x / len for x in self.vec]
+
+    def copy(self):
+        return NdVector(self.vec)
+
+    def __getitem__(self, i):
         return self.vec[i]
-    
+
+    def x(self):
+        return self.vec[0]
+
+    def y(self):
+        return self.vec[1]
+
     length = property(vecLength)
     lengthSq = property(vecLengthSq)
+    x = property(x)
+    y = property(y)
+
 
 class dataPoint:
     index = 0
-    co = Vector((0,0,0,0)) # x,y1,y2,y3 coordinate of original point
-    u = 0 #position according to parametric view of original data, [0,1] range
-    temp = 0 #use this for anything
-
-    def __init__(self,index,co,u=0):
+    # x,y1,y2,y3 coordinate of original point
+    co = NdVector((0, 0, 0, 0, 0))
+    #position according to parametric view of original data, [0,1] range
+    u = 0
+    #use this for anything
+    temp = 0
+
+    def __init__(self, index, co, u=0):
         self.index = index
         self.co = co
         self.u = u
 
+
 def autoloop_anim():
     context = bpy.context
     obj = context.active_object
@@ -64,388 +109,407 @@ def autoloop_anim():
 
     data = []
     end = len(fcurves[0].keyframe_points)
-        
-    for i in range(1,end):
+
+    for i in range(1, end):
         vec = []
         for fcurve in fcurves:
             vec.append(fcurve.evaluate(i))
         data.append(NdVector(vec))
-    
-    def comp(a,b):
-        return a*b
-    
+
+    def comp(a, b):
+        return a * b
+
     N = len(data)
     Rxy = [0.0] * N
     for i in range(N):
-        for j in range(i,min(i+N,N)):
-            Rxy[i]+=comp(data[j],data[j-i]) 
+        for j in range(i, min(i + N, N)):
+            Rxy[i] += comp(data[j], data[j - i])
         for j in range(i):
-            Rxy[i]+=comp(data[j],data[j-i+N])
-        Rxy[i]/=float(N)
-    
+            Rxy[i] += comp(data[j], data[j - i + N])
+        Rxy[i] /= float(N)
+
     def bestLocalMaximum(Rxy):
-        Rxyd = [Rxy[i]-Rxy[i-1] for i in range(1,len(Rxy))]
+        Rxyd = [Rxy[i] - Rxy[i - 1] for i in range(1, len(Rxy))]
         maxs = []
-        for i in range(1,len(Rxyd)-1):
-            a = Rxyd[i-1]
+        for i in range(1, len(Rxyd) - 1):
+            a = Rxyd[i - 1]
             b = Rxyd[i]
-            print(a,b)
-            if (a>=0 and b<0) or (a<0 and b>=0): #sign change (zerocrossing) at point i, denoting max point (only)
-                maxs.append((i,max(Rxy[i],Rxy[i-1])))
-        return max(maxs,key=lambda x: x[1])[0]         
+            print(a, b)
+            #sign change (zerocrossing) at point i, denoting max point (only)
+            if (a >= 0 and b < 0) or (a < 0 and b >= 0):
+                maxs.append((i, max(Rxy[i], Rxy[i - 1])))
+        return max(maxs, key=lambda x: x[1])[0]
     flm = bestLocalMaximum(Rxy[0:int(len(Rxy))])
-    
+
     diff = []
-    
-    for i in range(len(data)-flm):
-        diff.append((data[i]-data[i+flm]).lengthSq)
-    
-    def lowerErrorSlice(diff,e):
-        bestSlice = (0,100000) #index, error at index
-        for i in range(e,len(diff)-e):
-            errorSlice = sum(diff[i-e:i+e+1])
-            if errorSlice<bestSlice[1]:
-                bestSlice = (i,errorSlice)
+
+    for i in range(len(data) - flm):
+        diff.append((data[i] - data[i + flm]).lengthSq)
+
+    def lowerErrorSlice(diff, e):
+        #index, error at index
+        bestSlice = (0, 100000)
+        for i in range(e, len(diff) - e):
+            errorSlice = sum(diff[i - e:i + e + 1])
+            if errorSlice < bestSlice[1]:
+                bestSlice = (i, errorSlice)
         return bestSlice[0]
-    
+
     margin = 2
-    
-    s = lowerErrorSlice(diff,margin)
-    
-    print(flm,s)
-    loop = data[s:s+flm+margin]
-    
-    #find *all* loops, s:s+flm, s+flm:s+2flm, etc... and interpolate between all
-    # to find "the perfect loop". Maybe before finding s? interp(i,i+flm,i+2flm)....
-    for i in range(1,margin+1):
-        w1 = sqrt(float(i)/margin)
-        loop[-i] = (loop[-i]*w1)+(loop[0]*(1-w1))
-
-    
+
+    s = lowerErrorSlice(diff, margin)
+
+    print(flm, s)
+    loop = data[s:s + flm + margin]
+
+    #find *all* loops, s:s+flm, s+flm:s+2flm, etc...
+    #and interpolate between all
+    # to find "the perfect loop".
+    #Maybe before finding s? interp(i,i+flm,i+2flm)....
+    for i in range(1, margin + 1):
+        w1 = sqrt(float(i) / margin)
+        loop[-i] = (loop[-i] * w1) + (loop[0] * (1 - w1))
+
     for curve in fcurves:
         pts = curve.keyframe_points
-        for i in range(len(pts)-1,-1,-1):
+        for i in range(len(pts) - 1, -1, -1):
             pts.remove(pts[i])
-    
-    for c,curve in enumerate(fcurves):
+
+    for c, curve in enumerate(fcurves):
         pts = curve.keyframe_points
         for i in range(len(loop)):
-            pts.insert(i+1,loop[i][c])
-            
-    context.scene.frame_end = flm+1
-    
-    
-    
-    
+            pts.insert(i + 1, loop[i][c])
+
+    context.scene.frame_end = flm + 1
 
 
 def simplifyCurves(curveGroup, error, reparaError, maxIterations, group_mode):
 
-    def unitTangent(v,data_pts):
-        tang = Vector((0,0,0,0)) #
-        if v!=0:
+    def unitTangent(v, data_pts):
+        tang = NdVector((0, 0, 0, 0, 0))
+        if v != 0:
             #If it's not the first point, we can calculate a leftside tangent
-            tang+= data_pts[v].co-data_pts[v-1].co
-        if v!=len(data_pts)-1:
+            tang += data_pts[v].co - data_pts[v - 1].co
+        if v != len(data_pts) - 1:
             #If it's not the last point, we can calculate a rightside tangent
-            tang+= data_pts[v+1].co-data_pts[v].co
+            tang += data_pts[v + 1].co - data_pts[v].co
         tang.normalize()
         return tang
 
     #assign parametric u value for each point in original data
-    def chordLength(data_pts,s,e):
+    def chordLength(data_pts, s, e):
         totalLength = 0
-        for pt in data_pts[s:e+1]:
+        for pt in data_pts[s:e + 1]:
             i = pt.index
-            if i==s:
+            if i == s:
                 chordLength = 0
             else:
-                chordLength = (data_pts[i].co-data_pts[i-1].co).length
-            totalLength+= chordLength
+                chordLength = (data_pts[i].co - data_pts[i - 1].co).length
+            totalLength += chordLength
             pt.temp = totalLength
-        for pt in data_pts[s:e+1]:
-            if totalLength==0:
-                print(s,e)
-            pt.u = (pt.temp/totalLength)
-
-    # get binomial coefficient, this function/table is only called with args (3,0),(3,1),(3,2),(3,3),(2,0),(2,1),(2,2)!
-    binomDict = {(3,0): 1, (3,1): 3, (3,2): 3, (3,3): 1, (2,0): 1, (2,1): 2, (2,2): 1}
+        for pt in data_pts[s:e + 1]:
+            if totalLength == 0:
+                print(s, e)
+            pt.u = (pt.temp / totalLength)
+
+    # get binomial coefficient, this function/table is only called with args
+    # (3,0),(3,1),(3,2),(3,3),(2,0),(2,1),(2,2)!
+    binomDict = {(3, 0): 1,
+    (3, 1): 3,
+    (3, 2): 3,
+    (3, 3): 1,
+    (2, 0): 1,
+    (2, 1): 2,
+    (2, 2): 1}
     #value at pt t of a single bernstein Polynomial
 
-    def bernsteinPoly(n,i,t):
-        binomCoeff = binomDict[(n,i)]
-        return binomCoeff * pow(t,i) * pow(1-t,n-i)
-            
-    # fit a single cubic to data points in range [s(tart),e(nd)].  
-    def fitSingleCubic(data_pts,s,e):
+    def bernsteinPoly(n, i, t):
+        binomCoeff = binomDict[(n, i)]
+        return binomCoeff * pow(t, i) * pow(1 - t, n - i)
+
+    # fit a single cubic to data points in range [s(tart),e(nd)].
+    def fitSingleCubic(data_pts, s, e):
 
         # A - matrix used for calculating C matrices for fitting
-        def A(i,j,s,e,t1,t2):
-            if j==1:
+        def A(i, j, s, e, t1, t2):
+            if j == 1:
                 t = t1
-            if j==2:
+            if j == 2:
                 t = t2
             u = data_pts[i].u
-            return t * bernsteinPoly(3,j,u)
-        
+            return t * bernsteinPoly(3, j, u)
+
         # X component, used for calculating X matrices for fitting
-        def xComponent(i,s,e):
+        def xComponent(i, s, e):
             di = data_pts[i].co
             u = data_pts[i].u
             v0 = data_pts[s].co
             v3 = data_pts[e].co
-            a = v0*bernsteinPoly(3,0,u)
-            b = v0*bernsteinPoly(3,1,u) #
-            c = v3*bernsteinPoly(3,2,u)
-            d = v3*bernsteinPoly(3,3,u)
-            return (di -(a+b+c+d))
-        
-        t1 = unitTangent(s,data_pts)
-        t2 = unitTangent(e,data_pts)    
-        c11 = sum([A(i,1,s,e,t1,t2)*A(i,1,s,e,t1,t2) for i in range(s,e+1)])
-        c12 = sum([A(i,1,s,e,t1,t2)*A(i,2,s,e,t1,t2) for i in range(s,e+1)])
+            a = v0 * bernsteinPoly(3, 0, u)
+            b = v0 * bernsteinPoly(3, 1, u)
+            c = v3 * bernsteinPoly(3, 2, u)
+            d = v3 * bernsteinPoly(3, 3, u)
+            return (di - (a + b + c + d))
+
+        t1 = unitTangent(s, data_pts)
+        t2 = unitTangent(e, data_pts)
+        c11 = sum([A(i, 1, s, e, t1, t2) * A(i, 1, s, e, t1, t2) for i in range(s, e + 1)])
+        c12 = sum([A(i, 1, s, e, t1, t2) * A(i, 2, s, e, t1, t2) for i in range(s, e + 1)])
         c21 = c12
-        c22 = sum([A(i,2,s,e,t1,t2)*A(i,2,s,e,t1,t2) for i in range(s,e+1)])
-        
-        x1 = sum([xComponent(i,s,e)*A(i,1,s,e,t1,t2) for i in range(s,e+1)])
-        x2 = sum([xComponent(i,s,e)*A(i,2,s,e,t1,t2) for i in range(s,e+1)])
-        
+        c22 = sum([A(i, 2, s, e, t1, t2) * A(i, 2, s, e, t1, t2) for i in range(s, e + 1)])
+
+        x1 = sum([xComponent(i, s, e) * A(i, 1, s, e, t1, t2) for i in range(s, e + 1)])
+        x2 = sum([xComponent(i, s, e) * A(i, 2, s, e, t1, t2) for i in range(s, e + 1)])
+
         # calculate Determinate of the 3 matrices
         det_cc = c11 * c22 - c21 * c12
         det_cx = c11 * x2 - c12 * x1
         det_xc = x1 * c22 - x2 * c12
 
-        # if matrix is not homogenous, fudge the data a bit 
+        # if matrix is not homogenous, fudge the data a bit
         if det_cc == 0:
-            det_cc=0.01
+            det_cc = 0.01
 
         # alpha's are the correct offset for bezier handles
-        alpha0 = det_xc / det_cc #offset from right (first) point
-        alpha1 = det_cx / det_cc #offset from left (last) point
-       
+        alpha0 = det_xc / det_cc   # offset from right (first) point
+        alpha1 = det_cx / det_cc   # offset from left (last) point
+
         sRightHandle = data_pts[s].co.copy()
-        sTangent = t1*abs(alpha0)
-        sRightHandle+= sTangent #position of first pt's handle
+        sTangent = t1 * abs(alpha0)
+        sRightHandle += sTangent  # position of first pt's handle
         eLeftHandle = data_pts[e].co.copy()
-        eTangent = t2*abs(alpha1)
-        eLeftHandle+= eTangent #position of last pt's handle.
-        
-        #return a 4 member tuple representing the bezier
+        eTangent = t2 * abs(alpha1)
+        eLeftHandle += eTangent  # position of last pt's handle.
+
+        # return a 4 member tuple representing the bezier
         return (data_pts[s].co,
               sRightHandle,
               eLeftHandle,
               data_pts[e].co)
 
     # convert 2 given data points into a cubic bezier.
-    # handles are offset along the tangent at a 3rd of the length between the points.          
-    def fitSingleCubic2Pts(data_pts,s,e):
-        alpha0 = alpha1 = (data_pts[s].co-data_pts[e].co).length / 3
+    # handles are offset along the tangent at
+    # a 3rd of the length between the points.
+    def fitSingleCubic2Pts(data_pts, s, e):
+        alpha0 = alpha1 = (data_pts[s].co - data_pts[e].co).length / 3
 
         sRightHandle = data_pts[s].co.copy()
-        sTangent = unitTangent(s,data_pts)*abs(alpha0)
-        sRightHandle+= sTangent #position of first pt's handle
+        sTangent = unitTangent(s, data_pts) * abs(alpha0)
+        sRightHandle += sTangent  # position of first pt's handle
         eLeftHandle = data_pts[e].co.copy()
-        eTangent = unitTangent(e,data_pts)*abs(alpha1)
-        eLeftHandle+= eTangent #position of last pt's handle.
-        
+        eTangent = unitTangent(e, data_pts) * abs(alpha1)
+        eLeftHandle += eTangent  # position of last pt's handle.
+
         #return a 4 member tuple representing the bezier
         return (data_pts[s].co,
           sRightHandle,
           eLeftHandle,
           data_pts[e].co)
-              
+
     #evaluate bezier, represented by a 4 member tuple (pts) at point t.
-    def bezierEval(pts,t):
-        sumVec = Vector((0,0,0,0))
+    def bezierEval(pts, t):
+        sumVec = NdVector((0, 0, 0, 0, 0))
         for i in range(4):
-            sumVec+=pts[i]*bernsteinPoly(3,i,t)
+            sumVec += pts[i] * bernsteinPoly(3, i, t)
         return sumVec
 
     #calculate the highest error between bezier and original data
     #returns the distance and the index of the point where max error occurs.
-    def maxErrorAmount(data_pts,bez,s,e):
+    def maxErrorAmount(data_pts, bez, s, e):
         maxError = 0
         maxErrorPt = s
-        if e-s<3: return 0, None
-        for pt in data_pts[s:e+1]:
-            bezVal = bezierEval(bez,pt.u) 
-            tmpError = (pt.co-bezVal).length/pt.co.length
+        if e - s < 3:
+            return 0, None
+        for pt in data_pts[s:e + 1]:
+            bezVal = bezierEval(bez, pt.u)
+            tmpError = (pt.co - bezVal).length / pt.co.length
             if tmpError >= maxError:
                 maxError = tmpError
                 maxErrorPt = pt.index
-        return maxError,maxErrorPt
-
+        return maxError, maxErrorPt
 
     #calculated bezier derivative at point t.
     #That is, tangent of point t.
-    def getBezDerivative(bez,t):
-        n = len(bez)-1
-        sumVec = Vector((0,0,0,0))
-        for i in range(n-1):
-            sumVec+=bernsteinPoly(n-1,i,t)*(bez[i+1]-bez[i])
+    def getBezDerivative(bez, t):
+        n = len(bez) - 1
+        sumVec = NdVector((0, 0, 0, 0, 0))
+        for i in range(n - 1):
+            sumVec += (bez[i + 1] - bez[i]) * bernsteinPoly(n - 1, i, t)
         return sumVec
-        
-        
+
     #use Newton-Raphson to find a better paramterization of datapoints,
-    #one that minimizes the distance (or error) between bezier and original data.
-    def newtonRaphson(data_pts,s,e,bez):
-        for pt in data_pts[s:e+1]:
-            if pt.index==s:
-                pt.u=0
-            elif pt.index==e:
-                pt.u=1
+    #one that minimizes the distance (or error)
+    # between bezier and original data.
+    def newtonRaphson(data_pts, s, e, bez):
+        for pt in data_pts[s:e + 1]:
+            if pt.index == s:
+                pt.u = 0
+            elif pt.index == e:
+                pt.u = 1
             else:
                 u = pt.u
-                qu = bezierEval(bez,pt.u)
-                qud = getBezDerivative(bez,u)
-                #we wish to minimize f(u), the squared distance between curve and data
-                fu = (qu-pt.co).length**2 
-                fud = (2*(qu.x-pt.co.x)*(qud.x))-(2*(qu.y-pt.co.y)*(qud.y))
-                if fud==0:
+                qu = bezierEval(bez, pt.u)
+                qud = getBezDerivative(bez, u)
+                #we wish to minimize f(u),
+                #the squared distance between curve and data
+                fu = (qu - pt.co).length ** 2
+                fud = (2 * (qu.x - pt.co.x) * (qud.x)) - (2 * (qu.y - pt.co.y) * (qud.y))
+                if fud == 0:
                     fu = 0
                     fud = 1
-                pt.u=pt.u-(fu/fud)
+                pt.u = pt.u - (fu / fud)
 
     def createDataPts(curveGroup, group_mode):
         data_pts = []
         if group_mode:
+            print([x.data_path for x in curveGroup])
             for i in range(len(curveGroup[0].keyframe_points)):
                 x = curveGroup[0].keyframe_points[i].co.x
                 y1 = curveGroup[0].keyframe_points[i].co.y
                 y2 = curveGroup[1].keyframe_points[i].co.y
                 y3 = curveGroup[2].keyframe_points[i].co.y
-                data_pts.append(dataPoint(i,Vector((x,y1,y2,y3))))
+                y4 = 0
+                if len(curveGroup) == 4:
+                    y4 = curveGroup[3].keyframe_points[i].co.y
+                data_pts.append(dataPoint(i, NdVector((x, y1, y2, y3, y4))))
         else:
             for i in range(len(curveGroup.keyframe_points)):
                 x = curveGroup.keyframe_points[i].co.x
                 y1 = curveGroup.keyframe_points[i].co.y
                 y2 = 0
                 y3 = 0
-                data_pts.append(dataPoint(i,Vector((x,y1,y2,y3))))
+                y4 = 0
+                data_pts.append(dataPoint(i, NdVector((x, y1, y2, y3, y4))))
         return data_pts
 
-    def fitCubic(data_pts,s,e):
-
-        if e-s<3: # if there are less than 3 points, fit a single basic bezier
-            bez = fitSingleCubic2Pts(data_pts,s,e)
+    def fitCubic(data_pts, s, e):
+        # if there are less than 3 points, fit a single basic bezier
+        if e - s < 3:
+            bez = fitSingleCubic2Pts(data_pts, s, e)
         else:
-            #if there are more, parameterize the points and fit a single cubic bezier
-            chordLength(data_pts,s,e)
-            bez = fitSingleCubic(data_pts,s,e)
-        
+            #if there are more, parameterize the points
+            # and fit a single cubic bezier
+            chordLength(data_pts, s, e)
+            bez = fitSingleCubic(data_pts, s, e)
+
         #calculate max error and point where it occurs
-        maxError,maxErrorPt = maxErrorAmount(data_pts,bez,s,e)
+        maxError, maxErrorPt = maxErrorAmount(data_pts, bez, s, e)
         #if error is small enough, reparameterization might be enough
-        if maxError<reparaError and maxError>error:
+        if maxError < reparaError and maxError > error:
             for i in range(maxIterations):
-                newtonRaphson(data_pts,s,e,bez)
-                if e-s<3:
-                    bez = fitSingleCubic2Pts(data_pts,s,e)
+                newtonRaphson(data_pts, s, e, bez)
+                if e - s < 3:
+                    bez = fitSingleCubic2Pts(data_pts, s, e)
                 else:
-                    bez = fitSingleCubic(data_pts,s,e)
-       
+                    bez = fitSingleCubic(data_pts, s, e)
 
         #recalculate max error and point where it occurs
-        maxError,maxErrorPt = maxErrorAmount(data_pts,bez,s,e)
-        
+        maxError, maxErrorPt = maxErrorAmount(data_pts, bez, s, e)
+
         #repara wasn't enough, we need 2 beziers for this range.
         #Split the bezier at point of maximum error
-        if maxError>error:
-            fitCubic(data_pts,s,maxErrorPt)
-            fitCubic(data_pts,maxErrorPt,e)
+        if maxError > error:
+            fitCubic(data_pts, s, maxErrorPt)
+            fitCubic(data_pts, maxErrorPt, e)
         else:
             #error is small enough, return the beziers.
             beziers.append(bez)
             return
 
-    def createNewCurves(curveGroup,beziers,group_mode):
+    def createNewCurves(curveGroup, beziers, group_mode):
         #remove all existing data points
         if group_mode:
             for fcurve in curveGroup:
-                for i in range(len(fcurve.keyframe_points)-1,0,-1):
+                for i in range(len(fcurve.keyframe_points) - 1, 0, -1):
                     fcurve.keyframe_points.remove(fcurve.keyframe_points[i])
         else:
             fcurve = curveGroup
-            for i in range(len(fcurve.keyframe_points)-1,0,-1):
+            for i in range(len(fcurve.keyframe_points) - 1, 0, -1):
                 fcurve.keyframe_points.remove(fcurve.keyframe_points[i])
-        
+
         #insert the calculated beziers to blender data.\
-        if group_mode: 
+        if group_mode:
             for fullbez in beziers:
-                for i,fcurve in enumerate(curveGroup):
-                    bez = [Vector((vec[0],vec[i+1])) for vec in fullbez]
-                    newKey = fcurve.keyframe_points.insert(frame=bez[0].x,value=bez[0].y)
-                    newKey.handle_right = (bez[1].x,bez[1].y)
-                           
-                    newKey = fcurve.keyframe_points.insert(frame=bez[3].x,value=bez[3].y)
-                    newKey.handle_left= (bez[2].x,bez[2].y)
+                for i, fcurve in enumerate(curveGroup):
+                    bez = [Vector((vec[0], vec[i + 1])) for vec in fullbez]
+                    newKey = fcurve.keyframe_points.insert(frame=bez[0].x, value=bez[0].y)
+                    newKey.handle_right = (bez[1].x, bez[1].y)
+
+                    newKey = fcurve.keyframe_points.insert(frame=bez[3].x, value=bez[3].y)
+                    newKey.handle_left = (bez[2].x, bez[2].y)
         else:
             for bez in beziers:
                 for vec in bez:
                     vec.resize_2d()
-                newKey = fcurve.keyframe_points.insert(frame=bez[0].x,value=bez[0].y)
-                newKey.handle_right = (bez[1].x,bez[1].y)
-                        
-                newKey = fcurve.keyframe_points.insert(frame=bez[3].x,value=bez[3].y)
-                newKey.handle_left= (bez[2].x,bez[2].y)
-
-    #indices are detached from data point's frame (x) value and stored in the dataPoint object, represent a range
-
-    data_pts = createDataPts(curveGroup,group_mode)
-        
-    s = 0 #start
-    e = len(data_pts)-1 #end
-    
+                newKey = fcurve.keyframe_points.insert(frame=bez[0].x, value=bez[0].y)
+                newKey.handle_right = (bez[1].x, bez[1].y)
+
+                newKey = fcurve.keyframe_points.insert(frame=bez[3].x, value=bez[3].y)
+                newKey.handle_left = (bez[2].x, bez[2].y)
+
+    # indices are detached from data point's frame (x) value and
+    # stored in the dataPoint object, represent a range
+
+    data_pts = createDataPts(curveGroup, group_mode)
+
+    s = 0  # start
+    e = len(data_pts) - 1  # end
+
     beziers = []
 
-    #begin the recursive fitting algorithm.            
-    fitCubic(data_pts,s,e)
+    #begin the recursive fitting algorithm.
+    fitCubic(data_pts, s, e)
     #remove old Fcurves and insert the new ones
-    createNewCurves(curveGroup,beziers,group_mode)
-    
+    createNewCurves(curveGroup, beziers, group_mode)
+
 #Main function of simplification
 #sel_opt: either "sel" or "all" for which curves to effect
-#error: maximum error allowed, in fraction (20% = 0.0020), i.e. divide by 10000 from percentage wanted.
-#group_mode: boolean, to analyze each curve seperately or in groups, where group is all curves that effect the same property (e.g. a bone's x,y,z rotation)
+#error: maximum error allowed, in fraction (20% = 0.0020),
+#i.e. divide by 10000 from percentage wanted.
+#group_mode: boolean, to analyze each curve seperately or in groups,
+#where group is all curves that effect the same property
+#(e.g. a bone's x,y,z rotation)
+
 
 def fcurves_simplify(sel_opt="all", error=0.002, group_mode=True):
     # main vars
     context = bpy.context
     obj = context.active_object
     fcurves = obj.animation_data.action.fcurves
-    
-    if sel_opt=="sel":
+
+    if sel_opt == "sel":
         sel_fcurves = [fcurve for fcurve in fcurves if fcurve.select]
     else:
         sel_fcurves = fcurves[:]
 
     #Error threshold for Newton Raphson reparamatizing
-    reparaError = error*32
+    reparaError = error * 32
     maxIterations = 16
-    
+
     if group_mode:
         fcurveDict = {}
-        #this loop sorts all the fcurves into groups of 3, based on their RNA Data path, which corresponds to which property they effect
+        #this loop sorts all the fcurves into groups of 3 or 4,
+        #based on their RNA Data path, which corresponds to
+        #which property they effect
         for curve in sel_fcurves:
-            if curve.data_path in fcurveDict: #if this bone has been added, append the curve to its list
+            if curve.data_path in fcurveDict:  # if this bone has been added, append the curve to its list
                 fcurveDict[curve.data_path].append(curve)
             else:
-                fcurveDict[curve.data_path] = [curve] #new bone, add a new dict value with this first curve
+                fcurveDict[curve.data_path] = [curve]  # new bone, add a new dict value with this first curve
         fcurveGroups = fcurveDict.values()
     else:
         fcurveGroups = sel_fcurves
-     
-    if error>0.00000:
+
+    if error > 0.00000:
         #simplify every selected curve.
         totalt = 0
-        for i,fcurveGroup in enumerate(fcurveGroups):
-            print("Processing curve "+str(i+1)+"/"+str(len(fcurveGroups)))
+        for i, fcurveGroup in enumerate(fcurveGroups):
+            print("Processing curve " + str(i + 1) + "/" + str(len(fcurveGroups)))
             t = time.clock()
-            simplifyCurves(fcurveGroup,error,reparaError,maxIterations,group_mode)
+            simplifyCurves(fcurveGroup, error, reparaError, maxIterations, group_mode)
             t = time.clock() - t
-            print(str(t)[:5]+" seconds to process last curve")
-            totalt+=t
-            print(str(totalt)[:5]+" seconds, total time elapsed")
-
-    return        
+            print(str(t)[:5] + " seconds to process last curve")
+            totalt += t
+            print(str(totalt)[:5] + " seconds, total time elapsed")
 
+    return
index 642ea5b6e31f794520c520ea20a377dd2d677548..8fe8c6937bb42d027b64a3a1a2f8b91aa800a6c9 100644 (file)
@@ -1,37 +1,57 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+#  This program is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU General Public License
+#  as published by the Free Software Foundation; either version 2
+#  of the License, or (at your option) any later version.
+#
+#  This program is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+#  You should have received a copy of the GNU General Public License
+#  along with this program; if not, write to the Free Software Foundation,
+#  Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+# <pep8 compliant>
+
 import bpy
 from mathutils import *
 from math import radians, acos
 
 #TODO: Only selected bones get retargeted.
-#      Selected Bones/chains get original pos empties, if ppl want IK instead of FK
-#      Some "magic" numbers - frame start and end, eulers of all orders instead of just quats keyframed
+#      Selected Bones/chains get original pos empties,
+#      if ppl want IK instead of FK
+#      Some "magic" numbers - frame start and end,
+#      eulers of all orders instead of just quats keyframed
 
 # dictionary of mapping
-# this is currently manuall input'ed, but will
+# this is currently manuall input'ed, but willW
 # be created from a more comfortable UI in the future
 
+
 def createDictionary(perf_arm):
     bonemap = {}
-    #perf: enduser
     for bone in perf_arm.bones:
         bonemap[bone.name] = bone.map
-                
     #root is the root of the enduser
     root = "root"
     # creation of a reverse map
     # multiple keys get mapped to list values
     bonemapr = {}
-    for key in bonemap.keys():
-        if not bonemap[key] in bonemapr:
-            if type(bonemap[key])==type((0,0)):
+    for key, value in bonemap.items():
+        if not value in bonemapr:
+            if isinstance(bonemap[key], tuple):
                 for key_x in bonemap[key]:
                     bonemapr[key_x] = [key]
             else:
                 bonemapr[bonemap[key]] = [key]
         else:
             bonemapr[bonemap[key]].append(key)
-    
-    return bonemap, bonemapr, root       
+    return bonemap, bonemapr, root
 # list of empties created to keep track of "original"
 # position data
 # in final product, these locations can be stored as custom props
@@ -43,118 +63,120 @@ def createDictionary(perf_arm):
 # and bone roll is identical to the performer
 # its purpose is to copy over the rotations
 # easily while concentrating on the hierarchy changes
-def createIntermediate(performer_obj,endu_name,bonemap, bonemapr,root,s_frame,e_frame,scene):
-    
+
+
+def createIntermediate(performer_obj, enduser_obj, bonemap, bonemapr, root, s_frame, e_frame, scene):
     #creates and keyframes an empty with its location
     #the original position of the tail bone
     #useful for storing the important data in the original motion
     #i.e. using this empty to IK the chain to that pos / DEBUG
-    def locOfOriginal(inter_bone,perf_bone):
-        pass
-#        if not perf_bone.name+"Org" in bpy.data.objects:
-#            bpy.ops.object.add()
-#            empty = bpy.context.active_object
-#            empty.name = perf_bone.name+"Org"
-#            empty.empty_draw_size = 0.01
-#        empty = bpy.data.objects[perf_bone.name+"Org"]
-#        offset = perf_bone.vector
-#        if inter_bone.length == 0 or perf_bone.length == 0:
-#            scaling = 1
-#        else:
-#            scaling = perf_bone.length / inter_bone.length
-#        offset/=scaling
-#        empty.location = inter_bone.head + offset
-#        empty.keyframe_insert("location")
-    
+    def locOfOriginal(inter_bone, perf_bone):
+        if not perf_bone.name + "Org" in bpy.data.objects:
+            bpy.ops.object.add()
+            empty = bpy.context.active_object
+            empty.name = perf_bone.name + "Org"
+            empty.empty_draw_size = 0.01
+        empty = bpy.data.objects[perf_bone.name + "Org"]
+        offset = perf_bone.vector
+        if inter_bone.length == 0 or perf_bone.length == 0:
+            scaling = 1
+        else:
+            scaling = perf_bone.length / inter_bone.length
+        offset /= scaling
+        empty.location = inter_bone.head + offset
+        empty.keyframe_insert("location")
+
     #Simple 1to1 retarget of a bone
-    def singleBoneRetarget(inter_bone,perf_bone):
-            perf_world_rotation = perf_bone.matrix * performer_obj.matrix_world         
+    def singleBoneRetarget(inter_bone, perf_bone):
+            perf_world_rotation = perf_bone.matrix * performer_obj.matrix_world
             inter_world_base_rotation = inter_bone.bone.matrix_local * inter_obj.matrix_world
             inter_world_base_inv = Matrix(inter_world_base_rotation)
             inter_world_base_inv.invert()
             return (inter_world_base_inv.to_3x3() * perf_world_rotation.to_3x3()).to_4x4()
-        
-    #uses 1to1 and interpolation/averaging to match many to 1 retarget    
-    def manyPerfToSingleInterRetarget(inter_bone,performer_bones_s):
-        retarget_matrices = [singleBoneRetarget(inter_bone,perf_bone) for perf_bone in performer_bones_s]
+
+    #uses 1to1 and interpolation/averaging to match many to 1 retarget
+    def manyPerfToSingleInterRetarget(inter_bone, performer_bones_s):
+        retarget_matrices = [singleBoneRetarget(inter_bone, perf_bone) for perf_bone in performer_bones_s]
         lerp_matrix = Matrix()
-        for i in range(len(retarget_matrices)-1):
+        for i in range(len(retarget_matrices) - 1):
             first_mat = retarget_matrices[i]
-            next_mat = retarget_matrices[i+1]
-            lerp_matrix = first_mat.lerp(next_mat,0.5)
+            next_mat = retarget_matrices[i + 1]
+            lerp_matrix = first_mat.lerp(next_mat, 0.5)
         return lerp_matrix
-    
-    #determines the type of hierachy change needed and calls the 
-    #right function        
+
+    #determines the type of hierachy change needed and calls the
+    #right function
     def retargetPerfToInter(inter_bone):
-        if inter_bone.name in bonemapr.keys():
+        if inter_bone.name in bonemapr:
             perf_bone_name = bonemapr[inter_bone.name]
             #is it a 1 to many?
-            if type(bonemap[perf_bone_name[0]])==type((0,0)):
+            if isinstance(bonemap[perf_bone_name[0]], tuple):
                 perf_bone = performer_bones[perf_bone_name[0]]
                 if inter_bone.name == bonemap[perf_bone_name[0]][0]:
-                    locOfOriginal(inter_bone,perf_bone)
+                    locOfOriginal(inter_bone, perf_bone)
             else:
                 # then its either a many to 1 or 1 to 1
-                
+
                 if len(perf_bone_name) > 1:
                     performer_bones_s = [performer_bones[name] for name in perf_bone_name]
                     #we need to map several performance bone to a single
                     for perf_bone in performer_bones_s:
-                        locOfOriginal(inter_bone,perf_bone)
-                    inter_bone.matrix_basis = manyPerfToSingleInterRetarget(inter_bone,performer_bones_s)
+                        locOfOriginal(inter_bone, perf_bone)
+                    inter_bone.matrix_basis = manyPerfToSingleInterRetarget(inter_bone, performer_bones_s)
                 else:
                     perf_bone = performer_bones[perf_bone_name[0]]
-                    locOfOriginal(inter_bone,perf_bone)
-                    inter_bone.matrix_basis = singleBoneRetarget(inter_bone,perf_bone)
-                    
+                    locOfOriginal(inter_bone, perf_bone)
+                    inter_bone.matrix_basis = singleBoneRetarget(inter_bone, perf_bone)
+
         inter_bone.keyframe_insert("rotation_quaternion")
         for child in inter_bone.children:
             retargetPerfToInter(child)
-            
-    #creates the intermediate armature object        
-    bpy.ops.object.select_name(name=endu_name,extend=False)
-    bpy.ops.object.duplicate(linked=False)
-    bpy.context.active_object.name = "intermediate"
-    inter_obj = bpy.context.active_object
+
+    #creates the intermediate armature object
+    inter_obj = enduser_obj.copy()
+    inter_obj.data = inter_obj.data.copy()  # duplicate data
+    bpy.context.scene.objects.link(inter_obj)
+    inter_obj.name = "intermediate"
+    bpy.context.scene.objects.active = inter_obj
     bpy.ops.object.mode_set(mode='EDIT')
-    #resets roll 
+    #resets roll
     bpy.ops.armature.calculate_roll(type='Z')
     bpy.ops.object.mode_set(mode="OBJECT")
-    inter_arm = bpy.data.armatures[endu_name+".001"]
-    inter_arm.name = "inter_arm"
+    inter_obj.data.name = "inter_arm"
+    inter_arm = inter_obj.data
     performer_bones = performer_obj.pose.bones
-    inter_bones =  inter_obj.pose.bones
-    
+    inter_bones = inter_obj.pose.bones
     #clears inheritance
     for inter_bone in inter_bones:
         inter_bone.bone.use_inherit_rotation = False
-        
-    for t in range(s_frame,e_frame):
+
+    for t in range(s_frame, e_frame):
         scene.frame_set(t)
         inter_bone = inter_bones[root]
         retargetPerfToInter(inter_bone)
-        
-    return inter_obj,inter_arm
+
+    return inter_obj, inter_arm
 
 # this procedure copies the rotations over from the intermediate
 # armature to the end user one.
-# As the hierarchies are 1 to 1, this is a simple matter of 
+# As the hierarchies are 1 to 1, this is a simple matter of
 # copying the rotation, while keeping in mind bone roll, parenting, etc.
 # TODO: Control Bones: If a certain bone is constrained in a way
 #       that its rotation is determined by another (a control bone)
 #       We should determine the right pos of the control bone.
-#       Scale: ? Should work but needs testing.          
-def retargetEnduser(inter_obj, enduser_obj,root,s_frame,e_frame,scene):
-    inter_bones =  inter_obj.pose.bones
+#       Scale: ? Should work but needs testing.
+
+
+def retargetEnduser(inter_obj, enduser_obj, root, s_frame, e_frame, scene):
+    inter_bones = inter_obj.pose.bones
     end_bones = enduser_obj.pose.bones
-    
+
     def bakeTransform(end_bone):
         src_bone = inter_bones[end_bone.name]
         trg_bone = end_bone
         bake_matrix = src_bone.matrix
         rest_matrix = trg_bone.bone.matrix_local
-        
+
         if trg_bone.parent and trg_bone.bone.use_inherit_rotation:
             parent_mat = src_bone.parent.matrix
             parent_rest = trg_bone.parent.bone.matrix_local
@@ -164,17 +186,17 @@ def retargetEnduser(inter_obj, enduser_obj,root,s_frame,e_frame,scene):
             parent_mat_inv.invert()
             bake_matrix = parent_mat_inv * bake_matrix
             rest_matrix = parent_rest_inv * rest_matrix
-            
+
         rest_matrix_inv = rest_matrix.copy()
         rest_matrix_inv.invert()
         bake_matrix = rest_matrix_inv * bake_matrix
         trg_bone.matrix_basis = bake_matrix
         end_bone.keyframe_insert("rotation_quaternion")
-        
+
         for bone in end_bone.children:
             bakeTransform(bone)
-        
-    for t in range(s_frame,e_frame):
+
+    for t in range(s_frame, e_frame):
         scene.frame_set(t)
         end_bone = end_bones[root]
         bakeTransform(end_bone)
@@ -182,98 +204,104 @@ def retargetEnduser(inter_obj, enduser_obj,root,s_frame,e_frame,scene):
 #recieves the performer feet bones as a variable
 # by "feet" I mean those bones that have plants
 # (they don't move, despite root moving) somewhere in the animation.
-def copyTranslation(performer_obj,enduser_obj,perfFeet,bonemap,bonemapr,root,s_frame,e_frame,scene):
+
+
+def copyTranslation(performer_obj, enduser_obj, perfFeet, bonemap, bonemapr, root, s_frame, e_frame, scene):
     endFeet = [bonemap[perfBone] for perfBone in perfFeet]
     perfRoot = bonemapr[root][0]
-    locDictKeys = perfFeet+endFeet+[perfRoot]
+    locDictKeys = perfFeet + endFeet + [perfRoot]
     perf_bones = performer_obj.pose.bones
     end_bones = enduser_obj.pose.bones
-    
+
     def tailLoc(bone):
-        return bone.center+(bone.vector/2)
-    
+        return bone.center + (bone.vector / 2)
+
     #Step 1 - we create a dict that contains these keys:
     #(Performer) Hips, Feet
     #(End user) Feet
     # where the values are their world position on each (1,120) frame
-    
+
     locDict = {}
     for key in locDictKeys:
-        locDict[key] = []    
-    
-    for t in range(scene.frame_start,scene.frame_end):
+        locDict[key] = []
+
+    for t in range(scene.frame_start, scene.frame_end):
         scene.frame_set(t)
         for bone in perfFeet:
             locDict[bone].append(tailLoc(perf_bones[bone]))
         locDict[perfRoot].append(tailLoc(perf_bones[perfRoot]))
         for bone in endFeet:
             locDict[bone].append(tailLoc(end_bones[bone]))
-            
+
     # now we take our locDict and analyze it.
-    # we need to derive all chains 
-    
+    # we need to derive all chains
+
     locDeriv = {}
     for key in locDictKeys:
         locDeriv[key] = []
-    
+
     for key in locDict.keys():
         graph = locDict[key]
-        for t in range(len(graph)-1):
+        for t in range(len(graph) - 1):
             x = graph[t]
-            xh = graph[t+1]
-            locDeriv[key].append(xh-x)
-            
+            xh = graph[t + 1]
+            locDeriv[key].append(xh - x)
+
     # now find the plant frames, where perfFeet don't move much
-    
+
     linearAvg = []
-    
+
     for key in perfFeet:
-        for i in range(len(locDeriv[key])-1):
+        for i in range(len(locDeriv[key]) - 1):
             v = locDeriv[key][i]
             hipV = locDeriv[perfRoot][i]
             endV = locDeriv[bonemap[key]][i]
-            if (v.length<0.1):
+            if (v.length < 0.1):
                 #this is a plant frame.
                 #lets see what the original hip delta is, and the corresponding
                 #end bone's delta
-                if endV.length!=0:
-                    linearAvg.append(hipV.length/endV.length)
+                if endV.length != 0:
+                    linearAvg.append(hipV.length / endV.length)
     if linearAvg:
-        avg = sum(linearAvg)/len(linearAvg)
-        print("retargeted root motion should be "+ str(1/avg)+ " of original")
-        
+        avg = sum(linearAvg) / len(linearAvg)
+        print("retargeted root motion should be " + str(1 / avg) + " of original")
+
         bpy.ops.object.add()
         stride_bone = bpy.context.active_object
         stride_bone.name = "stride_bone"
-        bpy.ops.object.select_name(name=stride_bone.name,extend=False)
-        bpy.ops.object.select_name(name=enduser_obj.name,extend=True)
+        bpy.ops.object.select_name(name=stride_bone.name, extend=False)
+        bpy.ops.object.select_name(name=enduser_obj.name, extend=True)
         bpy.ops.object.mode_set(mode='POSE')
         bpy.ops.pose.select_all(action='DESELECT')
         root_bone = end_bones[root]
         root_bone.bone.select = True
         bpy.ops.pose.constraint_add_with_targets(type='CHILD_OF')
-        for t in range(s_frame,e_frame):
-            scene.frame_set(t)     
-            newTranslation = (tailLoc(perf_bones[perfRoot])/avg)
+        for t in range(s_frame, e_frame):
+            scene.frame_set(t)
+            newTranslation = (tailLoc(perf_bones[perfRoot]) / avg)
             stride_bone.location = newTranslation
             stride_bone.keyframe_insert("location")
-            
 
 
 def totalRetarget():
-    perf_name = bpy.context.scene.performer
-    endu_name = bpy.context.scene.enduser
-    performer_obj = bpy.data.objects[perf_name]
-    enduser_obj = bpy.data.objects[endu_name]
-    end_arm = bpy.data.armatures[endu_name] 
-    perf_arm = bpy.data.armatures[perf_name] 
+    enduser_obj = bpy.context.active_object
+    performer_obj = [obj for obj in bpy.context.selected_objects if obj != enduser_obj]
+    if enduser_obj is None or len(performer_obj) != 1:
+        print("Need active and selected armatures")
+    else:
+        performer_obj = performer_obj[0]
+    perf_arm = performer_obj.data
+    end_arm = enduser_obj.data
     scene = bpy.context.scene
     s_frame = scene.frame_start
     e_frame = scene.frame_end
-    bonemap, bonemapr, root = createDictionary(perf_arm)    
-    inter_obj, inter_arm = createIntermediate(performer_obj,endu_name,bonemap, bonemapr,root,s_frame,e_frame,scene)
-    retargetEnduser(inter_obj, enduser_obj,root,s_frame,e_frame,scene)
-    copyTranslation(performer_obj,enduser_obj,["RightFoot","LeftFoot"],bonemap,bonemapr,root,s_frame,e_frame,scene)
+    bonemap, bonemapr, root = createDictionary(perf_arm)
+    inter_obj, inter_arm = createIntermediate(performer_obj, enduser_obj, bonemap, bonemapr, root, s_frame, e_frame, scene)
+    retargetEnduser(inter_obj, enduser_obj, root, s_frame, e_frame, scene)
+    copyTranslation(performer_obj, enduser_obj, ["RightFoot", "LeftFoot"], bonemap, bonemapr, root, s_frame, e_frame, scene)
     bpy.ops.object.mode_set(mode='OBJECT')
-    bpy.ops.object.select_name(name=inter_obj.name,extend=False)
+    bpy.ops.object.select_name(name=inter_obj.name, extend=False)
     bpy.ops.object.delete()
+
+if __name__ == "__main__":
+    totalRetarget()
index 5f59c3683936c4ebbd2f1953e2470cb44c6b88e6..8454a99d4d5c4550b82e21a616b59d46d6962fb9 100644 (file)
@@ -1,3 +1,23 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+#  This program is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU General Public License
+#  as published by the Free Software Foundation; either version 2
+#  of the License, or (at your option) any later version.
+#
+#  This program is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+#  You should have received a copy of the GNU General Public License
+#  along with this program; if not, write to the Free Software Foundation,
+#  Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+# <pep8 compliant>
+
 import bpy
 import time
 
@@ -6,19 +26,18 @@ from bpy import *
 from mathutils import Vector
 from math import isfinite
 
-bpy.types.Scene.performer = bpy.props.StringProperty()
-bpy.types.Scene.enduser = bpy.props.StringProperty()
 
 bpy.types.Bone.map = bpy.props.StringProperty()
 import retarget
 import mocap_tools
 
+
 class MocapPanel(bpy.types.Panel):
     bl_label = "Mocap tools"
     bl_space_type = "PROPERTIES"
     bl_region_type = "WINDOW"
     bl_context = "object"
+
     def draw(self, context):
         self.layout.label("Preprocessing")
         row = self.layout.row(align=True)
@@ -32,66 +51,75 @@ class MocapPanel(bpy.types.Panel):
         row3 = self.layout.row(align=True)
         column1 = row3.column(align=True)
         column1.label("Performer Rig")
-        column1.prop_search(context.scene, "performer",  context.scene, "objects")
         column2 = row3.column(align=True)
         column2.label("Enduser Rig")
-        column2.prop_search(context.scene, "enduser",  context.scene, "objects")
         self.layout.label("Hierarchy mapping")
-        if context.scene.performer in bpy.data.armatures and context.scene.enduser in bpy.data.armatures:
-            perf = bpy.data.armatures[context.scene.performer]
-            enduser_arm = bpy.data.armatures[context.scene.enduser]
-            for bone in perf.bones:
-                row = self.layout.row(align=True)
-                row.label(bone.name)
-                row.prop_search(bone, "map", enduser_arm, "bones")
-            self.layout.operator("mocap.retarget", text='RETARGET!')
-        
-           
-        
-        
+        enduser_obj = bpy.context.active_object
+        performer_obj = [obj for obj in bpy.context.selected_objects if obj != enduser_obj]
+        if enduser_obj is None or len(performer_obj) != 1:
+            self.layout.label("Select performer rig and target rig (as active)")
+        else:
+            performer_obj = performer_obj[0]
+            if performer_obj.data.name in bpy.data.armatures and enduser_obj.data.name in bpy.data.armatures:
+                perf = performer_obj.data
+                enduser_arm = enduser_obj.data
+                for bone in perf.bones:
+                    row = self.layout.row(align=True)
+                    row.label(bone.name)
+                    row.prop_search(bone, "map", enduser_arm, "bones")
+                self.layout.operator("mocap.retarget", text='RETARGET!')
+
+
 class OBJECT_OT_RetargetButton(bpy.types.Operator):
     bl_idname = "mocap.retarget"
     bl_label = "Retargets active action from Performer to Enduser"
+
     def execute(self, context):
         retarget.totalRetarget()
         return {"FINISHED"}
-    
+
+
 class OBJECT_OT_ConvertSamplesButton(bpy.types.Operator):
     bl_idname = "mocap.samples"
     bl_label = "Converts samples / simplifies keyframes to beziers"
+
     def execute(self, context):
         mocap_tools.fcurves_simplify()
         return {"FINISHED"}
-    
+
+
 class OBJECT_OT_LooperButton(bpy.types.Operator):
     bl_idname = "mocap.looper"
     bl_label = "loops animation / sampled mocap data"
+
     def execute(self, context):
         mocap_tools.autoloop_anim()
         return {"FINISHED"}
-    
+
+
 class OBJECT_OT_DenoiseButton(bpy.types.Operator):
     bl_idname = "mocap.denoise"
     bl_label = "Denoises sampled mocap data "
+
     def execute(self, context):
         return {"FINISHED"}
 
+
 class OBJECT_OT_LimitDOFButton(bpy.types.Operator):
     bl_idname = "mocap.limitdof"
     bl_label = "Analyzes animations Max/Min DOF and adds hard/soft constraints"
+
     def execute(self, context):
         return {"FINISHED"}
 
+
 def register():
-   bpy.utils.register_module(__name__)
+    bpy.utils.register_module(__name__)
+
+
 def unregister():
     bpy.utils.unregister_module(__name__)
-    
-if __name__=="__main__":
-    register()   
+
+
+if __name__ == "__main__":
+    register()