1 # ##### BEGIN GPL LICENSE BLOCK #####
3 # This program is free software; you can redistribute it and/or
4 # modify it under the terms of the GNU General Public License
5 # as published by the Free Software Foundation; either version 2
6 # of the License, or (at your option) any later version.
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
13 # You should have received a copy of the GNU General Public License
14 # along with this program; if not, write to the Free Software Foundation,
15 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
17 # ##### END GPL LICENSE BLOCK #####
21 from math import hypot, sqrt, isfinite, radians, pi
24 from mathutils import Vector, Matrix
27 #Vector utility functions
31 def __init__(self, vec):
37 def __mul__(self, otherMember):
38 if (isinstance(otherMember, int) or
39 isinstance(otherMember, float)):
40 return NdVector([otherMember * x for x in self.vec])
45 return sum([a[i] * b[i] for i in range(n)])
47 def __sub__(self, otherVec):
51 return NdVector([a[i] - b[i] for i in range(n)])
53 def __add__(self, otherVec):
57 return NdVector([a[i] + b[i] for i in range(n)])
59 def __div__(self, scalar):
60 return NdVector([x / scalar for x in self.vec])
63 return sqrt(self * self)
65 def vecLengthSq(self):
70 self.vec = [x / len for x in self.vec]
73 return NdVector(self.vec)
75 def __getitem__(self, i):
85 return Vector((self.x, self.y))
87 length = property(vecLength)
88 lengthSq = property(vecLengthSq)
95 # x,y1,y2,y3 coordinate of original point
96 co = NdVector((0, 0, 0, 0, 0))
97 #position according to parametric view of original data, [0,1] range
99 #use this for anything
102 def __init__(self, index, co, u=0):
108 def crossCorrelationMatch(curvesA, curvesB, margin):
111 end = len(curvesA[0].keyframe_points)
113 for i in range(1, end):
115 for fcurve in curvesA:
116 vec.append(fcurve.evaluate(i))
117 dataA.append(NdVector(vec))
119 for fcurve in curvesB:
120 vec.append(fcurve.evaluate(i))
121 dataB.append(NdVector(vec))
129 for j in range(i, min(i + N, N)):
130 Rxy[i] += comp(dataA[j], dataB[j - i])
132 Rxy[i] += comp(dataA[j], dataB[j - i + N])
134 def bestLocalMaximum(Rxy):
135 Rxyd = [Rxy[i] - Rxy[i - 1] for i in range(1, len(Rxy))]
137 for i in range(1, len(Rxyd) - 1):
140 #sign change (zerocrossing) at point i, denoting max point (only)
141 if (a >= 0 and b < 0) or (a < 0 and b >= 0):
142 maxs.append((i, max(Rxy[i], Rxy[i - 1])))
143 return [x[0] for x in maxs]
144 #~ return max(maxs, key=lambda x: x[1])[0]
146 flms = bestLocalMaximum(Rxy[0:int(len(Rxy))])
151 for i in range(len(dataA) - flm):
152 diff.append((dataA[i] - dataB[i + flm]).lengthSq)
154 def lowerErrorSlice(diff, e):
155 #index, error at index
156 bestSlice = (0, 100000)
157 for i in range(e, len(diff) - e):
158 errorSlice = sum(diff[i - e:i + e + 1])
159 if errorSlice < bestSlice[1]:
160 bestSlice = (i, errorSlice, flm)
163 s = lowerErrorSlice(diff, margin)
166 ss.sort(key = lambda x: x[1])
167 return ss[0][2], ss[0][0], dataA
170 context = bpy.context
171 obj = context.active_object
172 fcurves = [x for x in obj.animation_data.action.fcurves if x.select]
176 flm, s, data = crossCorrelationMatch(fcurves, fcurves, margin)
177 loop = data[s:s + flm + margin]
179 #find *all* loops, s:s+flm, s+flm:s+2flm, etc...
180 #and interpolate between all
181 # to find "the perfect loop".
182 #Maybe before finding s? interp(i,i+flm,i+2flm)....
183 for i in range(1, margin + 1):
184 w1 = sqrt(float(i) / margin)
185 loop[-i] = (loop[-i] * w1) + (loop[0] * (1 - w1))
187 for curve in fcurves:
188 pts = curve.keyframe_points
189 for i in range(len(pts) - 1, -1, -1):
192 for c, curve in enumerate(fcurves):
193 pts = curve.keyframe_points
194 for i in range(len(loop)):
195 pts.insert(i + 1, loop[i][c])
197 context.scene.frame_end = flm + 1
200 def simplifyCurves(curveGroup, error, reparaError, maxIterations, group_mode):
202 def unitTangent(v, data_pts):
203 tang = NdVector((0, 0, 0, 0, 0))
205 #If it's not the first point, we can calculate a leftside tangent
206 tang += data_pts[v].co - data_pts[v - 1].co
207 if v != len(data_pts) - 1:
208 #If it's not the last point, we can calculate a rightside tangent
209 tang += data_pts[v + 1].co - data_pts[v].co
213 #assign parametric u value for each point in original data
214 def chordLength(data_pts, s, e):
216 for pt in data_pts[s:e + 1]:
221 chordLength = (data_pts[i].co - data_pts[i - 1].co).length
222 totalLength += chordLength
223 pt.temp = totalLength
224 for pt in data_pts[s:e + 1]:
227 pt.u = (pt.temp / totalLength)
229 # get binomial coefficient, this function/table is only called with args
230 # (3,0),(3,1),(3,2),(3,3),(2,0),(2,1),(2,2)!
231 binomDict = {(3, 0): 1,
238 #value at pt t of a single bernstein Polynomial
240 def bernsteinPoly(n, i, t):
241 binomCoeff = binomDict[(n, i)]
242 return binomCoeff * pow(t, i) * pow(1 - t, n - i)
244 # fit a single cubic to data points in range [s(tart),e(nd)].
245 def fitSingleCubic(data_pts, s, e):
247 # A - matrix used for calculating C matrices for fitting
248 def A(i, j, s, e, t1, t2):
254 return t * bernsteinPoly(3, j, u)
256 # X component, used for calculating X matrices for fitting
257 def xComponent(i, s, e):
262 a = v0 * bernsteinPoly(3, 0, u)
263 b = v0 * bernsteinPoly(3, 1, u)
264 c = v3 * bernsteinPoly(3, 2, u)
265 d = v3 * bernsteinPoly(3, 3, u)
266 return (di - (a + b + c + d))
268 t1 = unitTangent(s, data_pts)
269 t2 = unitTangent(e, data_pts)
270 c11 = sum([A(i, 1, s, e, t1, t2) * A(i, 1, s, e, t1, t2) for i in range(s, e + 1)])
271 c12 = sum([A(i, 1, s, e, t1, t2) * A(i, 2, s, e, t1, t2) for i in range(s, e + 1)])
273 c22 = sum([A(i, 2, s, e, t1, t2) * A(i, 2, s, e, t1, t2) for i in range(s, e + 1)])
275 x1 = sum([xComponent(i, s, e) * A(i, 1, s, e, t1, t2) for i in range(s, e + 1)])
276 x2 = sum([xComponent(i, s, e) * A(i, 2, s, e, t1, t2) for i in range(s, e + 1)])
278 # calculate Determinate of the 3 matrices
279 det_cc = c11 * c22 - c21 * c12
280 det_cx = c11 * x2 - c12 * x1
281 det_xc = x1 * c22 - x2 * c12
283 # if matrix is not homogenous, fudge the data a bit
287 # alpha's are the correct offset for bezier handles
288 alpha0 = det_xc / det_cc # offset from right (first) point
289 alpha1 = det_cx / det_cc # offset from left (last) point
291 sRightHandle = data_pts[s].co.copy()
292 sTangent = t1 * abs(alpha0)
293 sRightHandle += sTangent # position of first pt's handle
294 eLeftHandle = data_pts[e].co.copy()
295 eTangent = t2 * abs(alpha1)
296 eLeftHandle += eTangent # position of last pt's handle.
298 # return a 4 member tuple representing the bezier
299 return (data_pts[s].co,
304 # convert 2 given data points into a cubic bezier.
305 # handles are offset along the tangent at
306 # a 3rd of the length between the points.
307 def fitSingleCubic2Pts(data_pts, s, e):
308 alpha0 = alpha1 = (data_pts[s].co - data_pts[e].co).length / 3
310 sRightHandle = data_pts[s].co.copy()
311 sTangent = unitTangent(s, data_pts) * abs(alpha0)
312 sRightHandle += sTangent # position of first pt's handle
313 eLeftHandle = data_pts[e].co.copy()
314 eTangent = unitTangent(e, data_pts) * abs(alpha1)
315 eLeftHandle += eTangent # position of last pt's handle.
317 #return a 4 member tuple representing the bezier
318 return (data_pts[s].co,
323 #evaluate bezier, represented by a 4 member tuple (pts) at point t.
324 def bezierEval(pts, t):
325 sumVec = NdVector((0, 0, 0, 0, 0))
327 sumVec += pts[i] * bernsteinPoly(3, i, t)
330 #calculate the highest error between bezier and original data
331 #returns the distance and the index of the point where max error occurs.
332 def maxErrorAmount(data_pts, bez, s, e):
337 for pt in data_pts[s:e + 1]:
338 bezVal = bezierEval(bez, pt.u)
339 normalize_error = pt.co.length
340 if normalize_error == 0:
342 tmpError = (pt.co - bezVal).length / normalize_error
343 if tmpError >= maxError:
345 maxErrorPt = pt.index
346 return maxError, maxErrorPt
348 #calculated bezier derivative at point t.
349 #That is, tangent of point t.
350 def getBezDerivative(bez, t):
352 sumVec = NdVector((0, 0, 0, 0, 0))
353 for i in range(n - 1):
354 sumVec += (bez[i + 1] - bez[i]) * bernsteinPoly(n - 1, i, t)
357 #use Newton-Raphson to find a better paramterization of datapoints,
358 #one that minimizes the distance (or error)
359 # between bezier and original data.
360 def newtonRaphson(data_pts, s, e, bez):
361 for pt in data_pts[s:e + 1]:
368 qu = bezierEval(bez, pt.u)
369 qud = getBezDerivative(bez, u)
370 #we wish to minimize f(u),
371 #the squared distance between curve and data
372 fu = (qu - pt.co).length ** 2
373 fud = (2 * (qu.x - pt.co.x) * (qud.x)) - (2 * (qu.y - pt.co.y) * (qud.y))
377 pt.u = pt.u - (fu / fud)
379 def createDataPts(curveGroup, group_mode):
382 print([x.data_path for x in curveGroup])
383 for i in range(len(curveGroup[0].keyframe_points)):
384 x = curveGroup[0].keyframe_points[i].co.x
385 y1 = curveGroup[0].keyframe_points[i].co.y
386 y2 = curveGroup[1].keyframe_points[i].co.y
387 y3 = curveGroup[2].keyframe_points[i].co.y
389 if len(curveGroup) == 4:
390 y4 = curveGroup[3].keyframe_points[i].co.y
391 data_pts.append(dataPoint(i, NdVector((x, y1, y2, y3, y4))))
393 for i in range(len(curveGroup.keyframe_points)):
394 x = curveGroup.keyframe_points[i].co.x
395 y1 = curveGroup.keyframe_points[i].co.y
399 data_pts.append(dataPoint(i, NdVector((x, y1, y2, y3, y4))))
402 def fitCubic(data_pts, s, e):
403 # if there are less than 3 points, fit a single basic bezier
405 bez = fitSingleCubic2Pts(data_pts, s, e)
407 #if there are more, parameterize the points
408 # and fit a single cubic bezier
409 chordLength(data_pts, s, e)
410 bez = fitSingleCubic(data_pts, s, e)
412 #calculate max error and point where it occurs
413 maxError, maxErrorPt = maxErrorAmount(data_pts, bez, s, e)
414 #if error is small enough, reparameterization might be enough
415 if maxError < reparaError and maxError > error:
416 for i in range(maxIterations):
417 newtonRaphson(data_pts, s, e, bez)
419 bez = fitSingleCubic2Pts(data_pts, s, e)
421 bez = fitSingleCubic(data_pts, s, e)
423 #recalculate max error and point where it occurs
424 maxError, maxErrorPt = maxErrorAmount(data_pts, bez, s, e)
426 #repara wasn't enough, we need 2 beziers for this range.
427 #Split the bezier at point of maximum error
429 fitCubic(data_pts, s, maxErrorPt)
430 fitCubic(data_pts, maxErrorPt, e)
432 #error is small enough, return the beziers.
436 def createNewCurves(curveGroup, beziers, group_mode):
437 #remove all existing data points
439 for fcurve in curveGroup:
440 for i in range(len(fcurve.keyframe_points) - 1, 0, -1):
441 fcurve.keyframe_points.remove(fcurve.keyframe_points[i])
444 for i in range(len(fcurve.keyframe_points) - 1, 0, -1):
445 fcurve.keyframe_points.remove(fcurve.keyframe_points[i])
447 #insert the calculated beziers to blender data.\
449 for fullbez in beziers:
450 for i, fcurve in enumerate(curveGroup):
451 bez = [Vector((vec[0], vec[i + 1])) for vec in fullbez]
452 newKey = fcurve.keyframe_points.insert(frame=bez[0].x, value=bez[0].y)
453 newKey.handle_right = (bez[1].x, bez[1].y)
455 newKey = fcurve.keyframe_points.insert(frame=bez[3].x, value=bez[3].y)
456 newKey.handle_left = (bez[2].x, bez[2].y)
461 newKey = fcurve.keyframe_points.insert(frame=bez[0].x, value=bez[0].y)
462 newKey.handle_right = (bez[1].x, bez[1].y)
464 newKey = fcurve.keyframe_points.insert(frame=bez[3].x, value=bez[3].y)
465 newKey.handle_left = (bez[2].x, bez[2].y)
467 # indices are detached from data point's frame (x) value and
468 # stored in the dataPoint object, represent a range
470 data_pts = createDataPts(curveGroup, group_mode)
473 e = len(data_pts) - 1 # end
477 #begin the recursive fitting algorithm.
478 fitCubic(data_pts, s, e)
479 #remove old Fcurves and insert the new ones
480 createNewCurves(curveGroup, beziers, group_mode)
482 #Main function of simplification
483 #sel_opt: either "sel" or "all" for which curves to effect
484 #error: maximum error allowed, in fraction (20% = 0.0020),
485 #i.e. divide by 10000 from percentage wanted.
486 #group_mode: boolean, to analyze each curve seperately or in groups,
487 #where group is all curves that effect the same property
488 #(e.g. a bone's x,y,z rotation)
491 def fcurves_simplify(context, obj, sel_opt="all", error=0.002, group_mode=True):
493 fcurves = obj.animation_data.action.fcurves
496 sel_fcurves = [fcurve for fcurve in fcurves if fcurve.select]
498 sel_fcurves = fcurves[:]
500 #Error threshold for Newton Raphson reparamatizing
501 reparaError = error * 32
506 #this loop sorts all the fcurves into groups of 3 or 4,
507 #based on their RNA Data path, which corresponds to
508 #which property they effect
509 for curve in sel_fcurves:
510 if curve.data_path in fcurveDict: # if this bone has been added, append the curve to its list
511 fcurveDict[curve.data_path].append(curve)
513 fcurveDict[curve.data_path] = [curve] # new bone, add a new dict value with this first curve
514 fcurveGroups = fcurveDict.values()
516 fcurveGroups = sel_fcurves
519 #simplify every selected curve.
521 for i, fcurveGroup in enumerate(fcurveGroups):
522 print("Processing curve " + str(i + 1) + "/" + str(len(fcurveGroups)))
524 simplifyCurves(fcurveGroup, error, reparaError, maxIterations, group_mode)
526 print(str(t)[:5] + " seconds to process last curve")
528 print(str(totalt)[:5] + " seconds, total time elapsed")
532 # Implementation of non-linear median filter, with variable kernel size
533 # Double pass - one marks spikes, the other smooths one
534 # Expects sampled keyframes on everyframe
537 def denoise_median():
538 context = bpy.context
539 obj = context.active_object
540 fcurves = obj.animation_data.action.fcurves
541 medKernel = 1 # actually *2+1... since it this is offset
543 highThres = (flagKernel * 2) - 1
545 for fcurve in fcurves:
546 orgPts = fcurve.keyframe_points[:]
548 # mark frames that are spikes by sorting a large kernel
549 for i in range(flagKernel, len(fcurve.keyframe_points) - flagKernel):
551 neighborhood = orgPts[i - flagKernel: i + flagKernel]
552 neighborhood.sort(key=lambda pt: pt.co[1])
553 weight = neighborhood.index(center)
554 if weight >= highThres or weight <= lowThres:
555 flaggedFrames.append((i, center))
556 # clean marked frames with a simple median filter
557 # averages all frames in the kernel equally, except center which has no weight
558 for i, pt in flaggedFrames:
561 neighborhood = [neighpt.co[1] for neighpt in orgPts[i - medKernel: i + medKernel + 1] if neighpt != pt]
562 newValue = sum(neighborhood) / len(neighborhood)
567 def rotate_fix_armature(arm_data):
568 global_matrix = Matrix.Rotation(radians(90), 4, "X")
569 bpy.ops.object.mode_set(mode='EDIT', toggle=False)
570 #disconnect all bones for ease of global rotation
572 for bone in arm_data.edit_bones:
574 connectedBones.append(bone.name)
575 bone.use_connect = False
577 #rotate all the bones around their center
578 for bone in arm_data.edit_bones:
579 bone.transform(global_matrix)
582 for bone in connectedBones:
583 arm_data.edit_bones[bone].use_connect = True
584 bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
587 def scale_fix_armature(performer_obj, enduser_obj):
588 perf_bones = performer_obj.data.bones
589 end_bones = enduser_obj.data.bones
591 def calculateBoundingRadius(bones):
594 center += bone.head_local
598 dist = (bone.head_local - center).length
603 perf_rad = calculateBoundingRadius(performer_obj.data.bones)
604 end_rad = calculateBoundingRadius(enduser_obj.data.bones)
605 #end_avg = enduser_obj.dimensions
606 factor = end_rad / perf_rad * 1.2
607 performer_obj.scale *= factor
610 def guessMapping(performer_obj, enduser_obj):
611 perf_bones = performer_obj.data.bones
612 end_bones = enduser_obj.data.bones
616 def findBoneSide(bone):
618 return "Left", bone.replace("Left", "").lower().replace(".", "")
620 return "Right", bone.replace("Right", "").lower().replace(".", "")
622 return "Left", bone.replace("Left", "").lower().replace(".", "")
624 return "Right", bone.replace("Right", "").lower().replace(".", "")
627 def nameMatch(bone_a, bone_b):
628 # nameMatch - recieves two strings, returns 2 if they are relatively the same, 1 if they are the same but R and L and 0 if no match at all
629 side_a, noside_a = findBoneSide(bone_a)
630 side_b, noside_b = findBoneSide(bone_b)
632 if noside_a in noside_b or noside_b in noside_a:
635 if noside_a in noside_b or noside_b in noside_a:
639 def guessSingleMapping(perf_bone):
640 possible_bones = [end_bones[0]]
641 while possible_bones:
642 for end_bone in possible_bones:
643 match = nameMatch(perf_bone.name, end_bone.name)
644 if match == 2 and not perf_bone.map:
645 perf_bone.map = end_bone.name
646 newPossibleBones = []
647 for end_bone in possible_bones:
648 newPossibleBones += list(end_bone.children)
649 possible_bones = newPossibleBones
651 for child in perf_bone.children:
652 guessSingleMapping(child)
654 guessSingleMapping(root)
657 def limit_dof(context, performer_obj, enduser_obj):
659 perf_bones = [bone for bone in performer_obj.pose.bones if bone.bone.map]
660 c_frame = context.scene.frame_current
661 for bone in perf_bones:
662 limitDict[bone.bone.map] = [1000, 1000, 1000, -1000, -1000, -1000]
663 for t in range(context.scene.frame_start, context.scene.frame_end):
664 context.scene.frame_set(t)
665 for bone in perf_bones:
666 end_bone = enduser_obj.pose.bones[bone.bone.map]
667 bake_matrix = bone.matrix
668 rest_matrix = end_bone.bone.matrix_local
670 if end_bone.parent and end_bone.bone.use_inherit_rotation:
671 srcParent = bone.parent
672 parent_mat = srcParent.matrix
673 parent_rest = end_bone.parent.bone.matrix_local
674 parent_rest_inv = parent_rest.inverted()
675 parent_mat_inv = parent_mat.inverted()
676 bake_matrix = parent_mat_inv * bake_matrix
677 rest_matrix = parent_rest_inv * rest_matrix
679 rest_matrix_inv = rest_matrix.inverted()
680 bake_matrix = rest_matrix_inv * bake_matrix
683 euler = mat.to_euler()
684 limitDict[bone.bone.map][0] = min(limitDict[bone.bone.map][0], euler.x)
685 limitDict[bone.bone.map][1] = min(limitDict[bone.bone.map][1], euler.y)
686 limitDict[bone.bone.map][2] = min(limitDict[bone.bone.map][2], euler.z)
687 limitDict[bone.bone.map][3] = max(limitDict[bone.bone.map][3], euler.x)
688 limitDict[bone.bone.map][4] = max(limitDict[bone.bone.map][4], euler.y)
689 limitDict[bone.bone.map][5] = max(limitDict[bone.bone.map][5], euler.z)
690 for bone in enduser_obj.pose.bones:
691 existingConstraint = [constraint for constraint in bone.constraints if constraint.name == "DOF Limitation"]
692 if existingConstraint:
693 bone.constraints.remove(existingConstraint[0])
694 end_bones = [bone for bone in enduser_obj.pose.bones if bone.name in limitDict.keys()]
695 for bone in end_bones:
696 #~ if not bone.is_in_ik_chain:
697 newCons = bone.constraints.new("LIMIT_ROTATION")
698 newCons.name = "DOF Limitation"
699 newCons.owner_space = "LOCAL"
700 newCons.min_x, newCons.min_y, newCons.min_z, newCons.max_x, newCons.max_y, newCons.max_z = limitDict[bone.name]
701 newCons.use_limit_x = True
702 newCons.use_limit_y = True
703 newCons.use_limit_z = True
705 #~ bone.ik_min_x, bone.ik_min_y, bone.ik_min_z, bone.ik_max_x, bone.ik_max_y, bone.ik_max_z = limitDict[bone.name]
706 #~ bone.use_ik_limit_x = True
707 #~ bone.use_ik_limit_y = True
708 #~ bone.use_ik_limit_z= True
709 #~ bone.ik_stiffness_x = 1/((limitDict[bone.name][3] - limitDict[bone.name][0])/(2*pi)))
710 #~ bone.ik_stiffness_y = 1/((limitDict[bone.name][4] - limitDict[bone.name][1])/(2*pi)))
711 #~ bone.ik_stiffness_z = 1/((limitDict[bone.name][5] - limitDict[bone.name][2])/(2*pi)))
713 context.scene.frame_set(c_frame)
716 def limit_dof_toggle_off(context, enduser_obj):
717 for bone in enduser_obj.pose.bones:
718 existingConstraint = [constraint for constraint in bone.constraints if constraint.name == "DOF Limitation"]
719 if existingConstraint:
720 bone.constraints.remove(existingConstraint[0])
723 def path_editing(context, stride_obj, path):
724 y_fcurve = [fcurve for fcurve in stride_obj.animation_data.action.fcurves if fcurve.data_path == "location"][1]
725 s, e = context.scene.frame_start, context.scene.frame_end # y_fcurve.range()
728 y_s = y_fcurve.evaluate(s)
729 y_e = y_fcurve.evaluate(e)
730 direction = (y_e - y_s) / abs(y_e - y_s)
731 existing_cons = [constraint for constraint in stride_obj.constraints if constraint.type == "FOLLOW_PATH"]
732 for cons in existing_cons:
733 stride_obj.constraints.remove(cons)
734 path_cons = stride_obj.constraints.new("FOLLOW_PATH")
736 path_cons.forward_axis = "TRACK_NEGATIVE_Y"
738 path_cons.forward_axis = "FORWARD_Y"
739 path_cons.target = path
740 path_cons.use_curve_follow = True
741 path.data.path_duration = e - s
743 path.data.animation_data.action.fcurves
744 except AttributeError:
745 path.data.keyframe_insert("eval_time", frame=0)
746 eval_time_fcurve = [fcurve for fcurve in path.data.animation_data.action.fcurves if fcurve.data_path == "eval_time"]
747 eval_time_fcurve = eval_time_fcurve[0]
749 parameterization = {}
750 print("evaluating curve")
751 for t in range(s, e - 1):
755 chordLength = (y_fcurve.evaluate(t) - y_fcurve.evaluate(t + 1))
756 totalLength += chordLength
757 parameterization[t] = totalLength
758 for t in range(s + 1, e - 1):
760 print("no forward motion")
761 parameterization[t] /= totalLength
762 parameterization[t] *= e - s
763 parameterization[e] = e - s
764 for t in parameterization.keys():
765 eval_time_fcurve.keyframe_points.insert(frame=t, value=parameterization[t])
767 print("finished path editing")
770 def anim_stitch(context, enduser_obj):
771 stitch_settings = enduser_obj.data.stitch_settings
772 action_1 = stitch_settings.first_action
773 action_2 = stitch_settings.second_action
774 if stitch_settings.stick_bone!="":
775 selected_bone = enduser_obj.pose.bones[stitch_settings.stick_bone]
777 selected_bone = enduser_obj.pose.bones[0]
778 scene = context.scene
779 TrackNamesA = enduser_obj.data.mocapNLATracks[action_1]
780 TrackNamesB = enduser_obj.data.mocapNLATracks[action_2]
781 enduser_obj.data.active_mocap = action_1
782 anim_data = enduser_obj.animation_data
783 # add tracks for action 2
784 mocapAction = bpy.data.actions[TrackNamesB.base_track]
785 mocapTrack = anim_data.nla_tracks.new()
786 mocapTrack.name = TrackNamesB.base_track
787 mocapStrip = mocapTrack.strips.new(TrackNamesB.base_track, stitch_settings.blend_frame, mocapAction)
788 mocapStrip.extrapolation = "HOLD_FORWARD"
789 mocapStrip.blend_in = stitch_settings.blend_amount
790 mocapStrip.action_frame_start+=stitch_settings.second_offset
791 mocapStrip.action_frame_end+=stitch_settings.second_offset
792 constraintTrack = anim_data.nla_tracks.new()
793 constraintTrack.name = TrackNamesB.auto_fix_track
794 constraintAction = bpy.data.actions[TrackNamesB.auto_fix_track]
795 constraintStrip = constraintTrack.strips.new(TrackNamesB.auto_fix_track, stitch_settings.blend_frame, constraintAction)
796 constraintStrip.extrapolation = "HOLD_FORWARD"
797 constraintStrip.blend_in = stitch_settings.blend_amount
798 userTrack = anim_data.nla_tracks.new()
799 userTrack.name = TrackNamesB.manual_fix_track
800 userAction = bpy.data.actions[TrackNamesB.manual_fix_track]
801 userStrip = userTrack.strips.new(TrackNamesB.manual_fix_track, stitch_settings.blend_frame, userAction)
802 userStrip.extrapolation = "HOLD_FORWARD"
803 userStrip.blend_in = stitch_settings.blend_amount
805 if enduser_obj.parent:
806 if enduser_obj.parent.name == "stride_bone":
807 stride_bone = enduser_obj.parent
808 stride_anim_data = stride_bone.animation_data
809 stride_anim_data.use_nla = True
810 stride_anim_data.action = None
811 for track in stride_anim_data.nla_tracks:
812 stride_anim_data.nla_tracks.remove(track)
813 actionATrack = stride_anim_data.nla_tracks.new()
814 actionATrack.name = TrackNamesA.stride_action
815 actionAStrip = actionATrack.strips.new(TrackNamesA.stride_action, 0, bpy.data.actions[TrackNamesA.stride_action])
816 actionAStrip.extrapolation = "NOTHING"
817 actionBTrack = stride_anim_data.nla_tracks.new()
818 actionBTrack.name = TrackNamesB.stride_action
819 actionBStrip = actionBTrack.strips.new(TrackNamesB.stride_action, stitch_settings.blend_frame, bpy.data.actions[TrackNamesB.stride_action])
820 actionBStrip.action_frame_start+=stitch_settings.second_offset
821 actionBStrip.action_frame_end+=stitch_settings.second_offset
822 actionBStrip.blend_in = stitch_settings.blend_amount
823 actionBStrip.extrapolation = "NOTHING"
824 #we need to change the stride_bone's action to add the offset
825 scene.frame_set(stitch_settings.blend_frame - 1)
826 desired_pos = (selected_bone.matrix.to_translation() * enduser_obj.matrix_world)
827 scene.frame_set(stitch_settings.blend_frame)
828 actual_pos = (selected_bone.matrix.to_translation() * enduser_obj.matrix_world)
829 offset = actual_pos - desired_pos
831 for i,fcurve in enumerate([fcurve for fcurve in bpy.data.actions[TrackNamesB.stride_action].fcurves if fcurve.data_path=="location"]):
832 print(offset[i],i,fcurve.array_index)
833 for pt in fcurve.keyframe_points:
835 pt.handle_left.y-=offset[i]
836 pt.handle_right.y-=offset[i]
839 def guess_anim_stitch(context, enduser_obj):
840 stitch_settings = enduser_obj.data.stitch_settings
841 action_1 = stitch_settings.first_action
842 action_2 = stitch_settings.second_action
843 TrackNamesA = enduser_obj.data.mocapNLATracks[action_1]
844 TrackNamesB = enduser_obj.data.mocapNLATracks[action_2]
845 mocapA = bpy.data.actions[TrackNamesA.base_track]
846 mocapB = bpy.data.actions[TrackNamesB.base_track]
847 curvesA = mocapA.fcurves
848 curvesB = mocapB.fcurves
849 flm, s, data = crossCorrelationMatch(curvesA, curvesB, 10)
851 enduser_obj.data.stitch_settings.blend_frame = flm
852 enduser_obj.data.stitch_settings.second_offset = s