summaryrefslogtreecommitdiff
path: root/scripts
diff options
context:
space:
mode:
authorBrad Payne (Vir Linden) <vir@lindenlab.com>2016-05-21 08:15:28 -0400
committerBrad Payne (Vir Linden) <vir@lindenlab.com>2016-05-21 08:15:28 -0400
commitf47ceb47bbcfd3466813d1653992e1ea418964b5 (patch)
treef3b1c26e835c8ddf40587783095a8b8954717d7b /scripts
parentc2ef3b4c7186dbbd95b16520f281b7d58364fb52 (diff)
parenta46fdc10c76ebbf76b4976f0cbb36b649fe4fdd2 (diff)
merged jelly-doll viewer-release into bento
Diffstat (limited to 'scripts')
-rw-r--r--scripts/content_tools/anim_tool.py595
-rw-r--r--scripts/content_tools/dae_tool.py119
-rw-r--r--scripts/content_tools/skel_tool.py427
3 files changed, 1141 insertions, 0 deletions
diff --git a/scripts/content_tools/anim_tool.py b/scripts/content_tools/anim_tool.py
new file mode 100644
index 0000000000..0cb1e1022e
--- /dev/null
+++ b/scripts/content_tools/anim_tool.py
@@ -0,0 +1,595 @@
+#!runpy.sh
+
+"""\
+
+This module contains tools for manipulating the .anim files supported
+for Second Life animation upload. Note that this format is unrelated
+to any non-Second Life formats of the same name.
+
+$LicenseInfo:firstyear=2016&license=viewerlgpl$
+Second Life Viewer Source Code
+Copyright (C) 2016, Linden Research, Inc.
+
+This library is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation;
+version 2.1 of the License only.
+
+This library is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with this library; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+
+Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA
+$/LicenseInfo$
+"""
+
+import sys
+import struct
+import StringIO
+import math
+import argparse
+import random
+from lxml import etree
+
+U16MAX = 65535
+OOU16MAX = 1.0/(float)(U16MAX)
+
+LL_MAX_PELVIS_OFFSET = 5.0
+
+class FilePacker(object):
+ def __init__(self):
+ self.data = StringIO.StringIO()
+ self.offset = 0
+
+ def write(self,filename):
+ f = open(filename,"wb")
+ f.write(self.data.getvalue())
+ f.close()
+
+ def pack(self,fmt,*args):
+ buf = struct.pack(fmt, *args)
+ self.offset += struct.calcsize(fmt)
+ self.data.write(buf)
+
+ def pack_string(self,str,size=0):
+ buf = str + "\000"
+ if size and (len(buf) < size):
+ buf += "\000" * (size-len(buf))
+ self.data.write(buf)
+
+class FileUnpacker(object):
+ def __init__(self, filename):
+ f = open(filename,"rb")
+ self.data = f.read()
+ self.offset = 0
+
+ def unpack(self,fmt):
+ result = struct.unpack_from(fmt, self.data, self.offset)
+ self.offset += struct.calcsize(fmt)
+ return result
+
+ def unpack_string(self, size=0):
+ result = ""
+ i = 0
+ while (self.data[self.offset+i] != "\000"):
+ result += self.data[self.offset+i]
+ i += 1
+ i += 1
+ if size:
+ # fixed-size field for the string
+ i = size
+ self.offset += i
+ return result
+
+# translated from the C++ version in lldefs.h
+def llclamp(a, minval, maxval):
+ if a<minval:
+ return minval
+ if a>maxval:
+ return maxval
+ return a
+
+# translated from the C++ version in llquantize.h
+def F32_to_U16(val, lower, upper):
+ val = llclamp(val, lower, upper);
+ # make sure that the value is positive and normalized to <0, 1>
+ val -= lower;
+ val /= (upper - lower);
+
+ # return the U16
+ return int(math.floor(val*U16MAX))
+
+# translated from the C++ version in llquantize.h
+def U16_to_F32(ival, lower, upper):
+ if ival < 0 or ival > U16MAX:
+ raise Exception("U16 out of range: "+ival)
+ val = ival*OOU16MAX
+ delta = (upper - lower)
+ val *= delta
+ val += lower
+
+ max_error = delta*OOU16MAX;
+
+ # make sure that zeroes come through as zero
+ if abs(val) < max_error:
+ val = 0.0
+ return val;
+
+class BadFormat(Exception):
+ pass
+
+class RotKey(object):
+ def __init__(self):
+ pass
+
+ def unpack(self, anim, fup):
+ (self.time_short, ) = fup.unpack("<H")
+ self.time = U16_to_F32(self.time_short, 0.0, anim.duration)
+ (x,y,z) = fup.unpack("<HHH")
+ self.rotation = [U16_to_F32(i, -1.0, 1.0) for i in (x,y,z)]
+
+ def dump(self, f):
+ print >>f, " rot_key: t",self.time,"st",self.time_short,"rot",",".join([str(f) for f in self.rotation])
+
+ def pack(self, anim, fp):
+ if not hasattr(self,"time_short"):
+ self.time_short = F32_to_U16(self.time, 0.0, anim.duration)
+ fp.pack("<H",self.time_short)
+ (x,y,z) = [F32_to_U16(v, -1.0, 1.0) for v in self.rotation]
+ fp.pack("<HHH",x,y,z)
+
+class PosKey(object):
+ def __init__(self):
+ pass
+
+ def unpack(self, anim, fup):
+ (self.time_short, ) = fup.unpack("<H")
+ self.time = U16_to_F32(self.time_short, 0.0, anim.duration)
+ (x,y,z) = fup.unpack("<HHH")
+ self.position = [U16_to_F32(i, -LL_MAX_PELVIS_OFFSET, LL_MAX_PELVIS_OFFSET) for i in (x,y,z)]
+
+ def dump(self, f):
+ print >>f, " pos_key: t",self.time,"pos ",",".join([str(f) for f in self.position])
+
+ def pack(self, anim, fp):
+ if not hasattr(self,"time_short"):
+ self.time_short = F32_to_U16(self.time, 0.0, anim.duration)
+ fp.pack("<H",self.time_short)
+ (x,y,z) = [F32_to_U16(v, -LL_MAX_PELVIS_OFFSET, LL_MAX_PELVIS_OFFSET) for v in self.position]
+ fp.pack("<HHH",x,y,z)
+
+class Constraint(object):
+ def __init__(self):
+ pass
+
+ def unpack(self, anim, fup):
+ (self.chain_length, self.constraint_type) = fup.unpack("<BB")
+ self.source_volume = fup.unpack_string(16)
+ self.source_offset = fup.unpack("<fff")
+ self.target_volume = fup.unpack_string(16)
+ self.target_offset = fup.unpack("<fff")
+ self.target_dir = fup.unpack("<fff")
+ fmt = "<ffff"
+ (self.ease_in_start, self.ease_in_stop, self.ease_out_start, self.ease_out_stop) = fup.unpack("<ffff")
+
+ def pack(self, anim, fp):
+ fp.pack("<BB", self.chain_length, self.constraint_type)
+ fp.pack_string(self.source_volume, 16)
+ fp.pack("<fff", *self.source_offset)
+ fp.pack_string(self.target_volume, 16)
+ fp.pack("<fff", *self.target_offset)
+ fp.pack("<fff", *self.target_dir)
+ fp.pack("<ffff", self.ease_in_start, self.ease_in_stop, self.ease_out_start, self.ease_out_stop)
+
+ def dump(self, f):
+ print >>f, " constraint:"
+ print >>f, " chain_length",self.chain_length
+ print >>f, " constraint_type",self.constraint_type
+ print >>f, " source_volume",self.source_volume
+ print >>f, " source_offset",self.source_offset
+ print >>f, " target_volume",self.target_volume
+ print >>f, " target_offset",self.target_offset
+ print >>f, " target_dir",self.target_dir
+ print >>f, " ease_in_start",self.ease_in_start
+ print >>f, " ease_in_stop",self.ease_in_stop
+ print >>f, " ease_out_start",self.ease_out_start
+ print >>f, " ease_out_stop",self.ease_out_stop
+
+class Constraints(object):
+ def __init__(self):
+ pass
+
+ def unpack(self, anim, fup):
+ (self.num_constraints, ) = fup.unpack("<i")
+ self.constraints = []
+ for i in xrange(self.num_constraints):
+ constraint = Constraint()
+ constraint.unpack(anim, fup)
+ self.constraints.append(constraint)
+
+ def pack(self, anim, fp):
+ fp.pack("<i",self.num_constraints)
+ for c in self.constraints:
+ c.pack(anim,fp)
+
+ def dump(self, f):
+ print >>f, "constraints:",self.num_constraints
+ for c in self.constraints:
+ c.dump(f)
+
+class PositionCurve(object):
+ def __init__(self):
+ self.num_pos_keys = 0
+ self.keys = []
+
+ def is_static(self):
+ if self.keys:
+ k0 = self.keys[0]
+ for k in self.keys:
+ if k.position != k0.position:
+ return False
+ return True
+
+ def unpack(self, anim, fup):
+ (self.num_pos_keys, ) = fup.unpack("<i")
+ self.keys = []
+ for k in xrange(0,self.num_pos_keys):
+ pos_key = PosKey()
+ pos_key.unpack(anim, fup)
+ self.keys.append(pos_key)
+
+ def pack(self, anim, fp):
+ fp.pack("<i",self.num_pos_keys)
+ for k in self.keys:
+ k.pack(anim, fp)
+
+ def dump(self, f):
+ print >>f, " position_curve:"
+ print >>f, " num_pos_keys", self.num_pos_keys
+ for k in xrange(0,self.num_pos_keys):
+ self.keys[k].dump(f)
+
+class RotationCurve(object):
+ def __init__(self):
+ self.num_rot_keys = 0
+ self.keys = []
+
+ def is_static(self):
+ if self.keys:
+ k0 = self.keys[0]
+ for k in self.keys:
+ if k.rotation != k0.rotation:
+ return False
+ return True
+
+ def unpack(self, anim, fup):
+ (self.num_rot_keys, ) = fup.unpack("<i")
+ self.keys = []
+ for k in xrange(0,self.num_rot_keys):
+ rot_key = RotKey()
+ rot_key.unpack(anim, fup)
+ self.keys.append(rot_key)
+
+ def pack(self, anim, fp):
+ fp.pack("<i",self.num_rot_keys)
+ for k in self.keys:
+ k.pack(anim, fp)
+
+ def dump(self, f):
+ print >>f, " rotation_curve:"
+ print >>f, " num_rot_keys", self.num_rot_keys
+ for k in xrange(0,self.num_rot_keys):
+ self.keys[k].dump(f)
+
+class JointInfo(object):
+ def __init__(self):
+ pass
+
+ def unpack(self, anim, fup):
+ self.joint_name = fup.unpack_string()
+ (self.joint_priority, ) = fup.unpack("<i")
+ self.rotation_curve = RotationCurve()
+ self.rotation_curve.unpack(anim, fup)
+ self.position_curve = PositionCurve()
+ self.position_curve.unpack(anim, fup)
+
+ def pack(self, anim, fp):
+ fp.pack_string(self.joint_name)
+ fp.pack("<i", self.joint_priority)
+ self.rotation_curve.pack(anim, fp)
+ self.position_curve.pack(anim, fp)
+
+ def dump(self, f):
+ print >>f, "joint:"
+ print >>f, " joint_name:",self.joint_name
+ print >>f, " joint_priority:",self.joint_priority
+ self.rotation_curve.dump(f)
+ self.position_curve.dump(f)
+
+class Anim(object):
+ def __init__(self, filename=None):
+ if filename:
+ self.read(filename)
+
+ def read(self, filename):
+ fup = FileUnpacker(filename)
+ self.unpack(fup)
+
+ # various validity checks could be added - see LLKeyframeMotion::deserialize()
+ def unpack(self,fup):
+ (self.version, self.sub_version, self.base_priority, self.duration) = fup.unpack("@HHhf")
+
+ if self.version == 0 and self.sub_version == 1:
+ self.old_version = True
+ raise BadFormat("old version not supported")
+ elif self.version == 1 and self.sub_version == 0:
+ self.old_version = False
+ else:
+ raise BadFormat("Bad combination of version, sub_version: %d %d" % (self.version, self.sub_version))
+
+ self.emote_name = fup.unpack_string()
+
+ (self.loop_in_point, self.loop_out_point, self.loop, self.ease_in_duration, self.ease_out_duration, self.hand_pose, self.num_joints) = fup.unpack("@ffiffII")
+
+ self.joints = []
+ for j in xrange(0,self.num_joints):
+ joint_info = JointInfo()
+ joint_info.unpack(self, fup)
+ self.joints.append(joint_info)
+ print "unpacked joint",joint_info.joint_name
+ self.constraints = Constraints()
+ self.constraints.unpack(self, fup)
+ self.data = fup.data
+
+ def pack(self, fp):
+ fp.pack("@HHhf", self.version, self.sub_version, self.base_priority, self.duration)
+ fp.pack_string(self.emote_name, 0)
+ fp.pack("@ffiffII", self.loop_in_point, self.loop_out_point, self.loop, self.ease_in_duration, self.ease_out_duration, self.hand_pose, self.num_joints)
+ for j in self.joints:
+ j.pack(anim, fp)
+ self.constraints.pack(anim, fp)
+
+ def dump(self, filename="-"):
+ if filename=="-":
+ f = sys.stdout
+ else:
+ f = open(filename,"w")
+ print >>f, "versions: ", self.version, self.sub_version
+ print >>f, "base_priority: ", self.base_priority
+ print >>f, "duration: ", self.duration
+ print >>f, "emote_name: ", self.emote_name
+ print >>f, "loop_in_point: ", self.loop_in_point
+ print >>f, "loop_out_point: ", self.loop_out_point
+ print >>f, "loop: ", self.loop
+ print >>f, "ease_in_duration: ", self.ease_in_duration
+ print >>f, "ease_out_duration: ", self.ease_out_duration
+ print >>f, "hand_pose", self.hand_pose
+ print >>f, "num_joints", self.num_joints
+ for j in self.joints:
+ j.dump(f)
+ self.constraints.dump(f)
+
+ def write(self, filename):
+ fp = FilePacker()
+ self.pack(fp)
+ fp.write(filename)
+
+ def write_src_data(self, filename):
+ print "write file",filename
+ f = open(filename,"wb")
+ f.write(self.data)
+ f.close()
+
+ def find_joint(self, name):
+ joints = [j for j in self.joints if j.joint_name == name]
+ if joints:
+ return joints[0]
+ else:
+ return None
+
+ def add_joint(self, name, priority):
+ if not self.find_joint(name):
+ j = JointInfo()
+ j.joint_name = name
+ j.joint_priority = priority
+ j.rotation_curve = RotationCurve()
+ j.position_curve = PositionCurve()
+ self.joints.append(j)
+ self.num_joints = len(self.joints)
+
+ def delete_joint(self, name):
+ j = self.find_joint(name)
+ if j:
+ anim.joints.remove(j)
+ anim.num_joints = len(self.joints)
+
+ def summary(self):
+ nj = len(self.joints)
+ nz = len([j for j in self.joints if j.joint_priority > 0])
+ nstatic = len([j for j in self.joints if j.rotation_curve.is_static() and j.position_curve.is_static()])
+ print "summary: %d joints, non-zero priority %d, static %d" % (nj, nz, nstatic)
+
+ def add_pos(self, joint_names, positions):
+ js = [joint for joint in self.joints if joint.joint_name in joint_names]
+ for j in js:
+ if args.verbose:
+ print "adding positions",j.joint_name,positions
+ j.joint_priority = 4
+ j.position_curve.num_pos_keys = len(positions)
+ j.position_curve.keys = []
+ for i,pos in enumerate(positions):
+ key = PosKey()
+ key.time = self.duration * i / (len(positions) - 1)
+ key.time_short = F32_to_U16(key.time, 0.0, self.duration)
+ key.position = pos
+ j.position_curve.keys.append(key)
+
+ def add_rot(self, joint_names, rotations):
+ js = [joint for joint in self.joints if joint.joint_name in joint_names]
+ for j in js:
+ print "adding rotations",j.joint_name
+ j.joint_priority = 4
+ j.rotation_curve.num_rot_keys = len(rotations)
+ j.rotation_curve.keys = []
+ for i,pos in enumerate(rotations):
+ key = RotKey()
+ key.time = self.duration * i / (len(rotations) - 1)
+ key.time_short = F32_to_U16(key.time, 0.0, self.duration)
+ key.rotation = pos
+ j.rotation_curve.keys.append(key)
+
+def twistify(anim, joint_names, rot1, rot2):
+ js = [joint for joint in anim.joints if joint.joint_name in joint_names]
+ for j in js:
+ print "twisting",j.joint_name
+ print j.rotation_curve.num_rot_keys
+ j.joint_priority = 4
+ j.rotation_curve.num_rot_keys = 2
+ j.rotation_curve.keys = []
+ key1 = RotKey()
+ key1.time_short = 0
+ key1.time = U16_to_F32(key1.time_short, 0.0, anim.duration)
+ key1.rotation = rot1
+ key2 = RotKey()
+ key2.time_short = U16MAX
+ key2.time = U16_to_F32(key2.time_short, 0.0, anim.duration)
+ key2.rotation = rot2
+ j.rotation_curve.keys.append(key1)
+ j.rotation_curve.keys.append(key2)
+
+def float_triple(arg):
+ vals = arg.split()
+ if len(vals)==3:
+ return [float(x) for x in vals]
+ else:
+ raise Exception("arg %s does not resolve to a float triple" % arg)
+
+def get_joint_by_name(tree,name):
+ if tree is None:
+ return None
+ matches = [elt for elt in tree.getroot().iter() if \
+ elt.get("name")==name and elt.tag in ["bone", "collision_volume", "attachment_point"]]
+ if len(matches)==1:
+ return matches[0]
+ elif len(matches)>1:
+ print "multiple matches for name",name
+ return None
+ else:
+ return None
+
+def get_elt_pos(elt):
+ if elt.get("pos"):
+ return float_triple(elt.get("pos"))
+ elif elt.get("position"):
+ return float_triple(elt.get("position"))
+ else:
+ return (0.0, 0.0, 0.0)
+
+def resolve_joints(names, skel_tree, lad_tree):
+ print "resolve joints, no_hud is",args.no_hud
+ if skel_tree and lad_tree:
+ all_elts = [elt for elt in skel_tree.getroot().iter()]
+ all_elts.extend([elt for elt in lad_tree.getroot().iter()])
+ matches = []
+ for elt in all_elts:
+ if elt.get("name") is None:
+ continue
+ print elt.get("name"),"hud",elt.get("hud")
+ if args.no_hud and elt.get("hud"):
+ print "skipping hud joint", elt.get("name")
+ continue
+ if elt.get("name") in names or elt.tag in names:
+ matches.append(elt.get("name"))
+ return list(set(matches))
+ else:
+ return names
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser(description="process SL animations")
+ parser.add_argument("--verbose", help="verbose flag", action="store_true")
+ parser.add_argument("--dump", help="dump to specified file")
+ parser.add_argument("--rot", help="specify sequence of rotations", type=float_triple, nargs="+")
+ parser.add_argument("--rand_pos", help="request random positions", action="store_true")
+ parser.add_argument("--reset_pos", help="request original positions", action="store_true")
+ parser.add_argument("--pos", help="specify sequence of positions", type=float_triple, nargs="+")
+ parser.add_argument("--delete_joints", help="specify joints to be deleted", nargs="+")
+ parser.add_argument("--joints", help="specify joints to be added or modified", nargs="+")
+ parser.add_argument("--summary", help="print summary of the output animation", action="store_true")
+ parser.add_argument("--skel", help="name of the avatar_skeleton file", default="avatar_skeleton.xml")
+ parser.add_argument("--lad", help="name of the avatar_lad file", default="avatar_lad.xml")
+ parser.add_argument("--set_version", nargs=2, type=int, help="set version and sub-version to specified values")
+ parser.add_argument("--no_hud", help="omit hud joints from list of attachments", action="store_true")
+ parser.add_argument("infilename", help="name of a .anim file to input")
+ parser.add_argument("outfilename", nargs="?", help="name of a .anim file to output")
+ args = parser.parse_args()
+
+ print "anim_tool.py: " + " ".join(sys.argv)
+ print "dump is", args.dump
+ print "infilename",args.infilename,"outfilename",args.outfilename
+ print "rot",args.rot
+ print "pos",args.pos
+ print "joints",args.joints
+
+ try:
+ anim = Anim(args.infilename)
+ skel_tree = None
+ lad_tree = None
+ joints = []
+ if args.skel:
+ skel_tree = etree.parse(args.skel)
+ if skel_tree is None:
+ print "failed to parse",args.skel
+ exit(1)
+ if args.lad:
+ lad_tree = etree.parse(args.lad)
+ if lad_tree is None:
+ print "failed to parse",args.lad
+ exit(1)
+ if args.joints:
+ joints = resolve_joints(args.joints, skel_tree, lad_tree)
+ if args.verbose:
+ print "joints resolved to",joints
+ for name in joints:
+ anim.add_joint(name,0)
+ if args.delete_joints:
+ for name in args.delete_joints:
+ anim.delete_joint(name)
+ if joints and args.rot:
+ anim.add_rot(joints, args.rot)
+ if joints and args.pos:
+ anim.add_pos(joints, args.pos)
+ if joints and args.rand_pos:
+ for joint in joints:
+ pos_array = list(tuple(random.uniform(-1,1) for i in xrange(3)) for j in xrange(2))
+ pos_array.append(pos_array[0])
+ anim.add_pos([joint], pos_array)
+ if joints and args.reset_pos:
+ for joint in joints:
+ elt = get_joint_by_name(skel_tree,joint)
+ if elt is None:
+ elt = get_joint_by_name(lad_tree,joint)
+ if elt is not None:
+ pos_array = []
+ pos_array.append(get_elt_pos(elt))
+ pos_array.append(pos_array[0])
+ anim.add_pos([joint], pos_array)
+ else:
+ print "no elt or no pos data for",joint
+ if args.set_version:
+ anim.version = args.set_version[0]
+ anim.sub_version = args.set_version[1]
+ if args.dump:
+ anim.dump(args.dump)
+ if args.summary:
+ anim.summary()
+ if args.outfilename:
+ anim.write(args.outfilename)
+ except:
+ raise
+
diff --git a/scripts/content_tools/dae_tool.py b/scripts/content_tools/dae_tool.py
new file mode 100644
index 0000000000..823f69cb85
--- /dev/null
+++ b/scripts/content_tools/dae_tool.py
@@ -0,0 +1,119 @@
+#!runpy.sh
+
+"""\
+
+This module contains tools for manipulating collada files
+
+$LicenseInfo:firstyear=2016&license=viewerlgpl$
+Second Life Viewer Source Code
+Copyright (C) 2016, Linden Research, Inc.
+
+This library is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation;
+version 2.1 of the License only.
+
+This library is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with this library; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+
+Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA
+$/LicenseInfo$
+"""
+
+import argparse
+import random
+
+# Need to pip install numpy and pycollada
+import numpy as np
+from collada import *
+from lxml import etree
+
+def mesh_summary(mesh):
+ print "scenes",mesh.scenes
+ for scene in mesh.scenes:
+ print "scene",scene
+ for node in scene.nodes:
+ print "node",node
+
+def mesh_lock_offsets(tree, joints):
+ print "mesh_lock_offsets",tree,joints
+ for joint_node in tree.iter():
+ if "node" not in joint_node.tag:
+ continue
+ if joint_node.get("type") != "JOINT":
+ continue
+ if joint_node.get("name") in joints or "bone" in joints:
+ for matrix_node in list(joint_node):
+ if "matrix" in matrix_node.tag:
+ floats = [float(x) for x in matrix_node.text.split()]
+ if len(floats) == 16:
+ floats[3] += 0.0001
+ floats[7] += 0.0001
+ floats[11] += 0.0001
+ matrix_node.text = " ".join([str(f) for f in floats])
+ print joint_node.get("name"),matrix_node.tag,"text",matrix_node.text,len(floats),floats
+
+
+def mesh_random_offsets(tree, joints):
+ print "mesh_random_offsets",tree,joints
+ for joint_node in tree.iter():
+ if "node" not in joint_node.tag:
+ continue
+ if joint_node.get("type") != "JOINT":
+ continue
+ if not joint_node.get("name"):
+ continue
+ if joint_node.get("name") in joints or "bone" in joints:
+ for matrix_node in list(joint_node):
+ if "matrix" in matrix_node.tag:
+ floats = [float(x) for x in matrix_node.text.split()]
+ print "randomizing",floats
+ if len(floats) == 16:
+ floats[3] += random.uniform(-1.0,1.0)
+ floats[7] += random.uniform(-1.0,1.0)
+ floats[11] += random.uniform(-1.0,1.0)
+ matrix_node.text = " ".join([str(f) for f in floats])
+ print joint_node.get("name"),matrix_node.tag,"text",matrix_node.text,len(floats),floats
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description="process SL animations")
+ parser.add_argument("--verbose", action="store_true",help="verbose flag")
+ parser.add_argument("infilename", help="name of a collada (dae) file to input")
+ parser.add_argument("outfilename", nargs="?", help="name of a collada (dae) file to output", default = None)
+ parser.add_argument("--lock_offsets", nargs="+", help="tweak position of listed joints to lock their offsets")
+ parser.add_argument("--random_offsets", nargs="+", help="random offset position for listed joints")
+ parser.add_argument("--summary", action="store_true", help="print summary info about input file")
+ args = parser.parse_args()
+
+ mesh = None
+ tree = None
+
+ if args.infilename:
+ print "reading",args.infilename
+ mesh = Collada(args.infilename)
+ tree = etree.parse(args.infilename)
+
+ if args.summary:
+ print "summarizing",args.infilename
+ mesh_summary(mesh)
+
+ if args.lock_offsets:
+ print "locking offsets for",args.lock_offsets
+ mesh_lock_offsets(tree, args.lock_offsets)
+
+ if args.random_offsets:
+ print "adding random offsets for",args.random_offsets
+ mesh_random_offsets(tree, args.random_offsets)
+
+ if args.outfilename:
+ print "writing",args.outfilename
+ f = open(args.outfilename,"w")
+ print >>f, etree.tostring(tree, pretty_print=True) #need update to get: , short_empty_elements=True)
+
diff --git a/scripts/content_tools/skel_tool.py b/scripts/content_tools/skel_tool.py
new file mode 100644
index 0000000000..5d4ea0e059
--- /dev/null
+++ b/scripts/content_tools/skel_tool.py
@@ -0,0 +1,427 @@
+#!runpy.sh
+
+"""\
+
+This module contains tools for manipulating and validating the avatar skeleton file.
+
+$LicenseInfo:firstyear=2016&license=viewerlgpl$
+Second Life Viewer Source Code
+Copyright (C) 2016, Linden Research, Inc.
+
+This library is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation;
+version 2.1 of the License only.
+
+This library is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with this library; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+
+Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA
+$/LicenseInfo$
+"""
+
+import argparse
+
+from lxml import etree
+
+def get_joint_names(tree):
+ joints = [element.get('name') for element in tree.getroot().iter() if element.tag in ['bone','collision_volume']]
+ print "joints:",joints
+ return joints
+
+def get_aliases(tree):
+ aliases = {}
+ alroot = tree.getroot()
+ for element in alroot.iter():
+ for key in element.keys():
+ if key == 'aliases':
+ name = element.get('name')
+ val = element.get('aliases')
+ aliases[name] = val
+ return aliases
+
+def fix_name(element):
+ pass
+
+def enforce_precision_rules(element):
+ pass
+
+def float_tuple(str):
+ try:
+ return [float(e) for e in str.split(" ")]
+ except:
+ return (0,0,0)
+
+def check_symmetry(name, field, vec1, vec2):
+ if vec1[0] != vec2[0]:
+ print name,field,"x match fail"
+ if vec1[1] != -vec2[1]:
+ print name,field,"y mirror image fail"
+ if vec1[2] != vec2[2]:
+ print name,field,"z match fail"
+
+def enforce_symmetry(tree, element, field, fix=False):
+ name = element.get("name")
+ if not name:
+ return
+ if "Right" in name:
+ left_name = name.replace("Right","Left")
+ left_element = get_element_by_name(tree, left_name)
+ pos = element.get(field)
+ left_pos = left_element.get(field)
+ pos_tuple = float_tuple(pos)
+ left_pos_tuple = float_tuple(left_pos)
+ check_symmetry(name,field,pos_tuple,left_pos_tuple)
+
+def get_element_by_name(tree,name):
+ if tree is None:
+ return None
+ matches = [elt for elt in tree.getroot().iter() if elt.get("name")==name]
+ if len(matches)==1:
+ return matches[0]
+ elif len(matches)>1:
+ print "multiple matches for name",name
+ return None
+ else:
+ return None
+
+def list_skel_tree(tree):
+ for element in tree.getroot().iter():
+ if element.tag == "bone":
+ print element.get("name"),"-",element.get("support")
+
+def validate_child_order(tree, ogtree, fix=False):
+ unfixable = 0
+
+ #print "validate_child_order am failing for NO RAISIN!"
+ #unfixable += 1
+
+ tofix = set()
+ for element in tree.getroot().iter():
+ if element.tag != "bone":
+ continue
+ og_element = get_element_by_name(ogtree,element.get("name"))
+ if og_element is not None:
+ for echild,ochild in zip(list(element),list(og_element)):
+ if echild.get("name") != ochild.get("name"):
+ print "Child ordering error, parent",element.get("name"),echild.get("name"),"vs",ochild.get("name")
+ if fix:
+ tofix.add(element.get("name"))
+ children = {}
+ for name in tofix:
+ print "FIX",name
+ element = get_element_by_name(tree,name)
+ og_element = get_element_by_name(ogtree,name)
+ children = []
+ # add children matching the original joints first, in the same order
+ for og_elt in list(og_element):
+ elt = get_element_by_name(tree,og_elt.get("name"))
+ if elt is not None:
+ children.append(elt)
+ print "b:",elt.get("name")
+ else:
+ print "b missing:",og_elt.get("name")
+ # then add children that are not present in the original joints
+ for elt in list(element):
+ og_elt = get_element_by_name(ogtree,elt.get("name"))
+ if og_elt is None:
+ children.append(elt)
+ print "e:",elt.get("name")
+ # if we've done this right, we have a rearranged list of the same length
+ if len(children)!=len(element):
+ print "children",[e.get("name") for e in children]
+ print "element",[e.get("name") for e in element]
+ print "children changes for",name,", cannot reconcile"
+ else:
+ element[:] = children
+
+ return unfixable
+
+# Checklist for the final file, started from SL-276:
+# - new "end" attribute on all bones
+# - new "connected" attribute on all bones
+# - new "support" tag on all bones and CVs
+# - aliases where appropriate for backward compatibility. rFoot and lFoot associated with mAnkle bones (not mFoot bones)
+# - correct counts of bones and collision volumes in header
+# - check all comments
+# - old fields of old bones and CVs should be identical to their previous values.
+# - old bones and CVs should retain their previous ordering under their parent, with new joints going later in any given child list
+# - corresponding right and left joints should be mirror symmetric.
+# - childless elements should be in short form (<bone /> instead of <bone></bone>)
+# - digits of precision should be consistent (again, except for old joints)
+def validate_skel_tree(tree, ogtree, reftree, fix=False):
+ print "validate_skel_tree"
+ (num_bones,num_cvs) = (0,0)
+ unfixable = 0
+ defaults = {"connected": "false",
+ "group": "Face"
+ }
+ for element in tree.getroot().iter():
+ og_element = get_element_by_name(ogtree,element.get("name"))
+ ref_element = get_element_by_name(reftree,element.get("name"))
+ # Preserve values from og_file:
+ for f in ["pos","rot","scale","pivot"]:
+ if og_element is not None and og_element.get(f) and (str(element.get(f)) != str(og_element.get(f))):
+ print element.get("name"),"field",f,"has changed:",og_element.get(f),"!=",element.get(f)
+ if fix:
+ element.set(f, og_element.get(f))
+
+ # Pick up any other fields that we can from ogtree and reftree
+ fields = []
+ if element.tag in ["bone","collision_volume"]:
+ fields = ["support","group"]
+ if element.tag == 'bone':
+ fields.extend(["end","connected"])
+ for f in fields:
+ if not element.get(f):
+ print element.get("name"),"missing required field",f
+ if fix:
+ if og_element is not None and og_element.get(f):
+ print "fix from ogtree"
+ element.set(f,og_element.get(f))
+ elif ref_element is not None and ref_element.get(f):
+ print "fix from reftree"
+ element.set(f,ref_element.get(f))
+ else:
+ if f in defaults:
+ print "fix by using default value",f,"=",defaults[f]
+ element.set(f,defaults[f])
+ elif f == "support":
+ if og_element is not None:
+ element.set(f,"base")
+ else:
+ element.set(f,"extended")
+ else:
+ print "unfixable:",element.get("name"),"no value for field",f
+ unfixable += 1
+
+ fix_name(element)
+ enforce_precision_rules(element)
+ for field in ["pos","pivot"]:
+ enforce_symmetry(tree, element, field, fix)
+
+ if element.tag == "linden_skeleton":
+ num_bones = int(element.get("num_bones"))
+ num_cvs = int(element.get("num_collision_volumes"))
+ all_bones = [e for e in tree.getroot().iter() if e.tag=="bone"]
+ all_cvs = [e for e in tree.getroot().iter() if e.tag=="collision_volume"]
+ if num_bones != len(all_bones):
+ print "wrong bone count, expected",len(all_bones),"got",num_bones
+ if fix:
+ element.set("num_bones", str(len(all_bones)))
+ if num_cvs != len(all_cvs):
+ print "wrong cv count, expected",len(all_cvs),"got",num_cvs
+ if fix:
+ element.set("num_collision_volumes", str(len(all_cvs)))
+
+ print "skipping child order code"
+ #unfixable += validate_child_order(tree, ogtree, fix)
+
+ if fix and (unfixable > 0):
+ print "BAD FILE:", unfixable,"errs could not be fixed"
+
+
+# Check contents of avatar_lad file relative to a specified skeleton
+def validate_lad_tree(ladtree,skeltree,orig_ladtree):
+ print "validate_lad_tree"
+ bone_names = [elt.get("name") for elt in skeltree.iter("bone")]
+ bone_names.append("mScreen")
+ bone_names.append("mRoot")
+ cv_names = [elt.get("name") for elt in skeltree.iter("collision_volume")]
+ #print "bones\n ","\n ".join(sorted(bone_names))
+ #print "cvs\n ","\n ".join(sorted(cv_names))
+ for att in ladtree.iter("attachment_point"):
+ att_name = att.get("name")
+ #print "attachment",att_name
+ joint_name = att.get("joint")
+ if not joint_name in bone_names:
+ print "att",att_name,"linked to invalid joint",joint_name
+ for skel_param in ladtree.iter("param_skeleton"):
+ skel_param_id = skel_param.get("id")
+ skel_param_name = skel_param.get("name")
+ #if not skel_param_name and not skel_param_id:
+ # print "strange skel_param"
+ # print etree.tostring(skel_param)
+ # for k,v in skel_param.attrib.iteritems():
+ # print k,"->",v
+ #print "skel_param",skel_param_name
+ for bone in skel_param.iter("bone"):
+ bone_name = bone.get("name")
+ if not bone_name in bone_names:
+ print "skel param references invalid bone",bone_name
+ print etree.tostring(bone)
+ drivers = {}
+ for driven_param in ladtree.iter("driven"):
+ driver = driven_param.getparent().getparent()
+ driven_id = driven_param.get("id")
+ driver_id = driver.get("id")
+ actual_param = next(param for param in ladtree.iter("param") if param.get("id")==driven_id)
+ if not driven_id in drivers:
+ drivers[driven_id] = set()
+ drivers[driven_id].add(driver_id)
+ if (actual_param.get("value_min") != driver.get("value_min") or \
+ actual_param.get("value_max") != driver.get("value_max")):
+ if args.verbose:
+ print "MISMATCH min max:",driver.get("id"),"drives",driven_param.get("id"),"min",driver.get("value_min"),actual_param.get("value_min"),"max",driver.get("value_max"),actual_param.get("value_max")
+
+ for driven_id in drivers:
+ dset = drivers[driven_id]
+ if len(dset) != 1:
+ print "driven_id",driven_id,"has multiple drivers",dset
+ else:
+ if args.verbose:
+ print "driven_id",driven_id,"has one driver",dset
+ if orig_ladtree:
+ # make sure expected message format is unchanged
+ orig_message_params_by_id = dict((int(param.get("id")),param) for param in orig_ladtree.iter("param") if param.get("group") in ["0","3"])
+ orig_message_ids = sorted(orig_message_params_by_id.keys())
+ #print "orig_message_ids",orig_message_ids
+ message_params_by_id = dict((int(param.get("id")),param) for param in ladtree.iter("param") if param.get("group") in ["0","3"])
+ message_ids = sorted(message_params_by_id.keys())
+ #print "message_ids",message_ids
+ if (set(message_ids) != set(orig_message_ids)):
+ print "mismatch in message ids!"
+ print "added",set(message_ids) - set(orig_message_ids)
+ print "removed",set(orig_message_ids) - set(message_ids)
+ else:
+ print "message ids OK"
+
+def remove_joint_by_name(tree, name):
+ print "remove joint:",name
+ elt = get_element_by_name(tree,name)
+ while elt is not None:
+ children = list(elt)
+ parent = elt.getparent()
+ print "graft",[e.get("name") for e in children],"into",parent.get("name")
+ print "remove",elt.get("name")
+ #parent_children = list(parent)
+ loc = parent.index(elt)
+ parent[loc:loc+1] = children
+ elt[:] = []
+ print "parent now:",[e.get("name") for e in list(parent)]
+ elt = get_element_by_name(tree,name)
+
+def compare_skel_trees(atree,btree):
+ diffs = {}
+ realdiffs = {}
+ a_missing = set()
+ b_missing = set()
+ a_names = set(e.get("name") for e in atree.getroot().iter() if e.get("name"))
+ b_names = set(e.get("name") for e in btree.getroot().iter() if e.get("name"))
+ print "a_names\n ",str("\n ").join(sorted(list(a_names)))
+ print
+ print "b_names\n ","\n ".join(sorted(list(b_names)))
+ all_names = set.union(a_names,b_names)
+ for name in all_names:
+ if not name:
+ continue
+ a_element = get_element_by_name(atree,name)
+ b_element = get_element_by_name(btree,name)
+ if a_element is None or b_element is None:
+ print "something not found for",name,a_element,b_element
+ if a_element is not None and b_element is not None:
+ all_attrib = set.union(set(a_element.attrib.keys()),set(b_element.attrib.keys()))
+ print name,all_attrib
+ for att in all_attrib:
+ if a_element.get(att) != b_element.get(att):
+ if not att in diffs:
+ diffs[att] = set()
+ diffs[att].add(name)
+ print "tuples",name,att,float_tuple(a_element.get(att)),float_tuple(b_element.get(att))
+ if float_tuple(a_element.get(att)) != float_tuple(b_element.get(att)):
+ print "diff in",name,att
+ if not att in realdiffs:
+ realdiffs[att] = set()
+ realdiffs[att].add(name)
+ for att in diffs:
+ print "Differences in",att
+ for name in sorted(diffs[att]):
+ print " ",name
+ for att in realdiffs:
+ print "Real differences in",att
+ for name in sorted(diffs[att]):
+ print " ",name
+ a_missing = b_names.difference(a_names)
+ b_missing = a_names.difference(b_names)
+ if len(a_missing) or len(b_missing):
+ print "Missing from comparison"
+ for name in a_missing:
+ print " ",name
+ print "Missing from infile"
+ for name in b_missing:
+ print " ",name
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser(description="process SL avatar_skeleton/avatar_lad files")
+ parser.add_argument("--verbose", action="store_true",help="verbose flag")
+ parser.add_argument("--ogfile", help="specify file containing base bones")
+ parser.add_argument("--ref_file", help="specify another file containing replacements for missing fields")
+ parser.add_argument("--lad_file", help="specify avatar_lad file to check")
+ parser.add_argument("--orig_lad_file", help="specify avatar_lad file to compare to")
+ parser.add_argument("--aliases", help="specify file containing bone aliases")
+ parser.add_argument("--validate", action="store_true", help="check specified input file for validity")
+ parser.add_argument("--fix", action="store_true", help="try to correct errors")
+ parser.add_argument("--remove", nargs="+", help="remove specified joints")
+ parser.add_argument("--list", action="store_true", help="list joint names")
+ parser.add_argument("--compare", help="alternate skeleton file to compare")
+ parser.add_argument("infilename", help="name of a skel .xml file to input")
+ parser.add_argument("outfilename", nargs="?", help="name of a skel .xml file to output")
+ args = parser.parse_args()
+
+ tree = etree.parse(args.infilename)
+
+ aliases = {}
+ if args.aliases:
+ altree = etree.parse(args.aliases)
+ aliases = get_aliases(altree)
+
+ # Parse input files
+ ogtree = None
+ reftree = None
+ ladtree = None
+ orig_ladtree = None
+
+ if args.ogfile:
+ ogtree = etree.parse(args.ogfile)
+
+ if args.ref_file:
+ reftree = etree.parse(args.ref_file)
+
+ if args.lad_file:
+ ladtree = etree.parse(args.lad_file)
+
+ if args.orig_lad_file:
+ orig_ladtree = etree.parse(args.orig_lad_file)
+
+ if args.remove:
+ for name in args.remove:
+ remove_joint_by_name(tree,name)
+
+ # Do processing
+ if args.validate and ogtree:
+ validate_skel_tree(tree, ogtree, reftree)
+
+ if args.validate and ladtree:
+ validate_lad_tree(ladtree, tree, orig_ladtree)
+
+ if args.fix and ogtree:
+ validate_skel_tree(tree, ogtree, reftree, True)
+
+ if args.list and tree:
+ list_skel_tree(tree)
+
+ if args.compare and tree:
+ compare_tree = etree.parse(args.compare)
+ compare_skel_trees(compare_tree,tree)
+
+ if args.outfilename:
+ f = open(args.outfilename,"w")
+ print >>f, etree.tostring(tree, pretty_print=True) #need update to get: , short_empty_elements=True)
+