diff options
| author | Andrey Lihatskiy <alihatskiy@productengine.com> | 2021-12-16 00:58:44 +0200 | 
|---|---|---|
| committer | Andrey Lihatskiy <alihatskiy@productengine.com> | 2021-12-16 00:58:44 +0200 | 
| commit | 83b4ea59fc8793ccbfb6b40ffff111de14ebd4d3 (patch) | |
| tree | eab6471412d7094cb265f4a292e910f187560a26 /scripts | |
| parent | aa309c2eef0f3d033ddf8a5096ea1143221a4c71 (diff) | |
| parent | 0b95b9d008a0878b5d57262e529cef61fb29ea24 (diff) | |
Merge branch 'SL-15742' into DRTVWR-527-maint
Diffstat (limited to 'scripts')
| -rw-r--r-- | scripts/code_tools/modified_strings.py | 12 | ||||
| -rw-r--r-- | scripts/content_tools/anim_tool.py | 130 | ||||
| -rw-r--r-- | scripts/content_tools/arche_tool.py | 22 | ||||
| -rw-r--r-- | scripts/content_tools/dae_tool.py | 30 | ||||
| -rw-r--r-- | scripts/content_tools/skel_tool.py | 160 | ||||
| -rwxr-xr-x | scripts/md5check.py | 12 | ||||
| -rw-r--r-- | scripts/metrics/viewer_asset_logs.py | 10 | ||||
| -rwxr-xr-x | scripts/metrics/viewerstats.py | 54 | ||||
| -rwxr-xr-x | scripts/packages-formatter.py | 23 | ||||
| -rwxr-xr-x | scripts/setup-path.py | 2 | ||||
| -rwxr-xr-x | scripts/template_verifier.py | 73 | 
11 files changed, 267 insertions, 261 deletions
| diff --git a/scripts/code_tools/modified_strings.py b/scripts/code_tools/modified_strings.py index 6a763b6ec5..e7a9d239dc 100644 --- a/scripts/code_tools/modified_strings.py +++ b/scripts/code_tools/modified_strings.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3  """\  This script scans the SL codebase for translation-related strings. @@ -25,7 +25,7 @@ Linden Research, Inc., 945 Battery Street, San Francisco, CA  94111  USA  $/LicenseInfo$  """ -from __future__ import print_function +  import xml.etree.ElementTree as ET  import argparse @@ -75,10 +75,10 @@ translate_attribs = [  ]  def codify_for_print(val): -    if isinstance(val, unicode): +    if isinstance(val, str):          return val.encode("utf-8")      else: -        return unicode(val, 'utf-8').encode("utf-8") +        return str(val, 'utf-8').encode("utf-8")  # Returns a dict of { name => xml_node }  def read_xml_elements(blob): @@ -186,7 +186,7 @@ def make_translation_table(mod_tree, base_tree, lang, args):          transl_dict = read_xml_elements(transl_blob)          rows = 0 -        for name in mod_dict.keys(): +        for name in list(mod_dict.keys()):              if not name in base_dict or mod_dict[name].text != base_dict[name].text or (args.missing and not name in transl_dict):                  elt = mod_dict[name]                  val = elt.text @@ -307,7 +307,7 @@ def save_translation_file(per_lang_data, aux_data, outfile):          print("Added", num_translations, "rows for language", lang)      # Reference info, not for translation -    for aux, data in aux_data.items(): +    for aux, data in list(aux_data.items()):          df = pd.DataFrame(data, columns = ["Key", "Value"])           df.to_excel(writer, index=False, sheet_name=aux)          worksheet = writer.sheets[aux] diff --git a/scripts/content_tools/anim_tool.py b/scripts/content_tools/anim_tool.py index 3aef8cd5ab..e7b86a88fa 100644 --- a/scripts/content_tools/anim_tool.py +++ b/scripts/content_tools/anim_tool.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/env python3  """\  @file   anim_tool.py  @author Brad Payne, Nat Goodspeed @@ -39,7 +39,7 @@ $/LicenseInfo$  import math  import os  import random -from cStringIO import StringIO +from io import StringIO  import struct  import sys  from xml.etree import ElementTree @@ -179,7 +179,7 @@ class RotKey(object):          return this      def dump(self, f): -        print >>f, "    rot_key: t %.3f" % self.time,"st",self.time_short,"rot",",".join("%.3f" % f for f in self.rotation) +        print("    rot_key: t %.3f" % self.time,"st",self.time_short,"rot",",".join("%.3f" % f for f in self.rotation), file=f)      def pack(self, fp):          fp.pack("<H",self.time_short) @@ -215,7 +215,7 @@ class PosKey(object):          return this      def dump(self, f): -        print >>f, "    pos_key: t %.3f" % self.time,"pos ",",".join("%.3f" % f for f in self.position) +        print("    pos_key: t %.3f" % self.time,"pos ",",".join("%.3f" % f for f in self.position), file=f)      def pack(self, fp):          fp.pack("<H",self.time_short) @@ -247,18 +247,18 @@ class Constraint(object):                  self.ease_out_start, self.ease_out_stop)      def dump(self, f): -        print >>f, "  constraint:" -        print >>f, "    chain_length",self.chain_length -        print >>f, "    constraint_type",self.constraint_type -        print >>f, "    source_volume",self.source_volume -        print >>f, "    source_offset",self.source_offset -        print >>f, "    target_volume",self.target_volume -        print >>f, "    target_offset",self.target_offset -        print >>f, "    target_dir",self.target_dir -        print >>f, "    ease_in_start",self.ease_in_start -        print >>f, "    ease_in_stop",self.ease_in_stop -        print >>f, "    ease_out_start",self.ease_out_start -        print >>f, "    ease_out_stop",self.ease_out_stop +        print("  constraint:", file=f) +        print("    chain_length",self.chain_length, file=f) +        print("    constraint_type",self.constraint_type, file=f) +        print("    source_volume",self.source_volume, file=f) +        print("    source_offset",self.source_offset, file=f) +        print("    target_volume",self.target_volume, file=f) +        print("    target_offset",self.target_offset, file=f) +        print("    target_dir",self.target_dir, file=f) +        print("    ease_in_start",self.ease_in_start, file=f) +        print("    ease_in_stop",self.ease_in_stop, file=f) +        print("    ease_out_start",self.ease_out_start, file=f) +        print("    ease_out_stop",self.ease_out_stop, file=f)  class Constraints(object):      @staticmethod @@ -266,7 +266,7 @@ class Constraints(object):          this = Constraints()          (num_constraints, ) = fup.unpack("<i")          this.constraints = [Constraint.unpack(duration, fup) -                            for i in xrange(num_constraints)] +                            for i in range(num_constraints)]          return this      def pack(self, fp): @@ -275,7 +275,7 @@ class Constraints(object):              c.pack(fp)      def dump(self, f): -        print >>f, "constraints:",len(self.constraints) +        print("constraints:",len(self.constraints), file=f)          for c in self.constraints:              c.dump(f) @@ -296,7 +296,7 @@ class PositionCurve(object):          this = PositionCurve()          (num_pos_keys, ) = fup.unpack("<i")          this.keys = [PosKey.unpack(duration, fup) -                     for k in xrange(num_pos_keys)] +                     for k in range(num_pos_keys)]          return this      def pack(self, fp): @@ -305,8 +305,8 @@ class PositionCurve(object):              k.pack(fp)      def dump(self, f): -        print >>f, "  position_curve:" -        print >>f, "    num_pos_keys", len(self.keys) +        print("  position_curve:", file=f) +        print("    num_pos_keys", len(self.keys), file=f)          for k in self.keys:              k.dump(f) @@ -327,7 +327,7 @@ class RotationCurve(object):          this = RotationCurve()          (num_rot_keys, ) = fup.unpack("<i")          this.keys = [RotKey.unpack(duration, fup) -                     for k in xrange(num_rot_keys)] +                     for k in range(num_rot_keys)]          return this      def pack(self, fp): @@ -336,8 +336,8 @@ class RotationCurve(object):              k.pack(fp)      def dump(self, f): -        print >>f, "  rotation_curve:" -        print >>f, "    num_rot_keys", len(self.keys) +        print("  rotation_curve:", file=f) +        print("    num_rot_keys", len(self.keys), file=f)          for k in self.keys:              k.dump(f) @@ -364,9 +364,9 @@ class JointInfo(object):          self.position_curve.pack(fp)      def dump(self, f): -        print >>f, "joint:" -        print >>f, "  joint_name:",self.joint_name -        print >>f, "  joint_priority:",self.joint_priority +        print("joint:", file=f) +        print("  joint_name:",self.joint_name, file=f) +        print("  joint_priority:",self.joint_priority, file=f)          self.rotation_curve.dump(f)          self.position_curve.dump(f) @@ -440,10 +440,10 @@ class Anim(object):              fup.unpack("@ffiffII")          self.joints = [JointInfo.unpack(self.duration, fup) -                       for j in xrange(num_joints)] +                       for j in range(num_joints)]          if self.verbose:              for joint_info in self.joints: -                print "unpacked joint",joint_info.joint_name +                print("unpacked joint",joint_info.joint_name)          self.constraints = Constraints.unpack(self.duration, fup)          self.buffer = fup.buffer @@ -461,17 +461,17 @@ class Anim(object):              f = sys.stdout          else:              f = open(filename,"w") -        print >>f, "versions: ", self.version, self.sub_version -        print >>f, "base_priority: ", self.base_priority -        print >>f, "duration: ", self.duration -        print >>f, "emote_name: ", self.emote_name -        print >>f, "loop_in_point: ", self.loop_in_point -        print >>f, "loop_out_point: ", self.loop_out_point -        print >>f, "loop: ", self.loop -        print >>f, "ease_in_duration: ", self.ease_in_duration -        print >>f, "ease_out_duration: ", self.ease_out_duration -        print >>f, "hand_pose", self.hand_pose -        print >>f, "num_joints", len(self.joints) +        print("versions: ", self.version, self.sub_version, file=f) +        print("base_priority: ", self.base_priority, file=f) +        print("duration: ", self.duration, file=f) +        print("emote_name: ", self.emote_name, file=f) +        print("loop_in_point: ", self.loop_in_point, file=f) +        print("loop_out_point: ", self.loop_out_point, file=f) +        print("loop: ", self.loop, file=f) +        print("ease_in_duration: ", self.ease_in_duration, file=f) +        print("ease_out_duration: ", self.ease_out_duration, file=f) +        print("hand_pose", self.hand_pose, file=f) +        print("num_joints", len(self.joints), file=f)          for j in self.joints:              j.dump(f)          self.constraints.dump(f) @@ -482,7 +482,7 @@ class Anim(object):          fp.write(filename)      def write_src_data(self, filename): -        print "write file",filename +        print("write file",filename)          with open(filename,"wb") as f:              f.write(self.buffer) @@ -501,11 +501,11 @@ class Anim(object):          j = self.find_joint(name)          if j:              if self.verbose: -                print "removing joint", name +                print("removing joint", name)              self.joints.remove(j)          else:              if self.verbose: -                print "joint not found to remove", name +                print("joint not found to remove", name)      def summary(self):          nj = len(self.joints) @@ -513,13 +513,13 @@ class Anim(object):          nstatic = len([j for j in self.joints                         if j.rotation_curve.is_static()                         and j.position_curve.is_static()]) -        print "summary: %d joints, non-zero priority %d, static %d" % (nj, nz, nstatic) +        print("summary: %d joints, non-zero priority %d, static %d" % (nj, nz, nstatic))      def add_pos(self, joint_names, positions):          js = [joint for joint in self.joints if joint.joint_name in joint_names]          for j in js:              if self.verbose: -                print "adding positions",j.joint_name,positions +                print("adding positions",j.joint_name,positions)              j.joint_priority = 4              j.position_curve.keys = [PosKey(self.duration * i / (len(positions) - 1),                                              self.duration, @@ -529,7 +529,7 @@ class Anim(object):      def add_rot(self, joint_names, rotations):          js = [joint for joint in self.joints if joint.joint_name in joint_names]          for j in js: -            print "adding rotations",j.joint_name +            print("adding rotations",j.joint_name)              j.joint_priority = 4              j.rotation_curve.keys = [RotKey(self.duration * i / (len(rotations) - 1),                                              self.duration, @@ -539,8 +539,8 @@ class Anim(object):  def twistify(anim, joint_names, rot1, rot2):      js = [joint for joint in anim.joints if joint.joint_name in joint_names]      for j in js: -        print "twisting",j.joint_name -        print len(j.rotation_curve.keys) +        print("twisting",j.joint_name) +        print(len(j.rotation_curve.keys))          j.joint_priority = 4          # Set the joint(s) to rot1 at time 0, rot2 at the full duration.          j.rotation_curve.keys = [ @@ -563,7 +563,7 @@ def get_joint_by_name(tree,name):      if len(matches)==1:          return matches[0]      elif len(matches)>1: -        print "multiple matches for name",name +        print("multiple matches for name",name)          return None      else:          return None @@ -577,7 +577,7 @@ def get_elt_pos(elt):          return (0.0, 0.0, 0.0)  def resolve_joints(names, skel_tree, lad_tree, no_hud=False): -    print "resolve joints, no_hud is",no_hud +    print("resolve joints, no_hud is",no_hud)      if skel_tree and lad_tree:          all_elts = [elt for elt in skel_tree.getroot().iter()]          all_elts.extend([elt for elt in lad_tree.getroot().iter()]) @@ -641,12 +641,12 @@ def main(*argv):      parser.add_argument("outfilename", nargs="?", help="name of a .anim file to output")      args = parser.parse_args(argv) -    print "anim_tool.py: " + " ".join(argv) -    print "dump is", args.dump -    print "infilename",args.infilename,"outfilename",args.outfilename -    print "rot",args.rot -    print "pos",args.pos -    print "joints",args.joints +    print("anim_tool.py: " + " ".join(argv)) +    print("dump is", args.dump) +    print("infilename",args.infilename,"outfilename",args.outfilename) +    print("rot",args.rot) +    print("pos",args.pos) +    print("joints",args.joints)      anim = Anim(args.infilename, args.verbose)      skel_tree = None @@ -663,7 +663,7 @@ def main(*argv):      if args.joints:          joints = resolve_joints(args.joints, skel_tree, lad_tree, args.no_hud)          if args.verbose: -            print "joints resolved to",joints +            print("joints resolved to",joints)          for name in joints:              anim.add_joint(name,0)      if args.delete_joints: @@ -677,8 +677,8 @@ def main(*argv):          # pick a random sequence of positions for each joint specified          for joint in joints:              # generate a list of rand_pos triples -            pos_array = [tuple(random.uniform(-1,1) for i in xrange(3)) -                         for j in xrange(args.rand_pos)] +            pos_array = [tuple(random.uniform(-1,1) for i in range(3)) +                         for j in range(args.rand_pos)]              # close the loop by cycling back to the first entry              pos_array.append(pos_array[0])              anim.add_pos([joint], pos_array) @@ -688,26 +688,26 @@ def main(*argv):              if elt is not None:                  anim.add_pos([joint], 2*[get_elt_pos(elt)])              else: -                print "no elt or no pos data for",joint +                print("no elt or no pos data for",joint)      if args.set_version:          anim.version, anim.sub_version = args.set_version      if args.base_priority is not None: -        print "set base priority",args.base_priority +        print("set base priority",args.base_priority)          anim.base_priority = args.base_priority      # --joint_priority sets priority for ALL joints, not just the explicitly-      # specified ones      if args.joint_priority is not None: -        print "set joint priority",args.joint_priority +        print("set joint priority",args.joint_priority)          for joint in anim.joints:              joint.joint_priority = args.joint_priority      if args.duration is not None: -        print "set duration",args.duration +        print("set duration",args.duration)          anim.duration = args.duration      if args.loop_in is not None: -        print "set loop_in",args.loop_in +        print("set loop_in",args.loop_in)          anim.loop_in_point = args.loop_in      if args.loop_out is not None: -        print "set loop_out",args.loop_out +        print("set loop_out",args.loop_out)          anim.loop_out_point = args.loop_out      if args.dump:          anim.dump("-") diff --git a/scripts/content_tools/arche_tool.py b/scripts/content_tools/arche_tool.py index f99d7be39a..677af62d2f 100644 --- a/scripts/content_tools/arche_tool.py +++ b/scripts/content_tools/arche_tool.py @@ -1,4 +1,4 @@ -#!runpy.sh +#!/usr/bin/env python3  """\ @@ -42,23 +42,23 @@ def node_key(e):  def compare_matched_nodes(key,items,summary):      tags = list(set([e.tag for e in items]))      if len(tags) != 1: -        print "different tag types for key",key +        print("different tag types for key",key)          summary.setdefault("tag_mismatch",0)          summary["tag_mismatch"] += 1          return -    all_attrib = list(set(chain.from_iterable([e.attrib.keys() for e in items]))) +    all_attrib = list(set(chain.from_iterable([list(e.attrib.keys()) for e in items])))      #print key,"all_attrib",all_attrib      for attr in all_attrib:          vals = [e.get(attr) for e in items]          #print "key",key,"attr",attr,"vals",vals          if len(set(vals)) != 1: -            print key,"- attr",attr,"multiple values",vals +            print(key,"- attr",attr,"multiple values",vals)              summary.setdefault("attr",{})              summary["attr"].setdefault(attr,0)              summary["attr"][attr] += 1  def compare_trees(file_trees): -    print "compare_trees" +    print("compare_trees")      summary = {}      all_keys = list(set([node_key(e) for tree in file_trees for e in tree.getroot().iter() if node_key(e)]))      #print "keys",all_keys @@ -70,14 +70,14 @@ def compare_trees(file_trees):          items = []          for nodes in tree_nodes:              if not key in nodes: -                print "file",i,"missing item for key",key +                print("file",i,"missing item for key",key)                  summary.setdefault("missing",0)                  summary["missing"] += 1              else:                  items.append(nodes[key])          compare_matched_nodes(key,items,summary) -    print "Summary:" -    print summary +    print("Summary:") +    print(summary)  def dump_appearance_params(tree):      vals = [] @@ -88,7 +88,7 @@ def dump_appearance_params(tree):                  vals.append("{" + e.get("id") + "," +e.get("u8") + "}")                  #print e.get("id"), e.get("name"), e.get("group"), e.get("u8")      if len(vals)==253: -        print ", ".join(vals) +        print(", ".join(vals))  if __name__ == "__main__": @@ -101,9 +101,9 @@ if __name__ == "__main__":      args = parser.parse_args() -    print "files",args.files +    print("files",args.files)      file_trees = [etree.parse(filename) for filename in args.files] -    print args +    print(args)      if args.compare:          compare_trees(file_trees)      if args.appearance_params: diff --git a/scripts/content_tools/dae_tool.py b/scripts/content_tools/dae_tool.py index 823f69cb85..2454fafa46 100644 --- a/scripts/content_tools/dae_tool.py +++ b/scripts/content_tools/dae_tool.py @@ -1,4 +1,4 @@ -#!runpy.sh +#!/usr/bin/env python3  """\ @@ -35,14 +35,14 @@ from collada import *  from lxml import etree  def mesh_summary(mesh): -    print "scenes",mesh.scenes +    print("scenes",mesh.scenes)      for scene in mesh.scenes: -        print "scene",scene +        print("scene",scene)          for node in scene.nodes: -            print "node",node +            print("node",node)  def mesh_lock_offsets(tree, joints): -    print "mesh_lock_offsets",tree,joints +    print("mesh_lock_offsets",tree,joints)      for joint_node in tree.iter():          if "node" not in joint_node.tag:              continue @@ -57,11 +57,11 @@ def mesh_lock_offsets(tree, joints):                          floats[7] += 0.0001                          floats[11] += 0.0001                          matrix_node.text = " ".join([str(f) for f in floats]) -                        print joint_node.get("name"),matrix_node.tag,"text",matrix_node.text,len(floats),floats +                        print(joint_node.get("name"),matrix_node.tag,"text",matrix_node.text,len(floats),floats)  def mesh_random_offsets(tree, joints): -    print "mesh_random_offsets",tree,joints +    print("mesh_random_offsets",tree,joints)      for joint_node in tree.iter():          if "node" not in joint_node.tag:              continue @@ -73,13 +73,13 @@ def mesh_random_offsets(tree, joints):              for matrix_node in list(joint_node):                  if "matrix" in matrix_node.tag:                      floats = [float(x) for x in matrix_node.text.split()] -                    print "randomizing",floats +                    print("randomizing",floats)                      if len(floats) == 16:                          floats[3] += random.uniform(-1.0,1.0)                          floats[7] += random.uniform(-1.0,1.0)                          floats[11] += random.uniform(-1.0,1.0)                          matrix_node.text = " ".join([str(f) for f in floats]) -                        print joint_node.get("name"),matrix_node.tag,"text",matrix_node.text,len(floats),floats +                        print(joint_node.get("name"),matrix_node.tag,"text",matrix_node.text,len(floats),floats)  if __name__ == "__main__": @@ -96,24 +96,24 @@ if __name__ == "__main__":      tree = None      if args.infilename: -        print "reading",args.infilename +        print("reading",args.infilename)          mesh = Collada(args.infilename)          tree = etree.parse(args.infilename)      if args.summary: -        print "summarizing",args.infilename +        print("summarizing",args.infilename)          mesh_summary(mesh)      if args.lock_offsets: -        print "locking offsets for",args.lock_offsets +        print("locking offsets for",args.lock_offsets)          mesh_lock_offsets(tree, args.lock_offsets)      if args.random_offsets: -        print "adding random offsets for",args.random_offsets +        print("adding random offsets for",args.random_offsets)          mesh_random_offsets(tree, args.random_offsets)      if args.outfilename: -        print "writing",args.outfilename +        print("writing",args.outfilename)          f = open(args.outfilename,"w") -        print >>f, etree.tostring(tree, pretty_print=True) #need update to get: , short_empty_elements=True) +        print(etree.tostring(tree, pretty_print=True), file=f) #need update to get: , short_empty_elements=True) diff --git a/scripts/content_tools/skel_tool.py b/scripts/content_tools/skel_tool.py index 26f63326f1..449ecd6a6c 100644 --- a/scripts/content_tools/skel_tool.py +++ b/scripts/content_tools/skel_tool.py @@ -1,4 +1,4 @@ -#!runpy.sh +#!/usr/bin/env python3  """\ @@ -32,14 +32,14 @@ from lxml import etree  def get_joint_names(tree):      joints = [element.get('name') for element in tree.getroot().iter() if element.tag in ['bone','collision_volume']] -    print "joints:",joints +    print("joints:",joints)      return joints  def get_aliases(tree):      aliases = {}      alroot = tree.getroot()      for element in alroot.iter(): -        for key in element.keys(): +        for key in list(element.keys()):              if key == 'aliases':                  name = element.get('name')                  val = element.get('aliases') @@ -58,19 +58,19 @@ def float_tuple(str, n=3):          if len(result)==n:              return result          else: -            print "tuple length wrong:", str,"gave",result,"wanted len",n,"got len",len(result) +            print("tuple length wrong:", str,"gave",result,"wanted len",n,"got len",len(result))              raise Exception()      except: -        print "convert failed for:",str +        print("convert failed for:",str)          raise  def check_symmetry(name, field, vec1, vec2):      if vec1[0] != vec2[0]: -        print name,field,"x match fail" +        print(name,field,"x match fail")      if vec1[1] != -vec2[1]: -        print name,field,"y mirror image fail" +        print(name,field,"y mirror image fail")      if vec1[2] != vec2[2]: -        print name,field,"z match fail" +        print(name,field,"z match fail")  def enforce_symmetry(tree, element, field, fix=False):      name = element.get("name") @@ -92,7 +92,7 @@ def get_element_by_name(tree,name):      if len(matches)==1:          return matches[0]      elif len(matches)>1: -        print "multiple matches for name",name +        print("multiple matches for name",name)          return None      else:          return None @@ -100,7 +100,7 @@ def get_element_by_name(tree,name):  def list_skel_tree(tree):      for element in tree.getroot().iter():          if element.tag == "bone": -            print element.get("name"),"-",element.get("support") +            print(element.get("name"),"-",element.get("support"))  def validate_child_order(tree, ogtree, fix=False):      unfixable = 0 @@ -116,12 +116,12 @@ def validate_child_order(tree, ogtree, fix=False):          if og_element is not None:              for echild,ochild in zip(list(element),list(og_element)):                  if echild.get("name") != ochild.get("name"): -                    print "Child ordering error, parent",element.get("name"),echild.get("name"),"vs",ochild.get("name") +                    print("Child ordering error, parent",element.get("name"),echild.get("name"),"vs",ochild.get("name"))                      if fix:                          tofix.add(element.get("name"))      children = {}      for name in tofix: -        print "FIX",name +        print("FIX",name)          element = get_element_by_name(tree,name)          og_element = get_element_by_name(ogtree,name)          children = [] @@ -130,20 +130,20 @@ def validate_child_order(tree, ogtree, fix=False):              elt = get_element_by_name(tree,og_elt.get("name"))              if elt is not None:                  children.append(elt) -                print "b:",elt.get("name") +                print("b:",elt.get("name"))              else: -                print "b missing:",og_elt.get("name") +                print("b missing:",og_elt.get("name"))          # then add children that are not present in the original joints          for elt in list(element):              og_elt = get_element_by_name(ogtree,elt.get("name"))              if og_elt is None:                  children.append(elt) -                print "e:",elt.get("name") +                print("e:",elt.get("name"))          # if we've done this right, we have a rearranged list of the same length          if len(children)!=len(element): -            print "children",[e.get("name") for e in children] -            print "element",[e.get("name") for e in element] -            print "children changes for",name,", cannot reconcile" +            print("children",[e.get("name") for e in children]) +            print("element",[e.get("name") for e in element]) +            print("children changes for",name,", cannot reconcile")          else:              element[:] = children @@ -163,7 +163,7 @@ def validate_child_order(tree, ogtree, fix=False):  # - digits of precision should be consistent (again, except for old joints)  # - new bones should have pos, pivot the same  def validate_skel_tree(tree, ogtree, reftree, fix=False): -    print "validate_skel_tree" +    print("validate_skel_tree")      (num_bones,num_cvs) = (0,0)      unfixable = 0      defaults = {"connected": "false",  @@ -175,7 +175,7 @@ def validate_skel_tree(tree, ogtree, reftree, fix=False):          # Preserve values from og_file:          for f in ["pos","rot","scale","pivot"]:              if og_element is not None and og_element.get(f) and (str(element.get(f)) != str(og_element.get(f))): -                print element.get("name"),"field",f,"has changed:",og_element.get(f),"!=",element.get(f) +                print(element.get("name"),"field",f,"has changed:",og_element.get(f),"!=",element.get(f))                  if fix:                      element.set(f, og_element.get(f)) @@ -187,17 +187,17 @@ def validate_skel_tree(tree, ogtree, reftree, fix=False):              fields.extend(["end","connected"])          for f in fields:              if not element.get(f): -                print element.get("name"),"missing required field",f +                print(element.get("name"),"missing required field",f)                  if fix:                      if og_element is not None and og_element.get(f): -                        print "fix from ogtree" +                        print("fix from ogtree")                          element.set(f,og_element.get(f))                      elif ref_element is not None and ref_element.get(f): -                        print "fix from reftree" +                        print("fix from reftree")                          element.set(f,ref_element.get(f))                      else:                          if f in defaults: -                            print "fix by using default value",f,"=",defaults[f] +                            print("fix by using default value",f,"=",defaults[f])                              element.set(f,defaults[f])                          elif f == "support":                              if og_element is not None: @@ -205,7 +205,7 @@ def validate_skel_tree(tree, ogtree, reftree, fix=False):                              else:                                  element.set(f,"extended")                          else: -                            print "unfixable:",element.get("name"),"no value for field",f +                            print("unfixable:",element.get("name"),"no value for field",f)                              unfixable += 1          fix_name(element) @@ -214,7 +214,7 @@ def validate_skel_tree(tree, ogtree, reftree, fix=False):              enforce_symmetry(tree, element, field, fix)          if element.get("support")=="extended":              if element.get("pos") != element.get("pivot"): -                print "extended joint",element.get("name"),"has mismatched pos, pivot" +                print("extended joint",element.get("name"),"has mismatched pos, pivot")          if element.tag == "linden_skeleton": @@ -223,19 +223,19 @@ def validate_skel_tree(tree, ogtree, reftree, fix=False):              all_bones = [e for e in tree.getroot().iter() if e.tag=="bone"]              all_cvs = [e for e in tree.getroot().iter() if e.tag=="collision_volume"]              if num_bones != len(all_bones): -                print "wrong bone count, expected",len(all_bones),"got",num_bones +                print("wrong bone count, expected",len(all_bones),"got",num_bones)                  if fix:                      element.set("num_bones", str(len(all_bones)))              if num_cvs != len(all_cvs): -                print "wrong cv count, expected",len(all_cvs),"got",num_cvs +                print("wrong cv count, expected",len(all_cvs),"got",num_cvs)                  if fix:                      element.set("num_collision_volumes", str(len(all_cvs))) -    print "skipping child order code" +    print("skipping child order code")      #unfixable += validate_child_order(tree, ogtree, fix)      if fix and (unfixable > 0): -        print "BAD FILE:", unfixable,"errs could not be fixed" +        print("BAD FILE:", unfixable,"errs could not be fixed")  def slider_info(ladtree,skeltree): @@ -243,37 +243,37 @@ def slider_info(ladtree,skeltree):          for skel_param in param.iter("param_skeleton"):              bones = [b for b in skel_param.iter("bone")]          if bones: -            print "param",param.get("name"),"id",param.get("id") +            print("param",param.get("name"),"id",param.get("id"))              value_min = float(param.get("value_min"))              value_max = float(param.get("value_max"))              neutral = 100.0 * (0.0-value_min)/(value_max-value_min) -            print "  neutral",neutral +            print("  neutral",neutral)              for b in bones:                  scale = float_tuple(b.get("scale","0 0 0"))                  offset = float_tuple(b.get("offset","0 0 0")) -                print "  bone", b.get("name"), "scale", scale, "offset", offset +                print("  bone", b.get("name"), "scale", scale, "offset", offset)                  scale_min = [value_min * s for s in scale]                  scale_max = [value_max * s for s in scale]                  offset_min = [value_min * t for t in offset]                  offset_max = [value_max * t for t in offset]                  if (scale_min != scale_max): -                    print "    Scale MinX", scale_min[0] -                    print "    Scale MinY", scale_min[1] -                    print "    Scale MinZ", scale_min[2] -                    print "    Scale MaxX", scale_max[0] -                    print "    Scale MaxY", scale_max[1] -                    print "    Scale MaxZ", scale_max[2] +                    print("    Scale MinX", scale_min[0]) +                    print("    Scale MinY", scale_min[1]) +                    print("    Scale MinZ", scale_min[2]) +                    print("    Scale MaxX", scale_max[0]) +                    print("    Scale MaxY", scale_max[1]) +                    print("    Scale MaxZ", scale_max[2])                  if (offset_min != offset_max): -                    print "    Offset MinX", offset_min[0] -                    print "    Offset MinY", offset_min[1] -                    print "    Offset MinZ", offset_min[2] -                    print "    Offset MaxX", offset_max[0] -                    print "    Offset MaxY", offset_max[1] -                    print "    Offset MaxZ", offset_max[2] +                    print("    Offset MinX", offset_min[0]) +                    print("    Offset MinY", offset_min[1]) +                    print("    Offset MinZ", offset_min[2]) +                    print("    Offset MaxX", offset_max[0]) +                    print("    Offset MaxY", offset_max[1]) +                    print("    Offset MaxZ", offset_max[2])  # Check contents of avatar_lad file relative to a specified skeleton  def validate_lad_tree(ladtree,skeltree,orig_ladtree): -    print "validate_lad_tree" +    print("validate_lad_tree")      bone_names = [elt.get("name") for elt in skeltree.iter("bone")]      bone_names.append("mScreen")      bone_names.append("mRoot") @@ -285,7 +285,7 @@ def validate_lad_tree(ladtree,skeltree,orig_ladtree):          #print "attachment",att_name          joint_name = att.get("joint")          if not joint_name in bone_names: -            print "att",att_name,"linked to invalid joint",joint_name +            print("att",att_name,"linked to invalid joint",joint_name)      for skel_param in ladtree.iter("param_skeleton"):          skel_param_id = skel_param.get("id")          skel_param_name = skel_param.get("name") @@ -297,13 +297,13 @@ def validate_lad_tree(ladtree,skeltree,orig_ladtree):          for bone in skel_param.iter("bone"):              bone_name = bone.get("name")              if not bone_name in bone_names: -                print "skel param references invalid bone",bone_name -                print etree.tostring(bone) +                print("skel param references invalid bone",bone_name) +                print(etree.tostring(bone))              bone_scale = float_tuple(bone.get("scale","0 0 0"))              bone_offset = float_tuple(bone.get("offset","0 0 0"))              param = bone.getparent().getparent()              if bone_scale==(0, 0, 0) and bone_offset==(0, 0, 0): -                print "no-op bone",bone_name,"in param",param.get("id","-1") +                print("no-op bone",bone_name,"in param",param.get("id","-1"))              # check symmetry of sliders              if "Right" in bone.get("name"):                  left_name = bone_name.replace("Right","Left") @@ -312,12 +312,12 @@ def validate_lad_tree(ladtree,skeltree,orig_ladtree):                      if b.get("name")==left_name:                          left_bone = b                  if left_bone is None: -                    print "left_bone not found",left_name,"in",param.get("id","-1") +                    print("left_bone not found",left_name,"in",param.get("id","-1"))                  else:                      left_scale = float_tuple(left_bone.get("scale","0 0 0"))                      left_offset = float_tuple(left_bone.get("offset","0 0 0"))                      if left_scale != bone_scale: -                        print "scale mismatch between",bone_name,"and",left_name,"in param",param.get("id","-1") +                        print("scale mismatch between",bone_name,"and",left_name,"in param",param.get("id","-1"))                      param_id = int(param.get("id","-1"))                      if param_id in [661]: # shear                          expected_offset = tuple([bone_offset[0],bone_offset[1],-bone_offset[2]]) @@ -326,7 +326,7 @@ def validate_lad_tree(ladtree,skeltree,orig_ladtree):                      else:                          expected_offset = tuple([bone_offset[0],-bone_offset[1],bone_offset[2]])                      if left_offset != expected_offset: -                        print "offset mismatch between",bone_name,"and",left_name,"in param",param.get("id","-1") +                        print("offset mismatch between",bone_name,"and",left_name,"in param",param.get("id","-1"))      drivers = {}      for driven_param in ladtree.iter("driven"): @@ -340,15 +340,15 @@ def validate_lad_tree(ladtree,skeltree,orig_ladtree):              if (actual_param.get("value_min") != driver.get("value_min") or \                  actual_param.get("value_max") != driver.get("value_max")):                  if args.verbose: -                    print "MISMATCH min max:",driver.get("id"),"drives",driven_param.get("id"),"min",driver.get("value_min"),actual_param.get("value_min"),"max",driver.get("value_max"),actual_param.get("value_max") +                    print("MISMATCH min max:",driver.get("id"),"drives",driven_param.get("id"),"min",driver.get("value_min"),actual_param.get("value_min"),"max",driver.get("value_max"),actual_param.get("value_max"))      for driven_id in drivers:          dset = drivers[driven_id]          if len(dset) != 1: -            print "driven_id",driven_id,"has multiple drivers",dset +            print("driven_id",driven_id,"has multiple drivers",dset)          else:              if args.verbose: -                print "driven_id",driven_id,"has one driver",dset +                print("driven_id",driven_id,"has one driver",dset)      if orig_ladtree:          # make sure expected message format is unchanged          orig_message_params_by_id = dict((int(param.get("id")),param) for param in orig_ladtree.iter("param") if param.get("group") in ["0","3"]) @@ -358,25 +358,25 @@ def validate_lad_tree(ladtree,skeltree,orig_ladtree):          message_ids = sorted(message_params_by_id.keys())          #print "message_ids",message_ids          if (set(message_ids) != set(orig_message_ids)): -            print "mismatch in message ids!" -            print "added",set(message_ids) - set(orig_message_ids) -            print "removed",set(orig_message_ids) - set(message_ids) +            print("mismatch in message ids!") +            print("added",set(message_ids) - set(orig_message_ids)) +            print("removed",set(orig_message_ids) - set(message_ids))          else: -            print "message ids OK" +            print("message ids OK")  def remove_joint_by_name(tree, name): -    print "remove joint:",name +    print("remove joint:",name)      elt = get_element_by_name(tree,name)      while elt is not None:          children = list(elt)          parent = elt.getparent() -        print "graft",[e.get("name") for e in children],"into",parent.get("name") -        print "remove",elt.get("name") +        print("graft",[e.get("name") for e in children],"into",parent.get("name")) +        print("remove",elt.get("name"))          #parent_children = list(parent)          loc = parent.index(elt)          parent[loc:loc+1] = children          elt[:] = [] -        print "parent now:",[e.get("name") for e in list(parent)] +        print("parent now:",[e.get("name") for e in list(parent)])          elt = get_element_by_name(tree,name)  def compare_skel_trees(atree,btree): @@ -386,9 +386,9 @@ def compare_skel_trees(atree,btree):      b_missing = set()      a_names = set(e.get("name") for e in atree.getroot().iter() if e.get("name"))      b_names = set(e.get("name") for e in btree.getroot().iter() if e.get("name")) -    print "a_names\n  ",str("\n  ").join(sorted(list(a_names))) -    print -    print "b_names\n  ","\n  ".join(sorted(list(b_names))) +    print("a_names\n  ",str("\n  ").join(sorted(list(a_names)))) +    print() +    print("b_names\n  ","\n  ".join(sorted(list(b_names))))      all_names = set.union(a_names,b_names)      for name in all_names:          if not name: @@ -396,38 +396,38 @@ def compare_skel_trees(atree,btree):          a_element = get_element_by_name(atree,name)          b_element = get_element_by_name(btree,name)          if a_element is None or b_element is None: -            print "something not found for",name,a_element,b_element +            print("something not found for",name,a_element,b_element)          if a_element is not None and b_element is not None:              all_attrib = set.union(set(a_element.attrib.keys()),set(b_element.attrib.keys())) -            print name,all_attrib +            print(name,all_attrib)              for att in all_attrib:                  if a_element.get(att) != b_element.get(att):                      if not att in diffs:                          diffs[att] = set()                      diffs[att].add(name) -                print "tuples",name,att,float_tuple(a_element.get(att)),float_tuple(b_element.get(att)) +                print("tuples",name,att,float_tuple(a_element.get(att)),float_tuple(b_element.get(att)))                  if float_tuple(a_element.get(att)) != float_tuple(b_element.get(att)): -                    print "diff in",name,att +                    print("diff in",name,att)                      if not att in realdiffs:                          realdiffs[att] = set()                      realdiffs[att].add(name)      for att in diffs: -        print "Differences in",att +        print("Differences in",att)          for name in sorted(diffs[att]): -            print "  ",name +            print("  ",name)      for att in realdiffs: -        print "Real differences in",att +        print("Real differences in",att)          for name in sorted(diffs[att]): -            print "  ",name +            print("  ",name)      a_missing = b_names.difference(a_names)      b_missing = a_names.difference(b_names)      if len(a_missing) or len(b_missing): -        print "Missing from comparison" +        print("Missing from comparison")          for name in a_missing: -            print "  ",name -        print "Missing from infile" +            print("  ",name) +        print("Missing from infile")          for name in b_missing: -            print "  ",name +            print("  ",name)  if __name__ == "__main__": @@ -499,5 +499,5 @@ if __name__ == "__main__":      if args.outfilename:          f = open(args.outfilename,"w") -        print >>f, etree.tostring(tree, pretty_print=True) #need update to get: , short_empty_elements=True) +        print(etree.tostring(tree, pretty_print=True), file=f) #need update to get: , short_empty_elements=True) diff --git a/scripts/md5check.py b/scripts/md5check.py index 1a54a2844c..20ebfa6656 100755 --- a/scripts/md5check.py +++ b/scripts/md5check.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3  """\  @file md5check.py  @brief Replacement for message template compatibility verifier. @@ -29,14 +29,14 @@ import sys  import hashlib  if len(sys.argv) != 3: -    print """Usage: %s --create|<hash-digest> <file> +    print("""Usage: %s --create|<hash-digest> <file>  Creates an md5sum hash digest of the specified file content  and compares it with the given hash digest.  If --create is used instead of a hash digest, it will simply  print out the hash digest of specified file content. -""" % sys.argv[0] +""" % sys.argv[0])      sys.exit(1)  if sys.argv[2] == '-': @@ -48,9 +48,9 @@ else:  hexdigest = hashlib.md5(fh.read()).hexdigest()  if sys.argv[1] == '--create': -    print hexdigest +    print(hexdigest)  elif hexdigest == sys.argv[1]: -    print "md5sum check passed:", filename +    print("md5sum check passed:", filename)  else: -    print "md5sum check FAILED:", filename +    print("md5sum check FAILED:", filename)      sys.exit(1) diff --git a/scripts/metrics/viewer_asset_logs.py b/scripts/metrics/viewer_asset_logs.py index e48286f696..0365936188 100644 --- a/scripts/metrics/viewer_asset_logs.py +++ b/scripts/metrics/viewer_asset_logs.py @@ -40,7 +40,7 @@ def get_metrics_record(infiles):          context = iter(context)          # get the root element -        event, root = context.next() +        event, root = next(context)          try:              for event, elem in context:                  if event == "end" and elem.tag == "llsd": @@ -48,7 +48,7 @@ def get_metrics_record(infiles):                      sd = llsd.parse_xml(xmlstr)                      yield sd          except etree.XMLSyntaxError: -            print "Fell off end of document" +            print("Fell off end of document")          f.close() @@ -56,7 +56,7 @@ def update_stats(stats,rec):      for region in rec["regions"]:          region_key = (region["grid_x"],region["grid_y"])          #print "region",region_key -        for field, val in region.iteritems(): +        for field, val in region.items():              if field in ["duration","grid_x","grid_y"]:                  continue              if field == "fps": @@ -96,7 +96,7 @@ if __name__ == "__main__":      for key in sorted(stats.keys()):          val = stats[key]          if val["count"] > 0: -            print key,"count",val["count"],"mean_time",val["sum"]/val["count"],"mean_bytes",val["sum_bytes"]/val["count"],"net bytes/sec",val["sum_bytes"]/val["sum"],"enqueued",val["enqueued"],"dequeued",val["dequeued"] +            print(key,"count",val["count"],"mean_time",val["sum"]/val["count"],"mean_bytes",val["sum_bytes"]/val["count"],"net bytes/sec",val["sum_bytes"]/val["sum"],"enqueued",val["enqueued"],"dequeued",val["dequeued"])          else: -            print key,"count",val["count"],"enqueued",val["enqueued"],"dequeued",val["dequeued"] +            print(key,"count",val["count"],"enqueued",val["enqueued"],"dequeued",val["dequeued"]) diff --git a/scripts/metrics/viewerstats.py b/scripts/metrics/viewerstats.py index f7be3d967e..7e19539e15 100755 --- a/scripts/metrics/viewerstats.py +++ b/scripts/metrics/viewerstats.py @@ -54,11 +54,11 @@ def show_stats_by_key(recs,indices,settings_sd = None):                      v = tuple(v)                  per_key_cnt[k][v] += 1          except Exception as e: -            print "err", e -            print "d", d, "k", k, "v", v +            print("err", e) +            print("d", d, "k", k, "v", v)              raise      mc = cnt.most_common() -    print "=========================" +    print("=========================")      keyprefix = ""      if len(indices)>0:          keyprefix = ".".join(indices) + "." @@ -67,32 +67,32 @@ def show_stats_by_key(recs,indices,settings_sd = None):          bigc = m[1]          unset_cnt = len(recs) - bigc          kmc = per_key_cnt[k].most_common(5) -        print i, keyprefix+str(k), bigc +        print(i, keyprefix+str(k), bigc)          if settings_sd is not None and k in settings_sd and "Value" in settings_sd[k]: -            print "    ", "default",settings_sd[k]["Value"],"count",unset_cnt +            print("    ", "default",settings_sd[k]["Value"],"count",unset_cnt)          for v in kmc: -            print "    ", "value",v[0],"count",v[1] +            print("    ", "value",v[0],"count",v[1])      if settings_sd is not None: -        print "Total keys in settings", len(settings_sd.keys()) +        print("Total keys in settings", len(settings_sd.keys()))          unused_keys = list(set(settings_sd.keys()) - set(cnt.keys()))          unused_keys_non_str = [k for k in unused_keys if settings_sd[k]["Type"] != "String"]          unused_keys_str = [k for k in unused_keys if settings_sd[k]["Type"] == "String"]          # Things that no one in the sample has set to a non-default value. Possible candidates for removal. -        print "\nUnused_keys_non_str", len(unused_keys_non_str) -        print   "======================" -        print "\n".join(sorted(unused_keys_non_str)) +        print("\nUnused_keys_non_str", len(unused_keys_non_str)) +        print(  "======================") +        print("\n".join(sorted(unused_keys_non_str)))          # Strings are not currently logged, so we have no info on usage. -        print "\nString keys (usage unknown)", len(unused_keys_str) -        print   "======================" -        print "\n".join(sorted(unused_keys_str)) +        print("\nString keys (usage unknown)", len(unused_keys_str)) +        print(  "======================") +        print("\n".join(sorted(unused_keys_str)))          # Things that someone has set but that aren't recognized settings.          unrec_keys = list(set(cnt.keys()) - set(settings_sd.keys())) -        print "\nUnrecognized keys", len(unrec_keys) -        print   "======================" -        print "\n".join(sorted(unrec_keys)) +        print("\nUnrecognized keys", len(unrec_keys)) +        print(  "======================") +        print("\n".join(sorted(unrec_keys)))          result = (settings_sd.keys(), unused_keys_str, unused_keys_non_str, unrec_keys)      return result @@ -138,7 +138,7 @@ def get_used_strings(root_dir):      for dir_name, sub_dir_list, file_list in os.walk(root_dir):          for fname in file_list:              if fname in ["settings.xml", "settings.xml.edit", "settings_per_account.xml"]: -                print "skip", fname +                print("skip", fname)                  continue              (base,ext) = os.path.splitext(fname)              #if ext not in [".cpp", ".hpp", ".h", ".xml"]: @@ -155,8 +155,8 @@ def get_used_strings(root_dir):                      for m in ms:                          #print "used_str",m                          used_str.add(m) -    print "skipped extensions", skipped_ext -    print "got used_str", len(used_str) +    print("skipped extensions", skipped_ext) +    print("got used_str", len(used_str))      return used_str @@ -171,7 +171,7 @@ if __name__ == "__main__":      args = parser.parse_args()      for fname in args.infiles: -        print "process", fname +        print("process", fname)          df = pd.read_csv(fname,sep='\t')          #print "DF", df.describe()          jstrs = df['RAW_LOG:BODY'] @@ -182,12 +182,12 @@ if __name__ == "__main__":          show_stats_by_key(recs,[])          show_stats_by_key(recs,["agent"])          if args.preferences: -            print "\nSETTINGS.XML" +            print("\nSETTINGS.XML")              settings_sd = parse_settings_xml("settings.xml")              #for skey,svals in settings_sd.items():               #    print skey, "=>", svals              (all_str,_,_,_) = show_stats_by_key(recs,["preferences","settings"],settings_sd) -            print +            print()              #print "\nSETTINGS_PER_ACCOUNT.XML"              #settings_pa_sd = parse_settings_xml("settings_per_account.xml") @@ -201,19 +201,19 @@ if __name__ == "__main__":                  unref_strings = all_str_set-used_strings_set                  # Some settings names are generated by appending to a prefix. Need to look for this case.                  prefix_used = set() -                print "checking unref_strings", len(unref_strings) +                print("checking unref_strings", len(unref_strings))                  for u in unref_strings:                      for k in range(6,len(u)):                          prefix = u[0:k]                          if prefix in all_str_set and prefix in used_strings_set:                              prefix_used.add(u)                              #print "PREFIX_USED",u,prefix -                print "PREFIX_USED", len(prefix_used), ",".join(list(prefix_used)) -                print +                print("PREFIX_USED", len(prefix_used), ",".join(list(prefix_used))) +                print()                  unref_strings = unref_strings - prefix_used -                print "\nUNREF_IN_CODE " + str(len(unref_strings)) + "\n" -                print "\n".join(list(unref_strings)) +                print("\nUNREF_IN_CODE " + str(len(unref_strings)) + "\n") +                print("\n".join(list(unref_strings)))                  settings_str = read_raw_settings_xml("settings.xml")                  # Do this via direct string munging to generate minimal changeset                  settings_edited = remove_settings(settings_str,unref_strings) diff --git a/scripts/packages-formatter.py b/scripts/packages-formatter.py index b1eef3c721..ff7c892577 100755 --- a/scripts/packages-formatter.py +++ b/scripts/packages-formatter.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3  """\  This module formats the package version and copyright information for the  viewer and its dependent packages. @@ -37,6 +37,9 @@ parser.add_argument('version', help='viewer version number')  args = parser.parse_args()  _autobuild=os.getenv('AUTOBUILD', 'autobuild') +_autobuild_env=os.environ.copy() +# Coerce stdout encoding to utf-8 as cygwin's will be detected as cp1252 otherwise. +_autobuild_env["PYTHONIOENCODING"] = "utf-8"  pkg_line=re.compile('^([\w-]+):\s+(.*)$') @@ -50,7 +53,7 @@ def autobuild(*args):      try:          child = subprocess.Popen(command,                                   stdin=None, stdout=subprocess.PIPE, -                                 universal_newlines=True) +                                 universal_newlines=True, env=_autobuild_env)      except OSError as err:          if err.errno != errno.ENOENT:              # Don't attempt to interpret anything but ENOENT @@ -110,20 +113,20 @@ for key, rawdata in ("versions", versions), ("copyrights", copyrights):                  break  # Now that we've run through all of both outputs -- are there duplicates? -if any(pkgs for pkgs in dups.values()): -    for key, pkgs in dups.items(): +if any(pkgs for pkgs in list(dups.values())): +    for key, pkgs in list(dups.items()):          if pkgs: -            print >>sys.stderr, "Duplicate %s for %s" % (key, ", ".join(pkgs)) +            print("Duplicate %s for %s" % (key, ", ".join(pkgs)), file=sys.stderr)      sys.exit(1) -print "%s %s" % (args.channel, args.version) -print viewer_copyright +print("%s %s" % (args.channel, args.version)) +print(viewer_copyright)  version = list(info['versions'].items())  version.sort()  for pkg, pkg_version in version: -    print ': '.join([pkg, pkg_version]) +    print(': '.join([pkg, pkg_version]))      try: -        print info['copyrights'][pkg] +        print(info['copyrights'][pkg])      except KeyError:          sys.exit("No copyright for %s" % pkg) -    print +    print() diff --git a/scripts/setup-path.py b/scripts/setup-path.py index ce83d815bf..427d119520 100755 --- a/scripts/setup-path.py +++ b/scripts/setup-path.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3  """\  @file setup-path.py  @brief Get the python library directory in the path, so we don't have diff --git a/scripts/template_verifier.py b/scripts/template_verifier.py index 358931b13e..0f5135fae6 100755 --- a/scripts/template_verifier.py +++ b/scripts/template_verifier.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3  """\  @file template_verifier.py  @brief Message template compatibility verifier. @@ -58,14 +58,14 @@ def add_indra_lib_path():                  sys.path.insert(0, dir)              break      else: -        print >>sys.stderr, "This script is not inside a valid installation." +        print("This script is not inside a valid installation.", file=sys.stderr)          sys.exit(1)  add_indra_lib_path()  import optparse  import os -import urllib +import urllib.request, urllib.parse, urllib.error  import hashlib  from indra.ipc import compatibility @@ -90,7 +90,7 @@ def getstatusoutput(command):  def die(msg): -    print >>sys.stderr, msg +    print(msg, file=sys.stderr)      sys.exit(1)  MESSAGE_TEMPLATE = 'message_template.msg' @@ -106,7 +106,7 @@ def retry(times, function, *args, **kwargs):      for i in range(times):          try:              return function(*args, **kwargs) -        except Exception, e: +        except Exception as e:              if i == times - 1:                  raise e  # we retried all the times we could @@ -138,10 +138,14 @@ def fetch(url):      if url.startswith('file://'):          # just open the file directly because urllib is dumb about these things          file_name = url[len('file://'):] -        return open(file_name).read() +        with open(file_name, 'rb') as f: +            return f.read()      else: -        # *FIX: this doesn't throw an exception for a 404, and oddly enough the sl.com 404 page actually gets parsed successfully -        return ''.join(urllib.urlopen(url).readlines())    +        with urllib.request.urlopen(url) as res: +            body = res.read() +            if res.status > 299: +                sys.exit("ERROR: Unable to download %s. HTTP status %d.\n%s" % (url, res.status, body.decode("utf-8"))) +            return body  def cache_master(master_url):      """Using the url for the master, updates the local cache, and returns an url to the local cache.""" @@ -153,23 +157,22 @@ def cache_master(master_url):          and time.time() - os.path.getmtime(master_cache) < MAX_MASTER_AGE):          return master_cache_url  # our cache is fresh      # new master doesn't exist or isn't fresh -    print "Refreshing master cache from %s" % master_url +    print("Refreshing master cache from %s" % master_url)      def get_and_test_master():          new_master_contents = fetch(master_url) -        llmessage.parseTemplateString(new_master_contents) +        llmessage.parseTemplateString(new_master_contents.decode("utf-8"))          return new_master_contents      try:          new_master_contents = retry(3, get_and_test_master) -    except IOError, e: +    except IOError as e:          # the refresh failed, so we should just soldier on -        print "WARNING: unable to download new master, probably due to network error.  Your message template compatibility may be suspect." -        print "Cause: %s" % e +        print("WARNING: unable to download new master, probably due to network error.  Your message template compatibility may be suspect.") +        print("Cause: %s" % e)          return master_cache_url      try:          tmpname = '%s.%d' % (master_cache, os.getpid()) -        mc = open(tmpname, 'wb') -        mc.write(new_master_contents) -        mc.close() +        with open(tmpname, "wb") as mc: +            mc.write(new_master_contents)          try:              os.rename(tmpname, master_cache)          except OSError: @@ -180,9 +183,9 @@ def cache_master(master_url):              # a single day.              os.unlink(master_cache)              os.rename(tmpname, master_cache) -    except IOError, e: -        print "WARNING: Unable to write master message template to %s, proceeding without cache." % master_cache -        print "Cause: %s" % e +    except IOError as e: +        print("WARNING: Unable to write master message template to %s, proceeding without cache." % master_cache) +        print("Cause: %s" % e)          return master_url      return master_cache_url @@ -246,16 +249,16 @@ http://wiki.secondlife.com/wiki/Template_verifier.py      # both current and master supplied in positional params      if len(args) == 2:          master_filename, current_filename = args -        print "master:", master_filename -        print "current:", current_filename +        print("master:", master_filename) +        print("current:", current_filename)          master_url = 'file://%s' % master_filename          current_url = 'file://%s' % current_filename      # only current supplied in positional param      elif len(args) == 1:          master_url = None          current_filename = args[0] -        print "master:", options.master_url  -        print "current:", current_filename +        print("master:", options.master_url)  +        print("current:", current_filename)          current_url = 'file://%s' % current_filename      # nothing specified, use defaults for everything      elif len(args) == 0: @@ -269,8 +272,8 @@ http://wiki.secondlife.com/wiki/Template_verifier.py      if current_url is None:          current_filename = local_template_filename() -        print "master:", options.master_url -        print "current:", current_filename +        print("master:", options.master_url) +        print("current:", current_filename)          current_url = 'file://%s' % current_filename      # retrieve the contents of the local template @@ -281,42 +284,42 @@ http://wiki.secondlife.com/wiki/Template_verifier.py          sha_url = "%s.sha1" % current_url          current_sha = fetch(sha_url)          if hexdigest == current_sha: -            print "Message template SHA_1 has not changed." +            print("Message template SHA_1 has not changed.")              sys.exit(0)      # and check for syntax -    current_parsed = llmessage.parseTemplateString(current) +    current_parsed = llmessage.parseTemplateString(current.decode("utf-8"))      if options.cache_master:          # optionally return a url to a locally-cached master so we don't hit the network all the time          master_url = cache_master(master_url)      def parse_master_url(): -        master = fetch(master_url) +        master = fetch(master_url).decode("utf-8")          return llmessage.parseTemplateString(master)      try:          master_parsed = retry(3, parse_master_url) -    except (IOError, tokenstream.ParseError), e: +    except (IOError, tokenstream.ParseError) as e:          if options.mode == 'production':              raise e          else: -            print "WARNING: problems retrieving the master from %s."  % master_url -            print "Syntax-checking the local template ONLY, no compatibility check is being run." -            print "Cause: %s\n\n" % e +            print("WARNING: problems retrieving the master from %s."  % master_url) +            print("Syntax-checking the local template ONLY, no compatibility check is being run.") +            print("Cause: %s\n\n" % e)              return 0      acceptable, compat = compare(          master_parsed, current_parsed, options.mode)      def explain(header, compat): -        print header +        print(header)          # indent compatibility explanation -        print '\n\t'.join(compat.explain().split('\n')) +        print('\n\t'.join(compat.explain().split('\n')))      if acceptable:          explain("--- PASS ---", compat)          if options.force_verification == False: -            print "Updating sha1 to %s" % hexdigest +            print("Updating sha1 to %s" % hexdigest)              sha_filename = "%s.sha1" % current_filename              sha_file = open(sha_filename, 'w')              sha_file.write(hexdigest) | 
