gpsd-dev
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[gpsd-dev] [PATCH] Fixes devtools/*.py for Python 3.


From: Fred Wright
Subject: [gpsd-dev] [PATCH] Fixes devtools/*.py for Python 3.
Date: Sun, 10 Apr 2016 01:02:05 -0700

This applies the 2to3 fixes as well as integer division fixes to the
four programs in devtools.  Most of these transformations are similar
to ones used elsewhere, with the notable exception of the
reraise_with_traceback() function in ais.py.

TESTED:
Ran identify_failing_build_options.py with both Python 2 and Python 3
up through length 1, with identical results.  Did *not* test the other
three programs.  Someone more familiar with their usage should do
that.
---
 devtools/ais.py                            |  62 +++++++-----
 devtools/identify_failing_build_options.py |  12 ++-
 devtools/tablegen.py                       | 156 ++++++++++++++++-------------
 devtools/test_json_validity.py             |   7 +-
 4 files changed, 139 insertions(+), 98 deletions(-)

diff --git a/devtools/ais.py b/devtools/ais.py
index e9eb62b..790e760 100755
--- a/devtools/ais.py
+++ b/devtools/ais.py
@@ -5,6 +5,21 @@
 # This file is Copyright (c) 2010 by the GPSD project
 # BSD terms apply: see the file COPYING in the distribution root for details.
 #
+# This code runs compatibly under Python 2 and 3.x for x >= 2.
+# Preserve this property!
+from __future__ import absolute_import, print_function, division
+
+try:
+    BaseException.with_traceback
+
+    def reraise_with_traceback(exc_type, exc_value, exc_traceback):
+        raise exc_type(exc_value).with_traceback(exc_traceback)
+
+except AttributeError:
+
+    def reraise_with_traceback(exc_type, exc_value, exc_traceback):
+        raise exc_type, exc_value, exc_traceback
+
 # This decoder works by defining a declarative pseudolanguage in which
 # to describe the process of extracting packed bitfields from an AIS
 # message, a set of tables which contain instructions in the pseudolanguage,
@@ -826,7 +841,7 @@ type24b = (
              validator=lambda n: n >= 0 and n <= 99,
              formatter=ship_type_legends),
     bitfield("vendorid",     42, 'string',   None, "Vendor ID"),
-    dispatch("mmsi", {0:type24b1, 1:type24b2}, lambda m: 1 if `m`[:2]=='98' 
else 0),
+    dispatch("mmsi", {0:type24b1, 1:type24b2}, lambda m: 1 if 
repr(m)[:2]=='98' else 0),
     )
 
 type24 = (
@@ -943,7 +958,7 @@ class BitVector:
     def extend_to(self, length):
         "Extend vector to given bitlength."
         if length > self.bitlen:
-            self.bits.extend([0]*((length - self.bitlen +7 )/8))
+            self.bits.extend([0] * ((length - self.bitlen + 7 ) // 8))
             self.bitlen = length
     def from_sixbit(self, data, pad=0):
         "Initialize bit vector from AIVDM-style six-bit armoring."
@@ -954,13 +969,14 @@ class BitVector:
                 ch -= 8
             for i in (5, 4, 3, 2, 1, 0):
                 if (ch >> i) & 0x01:
-                    self.bits[self.bitlen/8] |= (1 << (7 - self.bitlen % 8))
+                    self.bits[self.bitlen // 8] |= (1 << (7 - self.bitlen % 8))
                 self.bitlen += 1
         self.bitlen -= pad
     def ubits(self, start, width):
         "Extract a (zero-origin) bitfield from the buffer as an unsigned int."
         fld = 0
-        for i in range(start/BITS_PER_BYTE, (start + width + BITS_PER_BYTE - 
1) / BITS_PER_BYTE):
+        for i in range(start // BITS_PER_BYTE,
+                       (start + width + BITS_PER_BYTE - 1) // BITS_PER_BYTE):
             fld <<= BITS_PER_BYTE
             fld |= self.bits[i]
         end = (start + width) % BITS_PER_BYTE
@@ -978,7 +994,7 @@ class BitVector:
         return self.bitlen
     def __repr__(self):
         "Used for dumping binary data."
-        return str(self.bitlen) + ":" + "".join(map(lambda d: "%02x" % d, 
self.bits[:(self.bitlen + 7)/8]))
+        return str(self.bitlen) + ":" + "".join(["%02x" % d for d in 
self.bits[:(self.bitlen + 7) // 8]])
 
 import sys, exceptions, re
 
@@ -1014,7 +1030,7 @@ def aivdm_unpack(lc, data, offset, values, instructions):
                 # The try/catch error here is in case we run off the end
                 # of a variable-length string field, as in messages 12 and 14
                 try:
-                    for i in range(inst.width/6):
+                    for i in range(inst.width // 6):
                         newchar = "@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^- 
!\"#$%&'()*+,-./0123456789:;<=>?"[data.ubits(offset + 6*i, 6)]
                         if newchar == '@':
                             break
@@ -1025,7 +1041,7 @@ def aivdm_unpack(lc, data, offset, values, instructions):
                 value = value.replace("@", " ").rstrip()
             elif inst.type == 'raw':
                 # Note: Doesn't rely on the length.
-                value = BitVector(data.bits[offset/8:], len(data)-offset)
+                value = BitVector(data.bits[offset // 8:], len(data)-offset)
             values[inst.name] = value
             if inst.validator and not inst.validator(value):
                 raise AISUnpackingException(lc, inst.name, value)
@@ -1086,7 +1102,7 @@ def packet_scanner(source):
                 raise AISUnpackingException(lc, "checksum", crc)
         if csum != crc:
             if skiperr:
-                sys.stderr.write("%d: bad checksum %s, expecting %s: %s\n" % 
(lc, `crc`, csum, line.strip()))
+                sys.stderr.write("%d: bad checksum %s, expecting %s: %s\n" % 
(lc, repr(crc), csum, line.strip()))
                 well_formed = False
             else:
                 raise AISUnpackingException(lc, "checksum", crc)
@@ -1126,8 +1142,8 @@ def parse_ais_messages(source, scaled=False, 
skiperr=False, verbose=0):
             # Collect some field groups into ISO8601 format
             for (offset, template, label, legend, formatter) in field_groups:
                 segment = cooked[offset:offset+len(template)]
-                if map(lambda x: x[0], segment) == template:
-                    group = formatter(*map(lambda x: x[1], segment))
+                if [x[0] for x in segment] == template:
+                    group = formatter(*[x[1] for x in segment])
                     group = (label, group, 'string', legend, None)
                     cooked = 
cooked[:offset]+[group]+cooked[offset+len(template):]
             # Apply the postprocessor stage
@@ -1168,9 +1184,9 @@ def parse_ais_messages(source, scaled=False, 
skiperr=False, verbose=0):
             raise KeyboardInterrupt
         except GeneratorExit:
             raise GeneratorExit
-        except AISUnpackingException, e:
+        except AISUnpackingException as e:
             if skiperr:
-                sys.stderr.write("%s: %s\n" % (`e`, raw.strip().split()))
+                sys.stderr.write("%s: %s\n" % (repr(e), raw.strip().split()))
                 continue
             else:
                 raise
@@ -1180,7 +1196,7 @@ def parse_ais_messages(source, scaled=False, 
skiperr=False, verbose=0):
             if skiperr:
                 continue
             else:
-                raise exc_type, exc_value, exc_traceback
+                reraise_with_traceback(exc_type, exc_value, exc_traceback)
 
 # The rest is just sequencing and report generation.
 
@@ -1189,9 +1205,9 @@ if __name__ == "__main__":
 
     try:
         (options, arguments) = getopt.getopt(sys.argv[1:], "cdhjmqst:vx")
-    except getopt.GetoptError, msg:
-        print "ais.py: " + str(msg)
-        raise SystemExit, 1
+    except getopt.GetoptError as msg:
+        print("ais.py: " + str(msg))
+        raise SystemExit(1)
 
     dsv = False
     dump = False
@@ -1220,7 +1236,7 @@ if __name__ == "__main__":
         elif switch == '-s':      # Report AIS in scaled form
             scaled = True
         elif switch == '-t':      # Filter for a comma-separated list of types
-            types = map(int, val.split(","))
+            types = list(map(int, val.split(",")))
         elif switch == '-v':      # Dump raw packet before JSON or DSV.
             verbose += 1
         elif switch == '-x':      # Skip decoding errors
@@ -1242,9 +1258,9 @@ if __name__ == "__main__":
                             return '"' + str(x) + '"'
                         else:
                             return str(x)
-                    print "{" + ",".join(map(lambda x: '"' + x[0].name + '":' 
+ quotify(x[1]), parsed)) + "}"
+                    print("{" + ",".join(['"' + x[0].name + '":' + 
quotify(x[1]) for x in parsed]) + "}")
                 elif dsv:
-                    print "|".join(map(lambda x: str(x[1]), parsed))
+                    print("|".join([str(x[1]) for x in parsed]))
                 elif histogram:
                     key = "%02d" % msgtype
                     frequencies[key] = frequencies.get(key, 0) + 1
@@ -1260,14 +1276,14 @@ if __name__ == "__main__":
                         frequencies[key] = frequencies.get(key, 0) + 1
                 elif dump:
                     for (inst, value) in parsed:
-                        print "%-25s: %s" % (inst.legend, value)
-                    print "%%"
+                        print("%-25s: %s" % (inst.legend, value))
+                    print("%%")
             sys.stdout.flush()
         if histogram:
-            keys = frequencies.keys()
+            keys = list(frequencies.keys())
             keys.sort()
             for msgtype in keys:
-                print "%-33s\t%d" % (msgtype, frequencies[msgtype])
+                print("%-33s\t%d" % (msgtype, frequencies[msgtype]))
     except KeyboardInterrupt:
         pass
 # End
diff --git a/devtools/identify_failing_build_options.py 
b/devtools/identify_failing_build_options.py
index 99dedc8..d2440fa 100755
--- a/devtools/identify_failing_build_options.py
+++ b/devtools/identify_failing_build_options.py
@@ -1,5 +1,9 @@
 #!/usr/bin/env python
 
+# This code runs compatibly under Python 2 and 3.x for x >= 2.
+# Preserve this property!
+from __future__ import absolute_import, print_function, division
+
 import os
 
 always_on = [
@@ -95,7 +99,7 @@ def main(starting_number_of_options=0):
         if subprocess.call(command, stdout=dev_null) == 0:
             return True
         failed_configurations.append(command)
-        print command
+        print(command)
         with open('failed_%s_configs.txt' % phase, 'a') as failed_configs:
             failed_configs.write(' '.join(command) + '\n')
         return False
@@ -104,10 +108,10 @@ def main(starting_number_of_options=0):
     static_params += [key + '=off' for key in always_off]
 
     for i in range(starting_number_of_options, len(knobs)):
-        print 'Testing at length {}'.format(i)
+        print('Testing at length {}'.format(i))
 
         for row in itertools.combinations(knobs, i):
-            print row
+            print(row)
             params = static_params + [key + '=on' for key in row]
 
             # print {'on_params': row, 'scons_params': params}
@@ -134,4 +138,4 @@ def main(starting_number_of_options=0):
 if __name__ == '__main__':
     failed = main(0)
     for row in failed:
-        print ' '.join(row)
+        print(' '.join(row))
diff --git a/devtools/tablegen.py b/devtools/tablegen.py
index baf9bcb..432cd23 100755
--- a/devtools/tablegen.py
+++ b/devtools/tablegen.py
@@ -58,17 +58,21 @@
 #    preceding table.
 #
 # TO-DO: generate code for ais.py.
+#
+# This code runs compatibly under Python 2 and 3.x for x >= 2.
+# Preserve this property!
+from __future__ import absolute_import, print_function, division
 
 import sys, getopt
 
 def correct_table(wfp):
     # Writes the corrected table.
-    print >>sys.stderr, "Total bits:", base
+    print("Total bits:", base, file=sys.stderr)
     for (i, t) in enumerate(table):
         if offsets[i].strip():
-            print >>wfp, "|" + offsets[i] + t[owidth+1:].rstrip()
+            print("|" + offsets[i] + t[owidth+1:].rstrip(), file=wfp)
         else:
-            print >>wfp, t.rstrip()
+            print(t.rstrip(), file=wfp)
 
 def make_driver_code(wfp):
     # Writes calls to bit-extraction macros.
@@ -82,7 +86,7 @@ def make_driver_code(wfp):
     indent = base
     for (i, t) in enumerate(table):
         if '|' in t:
-            fields = map(lambda s: s.strip(), t.split('|'))
+            fields = [s.strip() for s in t.split('|')]
             width = fields[2]
             name = fields[4]
             ftype = fields[5]
@@ -95,21 +99,24 @@ def make_driver_code(wfp):
             if not record:
                 continue
             if ftype == 'x':
-                print >>wfp,"\t/* skip %s bit%s */" % (width, ["", 
"s"][width>'1'])
+                print("\t/* skip %s bit%s */" % (width, ["", "s"][width>'1']),
+                      file=wfp)
                 continue
             if ftype[0] == 'a':
                 arrayname = name
                 explicit = ftype[1] == '^'
-                print >>wfp, '#define ARRAY_BASE %s' % offsets[i].strip()
-                print >>wfp, '#define ELEMENT_SIZE %s' % trailing
+                print('#define ARRAY_BASE %s' % offsets[i].strip(), file=wfp)
+                print('#define ELEMENT_SIZE %s' % trailing, file=wfp)
                 if explicit:
                     lengthfield = last
-                    print >>wfp, indent + "for (i = 0; i < %s; i++) {" % 
lengthfield 
+                    print(indent + "for (i = 0; i < %s; i++) {" % lengthfield,
+                          file=wfp) 
                 else:
                     lengthfield = "n" + arrayname
-                    print >>wfp, indent + "for (i = 0; ARRAY_BASE + 
(ELEMENT_SIZE*i) < bitlen; i++) {" 
+                    print(indent + "for (i = 0; ARRAY_BASE + (ELEMENT_SIZE*i) 
< bitlen; i++) {", file=wfp) 
                 indent += step
-                print >>wfp, indent + "int a = ARRAY_BASE + (ELEMENT_SIZE*i);" 
+                print(indent + "int a = ARRAY_BASE + (ELEMENT_SIZE*i);",
+                      file=wfp) 
                 continue
             offset = offsets[i].split('-')[0]
             if arrayname:
@@ -118,22 +125,24 @@ def make_driver_code(wfp):
             else:
                 target = "%s.%s" % (structname, name)
             if ftype[0].lower() in ('u', 'i', 'e'):
-                print >>wfp, indent + "%s\t= %sBITS(%s, %s);" % \
-                      (target, {'u':'U', 'e':'U', 'i':'S'}[ftype[0].lower()], 
offset, width)
+                print(indent + "%s\t= %sBITS(%s, %s);" % \
+                      (target, {'u':'U', 'e':'U', 'i':'S'}[ftype[0].lower()], 
offset, width), file=wfp)
             elif ftype == 't':
-                print >>wfp, indent + "UCHARS(%s, %s);" % (offset, target)
+                print(indent + "UCHARS(%s, %s);" % (offset, target), file=wfp)
             elif ftype == 'b':
-                print >>wfp, indent + "%s\t= (bool)UBITS(%s, 1);" % (target, 
offset)
+                print(indent + "%s\t= (bool)UBITS(%s, 1);" % (target, offset),
+                      file=wfp)
             else:
-                print >>wfp, indent + "/* %s bits of type %s */" % 
(width,ftype)
+                print(indent + "/* %s bits of type %s */" % (width,ftype),
+                      file=wfp)
             last = name
     if arrayname:
         indent = base
-        print >>wfp, indent + "}"
+        print(indent + "}", file=wfp)
         if not explicit:
-            print >>wfp, indent + "%s.%s = ind;" % (structname, lengthfield)
-        print >>wfp, "#undef ARRAY_BASE" 
-        print >>wfp, "#undef ELEMENT_SIZE" 
+            print(indent + "%s.%s = ind;" % (structname, lengthfield), 
file=wfp)
+        print("#undef ARRAY_BASE", file=wfp) 
+        print("#undef ELEMENT_SIZE", file=wfp) 
 
 def make_structure(wfp):
     # Write a structure definition correponding to the table.
@@ -144,11 +153,11 @@ def make_structure(wfp):
     inwards = step
     arrayname = None
     def tabify(n):
-        return ('\t' * (n / 8)) + (" " * (n % 8)) 
-    print >>wfp, tabify(baseindent) + "struct {"
+        return ('\t' * (n // 8)) + (" " * (n % 8)) 
+    print(tabify(baseindent) + "struct {", file=wfp)
     for (i, t) in enumerate(table):
         if '|' in t:
-            fields = map(lambda s: s.strip(), t.split('|'))
+            fields = [s.strip() for s in t.split('|')]
             width = fields[2]
             description = fields[3].strip()
             name = fields[4]
@@ -168,12 +177,14 @@ def make_structure(wfp):
                     ftype = ftype[1:]
                 else:
                     lengthfield = "n%s" % arrayname
-                    print >>wfp, tabify(baseindent + inwards) + "signed int 
%s;" % lengthfield
+                    print(tabify(baseindent + inwards)
+                          + "signed int %s;" % lengthfield, file=wfp)
                 if arrayname.endswith("s"):
                     typename = arrayname[:-1]
                 else:
                     typename = arrayname
-                print >>wfp, tabify(baseindent + inwards) + "struct %s_t {" % 
typename
+                print(tabify(baseindent + inwards) + "struct %s_t {" % 
typename,
+                      file=wfp)
                 inwards += step
                 arraydim = ftype[1:]
                 continue
@@ -184,20 +195,21 @@ def make_structure(wfp):
             elif ftype == 'b':
                 decl = "bool %s;\t/* %s */" % (name, description)
             elif ftype == 't':
-                stl = int(width)/6
+                stl = int(width) // 6
                 decl = "char %s[%d+1];\t/* %s */" % (name, stl, description)
             else:
                 decl = "/* %s bits of type %s */" % (width, ftype)
-            print >>wfp, tabify(baseindent + inwards) + decl
+            print(tabify(baseindent + inwards) + decl, file=wfp)
         last = name
     if arrayname:
         inwards -= step
-        print >>wfp, tabify(baseindent + inwards) + "} %s[%s];" % (arrayname, 
arraydim)
+        print(tabify(baseindent + inwards) + "} %s[%s];"
+              % (arrayname, arraydim), file=wfp)
     if "->" in structname:
         typename = structname.split("->")[1]
     if "." in typename:
         structname = structname.split(".")[1]    
-    print >>wfp, tabify(baseindent) + "} %s;" % typename
+    print(tabify(baseindent) + "} %s;" % typename, file=wfp)
 
 def make_json_dumper(wfp):
     # Write the skeleton of a JSON dump corresponding to the table.
@@ -230,7 +242,7 @@ def make_json_dumper(wfp):
     vocabularies = [x[0] for x in subtables]
     for (i, t) in enumerate(table):
         if '|' in t:
-            fields = map(lambda s: s.strip(), t.split('|'))
+            fields = [s.strip() for s in t.split('|')]
             name = fields[4]
             ftype = fields[5]
             if after == name:
@@ -272,7 +284,7 @@ def make_json_dumper(wfp):
                                fmt+r'\"%s\"', "JSON_BOOL(%s)",
                                None, None))
             elif ftype[0] == 'd':
-                print >>sys.stderr, "Cannot generate code for data members"
+                print("Cannot generate code for data members", file=sys.stderr)
                 sys.exit(1)
             elif ftype[0] == 'U':
                 tuples.append((name,
@@ -290,21 +302,22 @@ def make_json_dumper(wfp):
                     lengthfield = "n" + name
                 tuples.append((name, None, None, None, lengthfield))
             else:
-                print >>sys.stderr, "Unknown type code", ftype
+                print("Unknown type code", ftype, file=sys.stderr)
                 sys.exit(1)
         last = name
     startspan = 0
     def scaled(i):
         return tuples[i][3] is not None
     def tslice(e, i):
-        return map(lambda x: x[i], tuples[startspan:e+1])
+        return [x[i] for x in tuples[startspan:e+1]]
     base = " " * 8
     step = " " * 4
     inarray = None
     header = "(void)snprintf(buf + strlen(buf), buflen - strlen(buf),"
     for (i, (var, uf, uv, sf, sv)) in enumerate(tuples):
         if uf == None:
-            print >>wfp, base + "for (i = 0; i < %s.%s; i++) {" % (structname, 
sv)
+            print(base + "for (i = 0; i < %s.%s; i++) {" % (structname, sv),
+                  file=wfp)
             inarray = var
             base = " " * 12
             startspan = i+1
@@ -321,12 +334,13 @@ def make_json_dumper(wfp):
             endit = None
         if endit:
             if not scaled(i):
-                print >>wfp, base + header
+                print(base + header, file=wfp)
                 if inarray:
                     prefix = '{"'
                 else:
                     prefix = '"'
-                print >>wfp, base + step + prefix +','.join(tslice(i,1)) + 
endit
+                print(base + step + prefix +','.join(tslice(i,1)) + endit,
+                      file=wfp)
                 for (j, t) in enumerate(tuples[startspan:i+1]):
                     if inarray:
                         ref = structname + "." + inarray + "[i]." + t[0]
@@ -338,9 +352,10 @@ def make_json_dumper(wfp):
                     else:
                         wfp.write(",\n")
             else:
-                print >>wfp, base + "if (scaled)"
-                print >>wfp, base + step + header
-                print >>wfp, base + step*2 + '"'+','.join(tslice(i,3)) + endit
+                print(base + "if (scaled)", file=wfp)
+                print(base + step + header, file=wfp)
+                print(base + step*2 + '"'+','.join(tslice(i,3)) + endit,
+                      file=wfp)
                 for (j, t) in enumerate(tuples[startspan:i+1]):
                     if inarray:
                         ref = structname + "." + inarray + "[i]." + t[0]
@@ -351,9 +366,10 @@ def make_json_dumper(wfp):
                         wfp.write(");\n")
                     else:
                         wfp.write(",\n")
-                print >>wfp, base + "else"
-                print >>wfp, base + step + header
-                print >>wfp, base + step*2 + '"'+','.join(tslice(i,1)) + endit
+                print(base + "else", file=wfp)
+                print(base + step + header, file=wfp)
+                print(base + step*2 + '"'+','.join(tslice(i,1)) + endit,
+                      file=wfp)
                 for (j, t) in enumerate(tuples[startspan:i+1]):
                     if inarray:
                         ref = structname + "." + inarray + "[i]." + t[0]
@@ -368,10 +384,11 @@ def make_json_dumper(wfp):
     # If we were looking at a trailing array, close scope 
     if inarray:
         base = " " * 8
-        print >>wfp, base + "}"
-        print >>wfp, base + "if (buf[strlen(buf)-1] == ',')"
-        print >>wfp, base + step + r"buf[strlen(buf)-1] = '\0';"
-        print >>wfp, base + "(void)strlcat(buf, \"]}\", buflen - strlen(buf));"
+        print(base + "}", file=wfp)
+        print(base + "if (buf[strlen(buf)-1] == ',')", file=wfp)
+        print(base + step + r"buf[strlen(buf)-1] = '\0';", file=wfp)
+        print(base + "(void)strlcat(buf, \"]}\", buflen - strlen(buf));",
+              file=wfp)
 
 def make_json_generator(wfp):
     # Write a stanza for jsongen.py.in describing how to generate a
@@ -380,16 +397,16 @@ def make_json_generator(wfp):
     extra = ""
     arrayname = None
     record = after is None
-    print >>wfp, '''\
+    print('''\
     {
     "initname" : "__INITIALIZER__",
     "headers": ("AIS_HEADER",),
     "structname": "%s",
     "fieldmap":(
-        # fieldname    type        default''' % (structname,)
+        # fieldname    type        default''' % (structname,), file=wfp)
     for (i, t) in enumerate(table):
         if '|' in t:
-            fields = map(lambda s: s.strip(), t.split('|'))
+            fields = [s.strip() for s in t.split('|')]
             name = fields[4]
             ftype = fields[5]
             if after == name:
@@ -414,9 +431,10 @@ def make_json_generator(wfp):
                 else:
                     lengthfield = "n" + arrayname
                 extra = " " * 8
-                print >>wfp, "        ('%s',%s 'array', (" % \
-                      (arrayname, " "*(10-len(arrayname)))
-                print >>wfp, "            ('%s_t', '%s', (" % (typename, 
lengthfield)
+                print("        ('%s',%s 'array', (" % \
+                      (arrayname, " "*(10-len(arrayname))), file=wfp)
+                print("            ('%s_t', '%s', (" % (typename, lengthfield),
+                      file=wfp)
             else:
                 # Depends on the assumption that the read code
                 # always sees unscaled JSON.
@@ -447,27 +465,27 @@ def make_json_generator(wfp):
                     "second": "'60'",
                     }
                 default = namedefaults.get(name) or typedefault
-                print >>wfp, extra + "        ('%s',%s '%s',%s %s)," % (name,
+                print(extra + "        ('%s',%s '%s',%s %s)," % (name,
                                                      " "*(10-len(name)),
                                                      readtype,
                                                      " "*(8-len(readtype)),
-                                                     default)
+                                                     default), file=wfp)
                 if ftype[0] == 'e':
-                    print >>wfp, extra + "        ('%s_text',%s'ignore',   
None)," % \
-                          (name, " "*(6-len(name)))
+                    print(extra + "        ('%s_text',%s'ignore',   None)," % \
+                          (name, " "*(6-len(name))), file=wfp)
 
             last = name
     if arrayname:
-        print >>wfp, "                    )))),"
-    print >>wfp, "        ),"
-    print >>wfp, "    },"
+        print("                    )))),", file=wfp)
+    print("        ),", file=wfp)
+    print("    },", file=wfp)
 
 if __name__ == '__main__':
     try:
         (options, arguments) = getopt.getopt(sys.argv[1:], "a:tc:s:d:S:E:r:o:")
-    except getopt.GetoptError, msg:
-        print "tablecheck.py: " + str(msg)
-        raise SystemExit, 1
+    except getopt.GetoptError as msg:
+        print("tablecheck.py: " + str(msg))
+        raise SystemExit(1)
     generate = maketable = makestruct = makedump = readgen = all = False
     after = before = None
     filestem = "tablegen"
@@ -497,7 +515,7 @@ if __name__ == '__main__':
             filestem = val
 
     if not generate and not maketable and not makestruct and not makedump and 
not readgen and not all:
-        print >>sys.stderr, "tablecheck.py: no mode selected"
+        print("tablecheck.py: no mode selected", file=sys.stderr)
         sys.exit(1)
 
     # First, read in the table.
@@ -555,10 +573,10 @@ if __name__ == '__main__':
                 subtable_content.append([f.strip() for f in 
line[1:].strip().split("|")])
             continue
     if state == 0:
-        print >>sys.stderr, "Can't find named table."
+        print("Can't find named table.", file=sys.stderr)
         sys.exit(1)        
     elif state < 3:
-        print >>sys.stderr, "Ill-formed table (in state %d)." % state
+        print("Ill-formed table (in state %d)." % state, file=sys.stderr)
         sys.exit(1)
     table = table[1:]
     ranges = ranges[1:]
@@ -581,7 +599,7 @@ if __name__ == '__main__':
     corrections = False
     for w in widths:
         if w is None:
-            offsets.append(`base`)
+            offsets.append(repr(base))
             base = 0
         elif w == '':
             offsets.append('')
@@ -589,13 +607,13 @@ if __name__ == '__main__':
             w = int(w)
             offsets.append("%d-%d" % (base, base + w - 1))
             base += w
-    if filter(lambda p: p[0] != p[1], zip(ranges, offsets)):
+    if [p for p in zip(ranges, offsets) if p[0] != p[1]]:
         corrections = True
-        print "Offset corrections:"
+        print("Offset corrections:")
         for (old, new) in zip(ranges, offsets):
             if old != new:
-                print >>sys.stderr, old, "->", new 
-    owidth = max(*map(len, offsets)) 
+                print(old, "->", new, file=sys.stderr) 
+    owidth = max(*list(map(len, offsets))) 
     for (i, off) in enumerate(offsets):
         offsets[i] += " " * (owidth - len(offsets[i]))
 
diff --git a/devtools/test_json_validity.py b/devtools/test_json_validity.py
index 7423c55..0f414cc 100755
--- a/devtools/test_json_validity.py
+++ b/devtools/test_json_validity.py
@@ -5,6 +5,9 @@
 # If it fails, it will print the offending line and an error message.
 # The goal is to check that GPSD outputs valid JSON.
 #
+# This code runs compatibly under Python 2 and 3.x for x >= 2.
+# Preserve this property!
+from __future__ import absolute_import, print_function, division
 
 import json, sys
 
@@ -17,7 +20,7 @@ for line in sys.stdin.readlines():
         item = json.loads(line)
     except ValueError as e:
         success = False
-        print "%d: %s" % (lc, line.strip())
-        print "%d: %s" % (lc, e)
+        print("%d: %s" % (lc, line.strip()))
+        print("%d: %s" % (lc, e))
 
 exit(0 if success else 1)
-- 
2.8.1




reply via email to

[Prev in Thread] Current Thread [Next in Thread]