commit-gnue
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[gnue] r8383 - in trunk/gnue-common/src/formatting/masks: . tests


From: jamest
Subject: [gnue] r8383 - in trunk/gnue-common/src/formatting/masks: . tests
Date: Wed, 5 Apr 2006 20:08:13 -0500 (CDT)

Author: jamest
Date: 2006-04-05 20:08:13 -0500 (Wed, 05 Apr 2006)
New Revision: 8383

Modified:
   trunk/gnue-common/src/formatting/masks/InputMask.py
   trunk/gnue-common/src/formatting/masks/MaskParser.py
   trunk/gnue-common/src/formatting/masks/tests/mask_tokenizer.py
Log:
mask tokenizer now takes text string instead of StringIO handle
pep8-ification work
misc typos fixed


Modified: trunk/gnue-common/src/formatting/masks/InputMask.py
===================================================================
--- trunk/gnue-common/src/formatting/masks/InputMask.py 2006-04-05 23:23:14 UTC 
(rev 8382)
+++ trunk/gnue-common/src/formatting/masks/InputMask.py 2006-04-06 01:08:13 UTC 
(rev 8383)
@@ -335,7 +335,7 @@
     # Generate a list of parser tokens that define the input mask
     # -------------------------------------------------------------------------
     #
-    parser = MaskParser.InputMaskParser(StringIO(mask),'inline', numeric, date)
+    parser = MaskParser.InputMaskParser(mask,'inline')
     
     self.pp = pprint.PrettyPrinter(indent=4)
     self.isnumeric = numeric

Modified: trunk/gnue-common/src/formatting/masks/MaskParser.py
===================================================================
--- trunk/gnue-common/src/formatting/masks/MaskParser.py        2006-04-05 
23:23:14 UTC (rev 8382)
+++ trunk/gnue-common/src/formatting/masks/MaskParser.py        2006-04-06 
01:08:13 UTC (rev 8383)
@@ -34,11 +34,22 @@
 """
 __revision__ = "$Id$"
 
-from gnue.common.external.plex import Scanner, Lexicon, Errors, \
-                                      Str, Begin, State, AnyChar, Rep1, Any
 import string
-from Errors import MaskDefinitionError
+import StringIO
 
+from gnue.common.external.plex import \
+  Scanner, Lexicon, Errors, \
+  Str, Begin, State, AnyChar, Rep1, Any
+                                      
+from gnue.common.formatting.masks.Errors import MaskDefinitionError
+
+# =============================================================================
+# Support token classes
+# =============================================================================
+# 
+# The following classes all represent tokens returned by the mask tokenizer
+# 
+
 class BaseToken:
     """
     Basic parser token class.
@@ -103,7 +114,9 @@
     depending on contents of [].
     """
     def __init__(self, token, *args):
-        # TODO: Expand the set
+        
+        Token.__init__(self, token, *args)
+        
         # Are we all-numeric?
         self.numeric = token.isdigit()
         self.token = token
@@ -154,23 +167,20 @@
     def __init__(self, count):
         self.count = count
 
-##
-##
-##
+
 # =============================================================================
 # Input mask parser
 # =============================================================================
 class InputMaskParser(Scanner):
     """
-    Custom plex scanner used to contstruct the TODO: put name here
-    from an input mask passed in during initialization.
+    Custom plex scanner used to contstruct a token list which represents
+    an input mask.  This token list is used by the input mask to define
+    valid input for each position of the input.
 
     Takes a file handle containing an input mask and creates a
     list of Tokens which define the input mask
     """
-
-
-    def getType(self):
+    def get_type(self):
         """
         Returns the apparent type of this mask.
 
@@ -179,7 +189,7 @@
         """
         return type
 
-    def getTokens(self):
+    def get_tokens(self):
         """
         Returns a list of the tokens after parsing the input mask.
 
@@ -188,13 +198,13 @@
         """
         return self.tokens[:]
 
-    # 
===========================================================================
+    # =========================================================================
     # Private stuff
-    # 
===========================================================================
+    # =========================================================================
 
-    # 
---------------------------------------------------------------------------
+    # -------------------------------------------------------------------------
     # Lexicon action functions
-    # 
---------------------------------------------------------------------------
+    # -------------------------------------------------------------------------
     def _check_single(self, text):
         """
         Function to add single instance tokens to the input mask.
@@ -203,7 +213,8 @@
         in an input mask.
         """
         if text in self.__singles:
-            raise Errors.UnrecognizedInput(self, 'Mask can only have one "%s" 
token' %text)
+            raise Errors.UnrecognizedInput(self, \
+            'Mask can only have one "%s" token' %text)
         self.__singles.append(text)
         if text == '!':
             self.produce (RightToLeft(text))
@@ -216,21 +227,18 @@
         """
         A text literal that should appear as is in the mask
         """
-        print "literal %s" % text
         self.produce(Literal(text))
 
     def _literal_2nd(self, text):
         """
         Closes the literal string
         """
-        print "literal 2nd %s" % text
-        return self.literal(text[1:])
+        return self._literal(text[1:])
 
     def _escape(self, text):
         """
         An escaped character such as \$ to display a $
         """
-        print "escape %s" % text
         self.begin('')
         self.produce(Literal(text))
 
@@ -274,7 +282,7 @@
         Used when an escaped set character \[ or \] is found
         in the list of valid characters to be added to the set
         """
-        return self.add_set(text[1:])
+        return self._add_set(text[1:])
 
     def _end_set(self, text):
         """
@@ -289,34 +297,35 @@
         self.begin('')
         self.produce(TokenSet(self._set))
 
-    # 
===========================================================================
+    # =========================================================================
     # Lexicon defintions
-    # 
===========================================================================
+    # =========================================================================
     #
-    # 
---------------------------------------------------------------------------
+    # -------------------------------------------------------------------------
     # Base Lexicon definition
-    # 
---------------------------------------------------------------------------
+    # -------------------------------------------------------------------------
     # This lexicon is the base used by all masks
     #
 
     _lexicon = [
-        # 
-----------------------------------------------------------------------
+        # ---------------------------------------------------------------------
         # Default state definitions
-        # 
-----------------------------------------------------------------------
+        # ---------------------------------------------------------------------
         (Str('\\'),          Begin('escape')),   # found \, set state to escape
                                                  #
         (Str("'"),           Begin('quoted')),   # found ', set state to quoted
                                                  #
         (Str('"'),           Begin('quoted2')),  # found ", set state to qoute2
                                                  #
-        (Str('{'),           Begin('repeater')), # found {, set state to 
repeater
+        (Str('{'),           Begin('repeater')), # found {, set state to 
+                                                 # repeater
                                                  #
         (Str('['),           _begin_set),        # found [, execute _begin_set
                                                  # the function will set state
                                                  # to set when executed
                                                  #
-        (Str(' '),           Literal),           # found a space
-                                                 # reutrn a literal char 
instance
+        (Str(' '),           Literal),           # found a space, return a 
+                                                 # literal char instance
                                                  #
         (Any('+.,'),         _check_single),     # these characters can appear
                                                  # only once in an input mask
@@ -328,14 +337,16 @@
                                                  # return a date token instance
                                                  #
         (Any('#0'),          NumberToken),       # found a number character
-                                                 # return a number token 
instance
+                                                 # return a number token 
+                                                 # instance
                                                  #
         (Any('<>'),          CaseModifier),      # found a case modifier
-                                                 # return case modifier 
instance
+                                                 # return case modifier 
+                                                 # instance
 
-        # 
-----------------------------------------------------------------------
+        # ---------------------------------------------------------------------
         # Escape State
-        # 
-----------------------------------------------------------------------
+        # ---------------------------------------------------------------------
         # The escape state is entered whenever a backslash is encountered while
         # in the default state.  It's purpose is to allow the placement of what
         # would normally be reserved characters into the input mask
@@ -347,9 +358,9 @@
                                        # the state back to default
           ]),
 
-        # 
-----------------------------------------------------------------------
+        # ---------------------------------------------------------------------
         # Quoted state
-        # 
-----------------------------------------------------------------------
+        # ---------------------------------------------------------------------
         # The quoted state is entered whenever a single quote is encountered
         # thile in the default state.  It's purpose is to allow quoted strings
         # inside the input mask to sent through as their literal value
@@ -360,9 +371,9 @@
             (AnyChar,            _literal)      # Process as literal character
           ]),
 
-        # 
-----------------------------------------------------------------------
+        # ---------------------------------------------------------------------
         # quote2 state
-        # 
-----------------------------------------------------------------------
+        # ---------------------------------------------------------------------
         # This works the exact same way as the quoted state but is used
         # when a double quote is encountered.  ' and " get seperate states
         # so that one type can always enclose the other
@@ -375,9 +386,9 @@
             (AnyChar,            _literal)      # Process as literal character
           ]),
 
-        # 
-----------------------------------------------------------------------
+        # ---------------------------------------------------------------------
         # repeater state
-        # 
-----------------------------------------------------------------------
+        # ---------------------------------------------------------------------
         # The repeater state is entered whenever a { is encountered
         # while in the default state.  This state allows an input
         # mask to include a number inside of {} to cause the previous
@@ -386,17 +397,19 @@
         # Example : A{5} is the same as AAAAA
         #
         State('repeater',  [
-            (Str('}'),                 Begin('')),# found }, set state to 
default
-            (Rep1(Any(string.digits)), _repeater) # grab all digits inside the 
{}
-                                                  # execute _repeater, the
-                                                  # function will recreate a
-                                                  # repeater instance 
containing
-                                                  # the obtained number
+            (Str('}'),                 Begin('')),# found }, set state to 
+                                                  # default
+            (Rep1(Any(string.digits)), _repeater) # grab all digits inside 
+                                                  # the {} execute _repeater,
+                                                  # the function will recreate
+                                                  # a repeater instance 
+                                                  # containing the obtained 
+                                                  # number
           ]),
 
-        # 
-----------------------------------------------------------------------
+        # ---------------------------------------------------------------------
         # Set state
-        # 
-----------------------------------------------------------------------
+        # ---------------------------------------------------------------------
         # The set state is entered whenever a [ is encountered while in the
         # default state.  This provides basic regex set support where any
         # character inside the [] is matched.
@@ -410,9 +423,9 @@
           ]),
     ]
 
-    # 
---------------------------------------------------------------------------
+    # -------------------------------------------------------------------------
     # Additional lexicon definitions for input masks
-    # 
---------------------------------------------------------------------------
+    # -------------------------------------------------------------------------
     _extra_lexicon = [
           (Any('!'),        _check_single),
     ]
@@ -431,8 +444,9 @@
             # If the incoming token is a repeater then replace
             # the repeater with the appropriate number of the
             # previous token.
-            for i in range(0, token.count-1):
+            for unused in range(0, token.count-1):
                 self.__process(self.__last)
+                
         elif isinstance(token, CaseModifier):
             # If then incomming token is a case modifier
             # then add the modifier token to the list of
@@ -455,15 +469,15 @@
         # TODO: Should this be storing modifiers and the like? It is.
         self.__last = token
 
-    def __init__(self, mask, name, numeric=False, date=False):
+    def __init__(self, mask_text, name):
         """
         Input mask scanner constructor.
 
         The input mask scanner will create a list of class instances
         that describe the input mask.
 
-        @type mask: input stream
-        @param mask: The text to be used as the mask
+        @type text: string
+        @param text: The text to be used as the mask
         @type name: string
         @param name: The name of the input mask(TODO: ?)
         @type numeric: boolean
@@ -476,17 +490,18 @@
         self.__last = None  # The last token generated from the input mask
         self.__modify = []
 
-
-        # 
-------------------------------------------------------------------------
+        mask = StringIO.StringIO(mask_text)
+        
+        # ---------------------------------------------------------------------
         # Read the input mask and convert into instances of Token classes
-        # 
-------------------------------------------------------------------------
+        # ---------------------------------------------------------------------
         try:
             Scanner.__init__(self,
                              Lexicon(self._lexicon + self._extra_lexicon),
                              mask, name)
 
             while True:
-                token, extra = self.read()
+                token, unused = self.read()
                 if token is None:
                     break
 
@@ -499,9 +514,9 @@
         if self.__modify:
             print "WARNING: Modifier found at end of mask."
 
-        # 
-------------------------------------------------------------------------
+        # ---------------------------------------------------------------------
         # Build a count of the various token types created during parsing
-        # 
-------------------------------------------------------------------------
+        # ---------------------------------------------------------------------
         #
         num_markers   = 0 # Number of numeric token instances found
         date_markers  = 0 # Number of date token instances found
@@ -528,21 +543,25 @@
 
         self.rtl_pos = rtl_pos
 
-        # 
-------------------------------------------------------------------------
+        # ---------------------------------------------------------------------
         # Check for errors and mixed marker types
-        # 
-------------------------------------------------------------------------
-        if not (num_markers or date_markers or text_markers):
-            raise MaskDefinitionError, 'Mask has no character tokens'
+        # ---------------------------------------------------------------------
+        #
+        # TODO: I'm not sure we should block mixed input types
+        #
+        #if not (num_markers or date_markers or text_markers):
+            #raise MaskDefinitionError, 'Mask has no character tokens'
 
-        if numeric and (date_markers or text_markers):
-            raise MaskDefinitionError, 'Numeric mask has non-numeric tokens'
+        #if (num_markers) and (date_markers or text_markers):
+            #raise MaskDefinitionError, \
+            #'Numeric mask %s has non-numeric tokens' % mask_text
 
-        if date and (num_markers or text_markers):
-            raise MaskDefinitionError, 'Date/Time mask has non-date tokens'
+        #if (date_markers) and (num_markers or text_markers):
+            #raise MaskDefinitionError, 'Date/Time mask has non-date tokens'
 
-        # 
-------------------------------------------------------------------------
+        # ---------------------------------------------------------------------
         # Set the type of parser based upon the marker counts
-        # 
-------------------------------------------------------------------------
+        # ---------------------------------------------------------------------
         # If any two of these are non-zero, then the mask is a text mask,
         # not date or numeric.
         #
@@ -551,4 +570,4 @@
         elif num_markers:
             self.type = 'numeric'
         else:
-            self.type = 'date'
+            self.type = 'date'
\ No newline at end of file

Modified: trunk/gnue-common/src/formatting/masks/tests/mask_tokenizer.py
===================================================================
--- trunk/gnue-common/src/formatting/masks/tests/mask_tokenizer.py      
2006-04-05 23:23:14 UTC (rev 8382)
+++ trunk/gnue-common/src/formatting/masks/tests/mask_tokenizer.py      
2006-04-06 01:08:13 UTC (rev 8383)
@@ -1,4 +1,6 @@
-import locale, unittest, StringIO
+import locale
+import unittest
+
 locale.setlocale(locale.LC_ALL,'')
 
 from gnue.common.formatting.masks.MaskParser \
@@ -29,7 +31,7 @@
 
     for test in testInputs:
       maskText, result = test
-      mask = InputMaskParser(StringIO.StringIO(maskText), 'bogus')
+      mask = InputMaskParser(maskText, 'bogus')
       self._verifyTokens(maskText, mask.tokens, result)
 
   def testDateMasks(self):
@@ -43,7 +45,7 @@
 
     for test in testInputs:
       maskText, result = test
-      mask = InputMaskParser(StringIO.StringIO(maskText), 'bogus')
+      mask = InputMaskParser(maskText, 'bogus')
       self._verifyTokens(maskText, mask.tokens, result)
 
   def testNumericMasks(self):
@@ -58,7 +60,7 @@
 
     for test in testInputs:
       maskText, result = test
-      mask = InputMaskParser(StringIO.StringIO(maskText), 'bogus')
+      mask = InputMaskParser(maskText, 'bogus')
       self._verifyTokens(maskText, mask.tokens, result)
 
 def suite():





reply via email to

[Prev in Thread] Current Thread [Next in Thread]