[Top][All Lists]
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[gnue] r7266 - in trunk/gnue-common/src: . formatting/masks
From: |
jamest |
Subject: |
[gnue] r7266 - in trunk/gnue-common/src: . formatting/masks |
Date: |
Sat, 26 Mar 2005 20:35:19 -0600 (CST) |
Author: jamest
Date: 2005-03-26 20:35:17 -0600 (Sat, 26 Mar 2005)
New Revision: 7266
Removed:
trunk/gnue-common/src/FormatMasks/
Modified:
trunk/gnue-common/src/formatting/masks/InputMask.py
trunk/gnue-common/src/formatting/masks/MaskParser.py
trunk/gnue-common/src/formatting/masks/Tokens.py
trunk/gnue-common/src/formatting/masks/test.py
Log:
started documenting the plex based input mask system
Modified: trunk/gnue-common/src/formatting/masks/InputMask.py
===================================================================
--- trunk/gnue-common/src/formatting/masks/InputMask.py 2005-03-26 21:37:40 UTC
(rev 7265)
+++ trunk/gnue-common/src/formatting/masks/InputMask.py 2005-03-27 02:35:17 UTC
(rev 7266)
@@ -29,6 +29,7 @@
# NOTES:
#
+import pprint
from gnue.common.external.plex import *
from cStringIO import StringIO
@@ -56,6 +57,9 @@
def begin(self, value=None):
"""
Resets the mask processor.
+
+ @type value: string
+ @param value: Not used
"""
self.text = ""
self.cursor = 0
@@ -66,7 +70,7 @@
def add(self, text, replace=0):
"""
- Insert text character
+ Insert text character
"""
disp = ""
emptyDisplay = self.emptyDisplay
@@ -74,8 +78,8 @@
tokens = self.tokens
last_token = len(tokens) - 1
i = 0
- print
- print "Cursor=%s" % self.cursor
+## print
+## print "Cursor=%s" % self.cursor
while i <= last_token:
if isinstance(self.tokens[i], Tokens.LitTok):
if len(disp) < self.cursor:
@@ -85,12 +89,12 @@
disp += inputted[i]
i += 1
- print "DISP=%s" % disp
+## print "DISP=%s" % disp
newtxt = disp[:self.cursor] + text + disp[self.cursor + replace:]
- print "NEW=%s" % newtxt
- print newtxt
+## print "NEW=%s" % newtxt
+## print newtxt
self._parseInput(newtxt)
- print self.cursor + len(text) - replace
+## print self.cursor + len(text) - replace
if text:
return self._adjustCursor(self.cursor + len(text) - replace)
else:
@@ -111,106 +115,6 @@
if cursor != cursor2:
return self.add("", 1)
-
- def move(self, pos):
- """
- Move the cursor to a new position.
- Usually results from a mouse click.
- pos is a physical cursor position; the
- internal code calculates the logical
- position and, hence, the actual new
- physical position.
- """
- return self._adjustCursor(pos)
-
-
- def moveLeft(self):
- """
- Move the cursor left one character. The
- internal code calculates the logical
- position and, hence, the actual new
- physical position.
- """
- return self._adjustCursor(self.cursor-1, True)
-
- def moveRight(self):
- """
- Move the cursor right one character. The
- internal code calculates the logical
- position and, hence, the actual new
- physical position.
- """
- return self._adjustCursor(self.cursor+1)
-
- def moveHome(self):
- """
- Move the cursor to the beginning of
- the text. The internal code calculates
- the logical position and, hence, the
- actual new physical position.
- """
- return self._adjustCursor(0)
-
- def moveEnd(self):
- """
- Move the cursor to the end of the
- text. The internal code calculates
- the logical position and, hence,
- the actual new physical position.
- """
- return self._adjustCursor(len(self.display))
-
-
- ##
- ##
- ##
- def _adjustCursor(self, pos, left=False):
- """
- Moves the cursor to a new position.
- """
-
- if pos < 0:
- pos = 0
-
- print "Pos=%s" % pos
- print "Adjusting cursor to %s" % pos
-
- rpos = 0
- token_at = 0
- tokens = self.tokens
- last_token = len(tokens) - 1
- while rpos < pos and token_at < last_token:
- rpos += len(self.actualDisplay[token_at])
- token_at += 1
-
- if rpos > pos:
- # This can happen if a token is partially complete
- token_at -= 1
- elif rpos + len(self.inputted[token_at]) < pos:
- # This can happen at the end of the string
- pos = rpos + len(self.inputted[token_at])
-
- print "Token at %s, pos=%s, rpos=%s" % (token_at, pos, rpos)
-
- if left:
- while token_at > 0 and isinstance(self.tokens[token_at],Tokens.LitTok):
- pos -= len(self.emptyDisplay[token_at])
- token_at -= 1
- else:
- while token_at < last_token and \
- isinstance(self.tokens[token_at],Tokens.LitTok):
- pos += len(self.emptyDisplay[token_at])
- token_at += 1
-
-
- print "Deciding on %s" % pos
- self.cursor = pos
- return (self.display, pos)
-
-
- ##
- ##
- ##
def _parseInput(self, newtext=""):
"""
Parses an input string into its components
@@ -232,12 +136,12 @@
try:
while True:
parsed, extra = scanner.read()
- print parsed, extra
+## print parsed, extra
if parsed is None:
last_state = self.eof_nextstate[0]
break
else:
- print "Parsed", parsed
+## print "Parsed", parsed
state, char = parsed
mstate = state[0]
inputted_states[mstate].append(state)
@@ -302,16 +206,113 @@
return (self.display, self.cursor)
+ # ---------------------------------------------------------------------------
+ # Cursor positioning functions
+ # ---------------------------------------------------------------------------
+ def move(self, pos):
+ """
+ Move the cursor to a new position.
+ Usually results from a mouse click.
+ pos is a physical cursor position; the
+ internal code calculates the logical
+ position and, hence, the actual new
+ physical position.
+ """
+ return self._adjustCursor(pos)
+
+ def moveLeft(self):
+ """
+ Move the cursor left one character. The
+ internal code calculates the logical
+ position and, hence, the actual new
+ physical position.
+ """
+ return self._adjustCursor(self.cursor-1, True)
+ def moveRight(self):
+ """
+ Move the cursor right one character. The
+ internal code calculates the logical
+ position and, hence, the actual new
+ physical position.
+ """
+ return self._adjustCursor(self.cursor+1)
- ####################################################################
- #
+ def moveHome(self):
+ """
+ Move the cursor to the beginning of
+ the text. The internal code calculates
+ the logical position and, hence, the
+ actual new physical position.
+ """
+ return self._adjustCursor(0)
+
+ def moveEnd(self):
+ """
+ Move the cursor to the end of the
+ text. The internal code calculates
+ the logical position and, hence,
+ the actual new physical position.
+ """
+ return self._adjustCursor(len(self.display))
+
+
+ def _adjustCursor(self, pos, left=False):
+ """
+ Moves the cursor to a new position.
+ """
+
+ if pos < 0:
+ pos = 0
+
+## print "Pos=%s" % pos
+## print "Adjusting cursor to %s" % pos
+
+ rpos = 0
+ token_at = 0
+ tokens = self.tokens
+ last_token = len(tokens) - 1
+ while rpos < pos and token_at < last_token:
+ rpos += len(self.actualDisplay[token_at])
+ token_at += 1
+
+ if rpos > pos:
+ # This can happen if a token is partially complete
+ token_at -= 1
+ elif rpos + len(self.inputted[token_at]) < pos:
+ # This can happen at the end of the string
+ pos = rpos + len(self.inputted[token_at])
+
+## print "Token at %s, pos=%s, rpos=%s" % (token_at, pos, rpos)
+
+ if left:
+ while token_at > 0 and isinstance(self.tokens[token_at],Tokens.LitTok):
+ pos -= len(self.emptyDisplay[token_at])
+ token_at -= 1
+ else:
+ while token_at < last_token and \
+ isinstance(self.tokens[token_at],Tokens.LitTok):
+ pos += len(self.emptyDisplay[token_at])
+ token_at += 1
+
+
+## print "Deciding on %s" % pos
+ self.cursor = pos
+ return (self.display, pos)
+
+
+
+
+ # ===========================================================================
# Internal lexicon init crap
- #
+ # ===========================================================================
def __init__(self, mask, numeric=False, date=False):
+ # -------------------------------------------------------------------------
+ # Generate a list of parser tokens that define the input mask
+ # -------------------------------------------------------------------------
parser = MaskParser.InputMaskParser(StringIO(mask),'inline', numeric, date)
-
+ self.pp = pprint.PrettyPrinter(indent=4)
self.isnumeric = numeric
self.isdate = date
self.tokens = tokens = []
@@ -319,11 +320,15 @@
# List of all tokens. Note that all {#}
# expansions have already happened.
ptokens = parser.tokens
-
+ print "1st token list"
+ for item in ptokens:
+ print item.token,
+ print
# If non-zero, position of the right-to-left token
rtl_pos = self.rtl_pos = parser.rtl_pos
- # text, numeric, or date
+ # Set the type of input mask based upon
+ # the parser text, numeric, or date
self.type = parser.type
validationRule = None
@@ -333,13 +338,12 @@
# value.
self.emptyDisplay = []
- #
- # Process each returned parser token
- # and convert it into a mask token.
- #
+ # -------------------------------------------------------------------------
+ # Convert the parser token list into an input mask token list
+ # -------------------------------------------------------------------------
i = 0
while i < len(ptokens):
- ptoken=ptokens[i]
+ ptoken=ptokens[i]
if isinstance(ptoken ,MaskParser.Literal):
chars = ""
# Merge consecutive literals into one rule
@@ -378,13 +382,13 @@
self.emptyDisplay.append(self.placeholder*token.maxchars)
i += 1
-
- #
+ print self.tokens
+ # -------------------------------------------------------------------------
# Next, we will build the actual lexicon. We start
# at the end of the mask and work backwards, as
# any optional mask tokens will need to reference the
# next token's initial grammar elements.
- #
+ # -------------------------------------------------------------------------
i = len(tokens)
lexicon = [
# The first rule will always be Bol (to init stuff)
@@ -397,12 +401,12 @@
leadin = []
while i > 0:
+ # Iterate backward thru the tokens in the input mask
i -= 1
token = tokens[i]
if not token.optional:
leadin = []
-
j = 0
for ruleset in token.paths:
ks = 0
@@ -422,9 +426,12 @@
next_state = (i, j, ks+1)
else:
next_state = (i+1,0,0)
+ print "tokenFound(%s,%s,%s,%s)" % (0,0,(i,j,ks),next_state)
rule = (path,
lambda p, t, c=self._tokenFound, st=(i, j, ks), ns=next_state:
c(p, t, st, ns))
+
+ print "Rule", rule
if k == 0:
leadin.append(rule)
@@ -432,17 +439,19 @@
if possibly_completed:
lexi += last_leadin
if j or ks:
+ print "jks"
+ self.pp.pprint(lexi)
lexicon.append((State((i, j, ks), lexi)))
ks += 1
-
j += 1
-
+ print "lexicon"
+ self.pp.pprint(leadin[:])
lexicon.append(State((i,0,0), leadin[:]))
last_leadin = leadin
-## Tokens.printLexiconTree(lexicon)
+## Tokens.printLexiconTree(lexicon)
# Create a consolidated validation rule so we
# can test if inputted string is "complete".
@@ -456,7 +465,16 @@
def _tokenFound(self, parser, text, curstate, nextstate):
"""
- Called when an input character is found.
+ Function called when an input character matches a token.
+ It is defined as the action for every pattern in the
+ lexicon.
+
+ I believe this function returns the current state that
+ matched as well as the text. It then sets the plex
+ scanner to the next valid state.
+
+ My current thinking is the input mask system changes state
+ based upon every character input into the system.
"""
parser.produce((curstate,text))
parser.begin(nextstate)
@@ -468,4 +486,4 @@
Internal class used to return an EOF to our input loop.
"""
def __init__(self, state):
- self.state = state
\ No newline at end of file
+ self.state = state
Modified: trunk/gnue-common/src/formatting/masks/MaskParser.py
===================================================================
--- trunk/gnue-common/src/formatting/masks/MaskParser.py 2005-03-26
21:37:40 UTC (rev 7265)
+++ trunk/gnue-common/src/formatting/masks/MaskParser.py 2005-03-27
02:35:17 UTC (rev 7266)
@@ -24,10 +24,11 @@
# DESCRIPTION:
"""
Parse mask definitions into tokens that the Mask class can use.
+
+Tokens are ......
"""
# NOTES:
#
-
from gnue.common.external.plex import *
import string
from Errors import *
@@ -37,7 +38,8 @@
class BaseToken:
"""
Basic parser class. Not used directly,
- but inherited by Literal, Token, etc.
+ but inherited by the other defined tokens
+ Literal, Token, etc.
"""
numeric=False
date=False
@@ -51,9 +53,16 @@
else:
self.token = t1
+# -----------------------------------------------------------------------------
+# Standard token classes
+# -----------------------------------------------------------------------------
class Token(BaseToken):
"""
- Base token. (as opposed to literal, etc)
+ Class typically used to create normal tokens as
+ opposed to special tokens like literal.
+
+ It sets the standard options so that each individual
+ token class doesn't need to.
"""
force_lower = False
force_upper = False
@@ -94,7 +103,9 @@
if not self.numeric:
self.text = True
-
+# -----------------------------------------------------------------------------
+# Special token classes
+# -----------------------------------------------------------------------------
class Literal(BaseToken):
"""
A literal string that the developer wants in the string.
@@ -138,14 +149,30 @@
##
##
##
+# =============================================================================
+# Input mask parser
+# =============================================================================
class InputMaskParser(Scanner):
+ """
+ Takes a file handle containing an input mask and creates a
+ list of Tokens which define the input mask.
+ """
def getType(self):
"""
- Returns the apparent type of this mask. One of text, numeric, or date.
+ Returns the apparent type of this mask.
+
+ @rtype: string
+ @return: The value 'text', 'numeric', or 'date'
"""
return type
def getTokens(self):
+ """
+ Returns a list of the tokens after parsing the input mask.
+
+ @rtype: list
+ @return: The list of tokens
+ """
return self.tokens[:]
#
@@ -227,17 +254,21 @@
(Any('+.,'), _check_single),
(Any('_?AaLlCc'), TextToken),
(Any('MDYyHISPp:/'), DateToken),
- (Any('#0'), NumberToken),
+ (Any('#0'), NumberToken),
(Any('<>'), CaseModifier)
]
-
# Lexicon used by input masks
_extra_lexicon = [
(Any('!'), _check_single),
]
def __process(self, token):
+ """
+ Adds the standard tokens to the list of tokens
+ generated for this input mask. Deals with special
+ tokens.
+ """
if isinstance(token,Repeater):
for i in range(0, token.count-1):
self.__process(self.__last)
@@ -255,12 +286,23 @@
self.__last = token
def __init__(self, file, name, numeric=False, date=False):
-
+ """
+ @type file: input stream
+ @param file: The text to be used as the mask
+ @type name: string
+ @param name: The name of the input mask(TODO: ?)
+ @type numeric: boolean
+ @param numeric: Is this a numeric input mask
+ @type date: boolean
+ @param date: Is this a numeric input mask
+ """
self.__singles = []
self.tokens = []
self.__last = None
self.__modify = []
-
+ # -------------------------------------------------------------------------
+ # Read the input mask and convert into instances of Token classes
+ # -------------------------------------------------------------------------
try:
Scanner.__init__(self, Lexicon(self._lexicon + self._extra_lexicon),
file, name)
@@ -269,12 +311,17 @@
if token is None:
break
self.__process(token)
+
except Errors.PlexError, msg:
raise MaskDefinitionError, msg
if self.__modify:
print "WARNING: Modifier found at end of mask."
+ # -------------------------------------------------------------------------
+ # Set appropriate flags
+ # -------------------------------------------------------------------------
+
# If any two of these are non-zero, then the
# mask is a text mask, not date or numeric.
num_markers = 0
@@ -311,11 +358,11 @@
rtl_pos = 0
self.rtl_pos = rtl_pos
-
+
+ # Set the type of parser
if (num_markers and date_markers) or text_markers:
self.type = 'text'
elif num_markers:
self.type = 'numeric'
else:
self.type = 'date'
-
Modified: trunk/gnue-common/src/formatting/masks/Tokens.py
===================================================================
--- trunk/gnue-common/src/formatting/masks/Tokens.py 2005-03-26 21:37:40 UTC
(rev 7265)
+++ trunk/gnue-common/src/formatting/masks/Tokens.py 2005-03-27 02:35:17 UTC
(rev 7266)
@@ -23,8 +23,7 @@
#
# DESCRIPTION:
"""
-Input masks for GNUe Forms, et al
-Based on lex/yacc parsing (via Plex)
+Tokens used to define the components that make up an input mask
"""
# NOTES:
#
@@ -40,13 +39,17 @@
digit = Any(string.digits)
letter = Any(string.letters)
-######################################################################
+# =============================================================================
+# Base tokens
#
-# Base tokens
-#
+# These are inherited be other tokens that are actually
+# used in the input mask. Instances of these classes should not
+# be used directly.
+# =============================================================================
class Tok:
"""
- Base token
+ Base token containing all the flags and values that an
+ input token may require.
"""
# True if this character is optional
@@ -86,7 +89,7 @@
class TextTok(Tok):
"""
- Base text token
+ Base text token
"""
class DateTok(Tok):
@@ -115,10 +118,9 @@
*if* a forced marker is provided.
"""
-######################################################################
-#
+# =============================================================================
# Base tokens
-#
+# =============================================================================
class tChar(TextTok):
"""
Any character, required
@@ -295,6 +297,7 @@
class tLiteral(LitTok):
def __init__(self, char):
path = []
+
for ch in char:
path.append(Str(ch))
if len(char) == 1:
Modified: trunk/gnue-common/src/formatting/masks/test.py
===================================================================
--- trunk/gnue-common/src/formatting/masks/test.py 2005-03-26 21:37:40 UTC
(rev 7265)
+++ trunk/gnue-common/src/formatting/masks/test.py 2005-03-27 02:35:17 UTC
(rev 7266)
@@ -4,16 +4,21 @@
locale.setlocale(locale.LC_ALL,'')
-"""
-Uses ansi escape sequences to highlight the cursor position (hackish)
-"""
+# =============================================================================
+#
+# =============================================================================
def formatOutput(output, cursor):
+ """
+ Uses ansi escape sequences to highlight the cursor position (hackish)
+ """
output += "'"
output = output[:cursor] + chr(27) + '[7m' + \
output[cursor:cursor+1] + chr(27) + '[0m' + output[cursor+1:]
return "'" + output
-
+# =============================================================================
+# Test date mask
+# =============================================================================
m='"Date:" M/D/y'
mask = InputMask(m)
print "Mask: %s" % m
@@ -23,7 +28,9 @@
output, cursor = mask._parseInput(newtext='%s'%f)
print "Output: " + formatOutput(output, cursor)
-
+# =============================================================================
+# Test numeric mask
+# =============================================================================
m='\\$###,##0!.00'
mask = InputMask(m)
print "Mask: %s" % m
@@ -35,6 +42,10 @@
exit
+# =============================================================================
+# Test cursor positioning
+# =============================================================================
+
# Commands:
# < Left arrow
# > right arrow
[Prev in Thread] |
Current Thread |
[Next in Thread] |
- [gnue] r7266 - in trunk/gnue-common/src: . formatting/masks,
jamest <=