# HG changeset patch # User cmlenz # Date 1155769438 0 # Node ID 9c023c395e44ddb0e61730bcf212508fe95abc98 # Parent 456039594db97db00570bbd9a31269bdf4559ed2 Support for XPath number literals including decimal places. diff --git a/markup/path.py b/markup/path.py --- a/markup/path.py +++ b/markup/path.py @@ -224,14 +224,15 @@ _QUOTES = (("'", "'"), ('"', '"')) _TOKENS = ('::', ':', '..', '.', '//', '/', '[', ']', '()', '(', ')', '@', '=', '!=', '!', '|', ',', '>=', '>', '<=', '<') - _tokenize = re.compile('("[^"]*")|(\'[^\']*\')|(%s)|([^%s\s]+)|\s+' % ( + _tokenize = re.compile('("[^"]*")|(\'[^\']*\')|((?:\d+)?\.\d+)|(%s)|([^%s\s]+)|\s+' % ( '|'.join([re.escape(t) for t in _TOKENS]), ''.join([re.escape(t[0]) for t in _TOKENS]))).findall def __init__(self, text, filename=None, lineno=-1): self.filename = filename self.lineno = lineno - self.tokens = filter(None, [a or b or c or d for a, b, c, d in + self.tokens = filter(None, [dqstr or sqstr or number or token or name + for dqstr, sqstr, number, token, name in self._tokenize(text)]) self.pos = 0 @@ -401,7 +402,7 @@ if len(token) > 1 and (token[0], token[-1]) in self._QUOTES: self.next_token() return StringLiteral(token[1:-1]) - elif token[0].isdigit(): + elif token[0].isdigit() or token[0] == '.': self.next_token() return NumberLiteral(float(token)) elif not self.at_end and self.peek_token().startswith('('): diff --git a/markup/tests/path.py b/markup/tests/path.py --- a/markup/tests/path.py +++ b/markup/tests/path.py @@ -345,6 +345,10 @@ xml = XML('bar') path = Path('*[number("3.0")=3]') self.assertEqual('bar', path.select(xml).render()) + path = Path('*[number("3.0")=3.0]') + self.assertEqual('bar', path.select(xml).render()) + path = Path('*[number("0.1")=.1]') + self.assertEqual('bar', path.select(xml).render()) def test_predicate_round_function(self): xml = XML('bar')