panelparser.py :  » Language-Interface » ChinesePython » chinesepython2.1.3-0.4 » Lib » plat-irix6 » Python Open Source

Home
Python Open Source
1.3.1.2 Python
2.Ajax
3.Aspect Oriented
4.Blog
5.Build
6.Business Application
7.Chart Report
8.Content Management Systems
9.Cryptographic
10.Database
11.Development
12.Editor
13.Email
14.ERP
15.Game 2D 3D
16.GIS
17.GUI
18.IDE
19.Installer
20.IRC
21.Issue Tracker
22.Language Interface
23.Log
24.Math
25.Media Sound Audio
26.Mobile
27.Network
28.Parser
29.PDF
30.Project Management
31.RSS
32.Search
33.Security
34.Template Engines
35.Test
36.UML
37.USB Serial
38.Web Frameworks
39.Web Server
40.Web Services
41.Web Unit
42.Wiki
43.Windows
44.XML
Python Open Source » Language Interface » ChinesePython 
ChinesePython » chinesepython2.1.3 0.4 » Lib » plat irix6 » panelparser.py
# Module 'parser'
#
# Parse S-expressions output by the Panel Editor
# (which is written in Scheme so it can't help writing S-expressions).
#
# See notes at end of file.


whitespace = ' \t\n'
operators = '()\''
separators = operators + whitespace + ';' + '"'


# Tokenize a string.
# Return a list of tokens (strings).
#
def tokenize_string(s):
  tokens = []
  while s:
    c = s[:1]
    if c in whitespace:
      s = s[1:]
    elif c == ';':
      s = ''
    elif c == '"':
      n = len(s)
      i = 1
      while i < n:
        c = s[i]
        i = i+1
        if c == '"': break
        if c == '\\': i = i+1
      tokens.append(s[:i])
      s = s[i:]
    elif c in operators:
      tokens.append(c)
      s = s[1:]
    else:
      n = len(s)
      i = 1
      while i < n:
        if s[i] in separators: break
        i = i+1
      tokens.append(s[:i])
      s = s[i:]
  return tokens


# Tokenize a whole file (given as file object, not as file name).
# Return a list of tokens (strings).
#
def tokenize_file(fp):
  tokens = []
  while 1:
    line = fp.readline()
    if not line: break
    tokens = tokens + tokenize_string(line)
  return tokens


# Exception raised by parse_exr.
#
syntax_error = 'syntax error'


# Parse an S-expression.
# Input is a list of tokens as returned by tokenize_*().
# Return a pair (expr, tokens)
# where expr is a list representing the s-expression,
# and tokens contains the remaining tokens.
# May raise syntax_error.
#
def parse_expr(tokens):
  if (not tokens) or tokens[0] != '(':
    raise syntax_error, 'expected "("'
  tokens = tokens[1:]
  expr = []
  while 1:
    if not tokens:
      raise syntax_error, 'missing ")"'
    if tokens[0] == ')':
      return expr, tokens[1:]
    elif tokens[0] == '(':
      subexpr, tokens = parse_expr(tokens)
      expr.append(subexpr)
    else:
      expr.append(tokens[0])
      tokens = tokens[1:]


# Parse a file (given as file object, not as file name).
# Return a list of parsed S-expressions found at the top level.
#
def parse_file(fp):
  tokens = tokenize_file(fp)
  exprlist = []
  while tokens:
    expr, tokens = parse_expr(tokens)
    exprlist.append(expr)
  return exprlist


# EXAMPLE:
#
# The input
#  '(hip (hop hur-ray))'
#
# passed to tokenize_string() returns the token list
#  ['(', 'hip', '(', 'hop', 'hur-ray', ')', ')']
#
# When this is passed to parse_expr() it returns the expression
#  ['hip', ['hop', 'hur-ray']]
# plus an empty token list (because there are no tokens left.
#
# When a file containing the example is passed to parse_file() it returns
# a list whose only element is the output of parse_expr() above:
#  [['hip', ['hop', 'hur-ray']]]


# TOKENIZING:
#
# Comments start with semicolon (;) and continue till the end of the line.
#
# Tokens are separated by whitespace, except the following characters
# always form a separate token (outside strings):
#  ( ) '
# Strings are enclosed in double quotes (") and backslash (\) is used
# as escape character in strings.
www.java2java.com | Contact Us
Copyright 2009 - 12 Demo Source and Support. All rights reserved.
All other trademarks are property of their respective owners.