0001 #!/usr/bin/env python 0002 ### BITPIM 0003 ### 0004 ### Copyright (C) 2003-2004 Roger Binns <rogerb@rogerbinns.com> 0005 ### 0006 ### This program is free software; you can redistribute it and/or modify 0007 ### it under the terms of the BitPim license as detailed in the LICENSE file. 0008 ### 0009 ### $Id: protogen.py 4369 2007-08-20 01:59:33Z djpham $ 0010 0011 "Generate Python code from packet descriptions" 0012 from __future__ import with_statement 0013 import contextlib 0014 import tokenize 0015 import sys 0016 import token 0017 import cStringIO 0018 import os 0019 0020 class protoerror(Exception): 0021 0022 def __init__(self, desc, token): 0023 Exception.__init__(self,desc) 0024 self.desc=desc 0025 self.token=token 0026 0027 def __repr__(self): 0028 str=self.desc+"\nLine "+`self.token[2][0]`+":\n" 0029 str+=self.token[4] 0030 str+=" "*self.token[2][1]+"^\n" 0031 str+=`self.token[:4]` 0032 return str 0033 0034 def __str__(self): 0035 return self.__repr__() 0036 0037 class protogentokenizer: 0038 # A section enclosed in %{ ... }%. One item follows which is the string 0039 LITERAL="LITERAL" 0040 0041 # The start of a packet. 0042 # Followed by name, generatordict (as a string), userspecced dict (as a string), a comment (or None) 0043 PACKETSTART="PACKETSTART" 0044 0045 # End of a packet. Nothing follows 0046 PACKETEND="PACKETEND" 0047 0048 # Start of an 'if' section. Followed by condition as string including trailing ':' 0049 CONDITIONALSTART="CONDITIONALSTART" 0050 0051 # start if an 'else' or 'elif' section. 0052 CONDITIONALRESTART="CONDITIONALRESTART" 0053 0054 # End of an 'if' section. Nothing follows 0055 CONDITIONALEND="CONDITIONALEND" 0056 0057 # An actual field. Followed by name, size [-1 means unknown size], type, generatordict, userspecced dict, comment, modifiers 0058 FIELD="FIELD" 0059 0060 # Embedded codes: similar to LITERAL, but defined inside a PACKET. 0061 CODE="CODE" 0062 0063 # An assertion (validity check). Followed by string of assertion expression 0064 ASSERTION="ASSERTION" 0065 0066 STATE_TOPLEVEL="STATE_TOPLEVEL" 0067 STATE_PACKET="STATE_PACKET" 0068 STATE_CONDITIONAL="STATE_CONDITIONAL" 0069 0070 0071 def __init__(self, tokenizer, autogennamebase): 0072 self.tokenizer=tokenizer 0073 self.pb=[] # pushback list from what we are tokenizing 0074 self.state=[self.STATE_TOPLEVEL] # state stack 0075 self.packetstack=[] # packets being described stack 0076 self.resultspb=[] # our results pushback stack 0077 self.lines=[None] # no zeroth line 0078 self.autogennamebase=autogennamebase 0079 self.deferredpackets=[] # used for nested packets 0080 0081 def _getautogenname(self, line): 0082 return self.autogennamebase+`line` 0083 0084 def _lookahead(self, howfar=1): 0085 "Returns a token howfar ahead" 0086 assert howfar>=1 0087 while len(self.pb)<howfar: 0088 self.pb.append(self._realnext()) 0089 return self.pb[howfar-1] 0090 0091 def _realnext(self): 0092 "Gets the next token from our input, ignoring the pushback list" 0093 while True: 0094 t=self.tokenizer.next() 0095 t=(token.tok_name[t[0]],)+t[1:] 0096 0097 # we grab a copy of any lines we see for the first time 0098 if len(self.lines)==t[2][0]: 0099 ll=t[4].split('\n') 0100 self.lines.extend(ll[:-1]) 0101 elif t[3][0]>len(self.lines): 0102 # multiline token 0103 ll=t[4].split('\n') 0104 ll=ll[:-1] 0105 for i,l in zip(range(t[2][0],t[3][0]+1), ll): 0106 # we may already have the line, hence the conditional 0107 if len(self.lines)==i: 0108 self.lines.append(l) 0109 0110 if t[0]=='NL': 0111 t=('NEWLINE',)+t[1:] 0112 if t[0]!='COMMENT': 0113 break 0114 # print "next:",t 0115 return t 0116 0117 def _nextignorenl(self): 0118 "Gets next token ignoring newlines" 0119 while True: 0120 t=self._next() 0121 if t[0]!='NEWLINE': 0122 return t 0123 0124 def _next(self): 0125 "Gets next token from our input, looking in pushback list" 0126 if len(self.pb): 0127 t=self.pb[0] 0128 self.pb=self.pb[1:] 0129 return t 0130 return self._realnext() 0131 0132 def _consumenl(self): 0133 "consumes any newlines" 0134 while True: 0135 t=self._lookahead() 0136 if t[0]!='NEWLINE': 0137 break 0138 self._next() 0139 0140 def _getuptoeol(self): 0141 """Returns everything up to newline as a string. If end of line has backslash before it then 0142 next line is returned as well""" 0143 t=self._lookahead() 0144 res=self._getline(t[2][0])[t[2][1]:] 0145 while True: 0146 while t[0]!='NEWLINE': 0147 t=self._next() 0148 if res[-2]!='\\': 0149 break 0150 t=self._next() 0151 res+=self._getline(t[2][0]) 0152 return res 0153 0154 def _getline(self, line): 0155 return self.lines[line] 0156 0157 def __iter__(self): 0158 return self 0159 0160 def next(self): 0161 res=None 0162 if len(self.resultspb): 0163 res=self.resultspb.pop() 0164 0165 if self.state[-1]==self.STATE_TOPLEVEL: 0166 if res is not None: 0167 return res 0168 0169 # any deferred packets? 0170 if len(self.deferredpackets): 0171 res=self.deferredpackets[0] 0172 self.deferredpackets=self.deferredpackets[1:] 0173 return res 0174 0175 # outermost level in file 0176 t=self._lookahead() 0177 if t[0]=='NEWLINE': 0178 self._next() # consume 0179 return self.next() 0180 if t[0]=='OP' and t[1]=='%': 0181 return (self.LITERAL, self._getliteral()) 0182 if t[0]=='NAME' and t[1]=='PACKET': 0183 return self._processpacketheader() 0184 if t[0]=='ENDMARKER': 0185 raise StopIteration() 0186 raise protoerror("Unexpected token", t) 0187 0188 if self.state[-1]==self.STATE_PACKET or self.state[-1]==self.STATE_CONDITIONAL: 0189 # describing fields in a packet 0190 if res is None: 0191 res=self._processpacketfield() 0192 # flatten nested packets 0193 if res[0]==self.PACKETSTART: 0194 q=[res] 0195 while True: 0196 res=self.next() 0197 q.append(res) 0198 if res[0]==self.PACKETEND: 0199 break 0200 self.deferredpackets.extend(q) 0201 return self.next() 0202 # normal 0203 return res 0204 0205 raise protoerror("Unexpected state", self._lookahead()) 0206 0207 def _getliteral(self): 0208 "Returns the section enclosed in %{ ... }%. The %{ and }% must be on lines by themselves." 0209 t=self._next() 0210 if t[0]!='OP' or t[1]!='%': 0211 raise protoerror("Expecting '%{'", t) 0212 t=self._next() 0213 if t[0]!='OP' or t[1]!='{': 0214 raise protoerror("Expecting '%{'", t) 0215 t=self._next() 0216 if t[0]!='NEWLINE': 0217 raise protoerror("Expecting newline", t) 0218 # now in middle of literal 0219 res="" 0220 lastline=-1 0221 while True: 0222 t=self._lookahead() 0223 t2=self._lookahead(2) 0224 if t[0]=='OP' and t[1]=='%' and \ 0225 t2[0]=='OP' and t2[1]=='}': 0226 self._next() # consume % 0227 self._next() # consume } 0228 t=self._next() 0229 if t[0]!='NEWLINE': 0230 raise protoerror("Expecting newline",t) 0231 break 0232 t=self._next() 0233 res+=t[4] 0234 lastline=t[2][0] 0235 while self._lookahead()[2][0]==lastline: 0236 # consume all tokens on the same line 0237 self._next() 0238 return res 0239 0240 def _getdict(self): 0241 """Returns a text string representing a dict. If the next token is 0242 not a dict start then None is returned""" 0243 res=None 0244 t=self._lookahead() 0245 if t[0]!='OP' or t[1]!="{": 0246 return res 0247 res="" 0248 t=self._next() 0249 start=t[2] 0250 mostrecent=t # to aid in debugging 0251 nest=1 0252 while nest>0: 0253 t=self._next() 0254 if t[0]=='OP' and t[1]=='}': 0255 nest-=1 0256 continue 0257 if t[0]=='OP' and t[1]=='{': 0258 mostrecent=t 0259 nest+=1 0260 continue 0261 if t[0]=='DEDENT' or t[0]=='INDENT' or t[0]=='ENDMARKER': 0262 raise protoerror("Unterminated '{'", mostrecent) 0263 0264 end=t[3] 0265 for line in range(start[0], end[0]+1): 0266 l=self._getline(line) 0267 if line==end[0]: 0268 l=l[:end[1]] 0269 if line==start[0]: 0270 l=l[start[1]:] 0271 res+=l 0272 0273 return res 0274 0275 def _processpacketheader(self): 0276 t=self._next() 0277 if t[0]!='NAME': 0278 raise protoerror("expecting 'PACKET'", t) 0279 thedict=self._getdict() 0280 t=self._next() 0281 if t[0]!='NAME': 0282 raise protoerror("expecting packet name", t) 0283 thename=t[1] 0284 t=self._next() 0285 if t[0]!='OP' and t[1]!=':': 0286 raise protoerror("expecting ':'", t) 0287 0288 # we now have to see an indent and an option string in either order, with newlines ignored 0289 thecomment=None 0290 seenindent=False 0291 while True: 0292 t=self._lookahead() 0293 if t[0]=='NEWLINE': 0294 self._next() 0295 continue 0296 if t[0]=='STRING': 0297 if thecomment is not None: 0298 raise protoerror("Duplicate string comment", t) 0299 thecomment=self._next()[1] 0300 continue 0301 if t[0]=='INDENT': 0302 if seenindent: 0303 raise protoerror("Unexpected repeat indent", t) 0304 seenindent=True 0305 self._next() 0306 continue 0307 break 0308 0309 if not seenindent: 0310 raise protoerror("Expecting an indent", t) 0311 self._consumenl() 0312 # ok, now pointing to packet data 0313 self.state.append(self.STATE_PACKET) 0314 self.packetstack.append( (thename, thedict, thecomment) ) 0315 return self.PACKETSTART, thename, None, thedict, thecomment 0316 0317 def _processpacketfield(self): 0318 """Read in one packet field""" 0319 self._consumenl() 0320 t=self._lookahead() 0321 0322 if t[0]=='DEDENT': 0323 # consume 0324 self._next() 0325 # pop a packet 0326 x=self.state.pop() 0327 if x==self.STATE_CONDITIONAL: 0328 # check if this is an else if elif 0329 t=self._lookahead() 0330 if t[0]=='NAME' and t[1] in ('else', 'elif'): 0331 self.state.append(self.STATE_CONDITIONAL) 0332 else: 0333 return (self.CONDITIONALEND,) 0334 else: 0335 return (self.PACKETEND,) 0336 0337 if t[0]=='OP' and t[1]=='%': 0338 # embedded codes 0339 return self.CODE, self._getliteral() 0340 0341 # Size 0342 if t[0]=='NUMBER': 0343 self._next() 0344 thesize=int(t[1]) 0345 elif t[0]=='OP' and t[1]=='*': 0346 self._next() 0347 thesize=-1 0348 elif t[0]=='NAME' and t[1].upper()=='P': 0349 self._next() 0350 thesize='P' 0351 elif t[0]=='NAME' and t[1].upper()=='A': 0352 self._next() 0353 return self.ASSERTION, self._getuptoeol() 0354 elif t[0]=='NAME' and t[1]=='if': 0355 str=self._getuptoeol() 0356 self._consumenl() 0357 t=self._next() 0358 if t[0]!='INDENT': 0359 raise protoerror("Expecting an indent after if ...: statement", t) 0360 self.state.append(self.STATE_CONDITIONAL) 0361 return (self.CONDITIONALSTART, str) 0362 elif t[0]=='NAME' and t[1] in ('elif', 'else'): 0363 str=self._getuptoeol() 0364 self._consumenl() 0365 t=self._next() 0366 if t[0]!='INDENT': 0367 raise protoerror("Expecting an indent after else: or elif ...: statement", t) 0368 if self.state[-1]!=self.STATE_CONDITIONAL: 0369 raise protoerror('An if must precede an else or elif.', t) 0370 return (self.CONDITIONALRESTART, str) 0371 else: 0372 raise protoerror("Expecting field size as an integer, *, P, A or 'if' statement", t) 0373 0374 # Type 0375 t=self._next() 0376 if t[0]!='NAME': 0377 raise protoerror("Expecting field type", t) 0378 thetype=t[1] 0379 0380 # a dot and another type (first was module)? 0381 t=self._lookahead() 0382 if t[0]=='OP' and t[1]=='.': 0383 self._next() 0384 t=self._next() 0385 if t[0]!='NAME': 0386 raise protoerror("Expecting a name after . in field type", t) 0387 thetype+="."+t[1] 0388 0389 # Optional dict 0390 thedict=self._getdict() 0391 0392 # Name 0393 themodifiers="" 0394 t=self._next() 0395 while t[0]=='OP': 0396 themodifiers+=t[1] 0397 t=self._next() 0398 if t[0]!='NAME': 0399 raise protoerror("Expecting field name", t) 0400 thename=t[1] 0401 0402 # A colon (anonymous inner struct), newline, or string description 0403 thedesc=None 0404 t=self._lookahead() 0405 if t[0]=='OP' and t[1]==':': 0406 # consume : 0407 self._next() 0408 0409 seenindent=False 0410 0411 # optional newline 0412 self._consumenl() 0413 # optional description 0414 t=self._lookahead() 0415 if t[0]=='STRING': 0416 thedesc=t[1] 0417 t=self._next() 0418 elif t[0]=='INDENT': 0419 seenindent=True 0420 self._next() 0421 # optional newline 0422 self._consumenl() 0423 # there should be an indent 0424 if not seenindent: 0425 t=self._next() 0426 if t[0]!='INDENT': 0427 raise protoerror("Expected an indent after : based field", t) 0428 0429 # put new packet on results pushback 0430 autoclass=self._getautogenname(t[2][0]) 0431 self.resultspb.append( (self.PACKETSTART, autoclass, None, None, "'Anonymous inner class'") ) 0432 self.state.append(self.STATE_PACKET) 0433 0434 return self.FIELD, thename, thesize, thetype, "{'elementclass': "+autoclass+"}", \ 0435 thedict, thedesc, themodifiers 0436 0437 # optional string 0438 if t[0]=='STRING': 0439 thedesc=t[1] 0440 self._next() 0441 # optional newline 0442 self._consumenl() 0443 # the string this time on the next line? 0444 if thedesc is None: 0445 t=self._lookahead() 0446 if t[0]=='STRING': 0447 thedesc=t[1] 0448 self._next() 0449 self._consumenl() 0450 # return what have digested .. 0451 return self.FIELD, thename, thesize, thetype, None, thedict, thedesc, themodifiers 0452 0453 def indent(level=1): 0454 return " "*level 0455 0456 0457 class codegen: 0458 def __init__(self, tokenizer): 0459 self.tokenizer=tokenizer 0460 0461 def gencode(self): 0462 tokens=self.tokenizer 0463 out=cStringIO.StringIO() 0464 0465 print >>out, "# THIS FILE IS AUTOMATICALLY GENERATED. EDIT THE SOURCE FILE NOT THIS ONE" 0466 0467 for t in tokens: 0468 if t[0]==tokens.LITERAL: 0469 out.write(t[1]) 0470 continue 0471 if t[0]==tokens.PACKETSTART: 0472 classdetails=t 0473 classfields=[] 0474 classcodes=[] 0475 continue 0476 if t[0]==tokens.PACKETEND: 0477 self.genclasscode(out, classdetails, classfields, classcodes) 0478 continue 0479 if t[0]==tokens.CODE: 0480 classcodes.append(t) 0481 else: 0482 classfields.append(t) 0483 0484 return out.getvalue() 0485 0486 def genclasscode(self, out, namestuff, fields, codes): 0487 classname=namestuff[1] 0488 tokens=self.tokenizer 0489 print >>out, "class %s(BaseProtogenClass):" % (classname,) 0490 if namestuff[4] is not None: 0491 print >>out, indent()+namestuff[4] 0492 0493 # fields 0494 fieldlist=[] 0495 for f in fields: 0496 if f[0]==tokens.FIELD: 0497 fieldlist.append(f[1]) 0498 0499 print >>out, indent(1)+"__fields="+`fieldlist` 0500 print >>out, "" 0501 0502 0503 # Constructor 0504 print >>out, indent()+"def __init__(self, *args, **kwargs):" 0505 print >>out, indent(2)+"dict={}" 0506 if namestuff[2] is not None: 0507 print >>out, indent(2)+"# Default generator arguments" 0508 print >>out, indent(2)+"dict.update("+namestuff[2]+")" 0509 if namestuff[3] is not None: 0510 print >>out, indent(2)+"# User specified arguments in the packet description" 0511 print >>out, indent(2)+"dict.update("+namestuff[3]+")" 0512 print >>out, indent(2)+"# What was supplied to this function" 0513 print >>out, indent(2)+"dict.update(kwargs)" 0514 print >>out, indent(2)+"# Parent constructor" 0515 print >>out, indent(2)+"super(%s,self).__init__(**dict)"%(namestuff[1],) 0516 print >>out, indent(2)+"if self.__class__ is %s:" % (classname,) 0517 print >>out, indent(3)+"self._update(args,dict)" 0518 print >>out, "\n" 0519 # getfields 0520 print >>out, indent()+"def getfields(self):" 0521 print >>out, indent(2)+"return self.__fields" 0522 print >>out, "\n" 0523 # update function 0524 print >>out, indent()+"def _update(self, args, kwargs):" 0525 print >>out, indent(2)+"super(%s,self)._update(args,kwargs)"%(namestuff[1],) 0526 print >>out, indent(2)+"keys=kwargs.keys()" 0527 print >>out, indent(2)+"for key in keys:" 0528 print >>out, indent(3)+"if key in self.__fields:" 0529 print >>out, indent(4)+"setattr(self, key, kwargs[key])" 0530 print >>out, indent(4)+"del kwargs[key]" 0531 print >>out, indent(2)+"# Were any unrecognized kwargs passed in?" 0532 print >>out, indent(2)+"if __debug__:" 0533 print >>out, indent(3)+"self._complainaboutunusedargs(%s,kwargs)" % (namestuff[1],) 0534 # if only field, pass stuff on to it 0535 if len(fields)==1: 0536 print >>out, indent(2)+"if len(args):" 0537 # we can't use makefield as we have to make a new dict 0538 d=[] 0539 if f[2]>=0: 0540 d.append("{'sizeinbytes': "+`f[2]`+"}") 0541 for xx in 4,5: 0542 if f[xx] is not None: 0543 d.append(f[xx]) 0544 for dd in d: assert dd[0]=="{" and dd[-1]=='}' 0545 d=[dd[1:-1] for dd in d] 0546 print >>out, indent(3)+"dict2={%s}" % (", ".join(d),) 0547 print >>out, indent(3)+"dict2.update(kwargs)" 0548 print >>out, indent(3)+"kwargs=dict2" 0549 print >>out, indent(3)+"self.__field_%s=%s(*args,**dict2)" % (f[1],f[3]) 0550 # else error if any args 0551 else: 0552 print >>out, indent(2)+"if len(args): raise TypeError('Unexpected arguments supplied: '+`args`)" 0553 print >>out, indent(2)+"# Make all P fields that haven't already been constructed" 0554 for f in fields: 0555 if f[0]==tokens.FIELD and f[2]=='P': 0556 ## print >>out, indent(2)+"if getattr(self, '__field_"+f[1]+"', None) is None:" 0557 print >>out, indent(2)+"try: self.__field_"+f[1] 0558 print >>out, indent(2)+"except:" 0559 self.makefield(out, 3, f) 0560 0561 print >>out, "\n" 0562 0563 # Write to a buffer 0564 print >>out, indent()+"def writetobuffer(self,buf,autolog=True,logtitle=\"<written data>\"):" 0565 print >>out, indent(2)+"'Writes this packet to the supplied buffer'" 0566 print >>out, indent(2)+"self._bufferstartoffset=buf.getcurrentoffset()" 0567 i=2 0568 for f in fields: 0569 if f[0]==tokens.FIELD and f[2]!='P': 0570 if '+' in f[7]: 0571 print >>out, indent(i)+"try: self.__field_%s" % (f[1],) 0572 print >>out, indent(i)+"except:" 0573 self.makefield(out, i+1, f, isreading=False) 0574 print >>out, indent(i)+"self.__field_"+f[1]+".writetobuffer(buf)" 0575 elif f[0]==tokens.CONDITIONALSTART: 0576 print >>out, indent(i)+f[1] 0577 i+=1 0578 elif f[0]==tokens.CONDITIONALRESTART: 0579 print >>out, indent(i-1)+f[1] 0580 elif f[0]==tokens.CONDITIONALEND: 0581 i-=1 0582 assert i==2 0583 print >>out, indent(2)+"self._bufferendoffset=buf.getcurrentoffset()" 0584 print >>out, indent(2)+"if autolog and self._bufferstartoffset==0: self.autologwrite(buf, logtitle=logtitle)" 0585 print >>out, "\n" 0586 0587 # Read from a buffer 0588 print >>out, indent()+"def readfrombuffer(self,buf,autolog=True,logtitle=\"<read data>\"):" 0589 print >>out, indent(2)+"'Reads this packet from the supplied buffer'" 0590 i=2 0591 print >>out, indent(2)+"self._bufferstartoffset=buf.getcurrentoffset()" 0592 print >>out, indent(2)+"if autolog and self._bufferstartoffset==0: self.autologread(buf, logtitle=logtitle)" 0593 for f in fields: 0594 if f[0]==tokens.FIELD: 0595 if f[2]=='P': 0596 continue 0597 self.makefield(out, i, f) 0598 print >>out, indent(i)+"self.__field_%s.readfrombuffer(buf)" % (f[1],) 0599 elif f[0]==tokens.CONDITIONALSTART: 0600 print >>out, indent(i)+f[1] 0601 i+=1 0602 elif f[0]==tokens.CONDITIONALRESTART: 0603 print >>out, indent(i-1)+f[1] 0604 elif f[0]==tokens.CONDITIONALEND: 0605 i-=1 0606 assert i==2 0607 print >>out, indent(2)+"self._bufferendoffset=buf.getcurrentoffset()" 0608 print >>out, "\n" 0609 0610 # Setup each field as a property 0611 for f in fields: 0612 if f[0]==tokens.FIELD: 0613 # get 0614 print >>out, indent()+"def __getfield_%s(self):" % (f[1],) 0615 if '+' in f[7]: 0616 print >>out, indent(2)+"try: self.__field_%s" % (f[1],) 0617 print >>out, indent(2)+"except:" 0618 self.makefield(out, 3, f) 0619 print >>out, indent(2)+"return self.__field_%s.getvalue()\n" % (f[1],) 0620 # set 0621 print >>out, indent()+"def __setfield_%s(self, value):" % (f[1],) 0622 print >>out, indent(2)+"if isinstance(value,%s):" % (f[3],) 0623 print >>out, indent(3)+"self.__field_%s=value" % (f[1],) 0624 print >>out, indent(2)+"else:" 0625 self.makefield(out, 3, f, "value,", isreading=False) 0626 print >>out, "" 0627 # del 0628 print >>out, indent()+"def __delfield_%s(self): del self.__field_%s\n" % (f[1], f[1]) 0629 # Make it a property 0630 print >>out, indent()+"%s=property(__getfield_%s, __setfield_%s, __delfield_%s, %s)\n" % (f[1], f[1], f[1], f[1], f[6]) 0631 if '++' in f[7]: 0632 # allow setting attributes 0633 print >>out, indent()+"def set_%s_attr(self, **kwargs):"%f[1] 0634 print >>out, indent(2)+"self.%s"%f[1] 0635 print >>out, indent(2)+"self.__field_%s.update(**kwargs)\n"%f[1] 0636 0637 # we are a container 0638 print >>out, indent()+"def iscontainer(self):" 0639 print >>out, indent(2)+"return True\n" 0640 0641 print >>out, indent()+"def containerelements(self):" 0642 i=2 0643 for f in fields: 0644 if f[0]==tokens.FIELD: 0645 print >>out, indent(i)+"yield ('%s', self.__field_%s, %s)" % (f[1], f[1], f[6]) 0646 elif f[0]==tokens.CONDITIONALSTART: 0647 print >>out, indent(i)+f[1] 0648 i+=1 0649 elif f[0]==tokens.CONDITIONALRESTART: 0650 print >>out, indent(i-1)+f[1] 0651 elif f[0]==tokens.CONDITIONALEND: 0652 i-=1 0653 assert i==2 0654 0655 # generate embeded codes 0656 print >>out 0657 for _l in codes: 0658 print >>out, _l[1] 0659 0660 print >>out, "\n\n" 0661 0662 def makefield(self, out, indentamount, field, args="", isreading=True): 0663 d=[] 0664 if field[2]!='P' and field[2]>=0: 0665 d.append("{'sizeinbytes': "+`field[2]`+"}") 0666 if not (isreading and '*' in field[7]): 0667 for xx in 4,5: 0668 if field[xx] is not None: 0669 d.append(field[xx]) 0670 0671 for dd in d: 0672 assert dd[0]=='{' and dd[-1]=='}' 0673 0674 if len(d)==0: 0675 print >>out, indent(indentamount)+"self.__field_%s=%s(%s)" % (field[1], field[3], args) 0676 return 0677 0678 d=[dd[1:-1] for dd in d] 0679 dd="{"+", ".join(d)+"}" 0680 print >>out, indent(indentamount)+"self.__field_%s=%s(%s**%s)" % (field[1], field[3], args, dd) 0681 0682 0683 def processfile(inputfilename, outputfilename): 0684 print "Processing",inputfilename,"to",outputfilename 0685 fn=os.path.basename(outputfilename) 0686 fn=os.path.splitext(fn)[0] 0687 with contextlib.nested(file(inputfilename, "rtU"), 0688 file(outputfilename, "wt")) as (f, f2): 0689 tokens=tokenize.generate_tokens(f.readline) 0690 tt=protogentokenizer(tokens, "_gen_"+fn+"_") 0691 cg=codegen(tt) 0692 f2.write(cg.gencode()) 0693 0694 if __name__=='__main__': 0695 if len(sys.argv)>3 or (len(sys.argv)==2 and sys.argv[1]=="--help"): 0696 print "protogen compiles all .p files in this directory to .py" 0697 print "protogen foo.p compiles foo.p to foo.py" 0698 print "protogen foo.p bar.py compiles foo.p to bar.py" 0699 sys.exit(1) 0700 elif len(sys.argv)==3: 0701 processfile(sys.argv[1], sys.argv[2]) 0702 elif len(sys.argv)==2: 0703 processfile(sys.argv[1], sys.argv[1]+"y") 0704 elif len(sys.argv)==1: 0705 import glob 0706 for f in glob.glob("*.p"): 0707 processfile(f, f+"y") 0708 for f in glob.glob("phones/*.p"): 0709 processfile(f, f+"y") 0710 0711
Generated by PyXR 0.9.4