3 # WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
5 import re,string,traceback
6 from waflib import Logs,Utils,Errors
7 from waflib.Logs import debug,error
8 class PreprocError(Errors.WafError):
13 standard_includes=['/usr/include']
18 g_optrans={'not':'!','and':'&&','bitand':'&','and_eq':'&=','or':'||','bitor':'|','or_eq':'|=','xor':'^','xor_eq':'^=','compl':'~',}
19 re_lines=re.compile('^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE)
20 re_mac=re.compile("^[a-zA-Z_]\w*")
21 re_fun=re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
22 re_pragma_once=re.compile('^\s*once\s*',re.IGNORECASE)
23 re_nl=re.compile('\\\\\r*\n',re.MULTILINE)
24 re_cpp=re.compile(r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"',re.DOTALL|re.MULTILINE)
25 trig_def=[('??'+a,b)for a,b in zip("=-/!'()<>",r'#~\|^[]{}')]
26 chr_esc={'0':0,'a':7,'b':8,'t':9,'n':10,'f':11,'v':12,'r':13,'\\':92,"'":39}
32 tok_types=[NUM,STR,IDENT,OP]
33 exp_types=[r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",r'L?"([^"\\]|\\.)*"',r'[a-zA-Z_]\w*',r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',]
34 re_clexer=re.compile('|'.join(["(?P<%s>%s)"%(name,part)for name,part in zip(tok_types,exp_types)]),re.M)
44 def filter_comments(filename):
45 code=Utils.readf(filename)
47 for(a,b)in trig_def:code=code.split(a).join(b)
48 code=re_nl.sub('',code)
49 code=re_cpp.sub(repl,code)
50 return[(m.group(2),m.group(3))for m in re.finditer(re_lines,code)]
52 ops=['* / %','+ -','<< >>','< <= >= >','== !=','& | ^','&& ||',',']
53 for x in range(len(ops)):
55 for u in syms.split():
60 if s[0]=="'"and s[-1]=="'":return s[1:-1]
62 def reduce_nums(val_1,val_2,val_op):
64 except TypeError:a=int(val_1)
66 except TypeError:b=int(val_2)
75 elif d=='||':c=int(a or b)
77 elif d=='&&':c=int(a and b)
78 elif d=='==':c=int(a==b)
79 elif d=='!=':c=int(a!=b)
80 elif d=='<=':c=int(a<=b)
81 elif d=='<':c=int(a<b)
82 elif d=='>':c=int(a>b)
83 elif d=='>=':c=int(a>=b)
84 elif d=='^':c=int(a^b)
90 if not lst:raise PreprocError("empty list for get_num")
107 raise PreprocError("rparen expected %r"%lst)
108 (num,_)=get_term(lst[1:i])
109 return(num,lst[i+1:])
111 return get_num(lst[1:])
113 num,lst=get_num(lst[1:])
114 return(reduce_nums('-1',num,'*'),lst)
116 num,lst=get_num(lst[1:])
117 return(int(not int(num)),lst)
119 num,lst=get_num(lst[1:])
120 return(~int(num),lst)
122 raise PreprocError("Invalid op token %r for get_num"%lst)
128 raise PreprocError("Invalid token %r for get_num"%lst)
130 if not lst:raise PreprocError("empty list for get_term")
137 return get_term(lst[1:])
153 raise PreprocError("rparen expected %r"%lst)
155 return get_term(lst[1:i])
157 return get_term(lst[i+1:])
159 num2,lst=get_num(lst[1:])
161 num2=reduce_nums(num,num2,v)
162 return get_term([(NUM,num2)]+lst)
165 raise PreprocError("op expected %r"%lst)
166 if prec[v2]>=prec[v]:
167 num2=reduce_nums(num,num2,v)
168 return get_term([(NUM,num2)]+lst)
170 num3,lst=get_num(lst[1:])
171 num3=reduce_nums(num2,num3,v2)
172 return get_term([(NUM,num),(p,v),(NUM,num3)]+lst)
173 raise PreprocError("cannot reduce %r"%lst)
174 def reduce_eval(lst):
175 num,lst=get_term(lst)
178 lst=[str(v2)for(p2,v2)in lst]
180 def paste_tokens(t1,t2):
182 if t1[0]==OP and t2[0]==OP:
184 elif t1[0]==IDENT and(t2[0]==IDENT or t2[0]==NUM):
186 elif t1[0]==NUM and t2[0]==NUM:
189 raise PreprocError('tokens do not make a valid paste %r and %r'%(t1,t2))
190 return(p1,t1[1]+t2[1])
191 def reduce_tokens(lst,defs,ban=[]):
195 if p==IDENT and v=="defined":
204 elif p2==OP and v2=='(':
213 raise PreprocError("Invalid define expression %r"%lst)
214 elif p==IDENT and v in defs:
215 if isinstance(defs[v],str):
216 a,b=extract_macro(defs[v])
220 if isinstance(macro_def[0],list):
223 reduce_tokens(accu,defs,ban+[v])
224 for x in range(len(accu)):
225 lst.insert(i,accu[x])
231 raise PreprocError("expected '(' after %r (got nothing)"%v)
233 if p2!=OP or v2!='(':
234 raise PreprocError("expected '(' after %r"%v)
241 if p2==OP and count_paren==0:
243 one_param.append((p2,v2))
246 if one_param:args.append(one_param)
249 if not one_param:raise PreprocError("empty param in funcall %s"%p)
250 args.append(one_param)
253 one_param.append((p2,v2))
255 one_param.append((p2,v2))
256 if v2=='(':count_paren+=1
257 elif v2==')':count_paren-=1
259 raise PreprocError('malformed macro')
261 arg_table=macro_def[0]
265 if p2==OP and v2=='#':
266 if j+1<len(to_add)and to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table:
267 toks=args[arg_table[to_add[j+1][1]]]
268 accu.append((STR,stringize(toks)))
272 elif p2==OP and v2=='##':
273 if accu and j+1<len(to_add):
275 if to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table:
276 toks=args[arg_table[to_add[j+1][1]]]
278 accu[-1]=paste_tokens(t1,toks[0])
279 accu.extend(toks[1:])
283 elif to_add[j+1][0]==IDENT and to_add[j+1][1]=='__VA_ARGS__':
287 for x in args[pt-st+1:]:
289 va_toks.append((OP,','))
290 if va_toks:va_toks.pop()
300 accu[-1]=paste_tokens(t1,to_add[j+1])
304 elif p2==IDENT and v2 in arg_table:
305 toks=args[arg_table[v2]]
306 reduce_tokens(toks,defs,ban+[v])
311 reduce_tokens(accu,defs,ban+[v])
312 for x in range(len(accu)-1,-1,-1):
313 lst.insert(i,accu[x])
315 def eval_macro(lst,defs):
316 reduce_tokens(lst,defs,[])
317 if not lst:raise PreprocError("missing tokens to evaluate")
318 (p,v)=reduce_eval(lst)
320 def extract_macro(txt):
322 if re_fun.search(txt):
325 if p!=OP:raise PreprocError("expected open parenthesis")
338 elif p==OP and v==')':
341 raise PreprocError("unexpected token (3)")
345 elif p==OP and v==')':
348 raise PreprocError("comma or ... expected")
354 elif p==OP and v=='...':
355 raise PreprocError("not implemented (1)")
357 raise PreprocError("comma or ... expected (2)")
359 raise PreprocError("not implemented (2)")
361 raise PreprocError("unexpected else")
362 return(name,[params,t[i+1:]])
366 re_include=re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
367 def extract_include(txt,defs):
368 m=re_include.search(txt)
370 if m.group('a'):return'<',m.group('a')
371 if m.group('b'):return'"',m.group('b')
373 reduce_tokens(toks,defs,['waf_include'])
375 raise PreprocError("could not parse include %s"%txt)
380 if toks[0][1]=='<'and toks[-1][1]=='>':
381 return stringize(toks).lstrip('<').rstrip('>')
382 raise PreprocError("could not parse include %s."%txt)
384 if not txt:raise PreprocError("attempted to parse a null char")
389 if len(txt)==4 and txt[3]in string.hexdigits:return int(txt[2:],16)
390 return int(txt[2:],16)
392 if c=='0'and len(txt)==2:return 0
394 if len(txt)>i and txt[1:1+i].isdigit():
395 return(1+i,int(txt[1:1+i],8))
397 try:return chr_esc[c]
398 except KeyError:raise PreprocError("could not parse char literal '%s'"%txt)
400 return tokenize_private(s)[:]
402 def tokenize_private(s):
404 for match in re_clexer.finditer(s):
406 for name in tok_types:
410 try:v=g_optrans[v];name=OP
412 if v.lower()=="true":
415 elif v.lower()=="false":
419 if m('oct'):v=int(v,8)
420 elif m('hex'):v=int(m('hex'),16)
421 elif m('n0'):v=m('n0')
425 else:v=m('n2')or m('n4')
428 elif v=='%:%:':v='##'
435 def define_name(line):
436 return re_mac.match(line).group(0)
437 class c_parser(object):
438 def __init__(self,nodepaths=None,defines=None):
443 self.defs=dict(defines)
446 self.currentnode_stack=[]
447 self.nodepaths=nodepaths or[]
451 self.ban_includes=set([])
452 def cached_find_resource(self,node,filename):
455 except AttributeError:
456 nd=node.ctx.cache_nd={}
461 ret=node.find_resource(filename)
463 if getattr(ret,'children',None):
465 elif ret.is_child_of(node.ctx.bldnode):
466 tmp=node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode))
467 if tmp and getattr(tmp,'children',None):
471 def tryfind(self,filename):
472 self.curfile=filename
473 found=self.cached_find_resource(self.currentnode_stack[-1],filename)
474 for n in self.nodepaths:
477 found=self.cached_find_resource(n,filename)
478 if found and not found in self.ban_includes:
479 self.nodes.append(found)
480 if filename[-4:]!='.moc':
483 if not filename in self.names:
484 self.names.append(filename)
486 def addlines(self,node):
487 self.currentnode_stack.append(node.parent)
488 filepath=node.abspath()
490 if self.count_files>recursion_limit:
491 raise PreprocError("recursion limit exceeded")
493 debug('preproc: reading file %r',filepath)
499 self.lines.extend(lns)
502 lines=filter_comments(filepath)
503 lines.append((POPFILE,''))
506 self.lines.extend(lines)
508 raise PreprocError("could not read the file %s"%filepath)
511 error("parsing %s failed"%filepath)
512 traceback.print_exc()
513 def start(self,node,env):
514 debug('preproc: scanning %s (in %s)',node.name,node.parent.name)
517 self.parse_cache=bld.parse_cache
518 except AttributeError:
520 self.parse_cache=bld.parse_cache
521 self.current_file=node
525 lst=['%s %s'%(x[0],trimquotes('='.join(x[1:])))for x in[y.split('=')for y in env['DEFINES']]]
527 self.lines.extend([('define',x)for x in lst])
528 except AttributeError:
531 (token,line)=self.lines.pop()
534 self.currentnode_stack.pop()
538 if ve:debug('preproc: line is %s - %s state is %s',token,line,self.state)
541 state.append(undefined)
545 if skipped in self.state or ignored in self.state:
548 ret=eval_macro(tokenize(line),self.defs)
549 if ret:state[-1]=accepted
550 else:state[-1]=ignored
553 if m and m.group(0)in self.defs:state[-1]=accepted
554 else:state[-1]=ignored
555 elif token=='ifndef':
557 if m and m.group(0)in self.defs:state[-1]=ignored
558 else:state[-1]=accepted
559 elif token=='include'or token=='import':
560 (kind,inc)=extract_include(line,self.defs)
561 if ve:debug('preproc: include found %s (%s) ',inc,kind)
562 if kind=='"'or not strict_quotes:
563 self.current_file=self.tryfind(inc)
565 self.ban_includes.add(self.current_file)
567 if state[-1]==accepted:
569 elif state[-1]==ignored:
570 if eval_macro(tokenize(line),self.defs):
573 if state[-1]==accepted:state[-1]=skipped
574 elif state[-1]==ignored:state[-1]=accepted
575 elif token=='define':
577 self.defs[define_name(line)]=line
579 raise PreprocError("Invalid define line %s"%line)
582 if m and m.group(0)in self.defs:
583 self.defs.__delitem__(m.group(0))
584 elif token=='pragma':
585 if re_pragma_once.match(line.lower()):
586 self.ban_includes.add(self.current_file)
589 debug('preproc: line parsing failed (%s): %s %s',e,line,Utils.ex_stack())
593 incn=task.generator.includes_nodes
594 except AttributeError:
595 raise Errors.WafError('%r is missing a feature such as "c", "cxx" or "includes": '%task.generator)
597 nodepaths=incn+[task.generator.bld.root.find_dir(x)for x in standard_includes]
599 nodepaths=[x for x in incn if x.is_child_of(x.ctx.srcnode)or x.is_child_of(x.ctx.bldnode)]
600 tmp=c_parser(nodepaths)
601 tmp.start(task.inputs[0],task.env)
603 debug('deps: deps for %r: %r; unresolved %r'%(task.inputs,tmp.nodes,tmp.names))
604 return(tmp.nodes,tmp.names)