18 """Generate an Abstract Syntax Tree (AST) for C++."""
20 __author__ =
'nnorwitz@google.com (Neal Norwitz)'
40 import __builtin__
as builtins
45 from cpp
import keywords
46 from cpp
import tokenize
50 if not hasattr(builtins,
'reversed'):
53 for i
in range(
len(seq)-1, -1, -1):
56 if not hasattr(builtins,
'next'):
62 VISIBILITY_PUBLIC, VISIBILITY_PROTECTED, VISIBILITY_PRIVATE = range(3)
66 FUNCTION_VIRTUAL = 0x02
67 FUNCTION_PURE_VIRTUAL = 0x04
70 FUNCTION_ATTRIBUTE = 0x20
71 FUNCTION_UNKNOWN_ANNOTATION = 0x40
73 FUNCTION_OVERRIDE = 0x100
76 These are currently unused. Should really handle these properly at some point.
78 TYPE_MODIFIER_INLINE = 0x010000
79 TYPE_MODIFIER_EXTERN = 0x020000
80 TYPE_MODIFIER_STATIC = 0x040000
81 TYPE_MODIFIER_CONST = 0x080000
82 TYPE_MODIFIER_REGISTER = 0x100000
83 TYPE_MODIFIER_VOLATILE = 0x200000
84 TYPE_MODIFIER_MUTABLE = 0x400000
87 'inline': TYPE_MODIFIER_INLINE,
88 'extern': TYPE_MODIFIER_EXTERN,
89 'static': TYPE_MODIFIER_STATIC,
90 'const': TYPE_MODIFIER_CONST,
91 'register': TYPE_MODIFIER_REGISTER,
92 'volatile': TYPE_MODIFIER_VOLATILE,
93 'mutable': TYPE_MODIFIER_MUTABLE,
97 _INTERNAL_TOKEN =
'internal'
98 _NAMESPACE_POP =
'ns-pop'
104 __contains__ =
lambda self:
False
105 keys = values = items = iterkeys = itervalues = iteritems =
lambda self: ()
117 """Returns bool if this node is a declaration."""
121 """Returns bool if this node is a definition."""
125 """Returns bool if this node exportable from a header file."""
129 """Does this AST node require the definition of the node passed in?"""
137 return '%s(%s)' % (name, suffix)
138 return '%s(%d, %d, %s)' % (name, self.
start, self.
end, suffix)
146 Node.__init__(self, start, end)
157 Node.__init__(self, start, end)
170 Node.__init__(self, start, end)
179 Node.__init__(self, start, end)
200 Expr.__init__(self, start, end, expr)
206 Node.__init__(self, start, end)
214 def __init__(self, start, end, name, parameter_type, default):
215 Node.__init__(self, start, end)
222 return self.
type.name == node.name
226 suffix =
'%s %s' % (name, self.
name)
228 suffix +=
' = ' +
''.join([d.name
for d
in self.
default])
234 Node.__init__(self, start, end)
241 prefix =
'::'.join(self.
namespace) +
'::'
242 return prefix + self.
name
246 names = [n
or '<anonymous>' for n
in self.
namespace]
247 suffix +=
' in ' +
'::'.join(names)
253 def __init__(self, start, end, name, var_type, initial_value, namespace):
254 _GenericDeclaration.__init__(self, start, end, name, namespace)
260 return self.
type.name == node.name
263 """Return a string that tries to reconstitute the variable decl."""
264 suffix =
'%s %s' % (self.
type, self.
name)
274 def __init__(self, start, end, name, alias, namespace):
275 _GenericDeclaration.__init__(self, start, end, name, namespace)
287 for token
in self.
alias:
288 if token
is not None and name == token.name:
293 suffix =
'%s, %s' % (self.
name, self.
alias)
298 def __init__(self, start, end, name, fields, namespace):
299 _GenericDeclaration.__init__(self, start, end, name, namespace)
309 suffix =
'%s, {%s}' % (self.
name, self.
fields)
322 def __init__(self, start, end, name, bases, templated_types, body, namespace):
323 _GenericDeclaration.__init__(self, start, end, name, namespace)
329 return self.
bases is None and self.
body is None
340 for token_list
in self.
bases:
342 for token
in token_list:
343 if token.name == node.name:
352 suffix =
'%s, %s, %s' % (name, self.
bases, self.
body)
361 def __init__(self, start, end, name, return_type, parameters,
362 modifiers, templated_types, body, namespace):
363 _GenericDeclaration.__init__(self, start, end, name, namespace)
372 return self.
body is None
375 return self.
body is not None
386 if p.name == node.name:
393 suffix = (
'%s %s(%s), 0x%02x, %s' %
400 def __init__(self, start, end, name, in_class, return_type, parameters,
401 modifiers, templated_types, body, namespace):
402 Function.__init__(self, start, end, name, return_type, parameters,
403 modifiers, templated_types, body, namespace)
410 """Type used for any variable (eg class, primitive, struct, etc)."""
412 def __init__(self, start, end, name, templated_types, modifiers,
413 reference, pointer, array):
416 name: str name of main type
417 templated_types: [Class (Type?)] template type info between <>
418 modifiers: [str] type modifiers (keywords) eg, const, mutable, etc.
419 reference, pointer, array: bools
421 _GenericDeclaration.__init__(self, start, end, name, [])
423 if not name
and modifiers:
437 suffix = prefix + name
469 if token.name ==
'<':
471 elif token.name ==
'>':
475 return tokens[start:end-1], end
478 """Convert [Token,...] to [Class(...), ] useful for base classes.
479 For example, code like class Foo : public Bar<x, y> { ... };
480 the "Bar<x, y>" portion gets converted to an AST.
487 reference = pointer = array =
False
489 def AddType(templated_types):
493 for t
in name_tokens:
494 if keywords.IsKeyword(t.name):
495 modifiers.append(t.name)
498 name =
''.join(names)
500 result.append(
Type(name_tokens[0].start, name_tokens[-1].end,
501 name, templated_types, modifiers,
502 reference, pointer, array))
509 if token.name ==
'<':
511 AddType(self.
ToType(new_tokens))
515 reference = pointer = array =
False
516 elif token.name ==
',':
518 reference = pointer = array =
False
519 elif token.name ==
'*':
521 elif token.name ==
'&':
523 elif token.name ==
'[':
525 elif token.name ==
']':
528 name_tokens.append(token)
539 if needs_name_removed:
541 for i, t
in enumerate(parts):
543 default = parts[i+1:]
544 name = parts[i-1].name
545 if name ==
']' and parts[i-2].name ==
'[':
546 name = parts[i-3].name
551 if parts[-1].token_type == tokenize.NAME:
552 name = parts.pop().name
566 if keywords.IsKeyword(p.name):
567 modifiers.append(p.name)
570 templated_types = self.
ToType(templated_tokens)
574 if next_index < end
and parts[next_index].name ==
'::':
576 elif p.name
in (
'[',
']',
'='):
578 other_tokens.append(p)
579 elif p.name
not in (
'*',
'&',
'>'):
581 if (type_name
and type_name[-1].token_type == tokenize.NAME
and
582 p.token_type == tokenize.NAME):
586 other_tokens.append(p)
588 type_name =
''.join([t.name
for t
in type_name])
589 return name, type_name, templated_types, modifiers, default, other_tokens
596 name = type_name =
''
598 pointer = reference = array =
False
602 def AddParameter(end):
606 (name, type_name, templated_types, modifiers,
607 unused_default, unused_other_tokens) = parts
608 parameter_type =
Type(first_token.start, first_token.end,
609 type_name, templated_types, modifiers,
610 reference, pointer, array)
611 p =
Parameter(first_token.start, end, name,
612 parameter_type, default)
623 if template_count > 0:
624 type_modifiers.append(s)
628 AddParameter(s.start)
629 name = type_name =
''
631 pointer = reference = array =
False
648 type_modifiers.append(s)
649 AddParameter(tokens[-1].end)
653 if not return_type_seq:
655 start = return_type_seq[0].start
656 end = return_type_seq[-1].end
657 _, name, templated_types, modifiers, default, other_tokens = \
659 names = [n.name
for n
in other_tokens]
660 reference =
'&' in names
661 pointer =
'*' in names
663 return Type(start, end, name, templated_types, modifiers,
664 reference, pointer, array)
668 start = names.index(
'<')
671 if names[end] ==
'>':
677 def __init__(self, token_stream, filename, in_class='', visibility=None,
701 sys.stderr.write(
'Got %s in %s @ %s %s\n' %
702 (msg, self.
filename, token, printable_queue))
714 if token.token_type == _INTERNAL_TOKEN:
715 if token.name == _NAMESPACE_POP:
721 if result
is not None:
728 ref_pointer_name_seq, templated_types, value=None):
729 reference =
'&' in ref_pointer_name_seq
730 pointer =
'*' in ref_pointer_name_seq
731 array =
'[' in ref_pointer_name_seq
732 var_type =
Type(pos_token.start, pos_token.end, type_name,
733 templated_types, type_modifiers,
734 reference, pointer, array)
739 if token.token_type == tokenize.NAME:
740 if (keywords.IsKeyword(token.name)
and
741 not keywords.IsBuiltinType(token.name)):
742 method =
getattr(self,
'handle_' + token.name)
750 if next.token_type == tokenize.SYNTAX
and next.name ==
'(':
751 return self.
_GetMethod([token], FUNCTION_CTOR,
None,
True)
755 syntax = tokenize.SYNTAX
756 temp_tokens, last_token = \
758 temp_tokens.insert(0, token)
759 if last_token.name ==
'(':
762 expr = bool([e
for e
in temp_tokens
if e.name ==
'='])
765 temp_tokens.append(last_token)
766 temp_tokens.extend(new_temp)
769 if last_token.name ==
'[':
773 temp_tokens.append(last_token)
774 if temp_tokens[-2].name ==
'operator':
777 temp_tokens2, last_token = \
779 temp_tokens.extend(temp_tokens2)
781 if last_token.name ==
';':
783 parts = self.
converter.DeclarationToParts(temp_tokens,
True)
784 (name, type_name, templated_types, modifiers, default,
785 unused_other_tokens) = parts
788 names = [t.name
for t
in temp_tokens]
790 start, end = self.
converter.GetTemplateIndices(names)
791 names = names[:start] + names[end:]
792 default =
''.join([t.name
for t
in default])
794 names, templated_types, default)
795 if last_token.name ==
'{':
798 method_name = temp_tokens[0].name
799 method =
getattr(self,
'handle_' + method_name,
None)
805 return self.
_GetMethod(temp_tokens, 0,
None,
False)
806 elif token.token_type == tokenize.SYNTAX:
807 if token.name ==
'~' and self.
in_class:
812 if (token.token_type == tokenize.NAME
and
814 return self.
_GetMethod([token], FUNCTION_DTOR,
None,
True)
816 elif token.token_type == tokenize.PREPROCESSOR:
819 name = token.name[1:].lstrip()
820 if name.startswith(
'include'):
822 name = name[7:].strip()
825 if name.startswith(
'\\'):
826 name = name[1:].strip()
827 assert name[0]
in '<"', token
828 assert name[-1]
in '>"', token
829 system = name[0] ==
'<'
830 filename = name[1:-1]
831 return Include(token.start, token.end, filename, system)
832 if name.startswith(
'define'):
834 name = name[6:].strip()
837 for i, c
in enumerate(name):
839 value = name[i:].lstrip()
842 return Define(token.start, token.end, name, value)
843 if name.startswith(
'if')
and name[2:3].isspace():
844 condition = name[3:].strip()
845 if condition.startswith(
'0')
or condition.startswith(
'(0)'):
855 while (last_token.token_type != expected_token_type
or
856 last_token.name
not in expected_tokens):
857 tokens.append(last_token)
859 return tokens, last_token
869 if token.token_type != tokenize.PREPROCESSOR:
872 name = token.name[1:].lstrip()
873 if name.startswith(
'endif'):
877 elif name.startswith(
'if'):
881 if GetNextToken
is None:
886 token = GetNextToken()
888 if token.token_type == tokenize.SYNTAX:
889 if token.name == open_paren:
891 elif token.name == close_paren:
896 token = GetNextToken()
911 if token.whence == tokenize.WHENCE_STREAM:
912 token.whence = tokenize.WHENCE_QUEUE
915 assert token.whence == tokenize.WHENCE_QUEUE, token
920 if tokens[-1].whence == tokenize.WHENCE_STREAM:
922 token.whence = tokenize.WHENCE_QUEUE
925 assert tokens[-1].whence == tokenize.WHENCE_QUEUE, tokens
929 """Returns ([tokens], next_token_info)."""
933 GetNextToken =
lambda:
next(it)
934 next_token = GetNextToken()
936 last_token_was_name =
False
937 while (next_token.token_type == tokenize.NAME
or
938 (next_token.token_type == tokenize.SYNTAX
and
939 next_token.name
in (
'::',
'<'))):
942 if last_token_was_name
and next_token.token_type == tokenize.NAME:
944 last_token_was_name = next_token.token_type == tokenize.NAME
945 tokens.append(next_token)
947 if next_token.name ==
'<':
949 last_token_was_name =
True
950 next_token = GetNextToken()
951 return tokens, next_token
955 assert len(return_type_and_name) >= 1
956 return self.
_GetMethod(return_type_and_name, modifiers, templated_types,
959 def _GetMethod(self, return_type_and_name, modifiers, templated_types,
961 template_portion =
None
964 assert token.token_type == tokenize.SYNTAX, token
965 if token.name ==
'<':
967 template_portion = [token]
970 assert token.token_type == tokenize.SYNTAX, token
971 assert token.name ==
'(', token
973 name = return_type_and_name.pop()
977 while return_type_and_name[index].name !=
'<':
979 template_portion = return_type_and_name[index:] + [name]
980 del return_type_and_name[index:]
981 name = return_type_and_name.pop()
982 elif name.name ==
']':
983 rt = return_type_and_name
984 assert rt[-1].name ==
'[', return_type_and_name
985 assert rt[-2].name ==
'operator', return_type_and_name
986 name_seq = return_type_and_name[-2:]
987 del return_type_and_name[-2:]
989 name_seq[0].start, name.end)
994 return_type = return_type_and_name
997 indices = return_type[0]
1000 if name.name == self.
in_class and not modifiers:
1001 modifiers |= FUNCTION_CTOR
1006 if name.name ==
'operator' and not parameters:
1008 assert token.name ==
'(', token
1013 while token.token_type == tokenize.NAME:
1014 modifier_token = token
1016 if modifier_token.name ==
'const':
1017 modifiers |= FUNCTION_CONST
1018 elif modifier_token.name ==
'__attribute__':
1020 modifiers |= FUNCTION_ATTRIBUTE
1021 assert token.name ==
'(', token
1025 elif modifier_token.name ==
'throw':
1026 modifiers |= FUNCTION_THROW
1027 assert token.name ==
'(', token
1031 elif modifier_token.name ==
'override':
1032 modifiers |= FUNCTION_OVERRIDE
1033 elif modifier_token.name == modifier_token.name.upper():
1036 modifiers |= FUNCTION_UNKNOWN_ANNOTATION
1038 self.
HandleError(
'unexpected token', modifier_token)
1040 assert token.token_type == tokenize.SYNTAX, token
1042 if token.name ==
':':
1044 while token.name !=
';' and token.name !=
'{':
1049 if token.name ==
'(':
1050 if parameters[0].name ==
'*':
1052 name = parameters.pop()
1054 modifiers = [p.name
for p
in parameters]
1057 del function_parameters[-1]
1060 assert token.token_type == tokenize.SYNTAX, token
1061 assert token.name ==
';', token
1063 modifiers,
'',
None)
1072 real_name = parameters[-1]
1076 modifiers,
'',
None)
1078 if token.name ==
'{':
1083 if token.name ==
'=':
1086 if token.name ==
'default' or token.name ==
'delete':
1092 assert token.token_type == tokenize.CONSTANT, token
1093 assert token.name ==
'0', token
1094 modifiers |= FUNCTION_PURE_VIRTUAL
1097 if token.name ==
'[':
1103 assert token.name ==
';', (token, return_type_and_name, parameters)
1106 if len(return_type) > 2
and return_type[-1].name ==
'::':
1107 return_type, in_class = \
1109 return Method(indices.start, indices.end, name.name, in_class,
1110 return_type, parameters, modifiers, templated_types,
1112 return Function(indices.start, indices.end, name.name, return_type,
1113 parameters, modifiers, templated_types, body,
1127 if token_seq[0].name ==
'::':
1130 end =
len(token_seq) - 1
1131 if token_seq[end-1].name ==
'::':
1137 seq_copy = token_seq[i:end]
1142 new_name, next = self.
GetName(seq_copy[i:])
1143 assert new_name,
'Got empty new_name, next=%s' % next
1145 if next
and next.token_type == tokenize.SYNTAX:
1146 new_name.append(next)
1147 names.append(new_name)
1155 return_type = [e
for seq
in names[:-1]
for e
in seq]
1157 class_name = names[-1]
1158 return return_type, class_name
1195 name_tokens, token = self.GetName()
1197 name =
''.join([t.name
for t
in name_tokens])
1200 if token.token_type == tokenize.SYNTAX
and token.name ==
';':
1201 return ctor(token.start, token.end, name,
None,
1202 self.namespace_stack)
1204 if token.token_type == tokenize.NAME
and self._handling_typedef:
1205 self._AddBackToken(token)
1206 return ctor(token.start, token.end, name,
None,
1207 self.namespace_stack)
1210 fields = list(self._GetMatchingChar(
'{',
'}'))
1212 if token.token_type == tokenize.SYNTAX
and token.name ==
'{':
1213 next = self._GetNextToken()
1214 new_type = ctor(token.start, token.end, name, fields,
1215 self.namespace_stack)
1218 if next.token_type != tokenize.NAME:
1224 assert token.token_type == tokenize.NAME, token
1225 return self._CreateVariable(token, token.name, name, [],
'',
None)
1230 name_tokens, var_token = self.
GetName()
1233 is_syntax = (var_token.token_type == tokenize.SYNTAX
and
1234 var_token.name[0]
in '*&')
1235 is_variable = (var_token.token_type == tokenize.NAME
and
1236 next_token.name ==
';')
1237 variable = var_token
1238 if is_syntax
and not is_variable:
1239 variable = next_token
1241 if temp.token_type == tokenize.SYNTAX
and temp.name ==
'(':
1245 t0.start-7, t0.start-2)
1246 type_and_name = [struct]
1247 type_and_name.extend(name_tokens)
1248 type_and_name.extend((var_token, next_token))
1249 return self.
_GetMethod(type_and_name, 0,
None,
False)
1250 assert temp.name ==
';', (temp, name_tokens, var_token)
1252 modifiers = [
'struct']
1253 type_name =
''.join([t.name
for t
in name_tokens])
1254 position = name_tokens[0]
1256 modifiers, var_token.name,
None)
1257 name_tokens.extend((var_token, next_token))
1261 return self.
_GetClass(Struct, VISIBILITY_PUBLIC,
None)
1268 if not (token.token_type == tokenize.NAME
and token.name ==
'class'):
1294 token = token2 = self._GetNextToken()
1295 if token.name ==
'inline':
1297 token2 = self._GetNextToken()
1298 if token2.token_type == tokenize.SYNTAX
and token2.name ==
'~':
1299 return self.GetMethod(FUNCTION_VIRTUAL + FUNCTION_DTOR,
None)
1300 assert token.token_type == tokenize.NAME
or token.name ==
'::', token
1301 return_type_and_name = self._GetTokensUpTo(tokenize.SYNTAX,
'(')
1302 return_type_and_name.insert(0, token)
1303 if token2
is not token:
1304 return_type_and_name.insert(1, token2)
1305 return self._GetMethod(return_type_and_name, FUNCTION_VIRTUAL,
1315 assert self.in_class
1316 self.visibility = VISIBILITY_PUBLIC
1348 tokens = self._GetTokensUpTo(tokenize.SYNTAX,
';')
1350 return Delete(tokens[0].start, tokens[0].end, tokens)
1354 if (token.token_type == tokenize.NAME
and
1355 keywords.IsKeyword(token.name)):
1357 method =
getattr(self,
'handle_' + token.name)
1375 if name.name ==
')':
1377 if (
len(tokens) >= 4
and
1378 tokens[1].name ==
'(' and tokens[2].name ==
'*'):
1381 elif name.name ==
']':
1383 if len(tokens) >= 2:
1388 new_type = self.
converter.ToType(tokens)[0]
1389 return Typedef(indices.start, indices.end, name.name,
1401 len_tokens =
len(tokens) - 1
1403 while i < len_tokens:
1404 key = tokens[i].name
1406 if keywords.IsKeyword(key)
or key ==
',':
1408 type_name = default =
None
1411 if tokens[i-1].name ==
'=':
1412 assert i < len_tokens,
'%s %s' % (i, tokens)
1413 default, unused_next_token = self.
GetName(tokens[i:])
1416 if tokens[i-1].name !=
',':
1419 key = tokens[i-1].name
1420 type_name = tokens[i-2]
1422 result[key] = (type_name, default)
1427 assert token.token_type == tokenize.SYNTAX, token
1428 assert token.name ==
'<', token
1432 if token.token_type == tokenize.NAME:
1433 if token.name ==
'class':
1434 return self.
_GetClass(Class, VISIBILITY_PRIVATE, templated_types)
1435 elif token.name ==
'struct':
1436 return self.
_GetClass(Struct, VISIBILITY_PUBLIC, templated_types)
1437 elif token.name ==
'friend':
1443 if last.name ==
'(':
1444 return self.
GetMethod(FUNCTION_NONE, templated_types)
1458 return self.
_GetClass(Class, VISIBILITY_PRIVATE,
None)
1465 assert token.token_type == tokenize.NAME, token
1467 if token.name
not in (
'public',
'protected',
'private'):
1475 if token.name !=
'virtual':
1480 base, next_token = self.
GetName()
1482 assert len(bases_ast) == 1, bases_ast
1483 bases.append(bases_ast[0])
1484 assert next_token.token_type == tokenize.SYNTAX, next_token
1485 if next_token.name ==
'{':
1489 assert next_token.name ==
',', next_token
1492 def _GetClass(self, class_type, visibility, templated_types):
1495 if class_token.token_type != tokenize.NAME:
1496 assert class_token.token_type == tokenize.SYNTAX, class_token
1502 if next_token.token_type == tokenize.NAME:
1506 name_tokens, token = self.
GetName()
1507 class_name =
''.join([t.name
for t
in name_tokens])
1509 if token.token_type == tokenize.SYNTAX:
1510 if token.name ==
';':
1512 return class_type(class_token.start, class_token.end,
1513 class_name,
None, templated_types,
None,
1515 if token.name
in '*&':
1519 if next_token.name ==
';':
1521 modifiers = [
'class']
1524 modifiers, token.name,
None)
1527 tokens = (class_token, token, name_token, next_token)
1529 return self.
GetMethod(FUNCTION_NONE,
None)
1530 if token.name ==
':':
1534 if token.token_type == tokenize.SYNTAX
and token.name ==
'{':
1535 assert token.token_type == tokenize.SYNTAX, token
1536 assert token.name ==
'{', token
1540 body = list(ast.Generate())
1544 if token.token_type != tokenize.NAME:
1545 assert token.token_type == tokenize.SYNTAX, token
1546 assert token.name ==
';', token
1548 new_class = class_type(class_token.start, class_token.end,
1549 class_name, bases,
None,
1554 token.name, new_class,
1555 modifiers, token.name,
None)
1561 return class_type(class_token.start, class_token.end, class_name,
1568 if token.token_type == tokenize.NAME:
1572 assert token.token_type == tokenize.SYNTAX, token
1576 internal_token.whence = token.whence
1577 if token.name ==
'=':
1579 name, next_token = self.
GetName()
1580 assert next_token.name ==
';', next_token
1583 assert token.name ==
'{', token
1586 tokens[-1] = internal_token
1594 return Using(tokens[0].start, tokens[0].end, tokens)
1601 return self.
GetMethod(FUNCTION_CTOR,
None)
1620 token = self._GetNextToken()
1621 assert token.token_type == tokenize.SYNTAX
1622 assert token.name ==
':'
1631 tokens = self._GetTokensUpTo(tokenize.SYNTAX,
';')
1633 return Return(self.current_token.start, self.current_token.end,
None)
1634 return Return(tokens[0].start, tokens[0].end, tokens)
1638 assert len(tokens) == 1,
str(tokens)
1639 return Goto(tokens[0].start, tokens[0].end, tokens[0].name)
1660 self._IgnoreUpTo(tokenize.SYNTAX,
';')
1667 """Utility method that returns an AstBuilder from source code.
1670 source: 'C++ source code'
1676 return AstBuilder(tokenize.GetTokens(source), filename)
1680 """Prints all identifiers for a C++ source file.
1684 should_print: predicate with signature: bool Function(token)
1686 source = utils.ReadFile(filename,
False)
1688 sys.stderr.write(
'Unable to find: %s\n' % filename)
1694 for node
in builder.Generate():
1695 if should_print(node):
1697 except KeyboardInterrupt:
1704 """Prints all identifiers for each C++ source file in filenames.
1707 filenames: ['file1', 'file2', ...]
1708 should_print: predicate with signature: bool Function(token)
1710 for path
in filenames:
1715 for filename
in argv[1:]:
1716 source = utils.ReadFile(filename)
1720 print(
'Processing %s' % filename)
1723 entire_ast = filter(
None, builder.Generate())
1724 except KeyboardInterrupt:
1728 traceback.print_exc()
1731 for ast
in entire_ast:
1735 if __name__ ==
'__main__':