Added initial support for macros

This commit is contained in:
kevstone 2024-01-11 08:01:10 +00:00
parent 720fa8b03f
commit ef0a88b1dc
4 changed files with 27 additions and 59 deletions

View File

@ -14,7 +14,7 @@ import code_object.struct_arg as struct_arg
# "flat" to show aflat summary
# "list" to show a detailed list
# "" to not show tokens
def dump_node(cnode, depth=0, tokens=""):
def dump_node(cnode, depth=0, tokens="flat"):
debug.line("dump_node", f"{' ' * depth}[{cnode.kind}] spelling=[{cnode.spelling}] type=[{cnode.type.spelling}] extent=[{cnode.extent.start.line}:{cnode.extent.start.column}]->[{cnode.extent.end.line}:{cnode.extent.end.column}]")
if tokens == "flat":
debug.line("dump_node", f"{' ' * depth} -> tokens=[{[token.spelling for token in cnode.get_tokens()]}]")

View File

@ -47,9 +47,11 @@ class CNodeConverter:
def convert_node(self, node):
# Handle macros
if self._macros.is_instantiation(node):
return self.convert_macro_instantiation(node)
elif node.kind.is_statement():
macro_instantiation_node = self._macros.instantiation_node_for(node)
if macro_instantiation_node:
return self.convert_macro_instantiation(macro_instantiation_node)
if node.kind.is_statement():
return self.convert_STMT_node(node)
elif node.kind.is_declaration():
return self.convert_DECL_node(node)
@ -59,6 +61,7 @@ class CNodeConverter:
return self.convert_REF_node(node)
elif node.kind == clang.cindex.CursorKind.MACRO_DEFINITION:
debug.line("convert_node", f"IGNORING MACRO spelling=[{node.spelling}] kind=[{node.kind}]")
assert False
else:
assert False, f"Unclassified kind=[{node.kind}] spelling=[{node.spelling}]"
@ -79,32 +82,35 @@ class CNodeConverter:
debug.line("convert_node_not_implemented", f"*** kind=[{node.kind}] not implemented ***")
cnode_utils.dump_node(node,5)
debug.line("convert_node_not_implemented", f"No convert routine implemented for kind=[{node.kind}]")
#assert False, f"No convert routine implemented for kind=[{node.kind}]"
assert False, f"No convert routine implemented for kind=[{node.kind}]"
return None
# =================================== Macros Convert functions [BEGIN] ===================================
def convert_macro_instantiation(self, node):
debug.line("convert_macro_instantiation", f"MACRO INST spelling=[{node.spelling}] kind=[{node.kind}]")
debug.line("convert_macro_instantiation", f"MACRO spelling=[{node.spelling}] kind=[{node.kind}] extent=[{node.extent.start.line}:{node.extent.start.column} -> {node.extent.end.line}:{node.extent.end.column}]")
token_text = "".join([token.spelling for token in node.get_tokens()]) + ";"
debug.line("convert_macro_instantiation", f"MACRO token text=[{token_text}]")
macro_lines = code_lines.CodeLines()
macro_lines.add_line(token_text)
return macro_lines
'''debug.line("convert_macro_instantiation", f"MACRO INST spelling=[{node.spelling}] kind=[{node.kind}] extent=[{node.extent.start.line}:{node.extent.start.column} -> {node.extent.end.line}:{node.extent.end.column}]")
# Iterate children to find node with tokens and process...
for child in node.get_children():
tokens = child.get_tokens()
token = next(tokens, None)
if token != None:
debug.line("convert_macro_instantiation", f"MACRO INST [FOUND] spelling=[{child.spelling}] kind=[{child.kind}]")
debug.line("convert_macro_instantiation", f" [FOUND] displayname=[{child.displayname}]")
debug.line("convert_macro_instantiation", f" [FOUND] token=[{token.spelling}] extent=[{token.extent.start.line}:{token.extent.start.column} -> {token.extent.end.line}:{token.extent.end.column}]")
for child_arg in child.get_arguments():
debug.line("convert_macro_instantiation", f" [FOUND] argument spelling=[{child_arg.spelling}] type=[{child_arg.type.spelling}] kind=[{child_arg.kind}]")
debug.line("convert_macro_instantiation", f" [FOUND] lexical_parent=[{child.lexical_parent}]")
debug.line("convert_macro_instantiation", f" [FOUND] semantic_parent=[{child.semantic_parent}]")
debug.line("convert_macro_instantiation", f"MACRO INST [1] FOUND node with tokens: spelling=[{child.spelling}] kind=[{child.kind}] extent=[{child.extent.start.line}:{child.extent.start.column} -> {child.extent.end.line}:{child.extent.end.column}]")
cnode_utils.dump_node(child, 2)
debug.line("convert_macro_instantiation", f"MACRO INST [2] Converting node spelling=[{child.spelling}] kind=[{child.kind}] extent=[{child.extent.start.line}:{child.extent.start.column} -> {child.extent.end.line}:{child.extent.end.column}]")
return self.convert_node(child)
else:
self.convert_macro_instantiation(child)
return None
return None'''
# =================================== Macros Convert functions [END] ===================================

View File

@ -93,7 +93,8 @@ class CParser:
parse_options = clang.cindex.TranslationUnit.PARSE_NONE
# Enable this to get macros...
#parse_options = clang.cindex.TranslationUnit.PARSE_DETAILED_PROCESSING_RECORD
parse_options = clang.cindex.TranslationUnit.PARSE_DETAILED_PROCESSING_RECORD
debug.line("parse", f"MACRO generation enabled [PARSE_DETAILED_PROCESSING_RECORD]")
self._translation_unit = index.parse(self._cfilepath + self._cfilename,
args=CParser.parse_args,

View File

@ -24,54 +24,15 @@ class MacroDetails:
self._inst_nodes.append(inst_node)
def is_instantiation(self, ast_node):
return self.instantiation_node_for(ast_node) is not None
def instantiation_node_for(self, ast_node):
for node in self._inst_nodes:
if (node.extent.start.line == ast_node.extent.start.line and
node.extent.start.column == ast_node.extent.start.column and
node.extent.end.line == ast_node.extent.end.line and
node.extent.end.column == ast_node.extent.end.column):
return True
return node
return False
return None
'''
# In case it is needed...
def process_macro_definition(self, node):
param_tokens = []
body_tokens = []
is_func_macro=False
token_iter = node.get_tokens()
token_iter_end = "end"
token = next(token_iter, token_iter_end)
assert isinstance(token, clang.cindex.Token) and token.spelling == node.spelling, f"First token is not the macro name [{node.spelling}]"
macro_name_token = token
token = next(token_iter, token_iter_end)
if token != token_iter_end and token.spelling == "(":
# Must be a function macro
token = next(token_iter, token_iter_end)
while token != token_iter_end:
if token.spelling == ")":
break
param_tokens.append(token)
token = next(token_iter, token_iter_end)
assert token != token_iter_end, f"Function macro ill-formed - could not find \")\" at end of param list!"
token = next(token_iter, token_iter_end)
while token != token_iter_end:
body_tokens.append(token)
token = next(token_iter, token_iter_end)
body_spellings = [token.spelling for token in body_tokens]
if all(param.spelling in body_spellings for param in param_tokens):
is_func_macro=True
debug.line("process_macro_definition", f" Macro Name=[{node.spelling}] extent=[{node.extent.start.line}:{node.extent.start.column} -> {node.extent.end.line}:{node.extent.end.column}]")
debug.line("process_macro_definition", f" Name Token=[{macro_name_token.spelling}] extent=[{macro_name_token.extent.start.line}:{macro_name_token.extent.start.column} -> {macro_name_token.extent.end.line}:{macro_name_token.extent.end.column}]")
for token in param_tokens:
debug.line("process_macro_definition", f" Param Token=[{token.spelling}] extent=[{token.extent.start.line}:{token.extent.start.column} -> {token.extent.end.line}:{token.extent.end.column}]")
for token in body_tokens:
debug.line("process_macro_definition", f" Body Token=[{token.spelling}] extent=[{token.extent.start.line}:{token.extent.start.column} -> {token.extent.end.line}:{token.extent.end.column}]")
debug.line("process_macro_definition", f" is_func_macro=[{is_func_macro}]")
'''