🚨 removed linter warnings for Python code
This commit is contained in:
parent
f4a55f26b0
commit
a66b2d20c6
2 changed files with 259 additions and 245 deletions
10
develop/amalgamate/CHANGES.md
Normal file
10
develop/amalgamate/CHANGES.md
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
The following changes have been made to the code with respect to <https://github.com/edlund/amalgamate/commit/c91f07eea1133aa184f652b8f1398eaf03586208>:
|
||||||
|
|
||||||
|
- Resolved inspection results from PyCharm:
|
||||||
|
- replaced tabs with spaces
|
||||||
|
- added encoding annotation
|
||||||
|
- reindented file to remove trailing whitespaces
|
||||||
|
- unused import `sys`
|
||||||
|
- membership check
|
||||||
|
- made function from `_is_within`
|
||||||
|
- removed unused variable `actual_path`
|
|
@ -1,4 +1,5 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
# coding=utf-8
|
||||||
|
|
||||||
# amalgamate.py - Amalgamate C source and header files.
|
# amalgamate.py - Amalgamate C source and header files.
|
||||||
# Copyright (c) 2012, Erik Edlund <erik.edlund@32767.se>
|
# Copyright (c) 2012, Erik Edlund <erik.edlund@32767.se>
|
||||||
|
@ -37,259 +38,262 @@ import datetime
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
|
||||||
|
|
||||||
class Amalgamation(object):
|
class Amalgamation(object):
|
||||||
|
|
||||||
# Prepends self.source_path to file_path if needed.
|
|
||||||
def actual_path(self, file_path):
|
|
||||||
if not os.path.isabs(file_path):
|
|
||||||
file_path = os.path.join(self.source_path, file_path)
|
|
||||||
return file_path
|
|
||||||
|
|
||||||
# Search included file_path in self.include_paths and
|
|
||||||
# in source_dir if specified.
|
|
||||||
def find_included_file(self, file_path, source_dir):
|
|
||||||
search_dirs = self.include_paths[:]
|
|
||||||
if source_dir:
|
|
||||||
search_dirs.insert(0, source_dir)
|
|
||||||
|
|
||||||
for search_dir in search_dirs:
|
# Prepends self.source_path to file_path if needed.
|
||||||
search_path = os.path.join(search_dir, file_path)
|
def actual_path(self, file_path):
|
||||||
if os.path.isfile(self.actual_path(search_path)):
|
if not os.path.isabs(file_path):
|
||||||
return search_path
|
file_path = os.path.join(self.source_path, file_path)
|
||||||
return None
|
return file_path
|
||||||
|
|
||||||
def __init__(self, args):
|
# Search included file_path in self.include_paths and
|
||||||
with open(args.config, 'r') as f:
|
# in source_dir if specified.
|
||||||
config = json.loads(f.read())
|
def find_included_file(self, file_path, source_dir):
|
||||||
for key in config:
|
search_dirs = self.include_paths[:]
|
||||||
setattr(self, key, config[key])
|
if source_dir:
|
||||||
|
search_dirs.insert(0, source_dir)
|
||||||
self.verbose = args.verbose == "yes"
|
|
||||||
self.prologue = args.prologue
|
for search_dir in search_dirs:
|
||||||
self.source_path = args.source_path
|
search_path = os.path.join(search_dir, file_path)
|
||||||
self.included_files = []
|
if os.path.isfile(self.actual_path(search_path)):
|
||||||
|
return search_path
|
||||||
# Generate the amalgamation and write it to the target file.
|
return None
|
||||||
def generate(self):
|
|
||||||
amalgamation = ""
|
def __init__(self, args):
|
||||||
|
with open(args.config, 'r') as f:
|
||||||
if self.prologue:
|
config = json.loads(f.read())
|
||||||
with open(self.prologue, 'r') as f:
|
for key in config:
|
||||||
amalgamation += datetime.datetime.now().strftime(f.read())
|
setattr(self, key, config[key])
|
||||||
|
|
||||||
if self.verbose:
|
self.verbose = args.verbose == "yes"
|
||||||
print("Config:")
|
self.prologue = args.prologue
|
||||||
print(" target = {0}".format(self.target))
|
self.source_path = args.source_path
|
||||||
print(" working_dir = {0}".format(os.getcwd()))
|
self.included_files = []
|
||||||
print(" include_paths = {0}".format(self.include_paths))
|
|
||||||
print("Creating amalgamation:")
|
# Generate the amalgamation and write it to the target file.
|
||||||
for file_path in self.sources:
|
def generate(self):
|
||||||
# Do not check the include paths while processing the source
|
amalgamation = ""
|
||||||
# list, all given source paths must be correct.
|
|
||||||
actual_path = self.actual_path(file_path)
|
if self.prologue:
|
||||||
print(" - processing \"{0}\"".format(file_path))
|
with open(self.prologue, 'r') as f:
|
||||||
t = TranslationUnit(file_path, self, True)
|
amalgamation += datetime.datetime.now().strftime(f.read())
|
||||||
amalgamation += t.content
|
|
||||||
|
if self.verbose:
|
||||||
with open(self.target, 'w') as f:
|
print("Config:")
|
||||||
f.write(amalgamation)
|
print(" target = {0}".format(self.target))
|
||||||
|
print(" working_dir = {0}".format(os.getcwd()))
|
||||||
print("...done!\n")
|
print(" include_paths = {0}".format(self.include_paths))
|
||||||
if self.verbose:
|
print("Creating amalgamation:")
|
||||||
print("Files processed: {0}".format(self.sources))
|
for file_path in self.sources:
|
||||||
print("Files included: {0}".format(self.included_files))
|
# Do not check the include paths while processing the source
|
||||||
print("")
|
# list, all given source paths must be correct.
|
||||||
|
# actual_path = self.actual_path(file_path)
|
||||||
|
print(" - processing \"{0}\"".format(file_path))
|
||||||
|
t = TranslationUnit(file_path, self, True)
|
||||||
|
amalgamation += t.content
|
||||||
|
|
||||||
|
with open(self.target, 'w') as f:
|
||||||
|
f.write(amalgamation)
|
||||||
|
|
||||||
|
print("...done!\n")
|
||||||
|
if self.verbose:
|
||||||
|
print("Files processed: {0}".format(self.sources))
|
||||||
|
print("Files included: {0}".format(self.included_files))
|
||||||
|
print("")
|
||||||
|
|
||||||
|
|
||||||
|
def _is_within(match, matches):
|
||||||
|
for m in matches:
|
||||||
|
if match.start() > m.start() and \
|
||||||
|
match.end() < m.end():
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
class TranslationUnit(object):
|
class TranslationUnit(object):
|
||||||
|
# // C++ comment.
|
||||||
# // C++ comment.
|
cpp_comment_pattern = re.compile(r"//.*?\n")
|
||||||
cpp_comment_pattern = re.compile(r"//.*?\n")
|
|
||||||
|
|
||||||
# /* C comment. */
|
|
||||||
c_comment_pattern = re.compile(r"/\*.*?\*/", re.S)
|
|
||||||
|
|
||||||
# "complex \"stri\\\ng\" value".
|
|
||||||
string_pattern = re.compile("[^']" r'".*?(?<=[^\\])"', re.S)
|
|
||||||
|
|
||||||
# Handle simple include directives. Support for advanced
|
|
||||||
# directives where macros and defines needs to expanded is
|
|
||||||
# not a concern right now.
|
|
||||||
include_pattern = re.compile(
|
|
||||||
r'#\s*include\s+(<|")(?P<path>.*?)("|>)', re.S)
|
|
||||||
|
|
||||||
# #pragma once
|
# /* C comment. */
|
||||||
pragma_once_pattern = re.compile(r'#\s*pragma\s+once', re.S)
|
c_comment_pattern = re.compile(r"/\*.*?\*/", re.S)
|
||||||
|
|
||||||
# Search for pattern in self.content, add the match to
|
# "complex \"stri\\\ng\" value".
|
||||||
# contexts if found and update the index accordingly.
|
string_pattern = re.compile("[^']" r'".*?(?<=[^\\])"', re.S)
|
||||||
def _search_content(self, index, pattern, contexts):
|
|
||||||
match = pattern.search(self.content, index)
|
# Handle simple include directives. Support for advanced
|
||||||
if match:
|
# directives where macros and defines needs to expanded is
|
||||||
contexts.append(match)
|
# not a concern right now.
|
||||||
return match.end()
|
include_pattern = re.compile(
|
||||||
return index + 2
|
r'#\s*include\s+(<|")(?P<path>.*?)("|>)', re.S)
|
||||||
|
|
||||||
# Return all the skippable contexts, i.e., comments and strings
|
# #pragma once
|
||||||
def _find_skippable_contexts(self):
|
pragma_once_pattern = re.compile(r'#\s*pragma\s+once', re.S)
|
||||||
# Find contexts in the content in which a found include
|
|
||||||
# directive should not be processed.
|
# Search for pattern in self.content, add the match to
|
||||||
skippable_contexts = []
|
# contexts if found and update the index accordingly.
|
||||||
|
def _search_content(self, index, pattern, contexts):
|
||||||
# Walk through the content char by char, and try to grab
|
match = pattern.search(self.content, index)
|
||||||
# skippable contexts using regular expressions when found.
|
if match:
|
||||||
i = 1
|
contexts.append(match)
|
||||||
content_len = len(self.content)
|
return match.end()
|
||||||
while i < content_len:
|
return index + 2
|
||||||
j = i - 1
|
|
||||||
current = self.content[i]
|
# Return all the skippable contexts, i.e., comments and strings
|
||||||
previous = self.content[j]
|
def _find_skippable_contexts(self):
|
||||||
|
# Find contexts in the content in which a found include
|
||||||
if current == '"':
|
# directive should not be processed.
|
||||||
# String value.
|
skippable_contexts = []
|
||||||
i = self._search_content(j, self.string_pattern,
|
|
||||||
skippable_contexts)
|
# Walk through the content char by char, and try to grab
|
||||||
elif current == '*' and previous == '/':
|
# skippable contexts using regular expressions when found.
|
||||||
# C style comment.
|
i = 1
|
||||||
i = self._search_content(j, self.c_comment_pattern,
|
content_len = len(self.content)
|
||||||
skippable_contexts)
|
while i < content_len:
|
||||||
elif current == '/' and previous == '/':
|
j = i - 1
|
||||||
# C++ style comment.
|
current = self.content[i]
|
||||||
i = self._search_content(j, self.cpp_comment_pattern,
|
previous = self.content[j]
|
||||||
skippable_contexts)
|
|
||||||
else:
|
if current == '"':
|
||||||
# Skip to the next char.
|
# String value.
|
||||||
i += 1
|
i = self._search_content(j, self.string_pattern,
|
||||||
|
skippable_contexts)
|
||||||
return skippable_contexts
|
elif current == '*' and previous == '/':
|
||||||
|
# C style comment.
|
||||||
# Returns True if the match is within list of other matches
|
i = self._search_content(j, self.c_comment_pattern,
|
||||||
def _is_within(self, match, matches):
|
skippable_contexts)
|
||||||
for m in matches:
|
elif current == '/' and previous == '/':
|
||||||
if match.start() > m.start() and \
|
# C++ style comment.
|
||||||
match.end() < m.end():
|
i = self._search_content(j, self.cpp_comment_pattern,
|
||||||
return True
|
skippable_contexts)
|
||||||
return False
|
else:
|
||||||
|
# Skip to the next char.
|
||||||
# Removes pragma once from content
|
i += 1
|
||||||
def _process_pragma_once(self):
|
|
||||||
content_len = len(self.content)
|
return skippable_contexts
|
||||||
if content_len < len("#include <x>"):
|
|
||||||
return 0
|
# Returns True if the match is within list of other matches
|
||||||
|
|
||||||
# Find contexts in the content in which a found include
|
# Removes pragma once from content
|
||||||
# directive should not be processed.
|
def _process_pragma_once(self):
|
||||||
skippable_contexts = self._find_skippable_contexts()
|
content_len = len(self.content)
|
||||||
|
if content_len < len("#include <x>"):
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# Find contexts in the content in which a found include
|
||||||
|
# directive should not be processed.
|
||||||
|
skippable_contexts = self._find_skippable_contexts()
|
||||||
|
|
||||||
|
pragmas = []
|
||||||
|
pragma_once_match = self.pragma_once_pattern.search(self.content)
|
||||||
|
while pragma_once_match:
|
||||||
|
if not _is_within(pragma_once_match, skippable_contexts):
|
||||||
|
pragmas.append(pragma_once_match)
|
||||||
|
|
||||||
|
pragma_once_match = self.pragma_once_pattern.search(self.content,
|
||||||
|
pragma_once_match.end())
|
||||||
|
|
||||||
|
# Handle all collected pragma once directives.
|
||||||
|
prev_end = 0
|
||||||
|
tmp_content = ''
|
||||||
|
for pragma_match in pragmas:
|
||||||
|
tmp_content += self.content[prev_end:pragma_match.start()]
|
||||||
|
prev_end = pragma_match.end()
|
||||||
|
tmp_content += self.content[prev_end:]
|
||||||
|
self.content = tmp_content
|
||||||
|
|
||||||
|
# Include all trivial #include directives into self.content.
|
||||||
|
def _process_includes(self):
|
||||||
|
content_len = len(self.content)
|
||||||
|
if content_len < len("#include <x>"):
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# Find contexts in the content in which a found include
|
||||||
|
# directive should not be processed.
|
||||||
|
skippable_contexts = self._find_skippable_contexts()
|
||||||
|
|
||||||
|
# Search for include directives in the content, collect those
|
||||||
|
# which should be included into the content.
|
||||||
|
includes = []
|
||||||
|
include_match = self.include_pattern.search(self.content)
|
||||||
|
while include_match:
|
||||||
|
if not _is_within(include_match, skippable_contexts):
|
||||||
|
include_path = include_match.group("path")
|
||||||
|
search_same_dir = include_match.group(1) == '"'
|
||||||
|
found_included_path = self.amalgamation.find_included_file(
|
||||||
|
include_path, self.file_dir if search_same_dir else None)
|
||||||
|
if found_included_path:
|
||||||
|
includes.append((include_match, found_included_path))
|
||||||
|
|
||||||
|
include_match = self.include_pattern.search(self.content,
|
||||||
|
include_match.end())
|
||||||
|
|
||||||
|
# Handle all collected include directives.
|
||||||
|
prev_end = 0
|
||||||
|
tmp_content = ''
|
||||||
|
for include in includes:
|
||||||
|
include_match, found_included_path = include
|
||||||
|
tmp_content += self.content[prev_end:include_match.start()]
|
||||||
|
tmp_content += "// {0}\n".format(include_match.group(0))
|
||||||
|
if found_included_path not in self.amalgamation.included_files:
|
||||||
|
t = TranslationUnit(found_included_path, self.amalgamation, False)
|
||||||
|
tmp_content += t.content
|
||||||
|
prev_end = include_match.end()
|
||||||
|
tmp_content += self.content[prev_end:]
|
||||||
|
self.content = tmp_content
|
||||||
|
|
||||||
|
return len(includes)
|
||||||
|
|
||||||
|
# Make all content processing
|
||||||
|
def _process(self):
|
||||||
|
if not self.is_root:
|
||||||
|
self._process_pragma_once()
|
||||||
|
self._process_includes()
|
||||||
|
|
||||||
|
def __init__(self, file_path, amalgamation, is_root):
|
||||||
|
self.file_path = file_path
|
||||||
|
self.file_dir = os.path.dirname(file_path)
|
||||||
|
self.amalgamation = amalgamation
|
||||||
|
self.is_root = is_root
|
||||||
|
|
||||||
|
self.amalgamation.included_files.append(self.file_path)
|
||||||
|
|
||||||
|
actual_path = self.amalgamation.actual_path(file_path)
|
||||||
|
if not os.path.isfile(actual_path):
|
||||||
|
raise IOError("File not found: \"{0}\"".format(file_path))
|
||||||
|
with open(actual_path, 'r') as f:
|
||||||
|
self.content = f.read()
|
||||||
|
self._process()
|
||||||
|
|
||||||
pragmas = []
|
|
||||||
pragma_once_match = self.pragma_once_pattern.search(self.content)
|
|
||||||
while pragma_once_match:
|
|
||||||
if not self._is_within(pragma_once_match, skippable_contexts):
|
|
||||||
pragmas.append(pragma_once_match)
|
|
||||||
|
|
||||||
pragma_once_match = self.pragma_once_pattern.search(self.content,
|
|
||||||
pragma_once_match.end())
|
|
||||||
|
|
||||||
# Handle all collected pragma once directives.
|
|
||||||
prev_end = 0
|
|
||||||
tmp_content = ''
|
|
||||||
for pragma_match in pragmas:
|
|
||||||
tmp_content += self.content[prev_end:pragma_match.start()]
|
|
||||||
prev_end = pragma_match.end()
|
|
||||||
tmp_content += self.content[prev_end:]
|
|
||||||
self.content = tmp_content
|
|
||||||
|
|
||||||
# Include all trivial #include directives into self.content.
|
|
||||||
def _process_includes(self):
|
|
||||||
content_len = len(self.content)
|
|
||||||
if content_len < len("#include <x>"):
|
|
||||||
return 0
|
|
||||||
|
|
||||||
# Find contexts in the content in which a found include
|
|
||||||
# directive should not be processed.
|
|
||||||
skippable_contexts = self._find_skippable_contexts()
|
|
||||||
|
|
||||||
# Search for include directives in the content, collect those
|
|
||||||
# which should be included into the content.
|
|
||||||
includes = []
|
|
||||||
include_match = self.include_pattern.search(self.content)
|
|
||||||
while include_match:
|
|
||||||
if not self._is_within(include_match, skippable_contexts):
|
|
||||||
include_path = include_match.group("path")
|
|
||||||
search_same_dir = include_match.group(1) == '"'
|
|
||||||
found_included_path = self.amalgamation.find_included_file(
|
|
||||||
include_path, self.file_dir if search_same_dir else None)
|
|
||||||
if found_included_path:
|
|
||||||
includes.append((include_match, found_included_path))
|
|
||||||
|
|
||||||
include_match = self.include_pattern.search(self.content,
|
|
||||||
include_match.end())
|
|
||||||
|
|
||||||
# Handle all collected include directives.
|
|
||||||
prev_end = 0
|
|
||||||
tmp_content = ''
|
|
||||||
for include in includes:
|
|
||||||
include_match, found_included_path = include
|
|
||||||
tmp_content += self.content[prev_end:include_match.start()]
|
|
||||||
tmp_content += "// {0}\n".format(include_match.group(0))
|
|
||||||
if not found_included_path in self.amalgamation.included_files:
|
|
||||||
t = TranslationUnit(found_included_path, self.amalgamation, False)
|
|
||||||
tmp_content += t.content
|
|
||||||
prev_end = include_match.end()
|
|
||||||
tmp_content += self.content[prev_end:]
|
|
||||||
self.content = tmp_content
|
|
||||||
|
|
||||||
return len(includes)
|
|
||||||
|
|
||||||
# Make all content processing
|
|
||||||
def _process(self):
|
|
||||||
if not self.is_root:
|
|
||||||
self._process_pragma_once()
|
|
||||||
self._process_includes()
|
|
||||||
|
|
||||||
def __init__(self, file_path, amalgamation, is_root):
|
|
||||||
self.file_path = file_path
|
|
||||||
self.file_dir = os.path.dirname(file_path)
|
|
||||||
self.amalgamation = amalgamation
|
|
||||||
self.is_root = is_root
|
|
||||||
|
|
||||||
self.amalgamation.included_files.append(self.file_path)
|
|
||||||
|
|
||||||
actual_path = self.amalgamation.actual_path(file_path)
|
|
||||||
if not os.path.isfile(actual_path):
|
|
||||||
raise IOError("File not found: \"{0}\"".format(file_path))
|
|
||||||
with open(actual_path, 'r') as f:
|
|
||||||
self.content = f.read()
|
|
||||||
self._process()
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
description = "Amalgamate C source and header files."
|
description = "Amalgamate C source and header files."
|
||||||
usage = " ".join([
|
usage = " ".join([
|
||||||
"amalgamate.py",
|
"amalgamate.py",
|
||||||
"[-v]",
|
"[-v]",
|
||||||
"-c path/to/config.json",
|
"-c path/to/config.json",
|
||||||
"-s path/to/source/dir",
|
"-s path/to/source/dir",
|
||||||
"[-p path/to/prologue.(c|h)]"
|
"[-p path/to/prologue.(c|h)]"
|
||||||
])
|
])
|
||||||
argsparser = argparse.ArgumentParser(
|
argsparser = argparse.ArgumentParser(
|
||||||
description=description, usage=usage)
|
description=description, usage=usage)
|
||||||
|
|
||||||
argsparser.add_argument("-v", "--verbose", dest="verbose",
|
argsparser.add_argument("-v", "--verbose", dest="verbose",
|
||||||
choices=["yes", "no"], metavar="", help="be verbose")
|
choices=["yes", "no"], metavar="", help="be verbose")
|
||||||
|
|
||||||
argsparser.add_argument("-c", "--config", dest="config",
|
argsparser.add_argument("-c", "--config", dest="config",
|
||||||
required=True, metavar="", help="path to a JSON config file")
|
required=True, metavar="", help="path to a JSON config file")
|
||||||
|
|
||||||
argsparser.add_argument("-s", "--source", dest="source_path",
|
argsparser.add_argument("-s", "--source", dest="source_path",
|
||||||
required=True, metavar="", help="source code path")
|
required=True, metavar="", help="source code path")
|
||||||
|
|
||||||
argsparser.add_argument("-p", "--prologue", dest="prologue",
|
argsparser.add_argument("-p", "--prologue", dest="prologue",
|
||||||
required=False, metavar="", help="path to a C prologue file")
|
required=False, metavar="", help="path to a C prologue file")
|
||||||
|
|
||||||
amalgamation = Amalgamation(argsparser.parse_args())
|
amalgamation = Amalgamation(argsparser.parse_args())
|
||||||
amalgamation.generate()
|
amalgamation.generate()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue