tests: replace pycodestyle with black
Drop pycodestyle for code style checking in favor of black. Black is much faster, stable PEP8 compliant code style checker offering also automatic formatting. It aims to be very stable and produce smallest diffs. It's used by many small and big projects. Running checkstyle with black takes a few seconds with a terse output. Thus, test-checkstyle-diff is no longer necessary. Expand scope of checkstyle to all python files in the repo, replacing test-checkstyle with checkstyle-python. Also, fixstyle-python is now available for automatic style formatting. Note: python virtualenv has been consolidated in test/Makefile, test/requirements*.txt which will eventually be moved to a central location. This is required to simply the automated generation of docker executor images in the CI. Type: improvement Change-Id: I022a326603485f58585e879ac0f697fceefbc9c8 Signed-off-by: Klement Sekera <klement.sekera@gmail.com> Signed-off-by: Dave Wallace <dwallacelf@gmail.com>
This commit is contained in:

committed by
Ole Tr�an

parent
f90348bcb4
commit
d9b0c6fbf7
@ -24,8 +24,10 @@ import re
|
||||
themselves on this list."""
|
||||
siphon_patterns = []
|
||||
|
||||
|
||||
class Generate(object):
|
||||
"""Matches a siphon comment block start"""
|
||||
|
||||
siphon_block_start = re.compile("^\s*/\*\?\s*(.*)$")
|
||||
|
||||
"""Matches a siphon comment block stop"""
|
||||
@ -36,8 +38,10 @@ class Generate(object):
|
||||
|
||||
"""Matches a siphon block directive such as
|
||||
'%clicmd:group_label Debug CLI%'"""
|
||||
siphon_block_directive = re.compile("(%s)\s*([a-zA-Z0-9_:]+)\s+(.*)\s*(%s)" % \
|
||||
(siphon_block_delimiter, siphon_block_delimiter))
|
||||
siphon_block_directive = re.compile(
|
||||
"(%s)\s*([a-zA-Z0-9_:]+)\s+(.*)\s*(%s)"
|
||||
% (siphon_block_delimiter, siphon_block_delimiter)
|
||||
)
|
||||
|
||||
"""Matches the start of an initializer block"""
|
||||
siphon_initializer = re.compile("\s*=")
|
||||
@ -54,7 +58,6 @@ class Generate(object):
|
||||
"""Logging handler"""
|
||||
log = None
|
||||
|
||||
|
||||
def __init__(self, output_directory, input_prefix):
|
||||
super(Generate, self).__init__()
|
||||
self.log = logging.getLogger("siphon.generate")
|
||||
@ -70,14 +73,13 @@ class Generate(object):
|
||||
self.output = {}
|
||||
for siphon in self.known_siphons:
|
||||
self.output[siphon] = {
|
||||
"file": "%s/%s.siphon" % (output_directory, siphon),
|
||||
"global": {},
|
||||
"items": [],
|
||||
}
|
||||
"file": "%s/%s.siphon" % (output_directory, siphon),
|
||||
"global": {},
|
||||
"items": [],
|
||||
}
|
||||
|
||||
self.input_prefix = input_prefix
|
||||
|
||||
|
||||
"""
|
||||
count open and close braces in str
|
||||
return (0, index) when braces were found and count becomes 0.
|
||||
@ -87,16 +89,17 @@ class Generate(object):
|
||||
return (count, -1) if not all opening braces are closed, count is the
|
||||
current depth
|
||||
"""
|
||||
|
||||
def count_braces(self, str, count=0, found=False):
|
||||
for index in range(0, len(str)):
|
||||
if str[index] == '{':
|
||||
count += 1;
|
||||
if str[index] == "{":
|
||||
count += 1
|
||||
found = True
|
||||
elif str[index] == '}':
|
||||
elif str[index] == "}":
|
||||
if count == 0:
|
||||
# means we never found an open brace
|
||||
return (-1, -1)
|
||||
count -= 1;
|
||||
count -= 1
|
||||
|
||||
if count == 0 and found:
|
||||
return (count, index)
|
||||
@ -106,8 +109,8 @@ class Generate(object):
|
||||
def parse(self, filename):
|
||||
# Strip the current directory off the start of the
|
||||
# filename for brevity
|
||||
if filename[0:len(self.input_prefix)] == self.input_prefix:
|
||||
filename = filename[len(self.input_prefix):]
|
||||
if filename[0 : len(self.input_prefix)] == self.input_prefix:
|
||||
filename = filename[len(self.input_prefix) :]
|
||||
if filename[0] == "/":
|
||||
filename = filename[1:]
|
||||
|
||||
@ -115,8 +118,8 @@ class Generate(object):
|
||||
directory = os.path.dirname(filename)
|
||||
if directory[0:2] == "./":
|
||||
directory = directory[2:]
|
||||
elif directory[0:len(self.input_prefix)] == self.input_prefix:
|
||||
directory = directory[len(self.input_prefix):]
|
||||
elif directory[0 : len(self.input_prefix)] == self.input_prefix:
|
||||
directory = directory[len(self.input_prefix) :]
|
||||
if directory[0] == "/":
|
||||
directory = directory[1:]
|
||||
|
||||
@ -133,9 +136,10 @@ class Generate(object):
|
||||
|
||||
for line in fd:
|
||||
line_num += 1
|
||||
str = line[:-1] # filter \n
|
||||
str = line[:-1] # filter \n
|
||||
|
||||
"""See if there is a block directive and if so extract it"""
|
||||
|
||||
def process_block_directive(str, directives):
|
||||
m = self.siphon_block_directive.search(str)
|
||||
if m is not None:
|
||||
@ -143,7 +147,7 @@ class Generate(object):
|
||||
v = m.group(3).strip()
|
||||
directives[k] = v
|
||||
# Return only the parts we did not match
|
||||
return str[0:m.start(1)] + str[m.end(4):]
|
||||
return str[0 : m.start(1)] + str[m.end(4) :]
|
||||
|
||||
return str
|
||||
|
||||
@ -200,27 +204,25 @@ class Generate(object):
|
||||
# Skip to next line
|
||||
continue
|
||||
|
||||
|
||||
if siphon is None:
|
||||
# Look for blocks we need to siphon
|
||||
for p in siphon_patterns:
|
||||
if p[0].match(str):
|
||||
siphon = [ p[1], str + "\n", 0 ]
|
||||
siphon = [p[1], str + "\n", 0]
|
||||
siphon_line = line_num
|
||||
|
||||
# see if we have an initializer
|
||||
m = self.siphon_initializer.search(str)
|
||||
if m is not None:
|
||||
# count the braces on this line
|
||||
(count, index) = \
|
||||
self.count_braces(str[m.start():])
|
||||
(count, index) = self.count_braces(str[m.start() :])
|
||||
siphon[2] = count
|
||||
# TODO - it's possible we have the
|
||||
# initializer all on the first line
|
||||
# we should check for it, but also
|
||||
# account for the possibility that
|
||||
# the open brace is on the next line
|
||||
#if count == 0:
|
||||
# if count == 0:
|
||||
# # braces balanced
|
||||
# close_siphon = siphon
|
||||
# siphon = None
|
||||
@ -231,12 +233,11 @@ class Generate(object):
|
||||
else:
|
||||
# See if we should end the siphon here - do we have
|
||||
# balanced braces?
|
||||
(count, index) = self.count_braces(str,
|
||||
count=siphon[2], found=True)
|
||||
(count, index) = self.count_braces(str, count=siphon[2], found=True)
|
||||
if count == 0:
|
||||
# braces balanced - add the substring and
|
||||
# close the siphon
|
||||
siphon[1] += str[:index+1] + ";\n"
|
||||
siphon[1] += str[: index + 1] + ";\n"
|
||||
close_siphon = siphon
|
||||
siphon = None
|
||||
else:
|
||||
@ -259,15 +260,15 @@ class Generate(object):
|
||||
details[key] = directives[key]
|
||||
|
||||
# Copy details for this block
|
||||
details['file'] = filename
|
||||
details['directory'] = directory
|
||||
details['line_start'] = siphon_line
|
||||
details['line_end'] = line_num
|
||||
details['siphon_block'] = siphon_block.strip()
|
||||
details["file"] = filename
|
||||
details["directory"] = directory
|
||||
details["line_start"] = siphon_line
|
||||
details["line_end"] = line_num
|
||||
details["siphon_block"] = siphon_block.strip()
|
||||
details["block"] = close_siphon[1]
|
||||
|
||||
# Store the item
|
||||
self.output[siphon_name]['items'].append(details)
|
||||
self.output[siphon_name]["items"].append(details)
|
||||
|
||||
# All done
|
||||
close_siphon = None
|
||||
@ -275,7 +276,7 @@ class Generate(object):
|
||||
|
||||
# Update globals
|
||||
for key in directives.keys():
|
||||
if ':' not in key:
|
||||
if ":" not in key:
|
||||
continue
|
||||
|
||||
if filename.endswith("/dir.dox"):
|
||||
@ -288,19 +289,17 @@ class Generate(object):
|
||||
|
||||
if sn not in self.output:
|
||||
self.output[sn] = {}
|
||||
if 'global' not in self.output[sn]:
|
||||
self.output[sn]['global'] = {}
|
||||
if l not in self.output[sn]['global']:
|
||||
self.output[sn]['global'][l] = {}
|
||||
if "global" not in self.output[sn]:
|
||||
self.output[sn]["global"] = {}
|
||||
if l not in self.output[sn]["global"]:
|
||||
self.output[sn]["global"][l] = {}
|
||||
|
||||
self.output[sn]['global'][l][label] = directives[key]
|
||||
self.output[sn]["global"][l][label] = directives[key]
|
||||
|
||||
def deliver(self):
|
||||
# Write out the data
|
||||
for siphon in self.output.keys():
|
||||
self.log.info("Saving siphon data %s." % siphon)
|
||||
s = self.output[siphon]
|
||||
with open(s['file'], "a") as fp:
|
||||
json.dump(s, fp,
|
||||
separators=(',', ': '), indent=4, sort_keys=True)
|
||||
|
||||
with open(s["file"], "a") as fp:
|
||||
json.dump(s, fp, separators=(",", ": "), indent=4, sort_keys=True)
|
||||
|
@ -17,8 +17,11 @@ import re
|
||||
from . import generate
|
||||
|
||||
# Register our regexp
|
||||
generate.siphon_patterns.append((
|
||||
re.compile("(?P<m>VLIB_CLI_COMMAND)\s*"
|
||||
"[(](?P<name>[a-zA-Z0-9_]+)(,[^)]*)?[)]"),
|
||||
"clicmd"
|
||||
))
|
||||
generate.siphon_patterns.append(
|
||||
(
|
||||
re.compile(
|
||||
"(?P<m>VLIB_CLI_COMMAND)\s*" "[(](?P<name>[a-zA-Z0-9_]+)(,[^)]*)?[)]"
|
||||
),
|
||||
"clicmd",
|
||||
)
|
||||
)
|
||||
|
@ -17,8 +17,12 @@ import re
|
||||
from . import generate
|
||||
|
||||
# Register our regexp
|
||||
generate.siphon_patterns.append((
|
||||
re.compile("(?P<m>VLIB_CONFIG_FUNCTION)\s*"
|
||||
'[(](?P<fn>[a-zA-Z0-9_]+)\s*,\s*"(?P<name>[^"]*)"[)]'),
|
||||
"syscfg"
|
||||
))
|
||||
generate.siphon_patterns.append(
|
||||
(
|
||||
re.compile(
|
||||
"(?P<m>VLIB_CONFIG_FUNCTION)\s*"
|
||||
'[(](?P<fn>[a-zA-Z0-9_]+)\s*,\s*"(?P<name>[^"]*)"[)]'
|
||||
),
|
||||
"syscfg",
|
||||
)
|
||||
)
|
||||
|
@ -18,9 +18,10 @@ ident = pp.Word(pp.alphas + "_", pp.alphas + pp.nums + "_")
|
||||
intNum = pp.Word(pp.nums)
|
||||
hexNum = pp.Literal("0x") + pp.Word(pp.hexnums)
|
||||
octalNum = pp.Literal("0") + pp.Word("01234567")
|
||||
integer = (hexNum | octalNum | intNum) + \
|
||||
pp.Optional(pp.Literal("ULL") | pp.Literal("LL") | pp.Literal("L"))
|
||||
floatNum = pp.Regex(r'\d+(\.\d*)?([eE]\d+)?') + pp.Optional(pp.Literal("f"))
|
||||
integer = (hexNum | octalNum | intNum) + pp.Optional(
|
||||
pp.Literal("ULL") | pp.Literal("LL") | pp.Literal("L")
|
||||
)
|
||||
floatNum = pp.Regex(r"\d+(\.\d*)?([eE]\d+)?") + pp.Optional(pp.Literal("f"))
|
||||
char = pp.Literal("'") + pp.Word(pp.printables, exact=1) + pp.Literal("'")
|
||||
arrayIndex = integer | ident
|
||||
|
||||
@ -36,23 +37,29 @@ semicolon = pp.Literal(";").suppress()
|
||||
# initializer := { [member = ] (variable | expression | { initializer } ) }
|
||||
typeName = ident
|
||||
varName = ident
|
||||
typeSpec = pp.Optional("unsigned") + \
|
||||
pp.oneOf("int long short float double char u8 i8 void") + \
|
||||
pp.Optional(pp.Word("*"), default="")
|
||||
typeCast = pp.Combine( "(" + ( typeSpec | typeName ) + ")" ).suppress()
|
||||
typeSpec = (
|
||||
pp.Optional("unsigned")
|
||||
+ pp.oneOf("int long short float double char u8 i8 void")
|
||||
+ pp.Optional(pp.Word("*"), default="")
|
||||
)
|
||||
typeCast = pp.Combine("(" + (typeSpec | typeName) + ")").suppress()
|
||||
|
||||
string = pp.Combine(pp.OneOrMore(pp.QuotedString(quoteChar='"',
|
||||
escChar='\\', multiline=True)), adjacent=False)
|
||||
string = pp.Combine(
|
||||
pp.OneOrMore(pp.QuotedString(quoteChar='"', escChar="\\", multiline=True)),
|
||||
adjacent=False,
|
||||
)
|
||||
literal = pp.Optional(typeCast) + (integer | floatNum | char | string)
|
||||
var = pp.Combine(pp.Optional(typeCast) + varName +
|
||||
pp.Optional("[" + arrayIndex + "]"))
|
||||
var = pp.Combine(pp.Optional(typeCast) + varName + pp.Optional("[" + arrayIndex + "]"))
|
||||
|
||||
# This could be more complete, but suffices for our uses
|
||||
expr = (literal | var)
|
||||
expr = literal | var
|
||||
|
||||
"""Parse and render a block of text into a Python dictionary."""
|
||||
|
||||
|
||||
class Parser(object):
|
||||
"""Compiled PyParsing BNF"""
|
||||
|
||||
_parser = None
|
||||
|
||||
def __init__(self):
|
||||
@ -71,6 +78,8 @@ class Parser(object):
|
||||
|
||||
|
||||
"""Parser for function-like macros - without the closing semi-colon."""
|
||||
|
||||
|
||||
class ParserFunctionMacro(Parser):
|
||||
def BNF(self):
|
||||
# VLIB_CONFIG_FUNCTION (unix_config, "unix")
|
||||
@ -91,6 +100,8 @@ class ParserFunctionMacro(Parser):
|
||||
|
||||
|
||||
"""Parser for function-like macros with a closing semi-colon."""
|
||||
|
||||
|
||||
class ParseFunctionMacroStmt(ParserFunctionMacro):
|
||||
def BNF(self):
|
||||
# VLIB_CONFIG_FUNCTION (unix_config, "unix");
|
||||
@ -106,6 +117,8 @@ Parser for our struct initializers which are composed from a
|
||||
function-like macro, equals sign, and then a normal C struct initializer
|
||||
block.
|
||||
"""
|
||||
|
||||
|
||||
class MacroInitializer(ParserFunctionMacro):
|
||||
def BNF(self):
|
||||
# VLIB_CLI_COMMAND (show_sr_tunnel_command, static) = {
|
||||
@ -115,14 +128,15 @@ class MacroInitializer(ParserFunctionMacro):
|
||||
# };
|
||||
cs = pp.Forward()
|
||||
|
||||
|
||||
member = pp.Combine(dot + varName + pp.Optional("[" + arrayIndex + "]"),
|
||||
adjacent=False)
|
||||
value = (expr | cs)
|
||||
member = pp.Combine(
|
||||
dot + varName + pp.Optional("[" + arrayIndex + "]"), adjacent=False
|
||||
)
|
||||
value = expr | cs
|
||||
|
||||
entry = pp.Group(pp.Optional(member + equals, default="") + value)
|
||||
entries = (pp.ZeroOrMore(entry + comma) + entry + pp.Optional(comma)) | \
|
||||
(pp.ZeroOrMore(entry + comma))
|
||||
entries = (pp.ZeroOrMore(entry + comma) + entry + pp.Optional(comma)) | (
|
||||
pp.ZeroOrMore(entry + comma)
|
||||
)
|
||||
|
||||
cs << (lbrace + entries + rbrace)
|
||||
|
||||
|
@ -88,7 +88,8 @@ class Siphon(object):
|
||||
loader=loader,
|
||||
trim_blocks=True,
|
||||
autoescape=False,
|
||||
keep_trailing_newline=True)
|
||||
keep_trailing_newline=True,
|
||||
)
|
||||
|
||||
# Convenience, get a reference to the internal escape and
|
||||
# unescape methods in html.parser. These then become
|
||||
@ -103,32 +104,38 @@ class Siphon(object):
|
||||
# Output renderers
|
||||
|
||||
"""Returns an object to be used as the sorting key in the item index."""
|
||||
|
||||
def index_sort_key(self, group):
|
||||
return group
|
||||
|
||||
"""Returns a string to use as the header at the top of the item index."""
|
||||
|
||||
def index_header(self):
|
||||
return self.template("index_header")
|
||||
|
||||
"""Returns the string fragment to use for each section in the item
|
||||
index."""
|
||||
|
||||
def index_section(self, group):
|
||||
return self.template("index_section", group=group)
|
||||
|
||||
"""Returns the string fragment to use for each entry in the item index."""
|
||||
|
||||
def index_entry(self, meta, item):
|
||||
return self.template("index_entry", meta=meta, item=item)
|
||||
|
||||
"""Returns an object, typically a string, to be used as the sorting key
|
||||
for items within a section."""
|
||||
|
||||
def item_sort_key(self, item):
|
||||
return item['name']
|
||||
return item["name"]
|
||||
|
||||
"""Returns a key for grouping items together."""
|
||||
def group_key(self, directory, file, macro, name):
|
||||
_global = self._cmds['_global']
|
||||
|
||||
if file in _global and 'group_label' in _global[file]:
|
||||
def group_key(self, directory, file, macro, name):
|
||||
_global = self._cmds["_global"]
|
||||
|
||||
if file in _global and "group_label" in _global[file]:
|
||||
self._group[file] = (directory, file)
|
||||
return file
|
||||
|
||||
@ -136,60 +143,59 @@ class Siphon(object):
|
||||
return directory
|
||||
|
||||
"""Returns a key for identifying items within a grouping."""
|
||||
|
||||
def item_key(self, directory, file, macro, name):
|
||||
return name
|
||||
|
||||
"""Returns a string to use as the header when rendering the item."""
|
||||
|
||||
def item_header(self, group):
|
||||
return self.template("item_header", group=group)
|
||||
|
||||
"""Returns a string to use as the body when rendering the item."""
|
||||
|
||||
def item_format(self, meta, item):
|
||||
return self.template("item_format", meta=meta, item=item)
|
||||
|
||||
"""Returns a string to use as the label for the page reference."""
|
||||
|
||||
def page_label(self, group):
|
||||
return "_".join((
|
||||
self.name,
|
||||
self.sanitize_label(group)
|
||||
))
|
||||
return "_".join((self.name, self.sanitize_label(group)))
|
||||
|
||||
"""Returns a title to use for a page."""
|
||||
|
||||
def page_title(self, group):
|
||||
_global = self._cmds['_global']
|
||||
_global = self._cmds["_global"]
|
||||
(directory, file) = self._group[group]
|
||||
|
||||
if file and file in _global and 'group_label' in _global[file]:
|
||||
return _global[file]['group_label']
|
||||
if file and file in _global and "group_label" in _global[file]:
|
||||
return _global[file]["group_label"]
|
||||
|
||||
if directory in _global and 'group_label' in _global[directory]:
|
||||
return _global[directory]['group_label']
|
||||
if directory in _global and "group_label" in _global[directory]:
|
||||
return _global[directory]["group_label"]
|
||||
|
||||
return directory
|
||||
|
||||
"""Returns a string to use as the label for the section reference."""
|
||||
|
||||
def item_label(self, group, item):
|
||||
return "__".join((
|
||||
self.name,
|
||||
item
|
||||
))
|
||||
return "__".join((self.name, item))
|
||||
|
||||
"""Label sanitizer; for creating Doxygen references"""
|
||||
|
||||
def sanitize_label(self, value):
|
||||
return value.replace(" ", "_") \
|
||||
.replace("/", "_") \
|
||||
.replace(".", "_")
|
||||
return value.replace(" ", "_").replace("/", "_").replace(".", "_")
|
||||
|
||||
"""Template processor"""
|
||||
|
||||
def template(self, name, **kwargs):
|
||||
tpl = self._tplenv.get_template(name + self._format.extension)
|
||||
return tpl.render(
|
||||
this=self,
|
||||
**kwargs)
|
||||
return tpl.render(this=self, **kwargs)
|
||||
|
||||
# Processing methods
|
||||
|
||||
"""Parse the input file into a more usable dictionary structure."""
|
||||
|
||||
def load_json(self, files):
|
||||
self._cmds = {}
|
||||
self._group = {}
|
||||
@ -198,34 +204,37 @@ class Siphon(object):
|
||||
line_start = 0
|
||||
for filename in files:
|
||||
filename = os.path.relpath(filename)
|
||||
self.log.info("Parsing items in file \"%s\"." % filename)
|
||||
self.log.info('Parsing items in file "%s".' % filename)
|
||||
data = None
|
||||
with open(filename, "r") as fd:
|
||||
data = json.load(fd)
|
||||
|
||||
self._cmds['_global'] = data['global']
|
||||
self._cmds["_global"] = data["global"]
|
||||
|
||||
# iterate the items loaded and regroup it
|
||||
for item in data["items"]:
|
||||
try:
|
||||
o = self._parser.parse(item['block'])
|
||||
o = self._parser.parse(item["block"])
|
||||
except Exception:
|
||||
self.log.error("Exception parsing item: %s\n%s"
|
||||
% (json.dumps(item, separators=(',', ': '),
|
||||
indent=4),
|
||||
item['block']))
|
||||
self.log.error(
|
||||
"Exception parsing item: %s\n%s"
|
||||
% (
|
||||
json.dumps(item, separators=(",", ": "), indent=4),
|
||||
item["block"],
|
||||
)
|
||||
)
|
||||
raise
|
||||
|
||||
# Augment the item with metadata
|
||||
o["meta"] = {}
|
||||
for key in item:
|
||||
if key == 'block':
|
||||
if key == "block":
|
||||
continue
|
||||
o['meta'][key] = item[key]
|
||||
o["meta"][key] = item[key]
|
||||
|
||||
# Load some interesting fields
|
||||
directory = item['directory']
|
||||
file = item['file']
|
||||
directory = item["directory"]
|
||||
file = item["file"]
|
||||
macro = o["macro"]
|
||||
name = o["name"]
|
||||
|
||||
@ -240,6 +249,7 @@ class Siphon(object):
|
||||
|
||||
"""Iterate over the input data, calling render methods to generate the
|
||||
output."""
|
||||
|
||||
def process(self, out=None):
|
||||
|
||||
if out is None:
|
||||
@ -257,11 +267,12 @@ class Siphon(object):
|
||||
|
||||
# Iterate the dictionary and process it
|
||||
for group in sorted(self._cmds.keys(), key=group_sort_key):
|
||||
if group.startswith('_'):
|
||||
if group.startswith("_"):
|
||||
continue
|
||||
|
||||
self.log.info("Processing items in group \"%s\" (%s)." %
|
||||
(group, group_sort_key(group)))
|
||||
self.log.info(
|
||||
'Processing items in group "%s" (%s).' % (group, group_sort_key(group))
|
||||
)
|
||||
|
||||
# Generate the section index entry (write it now)
|
||||
out.write(self.index_section(group))
|
||||
@ -273,15 +284,16 @@ class Siphon(object):
|
||||
return self.item_sort_key(self._cmds[group][key])
|
||||
|
||||
for key in sorted(self._cmds[group].keys(), key=item_sort_key):
|
||||
self.log.debug("--- Processing key \"%s\" (%s)." %
|
||||
(key, item_sort_key(key)))
|
||||
self.log.debug(
|
||||
'--- Processing key "%s" (%s).' % (key, item_sort_key(key))
|
||||
)
|
||||
|
||||
o = self._cmds[group][key]
|
||||
meta = {
|
||||
"directory": o['meta']['directory'],
|
||||
"file": o['meta']['file'],
|
||||
"macro": o['macro'],
|
||||
"name": o['name'],
|
||||
"directory": o["meta"]["directory"],
|
||||
"file": o["meta"]["file"],
|
||||
"macro": o["macro"],
|
||||
"name": o["name"],
|
||||
"key": key,
|
||||
"label": self.item_label(group, key),
|
||||
}
|
||||
@ -304,7 +316,7 @@ class Siphon(object):
|
||||
|
||||
def do_cliexstart(self, matchobj):
|
||||
title = matchobj.group(1)
|
||||
title = ' '.join(title.splitlines())
|
||||
title = " ".join(title.splitlines())
|
||||
content = matchobj.group(2)
|
||||
content = re.sub(r"\n", r"\n ", content)
|
||||
return "\n\n.. code-block:: console\n\n %s\n %s\n\n" % (title, content)
|
||||
@ -316,7 +328,7 @@ class Siphon(object):
|
||||
|
||||
def do_cliexcmd(self, matchobj):
|
||||
content = matchobj.group(1)
|
||||
content = ' '.join(content.splitlines())
|
||||
content = " ".join(content.splitlines())
|
||||
return "\n\n.. code-block:: console\n\n %s\n\n" % content
|
||||
|
||||
def process_list(self, matchobj):
|
||||
@ -351,7 +363,9 @@ class Siphon(object):
|
||||
s = re.sub(r"@TODO[^\n]*", "", s)
|
||||
# ----------- code blocks
|
||||
s = re.sub(r"@cliexcmd{(.+?)}", self.do_cliexcmd, s, flags=re.DOTALL)
|
||||
s = re.sub(r"@cliexstart{(.+?)}(.+?)@cliexend", self.do_cliexstart, s, flags=re.DOTALL)
|
||||
s = re.sub(
|
||||
r"@cliexstart{(.+?)}(.+?)@cliexend", self.do_cliexstart, s, flags=re.DOTALL
|
||||
)
|
||||
s = re.sub(r"@clistart(.+?)@cliend", self.do_clistart, s, flags=re.DOTALL)
|
||||
# ----------- lists
|
||||
s = re.sub(r"^\s*-", r"\n@@@@", s, flags=re.MULTILINE)
|
||||
@ -377,6 +391,7 @@ class Siphon(object):
|
||||
s = re.sub(r"\n[ \f\v\t]*", "\n", s)
|
||||
return s
|
||||
|
||||
|
||||
class Format(object):
|
||||
"""Output format class"""
|
||||
|
||||
@ -389,6 +404,7 @@ class Format(object):
|
||||
|
||||
class FormatMarkdown(Format):
|
||||
"""Markdown output format"""
|
||||
|
||||
name = "markdown"
|
||||
extension = ".md"
|
||||
|
||||
@ -399,6 +415,7 @@ formats["markdown"] = FormatMarkdown
|
||||
|
||||
class FormatItemlist(Format):
|
||||
"""Itemlist output format"""
|
||||
|
||||
name = "itemlist"
|
||||
extension = ".itemlist"
|
||||
|
||||
|
@ -17,6 +17,7 @@
|
||||
from . import process, parsers
|
||||
import os
|
||||
|
||||
|
||||
class SiphonCLICMD(process.Siphon):
|
||||
|
||||
name = "clicmd"
|
||||
@ -32,37 +33,36 @@ class SiphonCLICMD(process.Siphon):
|
||||
return self.page_label(group) + ".rst"
|
||||
|
||||
def index_sort_key(self, group):
|
||||
_global = self._cmds['_global']
|
||||
_global = self._cmds["_global"]
|
||||
if group not in self._group:
|
||||
return group
|
||||
(directory, file) = self._group[group]
|
||||
|
||||
if file in _global and 'group_label' in _global[file]:
|
||||
return _global[file]['group_label']
|
||||
if file in _global and "group_label" in _global[file]:
|
||||
return _global[file]["group_label"]
|
||||
|
||||
if directory in _global and 'group_label' in _global[directory]:
|
||||
return _global[directory]['group_label']
|
||||
if directory in _global and "group_label" in _global[directory]:
|
||||
return _global[directory]["group_label"]
|
||||
|
||||
return group
|
||||
|
||||
def item_sort_key(self, item):
|
||||
return item['value']['path']
|
||||
return item["value"]["path"]
|
||||
|
||||
def item_label(self, group, item):
|
||||
return "_".join((
|
||||
self.name,
|
||||
self.sanitize_label(self._cmds[group][item]['value']['path'])
|
||||
))
|
||||
return "_".join(
|
||||
(self.name, self.sanitize_label(self._cmds[group][item]["value"]["path"]))
|
||||
)
|
||||
|
||||
def page_title(self, group):
|
||||
_global = self._cmds['_global']
|
||||
_global = self._cmds["_global"]
|
||||
(directory, file) = self._group[group]
|
||||
|
||||
if file and file in _global and 'group_label' in _global[file]:
|
||||
return _global[file]['group_label']
|
||||
if file and file in _global and "group_label" in _global[file]:
|
||||
return _global[file]["group_label"]
|
||||
|
||||
if directory in _global and 'group_label' in _global[directory]:
|
||||
return _global[directory]['group_label']
|
||||
if directory in _global and "group_label" in _global[directory]:
|
||||
return _global[directory]["group_label"]
|
||||
|
||||
file_ext = os.path.basename(directory)
|
||||
fname, ext = os.path.splitext(file_ext)
|
||||
|
Reference in New Issue
Block a user