feat: add API JSON generator (#5677)

Co-authored-by: Liam <30486941+liamHowatt@users.noreply.github.com>
This commit is contained in:
Kevin Schlosser 2024-06-20 14:02:25 -06:00 committed by GitHub
parent 25e993a137
commit ec80fe49fa
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 3662 additions and 106 deletions

28
.github/workflows/gen_json.yml vendored Normal file
View File

@ -0,0 +1,28 @@
name: Test API JSON generator
on:
push:
pull_request:
jobs:
test_api_json:
if: github.repository == 'lvgl/lvgl'
runs-on: ubuntu-22.04
name: Test API JSON
steps:
- uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: Install Doxygen and Latex dependencies
run: |
sudo apt-get update
sudo apt-get install doxygen texlive-xetex texlive-binaries texlive-lang-english latexmk fonts-freefont-otf
- name: Install requirements
run: pip install -r scripts/gen_json/requirements.txt
- name: Run test
run: python3 tests/gen_json/test_gen_json.py

View File

@ -16,6 +16,7 @@ import shutil
import tempfile
import config_builder
import add_translation
from docbuilder_utils import spawn
# due to the modifications that take place to the documentation files
# when the documentaation builds it is better to copy the source files to a
@ -70,10 +71,11 @@ def cmd(s):
print("")
print(s)
print("-------------------------------------")
r = os.system(s)
if r != 0:
result = os.system(s)
if result != 0:
print("Exit build due to previous error")
exit(-1)
sys.exit(result)
# Get the current branch name
@ -140,7 +142,7 @@ print("Add translation")
add_translation.exec(temp_directory)
print("Running doxygen")
cmd('cd "{0}" && doxygen Doxyfile'.format(temp_directory))
cmd('cd "{temp_directory}" && doxygen Doxyfile'.format(temp_directory=temp_directory))
print('Reading Doxygen output')

View File

@ -10,20 +10,34 @@ src_config = os.path.abspath(os.path.join(
))
def run():
def run(c_path=None):
global dst_config
if c_path is not None:
dst_config = c_path
with open(src_config, 'r') as f:
data = f.read()
data = data.split('\n')
for i, line in enumerate(data):
if 'LV_USE' in line or 'LV_FONT' in line:
if 'LV_USE_PROFILER' in line:
continue
if 'LV_USE' in line or 'LV_FONT' in line and '#define' in line:
line = [item for item in line.split(' ') if item]
for j, item in enumerate(line):
if item == '0':
line[j] = '1'
line = ' '.join(line)
data[i] = line
elif line.startswith('#if 0'):
line = line.replace('#if 0', '#if 1')
data[i] = line
data = '\n'.join(data)
with open(dst_config, 'w') as f:

View File

@ -1,9 +1,79 @@
import os
import sys
from xml.etree import ElementTree as ET
base_path = ''
xml_path = ''
EMIT_WARNINGS = True
DOXYGEN_OUTPUT = True
MISSING_FUNC = 'MissingFunctionDoc'
MISSING_FUNC_ARG = 'MissingFunctionArgDoc'
MISSING_FUNC_RETURN = 'MissingFunctionReturnDoc'
MISSING_FUNC_ARG_MISMATCH = 'FunctionArgMissing'
MISSING_STRUCT = 'MissingStructureDoc'
MISSING_STRUCT_FIELD = 'MissingStructureFieldDoc'
MISSING_UNION = 'MissingUnionDoc'
MISSING_UNION_FIELD = 'MissingUnionFieldDoc'
MISSING_ENUM = 'MissingEnumDoc'
MISSING_ENUM_ITEM = 'MissingEnumItemDoc'
MISSING_TYPEDEF = 'MissingTypedefDoc'
MISSING_VARIABLE = 'MissingVariableDoc'
MISSING_MACRO = 'MissingMacroDoc'
def warn(warning_type, *args):
if EMIT_WARNINGS:
args = ' '.join(str(arg) for arg in args)
if warning_type is None:
output = f'\033[31;1m {args}\033[0m\n'
else:
output = f'\033[31;1m{warning_type}: {args}\033[0m\n'
sys.stdout.write(output)
sys.stdout.flush()
def build_docstring(element):
docstring = None
if element.tag == 'parameterlist':
return None
if element.text:
docstring = element.text.strip()
for item in element:
ds = build_docstring(item)
if ds:
if docstring:
docstring += ' ' + ds
else:
docstring = ds.strip()
if element.tag == 'para':
if docstring:
docstring = '\n\n' + docstring
if element.tag == 'ref':
docstring = f':ref:`{docstring}`'
if element.tail:
if docstring:
docstring += ' ' + element.tail.strip()
else:
docstring = element.tail.strip()
return docstring
def read_as_xml(d):
try:
return ET.fromstring(d)
except: # NOQA
return None
def load_xml(fle):
fle = os.path.join(xml_path, fle + '.xml')
@ -39,7 +109,24 @@ namespaces = {}
files = {}
# things to remove from description
# <para> </para>
class STRUCT_FIELD(object):
def __init__(self, name, type, description, file_name, line_no):
self.name = name
self.type = type
self.description = description
self.file_name = file_name
self.line_no = line_no
class STRUCT(object):
_missing = MISSING_STRUCT
_missing_field = MISSING_STRUCT_FIELD
template = '''\
.. doxygenstruct:: {name}
:project: lvgl
@ -52,36 +139,83 @@ class STRUCT(object):
def __init__(self, parent, refid, name, **_):
if name in structures:
self.__dict__.update(structures[name].__dict__)
return
else:
structures[name] = self
self.parent = parent
self.refid = refid
self.name = name
self.types = set()
self._deps = None
self.header_file = ''
self.description = None
self.fields = []
self.file_name = None
self.line_no = None
structures[name] = self
self.parent = parent
self.refid = refid
self.name = name
self.types = set()
self._deps = None
self.header_file = ''
if parent and refid:
root = load_xml(refid)
root = load_xml(refid)
for compounddef in root:
if compounddef.attrib['id'] != self.refid:
continue
for child in compounddef:
if child.tag == 'includes':
self.header_file = os.path.splitext(child.text)[0]
if child.tag != 'sectiondef':
for compounddef in root:
if compounddef.attrib['id'] != self.refid:
continue
for memberdef in child:
t = get_type(memberdef)
if t is None:
for child in compounddef:
if child.tag == 'includes':
self.header_file = os.path.splitext(child.text)[0]
continue
self.types.add(t)
elif child.tag == 'location':
self.file_name = child.attrib['file']
self.line_no = child.attrib['line']
elif child.tag == 'detaileddescription':
self.description = build_docstring(child)
elif child.tag == 'sectiondef':
for memberdef in child:
t = get_type(memberdef)
description = None
name = ''
file_name = None
line_no = None
for element in memberdef:
if element.tag == 'location':
file_name = element.attrib['file']
line_no = element.attrib['line']
elif element.tag == 'name':
name = element.text
elif element.tag == 'detaileddescription':
description = build_docstring(element)
field = STRUCT_FIELD(name, t, description, file_name, line_no)
self.fields.append(field)
if t is None:
continue
self.types.add(t)
if not self.description:
warn(self._missing, self.name)
warn(None, 'FILE:', self.file_name)
warn(None, 'LINE:', self.line_no)
warn(None)
for field in self.fields:
if not field.description:
warn(self._missing_field, self.name)
warn(None, 'FIELD:', field.name)
warn(None, 'FILE:', field.file_name)
warn(None, 'LINE:', field.line_no)
warn(None)
def get_field(self, name):
for field in self.fields:
if field.name == name:
return field
@property
def deps(self):
@ -117,6 +251,9 @@ class STRUCT(object):
class UNION(STRUCT):
_missing = MISSING_UNION
_missing_field = MISSING_UNION_FIELD
template = '''\
.. doxygenunion:: {name}
:project: lvgl
@ -148,12 +285,48 @@ class VARIABLE(object):
def __init__(self, parent, refid, name, **_):
if name in variables:
self.__dict__.update(variables[name].__dict__)
return
else:
variables[name] = self
self.parent = parent
self.refid = refid
self.name = name
self.description = None
self.type = ''
self.file_name = None
self.line_no = None
variables[name] = self
self.parent = parent
self.refid = refid
self.name = name
if parent is not None:
root = load_xml(parent.refid)
for compounddef in root:
if compounddef.attrib['id'] != parent.refid:
continue
for child in compounddef:
if (
child.tag == 'sectiondef' and
child.attrib['kind'] == 'var'
):
for memberdef in child:
if memberdef.attrib['id'] == refid:
break
else:
continue
self.type = get_type(memberdef)
for element in memberdef:
if element.tag == 'location':
self.file_name = element.attrib['file']
self.line_no = element.attrib['line']
elif element.tag == 'detaileddescription':
self.description = build_docstring(element)
if not self.description:
warn(MISSING_VARIABLE, self.name)
warn(None, 'FILE:', self.file_name)
warn(None, 'LINE:', self.line_no)
warn(None)
def __str__(self):
return self.template.format(name=self.name)
@ -172,17 +345,92 @@ class NAMESPACE(object):
def __init__(self, parent, refid, name, **_):
if name in namespaces:
self.__dict__.update(namespaces[name].__dict__)
return
else:
namespaces[name] = self
self.parent = parent
self.refid = refid
self.name = name
self.description = None
self.line_no = None
self.file_name = None
self.enums = []
self.funcs = []
self.vars = []
self.typedefs = []
self.structs = []
self.unions = []
self.classes = []
namespaces[name] = self
self.parent = parent
self.refid = refid
self.name = name
# root = load_xml(refid)
#
# for compounddef in root:
# if compounddef.attrib['id'] != refid:
# continue
#
# for sectiondef in compounddef:
# if sectiondef.tag != 'sectiondef':
# continue
#
# enum
# typedef
# func
# struct
# union
#
#
# cls = globals()[sectiondef.attrib['kind'].upper()]
# if cls == ENUM:
# if sectiondef[0].text:
# sectiondef.attrib['name'] = sectiondef[0].text.strip()
# enums_.append(cls(self, **sectiondef.attrib))
# else:
# sectiondef.attrib['name'] = None
# enums_.append(cls(self, **sectiondef.attrib))
#
# elif cls == ENUMVALUE:
# if enums_[-1].is_member(sectiondef):
# enums_[-1].add_member(sectiondef)
#
# else:
# sectiondef.attrib['name'] = sectiondef[0].text.strip()
# cls(self, **sectiondef.attrib)
def __str__(self):
return self.template.format(name=self.name)
class FUNC_ARG(object):
def __init__(self, name, type):
self.name = name
self.type = type
self.description = None
groups = {}
class GROUP(object):
template = '''\
.. doxygengroup:: {name}
:project: lvgl
'''
def __init__(self, parent, refid, name, **_):
if name in groups:
self.__dict__.update(functions[name].__dict__)
else:
functions[name] = self
self.parent = parent
self.refid = refid
self.name = name
self.description = None
def __str__(self):
return self.template.format(name=self.name)
class FUNCTION(object):
template = '''\
.. doxygenfunction:: {name}
@ -192,15 +440,20 @@ class FUNCTION(object):
def __init__(self, parent, refid, name, **_):
if name in functions:
self.__dict__.update(functions[name].__dict__)
return
functions[name] = self
self.parent = parent
self.refid = refid
self.name = name
self.types = set()
self.restype = None
self._deps = None
else:
functions[name] = self
self.parent = parent
self.refid = refid
self.name = name
self.types = set()
self.restype = None
self.args = []
self._deps = None
self.description = None
self.res_description = None
self.file_name = None
self.line_no = None
self.void_return = False
if parent is not None:
root = load_xml(parent.refid)
@ -212,10 +465,14 @@ class FUNCTION(object):
for child in compounddef:
if child.tag != 'sectiondef':
continue
if child.attrib['kind'] != 'func':
continue
for memberdef in child:
if 'id' not in memberdef.attrib:
continue
if memberdef.attrib['id'] == refid:
break
else:
@ -232,11 +489,88 @@ class FUNCTION(object):
self.restype = get_type(memberdef)
for child in memberdef:
if child.tag == 'type':
if child.text and child.text.strip() == 'void':
self.void_return = True
if child.tag == 'param':
t = get_type(child)
if t is not None:
self.types.add(t)
for element in child:
if element.tag == 'declname':
arg = FUNC_ARG(element.text, t)
self.args.append(arg)
for child in memberdef:
if child.tag == 'location':
self.file_name = child.attrib['file']
self.line_no = child.attrib['line']
elif child.tag == 'detaileddescription':
self.description = build_docstring(child)
for element in child:
if element.tag != 'para':
continue
for desc_element in element:
if desc_element.tag == 'simplesect' and desc_element.attrib['kind'] == 'return':
self.res_description = build_docstring(desc_element)
if desc_element.tag != 'parameterlist':
continue
for parameter_item in desc_element:
parameternamelist = parameter_item[0]
if parameternamelist.tag != 'parameternamelist':
continue
parameter_name = parameternamelist[0].text
try:
parameterdescription = parameter_item[1]
if parameterdescription.tag == 'parameterdescription':
parameter_description = build_docstring(parameterdescription)
else:
parameter_description = None
except IndexError:
parameter_description = None
if parameter_name is not None:
for arg in self.args:
if arg.name != parameter_name:
continue
arg.description = parameter_description
break
else:
warn(MISSING_FUNC_ARG_MISMATCH, self.name)
warn(None, 'ARG:', parameter_name)
warn(None, 'FILE:', self.file_name)
warn(None, 'LINE:', self.line_no)
warn(None)
if not self.description:
warn(MISSING_FUNC, self.name)
warn(None, 'FILE:', self.file_name)
warn(None, 'LINE:', self.line_no)
warn(None)
else:
for arg in self.args:
if not arg.description:
warn(MISSING_FUNC_ARG, self.name)
warn(None, 'ARG:', arg.name)
warn(None, 'FILE:', self.file_name)
warn(None, 'LINE:', self.line_no)
warn(None)
if not self.res_description and not self.void_return:
warn(MISSING_FUNC_RETURN, self.name)
warn(None, 'FILE:', self.file_name)
warn(None, 'LINE:', self.line_no)
warn(None)
if self.restype in self.types:
self.restype = None
@ -277,6 +611,7 @@ class FUNCTION(object):
class FILE(object):
def __init__(self, _, refid, name, node, **__):
if name in files:
self.__dict__.update(files[name].__dict__)
@ -296,8 +631,13 @@ class FILE(object):
cls = globals()[member.attrib['kind'].upper()]
if cls == ENUM:
member.attrib['name'] = member[0].text.strip()
enums_.append(cls(self, **member.attrib))
if member[0].text:
member.attrib['name'] = member[0].text.strip()
enums_.append(cls(self, **member.attrib))
else:
member.attrib['name'] = None
enums_.append(cls(self, **member.attrib))
elif cls == ENUMVALUE:
if enums_[-1].is_member(member):
enums_[-1].add_member(member)
@ -316,14 +656,102 @@ class ENUM(object):
def __init__(self, parent, refid, name, **_):
if name in enums:
self.__dict__.update(enums[name].__dict__)
return
else:
enums[name] = self
enums[name] = self
self.parent = parent
self.refid = refid
self.name = name
self.members = []
self.parent = parent
self.refid = refid
self.name = name
self.members = []
self.description = None
self.file_name = None
self.line_no = None
if parent is not None:
root = load_xml(parent.refid)
for compounddef in root:
if compounddef.attrib['id'] != parent.refid:
continue
for child in compounddef:
if child.tag != 'sectiondef':
continue
if child.attrib['kind'] != 'enum':
continue
for memberdef in child:
if 'id' not in memberdef.attrib:
continue
if memberdef.attrib['id'] == refid:
break
else:
continue
break
else:
continue
break
else:
return
# raise RuntimeError(f'not able to locate enum {name} ({refid})')
for element in memberdef:
if element.tag == 'location':
self.file_name = element.attrib['file']
self.line_no = element.attrib['line']
if element.tag == 'detaileddescription':
self.description = build_docstring(element)
elif element.tag == 'enumvalue':
item_name = None
item_description = None
item_file_name = None
item_line_no = None
for s_element in element:
if s_element.tag == 'name':
item_name = s_element.text
elif s_element.tag == 'detaileddescription':
item_description = build_docstring(s_element)
elif s_element.tag == 'location':
item_file_name = child.attrib['file']
item_line_no = child.attrib['line']
if item_name is not None:
for ev in self.members:
if ev.name != item_name:
continue
break
else:
ev = ENUMVALUE(
self,
element.attrib['id'],
item_name
)
self.members.append(ev)
ev.description = item_description
if not self.description:
warn(MISSING_ENUM, self.name)
warn(None, 'FILE:', self.file_name)
warn(None, 'LINE:', self.line_no)
warn(None)
for member in self.members:
if not member.description:
warn(MISSING_ENUM_ITEM, self.name)
warn(None, 'MEMBER:', member.name)
warn(None, 'FILE:', self.file_name)
warn(None, 'LINE:', self.line_no)
warn(None)
def is_member(self, member):
return (
@ -332,11 +760,16 @@ class ENUM(object):
)
def add_member(self, member):
name = member[0].text.strip()
for ev in self.members:
if ev.name == name:
return
self.members.append(
ENUMVALUE(
self,
member.attrib['refid'],
member[0].text.strip()
name
)
)
@ -350,6 +783,29 @@ class ENUM(object):
defines = {}
def build_define(element):
define = None
if element.text:
define = element.text.strip()
for item in element:
ds = build_define(item)
if ds:
if define:
define += ' ' + ds
else:
define = ds.strip()
if element.tail:
if define:
define += ' ' + element.tail.strip()
else:
define = element.tail.strip()
return define
class DEFINE(object):
template = '''\
.. doxygendefine:: {name}
@ -359,51 +815,17 @@ class DEFINE(object):
def __init__(self, parent, refid, name, **_):
if name in defines:
self.__dict__.update(defines[name].__dict__)
return
else:
defines[name] = self
defines[name] = self
self.parent = parent
self.refid = refid
self.name = name
def __str__(self):
return self.template.format(name=self.name)
class ENUMVALUE(object):
template = '''\
.. doxygenenumvalue:: {name}
:project: lvgl
'''
def __init__(self, parent, refid, name, **_):
self.parent = parent
self.refid = refid
self.name = name
def __str__(self):
return self.template.format(name=self.name)
class TYPEDEF(object):
template = '''\
.. doxygentypedef:: {name}
:project: lvgl
'''
def __init__(self, parent, refid, name, **_):
if name in typedefs:
self.__dict__.update(typedefs[name].__dict__)
return
typedefs[name] = self
self.parent = parent
self.refid = refid
self.name = name
self.type = None
self._deps = None
self.parent = parent
self.refid = refid
self.name = name
self.description = None
self.file_name = None
self.line_no = None
self.params = None
self.initializer = None
if parent is not None:
root = load_xml(parent.refid)
@ -415,7 +837,8 @@ class TYPEDEF(object):
for child in compounddef:
if child.tag != 'sectiondef':
continue
if child.attrib['kind'] != 'typedef':
if child.attrib['kind'] != 'define':
continue
for memberdef in child:
@ -432,6 +855,123 @@ class TYPEDEF(object):
else:
return
for element in memberdef:
if element.tag == 'location':
self.file_name = element.attrib['file']
self.line_no = element.attrib['line']
elif element.tag == 'detaileddescription':
self.description = build_docstring(element)
elif element.tag == 'param':
for child in element:
if child.tag == 'defname':
if self.params is None:
self.params = []
if child.text:
self.params.append(child.text)
elif element.tag == 'initializer':
initializer = build_define(element)
if initializer is None:
self.initializer = ''
else:
self.initializer = initializer
if not self.description:
warn(MISSING_MACRO, self.name)
warn(None, 'FILE:', self.file_name)
warn(None, 'LINE:', self.line_no)
warn(None)
def __str__(self):
return self.template.format(name=self.name)
class ENUMVALUE(object):
template = '''\
.. doxygenenumvalue:: {name}
:project: lvgl
'''
def __init__(self, parent, refid, name, **_):
self.parent = parent
self.refid = refid
self.name = name
self.description = None
self.file_name = None
self.line_no = None
def __str__(self):
return self.template.format(name=self.name)
class TYPEDEF(object):
template = '''\
.. doxygentypedef:: {name}
:project: lvgl
'''
def __init__(self, parent, refid, name, **_):
if name in typedefs:
self.__dict__.update(typedefs[name].__dict__)
else:
typedefs[name] = self
self.parent = parent
self.refid = refid
self.name = name
self.type = None
self._deps = None
self.description = None
self.file_name = None
self.line_no = None
if parent is not None:
root = load_xml(parent.refid)
for compounddef in root:
if compounddef.attrib['id'] != parent.refid:
continue
for child in compounddef:
if child.tag != 'sectiondef':
continue
if child.attrib['kind'] != 'typedef':
continue
for memberdef in child:
if 'id' not in memberdef.attrib:
continue
if memberdef.attrib['id'] == refid:
break
else:
continue
break
else:
continue
break
else:
return
for element in memberdef:
if element.tag == 'location':
self.file_name = element.attrib['file']
self.line_no = element.attrib['line']
if element.tag == 'detaileddescription':
self.description = build_docstring(element)
if not self.description:
warn(MISSING_TYPEDEF, self.name)
warn(None, 'FILE:', self.file_name)
warn(None, 'LINE:', self.line_no)
warn(None)
self.type = get_type(memberdef)
@property
@ -622,7 +1162,7 @@ def get_includes(name1, name2, obj, includes):
if not is_name_match(name1, name2):
return
if obj.parent is not None:
if obj.parent is not None and hasattr(obj.parent, 'header_file'):
header_file = obj.parent.header_file
elif hasattr(obj, 'header_file'):
header_file = obj.header_file
@ -638,12 +1178,112 @@ def get_includes(name1, name2, obj, includes):
includes.add((header_file, html_files[header_file]))
class XMLSearch(object):
def __init__(self, temp_directory):
global xml_path
import subprocess
import re
import sys
bp = os.path.abspath(os.path.dirname(__file__))
lvgl_path = os.path.join(temp_directory, 'lvgl')
src_path = os.path.join(lvgl_path, 'src')
doxy_path = os.path.join(bp, 'Doxyfile')
with open(doxy_path, 'rb') as f:
data = f.read().decode('utf-8')
data = data.replace(
'#*#*LV_CONF_PATH*#*#',
os.path.join(temp_directory, 'lv_conf.h')
)
data = data.replace('*#*#SRC#*#*', '"{0}"'.format(src_path))
with open(os.path.join(temp_directory, 'Doxyfile'), 'wb') as f:
f.write(data.encode('utf-8'))
status, br = subprocess.getstatusoutput("git branch")
_, gitcommit = subprocess.getstatusoutput("git rev-parse HEAD")
br = re.sub('\* ', '', br)
urlpath = re.sub('release/', '', br)
os.environ['LVGL_URLPATH'] = urlpath
os.environ['LVGL_GITCOMMIT'] = gitcommit
p = subprocess.Popen(
f'cd "{temp_directory}" && doxygen Doxyfile',
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True
)
out, err = p.communicate()
if p.returncode:
if out:
sys.stdout.write(out)
sys.stdout.flush()
if err:
sys.stderr.write(err)
sys.stdout.flush()
sys.exit(p.returncode)
xml_path = os.path.join(temp_directory, 'xml')
self.index = load_xml('index')
for compound in self.index:
compound.attrib['name'] = compound[0].text.strip()
if compound.attrib['kind'] in ('example', 'page', 'dir'):
continue
globals()[compound.attrib['kind'].upper()](
None,
node=compound,
**compound.attrib
)
def get_macros(self):
return list(defines.values())
def get_enum_item(self, e_name):
for enum, obj in enums.items():
for enum_item in obj.members:
if enum_item.name == e_name:
return enum_item
def get_enum(self, e_name):
return enums.get(e_name, None)
def get_function(self, f_name):
return functions.get(f_name, None)
def get_variable(self, v_name):
return variables.get(v_name, None)
def get_union(self, u_name):
return unions.get(u_name, None)
def get_structure(self, s_name):
return structures.get(s_name, None)
def get_typedef(self, t_name):
return typedefs.get(t_name, None)
def get_macro(self, m_name):
return defines.get(m_name, None)
def run(project_path, temp_directory, *doc_paths):
global base_path
global xml_path
global lvgl_src_path
global api_path
base_path = temp_directory
xml_path = os.path.join(base_path, 'xml')
api_path = os.path.join(base_path, 'API')
@ -651,7 +1291,7 @@ def run(project_path, temp_directory, *doc_paths):
if not os.path.exists(api_path):
os.makedirs(api_path)
iter_src('API', '')
index = load_xml('index')

View File

@ -0,0 +1,504 @@
Output API as JSON data
=======================
We have written a script that will read the header files in LVGL and outputs a
more friendly JSON format for the API. This is done so that bindings that generate
code automatically will have an easy way to collect the needed information without
having to reinvent the wheel. The JSON data format has already made libraries for
reading the format for just about every programming language out there.
The script in order to run does have some requirements.
- Python >= 3.10
- Pycparser >= 2.21: Python Library for reading the preprocessor ouotput from the C compiler
- PyMSVC >= 0.4.0: Python library is using MSVC Compiler
- C compiler, gcc for Linux, clang for OSX and MSVC for Windows
- Doxygen: used to read the docstrings from the header files.
There are several options when running the script. They are as follows
- `--output-path`: output directory for JSON file. If one is not supplied
then it will be output stdout
- `--lvgl-config`: path to lv_conf.h (including file name), if this is not
set then a config file will be generated that has most common things turned on
- `--develop`: leaves the temporary folder in place.
to use the script
.. code:: shell
python /scripts/gen_json/gen_json.py --output-path=json/output/directory --lvgl-config=path/to/lv_conf.h
or if you want to run a subprocess from inside of a generation script and read the output from stdout
.. code:: shell
python /scripts/gen_json/gen_json.py --lvgl-config=path/to/lv_conf.h
The JSON data is broken apart into a couple of main categories.
- enums
- functions
- function_pointers
- structures
- unions
- variables
- typedefs
- forward_decls
- macros
Those categories are the element names undert the root of the JSON data.
The value for each categry is an array of JSON elements. There is a bit of
nesting with the elements in the arrays and I have created "json_types" that
will allow you to identify exactly what you are dealing with.
The different "json_types" are as follows:
- ``"array"``: The array type is used to identify arrays.
Available JSON fields:
- ``"dim"``: number of items in the array
- ``"quals"``: array of qualifiers, IE "const"
- ``"type"``: This may or may not be available.
- ``"name"``: the name of the data type
- ``"field"``: This type is used to describe fields in structures and unions.
It is used in the ``"fields"`` array of the ``"struct"`` and ``"union"`` JSON types.
Available JSON fields:
- ``"name"``: The name of the field.
- ``"type"``: This contains the type information for the field. Check the
``"json_type"`` to know what type you are dealing with.
- ``"bitsize"``: The number of bits the field has or ``null``
if there is no bit size defined
- ``"docstring"``: you should know what this is.
- ``"arg"``: Used to describe an argument/parameter in a function or a function pointer.
Available JSON fields:
- ``"name"``: The name of the argument/parameter.
- ``"type"``: This contains the type information for the field. Check the
``"json_type"`` to know what type you are dealing with.
- ``"docstring"``: you should know what this is.
- ``"quals"``: array of qualifiers, IE "const"
- ``"forward_decl"``: Describes a forward declaration.There are structures in
LVGL that are considered to be private and that is what these desccribe.
Available JSON fields:
- ``"name"``: The name of the formard declaration.
- ``"type"``: This contains the type information for the field. Check the
``"json_type"`` to know what type you are dealing with.
- ``"docstring"``: you should know what this is.
- ``"quals"``: array of qualifiers, IE "const"
- ``"function_pointer"``: Describes a function pointer. These are used when
registering callback functions in LVGL.
Available JSON fields:
- ``"name"``: The name of the function pointer.
- ``"type"``: This contains the return type information for the function pointer.
- ``"docstring"``: you should know what this is.
- ``"args"``: array of ``"arg"`` objects. This describes the fuction arguments/parameters.
- ``"quals"``: array of qualifiers, IE "const"
- ``"variable"``: Describes a global variable.
Available JSON fields:
- ``"name"``: The name of the variable.
- ``"type"``: This contains the type information for the field. Check the
``"json_type"`` to know what type you are dealing with.
- ``"docstring"``: you should know what this is.
- ``"quals"``: array of qualifiers, IE "const"
- ``"storage"``: array of storage classifiers, IE "extern"
- ``"special_type"``: Currently only used to describe an ellipsis argument
for a function.
Available JSON fields:
- ``"name"``: will always be "ellipsis".
- ``"primitive_type"``: This is a base type. There or no other types beneith this.
This tells you that the type is a basic or primitive C type.
IE: struct, union, int, unsigned int, etc...
Available JSON fields:
- ``"name"``: The name of the primitive type.
- ``"enum"``: Describes a grouping of enumeration items/members.
Available JSON fields:
- ``"name"``: The name of the enumeration group/type.
- ``"type"``: This contains the type information for the enumeration group.
This is always going to be an "int" type. Make sure you do not use this
type as the type for the members of this enumeration group. Check the
enumeration members type to get the correct type.
- ``"docstring"``: you should know what this is.
- ``"members"``: array of ``"enum_member"`` objects
- ``"enum_member"``: Describes an enumeration item/member. Only found under
the ``"members"`` field of an ``"enum"`` JSON type
Available JSON fields:
- ``"name"``: The name of the enumeration.
- ``"type"``: This contains the type information for the enum member.
This gets a bit tricky because the type specified in here is not always
going to be an "int". It will usually point to an lvgl type and the type
of the lvgl type can be found in the ``"typedefs"`` section.
- ``"docstring"``: you should know what this is.
- ``"value"``: the enumeration member/item's value
- ``"lvgl_type"``: This is a base type. There or no other types beneith this.
This tells you that the type is an LVGL data type.
Available JSON fields:
- ``"name"``: The name of the type.
- ``"quals"``: array of qualifiers, IE "const
- ``"struct"``: Describes a structure
Available JSON fields:
- ``"name"``: The name of the structure.
- ``"type"``: This contains the primitive type information for the structure.
- ``"docstring"``: you should know what this is.
- ``"fields"``: array of ``"field"`` elements.
- ``"quals"``: array of qualifiers, IE "const"
- ``"union"``: Describes a union
Available JSON fields:
- ``"name"``: The name of the union.
- ``"type"``: This contains the primitive type information for the union.
- ``"docstring"``: you should know what this is.
- ``"fields"``: array of ``"field"`` elements.
- ``"quals"``: array of qualifiers, IE "const"
- ``"macro"``: describes a macro. There is limited information that can be
collected about macros and in most cases a binding will need to have these
statically added to a binding. It is more for collecting the docstrings than
anything else.
Available JSON fields:
- ``"name"``: The name of the macro.
- ``"docstring"``: you should know what this is.
- ``"ret_type"``: return type from a function. This is only going to be seen in the ``"type"``
element of a ``"function"`` type.
Available JSON fields:
- ``"type"``: This contains the type information for the field. Check the
``"json_type"`` to know what type you are dealing with.
- ``"docstring"``: you should know what this is.
- ``"function"``: Describes a function.
Available JSON fields:
- ``"name"``: The name of the function.
- ``"type"``: This contains the type information for the return value.
- ``"docstring"``: you should know what this is.
- ``"args"``: array of ``"arg"`` json types. This describes the fuction arguments/parameters.
- ``"stdlib_type"``: This is a base type, meaning that there are no more
type levels beneith this. This tells us that the type is from the C stdlib.
Available JSON fields:
- ``"name"``: The name of the type.
- ``"quals"``: array of qualifiers, IE "const
- ``"unknown_type"``: This should not be seen. If it is then there needs to be
an adjustment made to the script. Please open an issue and let us know if you see this type.
Available JSON fields:
- ``"name"``: The name of the type.
- ``"quals"``: array of qualifiers, IE "const
- ``"pointer"``: This is a wrapper object to let you know that the type you
are dealing with is a pointer
Available JSON fields:
- ``"type"``: This contains the type information for the pointer. Check the
``"json_type"`` to know what type you are dealing with.
- ``"quals"``: array of qualifiers, IE "const", may or may not be available.
- ``"typedef"``: type definitions. I will explain more on this below.
Available JSON fields:
- ``"name"``: The name of the typedef.
- ``"type"``: This contains the type information for the field. Check the
``"json_type"`` to know what type you are dealing with.
- ``"docstring"``: you should know what this is.
- ``"quals"``: array of qualifiers, IE "const"
Here is an example of what the output will look like.
.. code:: json
{
"enums":[
{
"name":"_lv_result_t",
"type":{
"name":"int",
"json_type":"primitive_type"
},
"json_type":"enum",
"docstring":"LVGL error codes. ",
"members":[
{
"name":"LV_RESULT_INVALID",
"type":{
"name":"_lv_result_t",
"json_type":"lvgl_type"
},
"json_type":"enum_member",
"docstring":"",
"value":"0x0"
},
{
"name":"LV_RESULT_OK",
"type":{
"name":"_lv_result_t",
"json_type":"lvgl_type"
},
"json_type":"enum_member",
"docstring":"",
"value":"0x1"
}
]
}
],
"functions":[
{
"name":"lv_version_info",
"type":{
"type":{
"type":{
"name":"char",
"json_type":"primitive_type",
"quals":[
"const"
]
},
"json_type":"pointer",
"quals":[]
},
"json_type":"ret_type",
"docstring":""
},
"json_type":"function",
"docstring":"",
"args":[
{
"name":null,
"type":{
"name":"void",
"json_type":"primitive_type",
"quals":[]
},
"json_type":"arg",
"docstring":"",
"quals":[]
}
]
}
],
"function_pointers":[
{
"name":"lv_tlsf_walker",
"type":{
"type":{
"name":"void",
"json_type":"primitive_type",
"quals":[]
},
"json_type":"ret_type",
"docstring":""
},
"json_type":"function_pointer",
"docstring":"",
"args":[
{
"name":"ptr",
"type":{
"type":{
"name":"void",
"json_type":"primitive_type",
"quals":[]
},
"json_type":"pointer",
"quals":[]
},
"json_type":"arg",
"docstring":""
},
{
"name":"size",
"type":{
"name":"size_t",
"json_type":"stdlib_type",
"quals":[]
},
"json_type":"arg",
"docstring":""
},
{
"name":"used",
"type":{
"name":"int",
"json_type":"primitive_type",
"quals":[]
},
"json_type":"arg",
"docstring":""
},
{
"name":"user",
"type":{
"type":{
"name":"void",
"json_type":"primitive_type",
"quals":[]
},
"json_type":"pointer",
"quals":[]
},
"json_type":"arg",
"docstring":""
}
],
"quals":[]
}
],
"structures":[
{
"name":"_lv_gradient_cache_t",
"type":{
"name":"struct",
"json_type":"primitive_type"
},
"json_type":"struct",
"docstring":null,
"fields":[
{
"name":"color_map",
"type":{
"type":{
"name":"lv_color_t",
"json_type":"lvgl_type",
"quals":[]
},
"json_type":"pointer",
"quals":[]
},
"json_type":"field",
"bitsize":null,
"docstring":""
},
{
"name":"opa_map",
"type":{
"type":{
"name":"lv_opa_t",
"json_type":"lvgl_type",
"quals":[]
},
"json_type":"pointer",
"quals":[]
},
"json_type":"field",
"bitsize":null,
"docstring":""
},
{
"name":"size",
"type":{
"name":"uint32_t",
"json_type":"stdlib_type",
"quals":[]
},
"json_type":"field",
"bitsize":null,
"docstring":""
}
]
}
],
"unions":[],
"variables":[
{
"name":"lv_global",
"type":{
"name":"lv_global_t",
"json_type":"lvgl_type",
"quals":[]
},
"json_type":"variable",
"docstring":"",
"quals":[],
"storage":[
"extern"
]
}
],
"typedefs":[
{
"name":"lv_pool_t",
"type":{
"type":{
"name":"void",
"json_type":"primitive_type",
"quals":[]
},
"json_type":"pointer"
},
"json_type":"typedef",
"docstring":"",
"quals":[]
}
],
"forward_decls":[
{
"name":"lv_fragment_managed_states_t",
"type":{
"name":"struct",
"json_type":"primitive_type"
},
"json_type":"forward_decl",
"docstring":"",
"quals":[]
}
],
"macros":[
{
"name":"ZERO_MEM_SENTINEL",
"json_type":"macro",
"docstring":""
}
]
}

View File

@ -10,3 +10,4 @@ Bindings
cpp
pikascript
javascript
api_json

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,378 @@
import os
import sys
import argparse
import shutil
import tempfile
import json
import subprocess
import threading
base_path = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, base_path)
project_path = os.path.abspath(os.path.join(base_path, '..', '..'))
docs_path = os.path.join(project_path, 'docs')
sys.path.insert(0, docs_path)
import create_fake_lib_c # NOQA
import pycparser_monkeypatch # NOQA
import pycparser # NOQA
DEVELOP = False
class STDOut:
def __init__(self):
self._stdout = sys.stdout
sys.__stdout__ = self
sys.stdout = self
def write(self, data):
pass
def __getattr__(self, item):
if item in self.__dict__:
return self.__dict__[item]
return getattr(self._stdout, item)
def reset(self):
sys.stdout = self._stdout
temp_directory = tempfile.mkdtemp(suffix='.lvgl_json')
def run(output_path, lvgl_config_path, output_to_stdout, target_header, filter_private, *compiler_args):
# stdout = STDOut()
pycparser_monkeypatch.FILTER_PRIVATE = filter_private
# The thread is to provide an indication that things are being processed.
# There are long periods where nothing gets output to the screen and this
# is to let the user know that it is still working.
if not output_to_stdout:
event = threading.Event()
def _do():
while not event.is_set():
event.wait(1)
sys.stdout.write('.')
sys.stdout.flush()
print()
t = threading.Thread(target=_do)
t.daemon = True
t.start()
lvgl_path = project_path
lvgl_src_path = os.path.join(lvgl_path, 'src')
temp_lvgl = os.path.join(temp_directory, 'lvgl')
target_header_base_name = (
os.path.splitext(os.path.split(target_header)[-1])[0]
)
try:
os.mkdir(temp_lvgl)
shutil.copytree(lvgl_src_path, os.path.join(temp_lvgl, 'src'))
shutil.copyfile(os.path.join(lvgl_path, 'lvgl.h'), os.path.join(temp_lvgl, 'lvgl.h'))
pp_file = os.path.join(temp_directory, target_header_base_name + '.pp')
if lvgl_config_path is None:
lvgl_config_path = os.path.join(lvgl_path, 'lv_conf_template.h')
with open(lvgl_config_path, 'rb') as f:
data = f.read().decode('utf-8').split('\n')
for i, line in enumerate(data):
if line.startswith('#if 0'):
data[i] = '#if 1'
else:
for item in (
'LV_USE_LOG',
'LV_USE_OBJ_ID',
'LV_USE_OBJ_ID_BUILTIN',
'LV_USE_FLOAT',
'LV_USE_BIDI',
'LV_USE_LODEPNG',
'LV_USE_LIBPNG',
'LV_USE_BMP',
'LV_USE_TJPGD',
'LV_USE_LIBJPEG_TURBO',
'LV_USE_GIF',
'LV_BIN_DECODER_RAM_LOAD',
'LV_USE_RLE',
'LV_USE_QRCODE',
'LV_USE_BARCODE',
'LV_USE_TINY_TTF',
'LV_USE_GRIDNAV',
'LV_USE_FRAGMENT',
'LV_USE_IMGFONT',
'LV_USE_SNAPSHOT',
'LV_USE_FREETYPE'
):
if line.startswith(f'#define {item} '):
data[i] = f'#define {item} 1'
break
with open(os.path.join(temp_directory, 'lv_conf.h'), 'wb') as f:
f.write('\n'.join(data).encode('utf-8'))
else:
src = lvgl_config_path
dst = os.path.join(temp_directory, 'lv_conf.h')
shutil.copyfile(src, dst)
include_dirs = [temp_directory, project_path]
if sys.platform.startswith('win'):
import get_sdl2
try:
import pyMSVC # NOQA
except ImportError:
sys.stderr.write(
'\nThe pyMSVC library is missing, '
'please run "pip install pyMSVC" to install it.\n'
)
sys.stderr.flush()
sys.exit(-500)
env = pyMSVC.setup_environment() # NOQA
cpp_cmd = ['cl', '/std:c11', '/nologo', '/P']
output_pp = f'/Fi"{pp_file}"'
sdl2_include, _ = get_sdl2.get_sdl2(temp_directory)
include_dirs.append(sdl2_include)
include_path_env_key = 'INCLUDE'
elif sys.platform.startswith('darwin'):
include_path_env_key = 'C_INCLUDE_PATH'
cpp_cmd = [
'clang', '-std=c11', '-E', '-DINT32_MIN=0x80000000',
]
output_pp = f' >> "{pp_file}"'
else:
include_path_env_key = 'C_INCLUDE_PATH'
cpp_cmd = [
'gcc', '-std=c11', '-E', '-Wno-incompatible-pointer-types',
]
output_pp = f' >> "{pp_file}"'
fake_libc_path = create_fake_lib_c.run(temp_directory)
if include_path_env_key not in os.environ:
os.environ[include_path_env_key] = ''
os.environ[include_path_env_key] = (
f'{fake_libc_path}{os.pathsep}{os.environ[include_path_env_key]}'
)
if 'PATH' not in os.environ:
os.environ['PATH'] = ''
os.environ['PATH'] = (
f'{fake_libc_path}{os.pathsep}{os.environ["PATH"]}'
)
cpp_cmd.extend(compiler_args)
cpp_cmd.extend([
'-DLV_LVGL_H_INCLUDE_SIMPLE',
'-DLV_CONF_INCLUDE_SIMPLE',
'-DLV_USE_DEV_VERSION'
])
cpp_cmd.extend(['-DPYCPARSER', f'"-I{fake_libc_path}"'])
cpp_cmd.extend([f'"-I{item}"' for item in include_dirs])
cpp_cmd.append(f'"{target_header}"')
if sys.platform.startswith('win'):
cpp_cmd.insert(len(cpp_cmd) - 2, output_pp)
else:
cpp_cmd.append(output_pp)
cpp_cmd = ' '.join(cpp_cmd)
p = subprocess.Popen(
cpp_cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=os.environ,
shell=True
)
out, err = p.communicate()
exit_code = p.returncode
if not os.path.exists(pp_file):
sys.stdout.write(out.decode('utf-8').strip() + '\n')
sys.stdout.write('EXIT CODE: ' + str(exit_code) + '\n')
sys.stderr.write(err.decode('utf-8').strip() + '\n')
sys.stdout.flush()
sys.stderr.flush()
raise RuntimeError('Unknown Failure')
with open(pp_file, 'r') as f:
pp_data = f.read()
cparser = pycparser.CParser()
ast = cparser.parse(pp_data, target_header)
ast.setup_docs(temp_directory)
if not output_to_stdout and output_path is None:
# stdout.reset()
if not DEVELOP:
shutil.rmtree(temp_directory)
return ast
elif output_to_stdout:
# stdout.reset()
print(json.dumps(ast.to_dict(), indent=4))
else:
if not os.path.exists(output_path):
os.makedirs(output_path)
output_path = os.path.join(output_path, target_header_base_name + '.json')
with open(output_path, 'w') as f:
f.write(json.dumps(ast.to_dict(), indent=4))
# stdout.reset()
if not output_to_stdout:
event.set() # NOQA
t.join() # NOQA
except Exception as err:
if not output_to_stdout:
event.set() # NOQA
t.join() # NOQA
print()
try:
print(cpp_cmd) # NOQA
print()
except: # NOQA
pass
for key, value in os.environ.items():
print(key + ':', value)
print()
import traceback
traceback.print_exc()
print()
exceptions = [
ArithmeticError,
AssertionError,
AttributeError,
EOFError,
FloatingPointError,
GeneratorExit,
ImportError,
IndentationError,
IndexError,
KeyError,
KeyboardInterrupt,
LookupError,
MemoryError,
NameError,
NotImplementedError,
OverflowError,
ReferenceError,
RuntimeError,
StopIteration,
SyntaxError,
TabError,
SystemExit,
TypeError,
UnboundLocalError,
UnicodeError,
UnicodeEncodeError,
UnicodeDecodeError,
UnicodeTranslateError,
ValueError,
ZeroDivisionError,
SystemError
]
if isinstance(err, OSError):
error = err.errno
else:
if type(err) in exceptions:
error = ~exceptions.index(type(err))
else:
error = -100
else:
error = 0
if DEVELOP:
print('temporary file path:', temp_directory)
else:
shutil.rmtree(temp_directory)
sys.exit(error)
if __name__ == '__main__':
parser = argparse.ArgumentParser('-')
parser.add_argument(
'--output-path',
dest='output_path',
help=(
'output directory for JSON file. If one is not '
'supplied then it will be output stdout'
),
action='store',
default=None
)
parser.add_argument(
'--lvgl-config',
dest='lv_conf',
help=(
'path to lv_conf.h (including file name), if this is not set then '
'a config file will be generated that has everything turned on.'
),
action='store',
default=None
)
parser.add_argument(
'--develop',
dest='develop',
help='this option leaves the temporary folder in place.',
action='store_true',
)
parser.add_argument(
"--target-header",
dest="target_header",
help=(
"path to a custom header file. When using this to supply a custom"
"header file you MUST insure that any LVGL includes are done so "
"they are relitive to the LVGL repository root folder.\n\n"
'#include "src/lvgl_private.h"\n\n'
"If you have includes to header files that are not LVGL then you "
"will need to add the include locations for those header files "
"when running this script. It is done using the same manner that "
"is used when calling a C compiler\n\n"
"You need to provide the absolute path to the header file when "
"using this feature."
),
action="store",
default=os.path.join(temp_directory, "lvgl.h")
)
parser.add_argument(
'--filter-private',
dest='filter_private',
help='Internal Use',
action='store_true',
)
args, extra_args = parser.parse_known_args()
DEVELOP = args.develop
run(args.output_path, args.lv_conf, args.output_path is None, args.target_header, args.filter_private, *extra_args)

View File

@ -0,0 +1,66 @@
# -*- coding: utf-8 -*-
import zipfile
import io
import os
SDL2_URL = 'https://github.com/libsdl-org/SDL/releases/download/release-2.26.5/SDL2-devel-2.26.5-VC.zip' # NOQA
def get_path(name: str, p: str) -> str:
for file in os.listdir(p):
file = os.path.join(p, file)
if file.endswith(name):
return file
if os.path.isdir(file):
if res := get_path(name, file):
return res
def get_sdl2(path, url=SDL2_URL):
import requests # NOQA
stream = io.BytesIO()
with requests.get(url, stream=True) as r:
r.raise_for_status()
content_length = int(r.headers['Content-Length'])
chunks = 0
# print()
# sys.stdout.write('\r' + str(chunks) + '/' + str(content_length))
# sys.stdout.flush()
for chunk in r.iter_content(chunk_size=1024):
stream.write(chunk)
chunks += len(chunk)
# sys.stdout.write('\r' + str(chunks) + '/' + str(content_length))
# sys.stdout.flush()
# print()
stream.seek(0)
zf = zipfile.ZipFile(stream)
for z_item in zf.infolist():
for ext in ('.h', '.dll', '.lib'):
if not z_item.filename.endswith(ext):
continue
zf.extract(z_item, path=path)
break
include_path = get_path('include', path)
lib_path = get_path('lib\\x64', path)
dll_path = get_path('SDL2.dll', lib_path)
sdl_include_path = os.path.split(include_path)[0]
if not os.path.exists(os.path.join(sdl_include_path, 'SDL2')):
os.rename(include_path, os.path.join(sdl_include_path, 'SDL2'))
zf.close()
stream.close()
return os.path.abspath(sdl_include_path), dll_path

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,16 @@
pycparser>=2.22
pyMSVC>=0.5.3; platform_system == "Windows"
Sphinx
breathe
imagesize
importlib-metadata
sphinx-rtd-theme
sphinx-sitemap
sphinxcontrib-applehelp
sphinxcontrib-devhelp
sphinxcontrib-htmlhelp
sphinxcontrib-jsmath
sphinxcontrib-qthelp
sphinxcontrib-serializinghtml
sphinx-rtd-dark-mode
typing-extensions

View File

@ -0,0 +1,41 @@
# Basic test to see if the API jeson generator is able to run without any errors
# This test doesn't not check to make sure the output is correct. It is for the
# sole purpose of making sure it completes.
import os
import sys
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
SCRIPT_PATH = os.path.join(
BASE_PATH, '..', '..', 'scripts',
'gen_json', 'gen_json.py'
)
OUTPUT_FILE = os.path.join(BASE_PATH, 'lvgl.json')
cmd = f'{sys.executable} "{SCRIPT_PATH}" --output-path "{BASE_PATH}"'
print('running test')
print(cmd)
result = os.system(cmd)
if result != 0:
print()
sys.stderr.write('TEST FAILED!!\n\n')
sys.stderr.flush()
sys.exit(result)
if not os.path.exists(OUTPUT_FILE):
print()
sys.stderr.write(f'"{OUTPUT_FILE}" was not found.\n')
sys.stderr.write('TEST FAILED!!\n\n')
sys.stderr.flush()
sys.exit(-500)
try:
os.remove(OUTPUT_FILE)
except: # NOQA
pass
print()
print('TEST PASSED!')