kopia lustrzana https://github.com/espressif/esp-idf
docs: Move Doxygen build to a Sphinx extension, load sdkconfig & soc caps headers
rodzic
fcf76320c8
commit
783856d557
|
@ -300,6 +300,7 @@ ENABLE_PREPROCESSING = YES
|
|||
MACRO_EXPANSION = YES
|
||||
EXPAND_ONLY_PREDEF = YES
|
||||
PREDEFINED = \
|
||||
$(ENV_DOXYGEN_DEFINES) \
|
||||
__attribute__(x)= \
|
||||
IDF_DEPRECATED(X)= \
|
||||
IRAM_ATTR= \
|
||||
|
|
|
@ -25,7 +25,7 @@ import subprocess
|
|||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute
|
||||
|
||||
from local_util import run_cmd_get_output, copy_if_modified
|
||||
from local_util import run_cmd_get_output, copy_if_modified, call_with_python
|
||||
|
||||
# build_docs on the CI server sometimes fails under Python3. This is a workaround:
|
||||
sys.setrecursionlimit(3500)
|
||||
|
@ -46,29 +46,10 @@ except KeyError:
|
|||
# Set the idf_target chip. This is a hack right now.
|
||||
idf_target = 'esp32s2'
|
||||
|
||||
def call_with_python(cmd):
|
||||
# using sys.executable ensures that the scripts are called with the same Python interpreter
|
||||
if os.system('{} {}'.format(sys.executable, cmd)) != 0:
|
||||
raise RuntimeError('{} failed'.format(cmd))
|
||||
|
||||
|
||||
# Call Doxygen to get XML files from the header files
|
||||
print("Calling Doxygen to generate latest XML files")
|
||||
if os.system("doxygen ../Doxyfile") != 0:
|
||||
raise RuntimeError('Doxygen call failed')
|
||||
|
||||
# Doxygen has generated XML files in 'xml' directory.
|
||||
# Copy them to 'xml_in', only touching the files which have changed.
|
||||
copy_if_modified('xml/', 'xml_in/')
|
||||
|
||||
# Generate 'api_name.inc' files using the XML files by Doxygen
|
||||
call_with_python('../gen-dxd.py')
|
||||
|
||||
# Generate 'esp_err_defs.inc' file with ESP_ERR_ error code definitions
|
||||
esp_err_inc_path = '{}/inc/esp_err_defs.inc'.format(builddir)
|
||||
call_with_python('../../tools/gen_esp_err_to_name.py --rst_output ' + esp_err_inc_path + '.in')
|
||||
copy_if_modified(esp_err_inc_path + '.in', esp_err_inc_path)
|
||||
|
||||
try:
|
||||
os.mkdir(builddir)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# Generate version-related includes
|
||||
#
|
||||
|
@ -79,7 +60,6 @@ def generate_version_specific_includes(app):
|
|||
call_with_python('../gen-version-specific-includes.py {} {}'.format(app.config.language, version_tmpdir))
|
||||
copy_if_modified(version_tmpdir, '{}/inc'.format(builddir))
|
||||
|
||||
|
||||
# Generate toolchain download links
|
||||
print("Generating toolchain download links")
|
||||
base_url = 'https://dl.espressif.com/dl/'
|
||||
|
@ -118,6 +98,7 @@ extensions = ['breathe',
|
|||
'html_redirects',
|
||||
'idf_build_system',
|
||||
'kconfig_reference',
|
||||
'doxygen_idf',
|
||||
'sphinx.ext.todo',
|
||||
]
|
||||
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
# Extension to generate Doxygen XML include files, with IDF config & soc macros included
|
||||
import glob
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
from local_util import copy_if_modified, call_with_python
|
||||
|
||||
def setup(app):
|
||||
# The idf_build_system extension will emit this event once it
|
||||
app.connect('idf-info', generate_doxygen)
|
||||
|
||||
|
||||
def _parse_defines(header_path):
|
||||
defines = {}
|
||||
# Note: we run C preprocessor here without any -I arguments, so assumption is
|
||||
# that these headers are all self-contained and don't include any other headers
|
||||
# not in the same directory
|
||||
print("Reading macros from %s..." % (header_path))
|
||||
processed_output = subprocess.check_output(["xtensa-esp32-elf-gcc", "-dM", "-E", header_path])
|
||||
for line in processed_output.split("\n"):
|
||||
line = line.strip()
|
||||
m = re.search("#define ([^ ]+) ?(.*)", line)
|
||||
if m and not m.group(1).startswith("_"):
|
||||
defines[m.group(1)] = m.group(2)
|
||||
|
||||
return defines
|
||||
|
||||
|
||||
def generate_doxygen(app, project_description):
|
||||
build_dir = os.path.dirname(app.doctreedir.rstrip(os.sep))
|
||||
|
||||
# Parse kconfig macros to pass into doxygen
|
||||
#
|
||||
# TODO: this should use the set of "config which can't be changed" eventually,
|
||||
# not the header
|
||||
defines = _parse_defines(os.path.join(project_description["build_dir"],
|
||||
"config", "sdkconfig.h"))
|
||||
|
||||
# Add all SOC _caps.h headers to the defines
|
||||
#
|
||||
# kind of a hack, be nicer to add a component info dict in project_description.json
|
||||
soc_path = [p for p in project_description["build_component_paths"] if p.endswith("/soc")][0]
|
||||
for soc_header in glob.glob(os.path.join(soc_path, project_description["target"],
|
||||
"include", "soc", "*_caps.h")):
|
||||
defines.update(_parse_defines(soc_header))
|
||||
|
||||
# Call Doxygen to get XML files from the header files
|
||||
print("Calling Doxygen to generate latest XML files")
|
||||
doxy_env = {
|
||||
"ENV_DOXYGEN_DEFINES": " ".join(defines)
|
||||
}
|
||||
subprocess.check_call(["doxygen", "../Doxyfile"], env=doxy_env)
|
||||
|
||||
# Doxygen has generated XML files in 'xml' directory.
|
||||
# Copy them to 'xml_in', only touching the files which have changed.
|
||||
copy_if_modified('xml/', 'xml_in/')
|
||||
|
||||
# Generate 'api_name.inc' files using the XML files by Doxygen
|
||||
call_with_python('../gen-dxd.py')
|
||||
|
||||
# Generate 'esp_err_defs.inc' file with ESP_ERR_ error code definitions from inc file
|
||||
esp_err_inc_path = '{}/inc/esp_err_defs.inc'.format(build_dir)
|
||||
call_with_python('../../tools/gen_esp_err_to_name.py --rst_output ' + esp_err_inc_path + '.in')
|
||||
copy_if_modified(esp_err_inc_path + '.in', esp_err_inc_path)
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
LANGUAGE=en
|
||||
include ../docs_common.mk
|
|
@ -46,7 +46,102 @@ all_kinds = [
|
|||
"""
|
||||
|
||||
|
||||
def get_doxyfile_input():
|
||||
def setup(app):
|
||||
# The idf_build_system extension will emit this event once it
|
||||
app.connect('idf-info', generate_doxygen)
|
||||
|
||||
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
|
||||
|
||||
|
||||
def _parse_defines(header_path):
|
||||
defines = {}
|
||||
# Note: we run C preprocessor here without any -I arguments, so assumption is
|
||||
# that these headers are all self-contained and don't include any other headers
|
||||
# not in the same directory
|
||||
print("Reading macros from %s..." % (header_path))
|
||||
processed_output = subprocess.check_output(["xtensa-esp32-elf-gcc", "-dM", "-E", header_path]).decode()
|
||||
for line in processed_output.split("\n"):
|
||||
line = line.strip()
|
||||
m = re.search("#define ([^ ]+) ?(.*)", line)
|
||||
if m and not m.group(1).startswith("_"):
|
||||
defines[m.group(1)] = m.group(2)
|
||||
|
||||
return defines
|
||||
|
||||
|
||||
def generate_doxygen(app, project_description):
|
||||
build_dir = os.path.dirname(app.doctreedir.rstrip(os.sep))
|
||||
|
||||
# Parse kconfig macros to pass into doxygen
|
||||
#
|
||||
# TODO: this should use the set of "config which can't be changed" eventually,
|
||||
# not the header
|
||||
defines = _parse_defines(os.path.join(project_description["build_dir"],
|
||||
"config", "sdkconfig.h"))
|
||||
|
||||
# Add all SOC _caps.h headers to the defines
|
||||
#
|
||||
# kind of a hack, be nicer to add a component info dict in project_description.json
|
||||
soc_path = [p for p in project_description["build_component_paths"] if p.endswith("/soc")][0]
|
||||
for soc_header in glob.glob(os.path.join(soc_path, project_description["target"],
|
||||
"include", "soc", "*_caps.h")):
|
||||
defines.update(_parse_defines(soc_header))
|
||||
|
||||
# Call Doxygen to get XML files from the header files
|
||||
print("Calling Doxygen to generate latest XML files")
|
||||
doxy_env = {
|
||||
"ENV_DOXYGEN_DEFINES": " ".join(defines),
|
||||
"IDF_PATH": app.config.idf_path,
|
||||
"IDF_TARGET": app.config.idf_target,
|
||||
}
|
||||
doxyfile = os.path.join(app.config.docs_root, "Doxyfile")
|
||||
print("Running doxygen with doxyfile {}".format(doxyfile))
|
||||
# note: run Doxygen in the build directory, so the xml & xml_in files end up in there
|
||||
subprocess.check_call(["doxygen", doxyfile], env=doxy_env, cwd=build_dir)
|
||||
|
||||
# Doxygen has generated XML files in 'xml' directory.
|
||||
# Copy them to 'xml_in', only touching the files which have changed.
|
||||
copy_if_modified(os.path.join(build_dir, 'xml/'), os.path.join(build_dir, 'xml_in/'))
|
||||
|
||||
# Generate 'api_name.inc' files from the Doxygen XML files
|
||||
convert_api_xml_to_inc(app, doxyfile)
|
||||
|
||||
|
||||
def convert_api_xml_to_inc(app, doxyfile):
|
||||
""" Generate header_file.inc files
|
||||
with API reference made of doxygen directives
|
||||
for each header file
|
||||
specified in the 'INPUT' statement of the Doxyfile.
|
||||
"""
|
||||
build_dir = app.config.build_dir
|
||||
|
||||
xml_directory_path = "{}/xml".format(build_dir)
|
||||
inc_directory_path = "{}/inc".format(build_dir)
|
||||
|
||||
if not os.path.isdir(xml_directory_path):
|
||||
raise RuntimeError("Directory {} does not exist!".format(xml_directory_path))
|
||||
|
||||
if not os.path.exists(inc_directory_path):
|
||||
os.makedirs(inc_directory_path)
|
||||
|
||||
header_paths = get_doxyfile_input_paths(app, doxyfile)
|
||||
print("Generating 'api_name.inc' files with Doxygen directives")
|
||||
for header_file_path in header_paths:
|
||||
api_name = get_api_name(header_file_path)
|
||||
inc_file_path = inc_directory_path + "/" + api_name + ".inc"
|
||||
rst_output = generate_directives(header_file_path, xml_directory_path)
|
||||
|
||||
previous_rst_output = ''
|
||||
if os.path.isfile(inc_file_path):
|
||||
with open(inc_file_path, "r", encoding='utf-8') as inc_file_old:
|
||||
previous_rst_output = inc_file_old.read()
|
||||
|
||||
if previous_rst_output != rst_output:
|
||||
with open(inc_file_path, "w", encoding='utf-8') as inc_file:
|
||||
inc_file.write(rst_output)
|
||||
|
||||
|
||||
def get_doxyfile_input_paths(app, doxyfile_path):
|
||||
"""Get contents of Doxyfile's INPUT statement.
|
||||
|
||||
Returns:
|
||||
|
|
|
@ -41,10 +41,12 @@ REDIRECT_TEMPLATE = """
|
|||
|
||||
def setup(app):
|
||||
app.add_config_value('html_redirect_pages', [], 'html')
|
||||
app.connect('build-finished', create_redirect_pages)
|
||||
# attaching to this event is a hack, but it's a convenient stage in the build
|
||||
# to create HTML redirects
|
||||
app.connect('html-collect-pages', create_redirect_pages)
|
||||
|
||||
|
||||
def create_redirect_pages(app, docname):
|
||||
def create_redirect_pages(app):
|
||||
if not isinstance(app.builder, StandaloneHTMLBuilder):
|
||||
return # only relevant for standalone HTML output
|
||||
|
||||
|
@ -66,3 +68,5 @@ def create_redirect_pages(app, docname):
|
|||
|
||||
with open(out_file, "w") as rp:
|
||||
rp.write(content)
|
||||
|
||||
return []
|
||||
|
|
|
@ -13,7 +13,6 @@ import json
|
|||
|
||||
# this directory also contains the dummy IDF project
|
||||
project_path = os.path.abspath(os.path.dirname(__file__))
|
||||
project_build_dir = os.path.join(project_path, "build")
|
||||
|
||||
def setup(app):
|
||||
builddir = os.path.dirname(app.doctreedir.rstrip(os.sep))
|
||||
|
@ -27,21 +26,20 @@ def setup(app):
|
|||
|
||||
def generate_idf_info(app, env, added, changed, removed):
|
||||
print("Running CMake on dummy project to get build info...")
|
||||
build_dir = os.path.dirname(app.doctreedir.rstrip(os.sep))
|
||||
cmake_build_dir = os.path.join(build_dir, "build_dummy_project")
|
||||
idf_py_path = os.path.join(app.config.idf_path, "tools", "idf.py")
|
||||
print("Running idf.py...")
|
||||
subprocess.check_call([sys.executable,
|
||||
idf_py_path,
|
||||
"-C",
|
||||
project_path,
|
||||
"set-target",
|
||||
app.config.idf_target])
|
||||
# TODO: can call these in one execution pass?
|
||||
subprocess.check_call([sys.executable,
|
||||
idf_py_path,
|
||||
"-C",
|
||||
project_path,
|
||||
"reconfigure"])
|
||||
with open(os.path.join(project_build_dir, "project_description.json")) as f:
|
||||
idf_py = [sys.executable,
|
||||
idf_py_path,
|
||||
"-B",
|
||||
cmake_build_dir,
|
||||
"-C",
|
||||
project_path]
|
||||
subprocess.check_call(idf_py + [ "set-target", app.config.idf_target])
|
||||
# TODO: can call these two in one execution pass?
|
||||
subprocess.check_call(idf_py + [ "reconfigure"])
|
||||
with open(os.path.join(cmake_build_dir, "project_description.json")) as f:
|
||||
project_description = json.load(f)
|
||||
app.emit('idf-info', project_description)
|
||||
|
||||
|
|
|
@ -18,6 +18,7 @@ from __future__ import unicode_literals
|
|||
from io import open
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
try:
|
||||
import urllib.request
|
||||
|
@ -75,3 +76,8 @@ def download_file_if_missing(from_url, to_path):
|
|||
with open(filename_with_path, 'wb') as fobj:
|
||||
with open(tmp_file, 'rb') as tmp:
|
||||
fobj.write(tmp.read())
|
||||
|
||||
def call_with_python(cmd):
|
||||
# using sys.executable ensures that the scripts are called with the same Python interpreter
|
||||
if os.system('{} {}'.format(sys.executable, cmd)) != 0:
|
||||
raise RuntimeError('{} failed'.format(cmd))
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
LANGUAGE=zh_CN
|
||||
include ../docs_common.mk
|
Ładowanie…
Reference in New Issue