2020-02-18 04:08:22 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
#
|
|
|
|
# CI script to deploy docs to a webserver. Not useful outside of CI environment
|
|
|
|
#
|
|
|
|
#
|
|
|
|
# Copyright 2020 Espressif Systems (Shanghai) PTE LTD
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
#
|
|
|
|
import glob
|
|
|
|
import os
|
|
|
|
import os.path
|
|
|
|
import re
|
|
|
|
import stat
|
|
|
|
import subprocess
|
2021-01-26 02:49:01 +00:00
|
|
|
import sys
|
2020-02-18 04:08:22 +00:00
|
|
|
import tarfile
|
2021-01-26 02:49:01 +00:00
|
|
|
|
2020-02-18 04:08:22 +00:00
|
|
|
import packaging.version
|
|
|
|
|
|
|
|
|
|
|
|
def env(variable, default=None):
|
|
|
|
""" Shortcut to return the expanded version of an environment variable """
|
|
|
|
return os.path.expandvars(os.environ.get(variable, default) if default else os.environ[variable])
|
|
|
|
|
|
|
|
|
|
|
|
# import sanitize_version from the docs directory, shared with here
|
2021-01-26 02:49:01 +00:00
|
|
|
sys.path.append(os.path.join(env('IDF_PATH'), 'docs'))
|
2020-02-18 04:08:22 +00:00
|
|
|
from sanitize_version import sanitize_version # noqa
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
# if you get KeyErrors on the following lines, it's probably because you're not running in Gitlab CI
|
2021-01-26 02:49:01 +00:00
|
|
|
git_ver = env('GIT_VER') # output of git describe --always
|
|
|
|
ci_ver = env('CI_COMMIT_REF_NAME', git_ver) # branch or tag we're building for (used for 'release' & URL)
|
2020-02-18 04:08:22 +00:00
|
|
|
|
|
|
|
version = sanitize_version(ci_ver)
|
2021-01-26 02:49:01 +00:00
|
|
|
print('Git version: {}'.format(git_ver))
|
|
|
|
print('CI Version: {}'.format(ci_ver))
|
|
|
|
print('Deployment version: {}'.format(version))
|
2020-02-18 04:08:22 +00:00
|
|
|
|
|
|
|
if not version:
|
2021-01-26 02:49:01 +00:00
|
|
|
raise RuntimeError('A version is needed to deploy')
|
2020-02-18 04:08:22 +00:00
|
|
|
|
2021-01-26 02:49:01 +00:00
|
|
|
build_dir = env('DOCS_BUILD_DIR') # top-level local build dir, where docs have already been built
|
2020-02-18 04:08:22 +00:00
|
|
|
|
|
|
|
if not build_dir:
|
2021-01-26 02:49:01 +00:00
|
|
|
raise RuntimeError('Valid DOCS_BUILD_DIR is needed to deploy')
|
2020-02-18 04:08:22 +00:00
|
|
|
|
2021-01-26 02:49:01 +00:00
|
|
|
url_base = env('DOCS_DEPLOY_URL_BASE') # base for HTTP URLs, used to print the URL to the log after deploying
|
2020-02-18 04:08:22 +00:00
|
|
|
|
2021-01-26 02:49:01 +00:00
|
|
|
docs_server = env('DOCS_DEPLOY_SERVER') # ssh server to deploy to
|
|
|
|
docs_user = env('DOCS_DEPLOY_SERVER_USER')
|
|
|
|
docs_path = env('DOCS_DEPLOY_PATH') # filesystem path on DOCS_SERVER
|
2020-02-18 04:08:22 +00:00
|
|
|
|
|
|
|
if not docs_server:
|
2021-01-26 02:49:01 +00:00
|
|
|
raise RuntimeError('Valid DOCS_DEPLOY_SERVER is needed to deploy')
|
2020-02-18 04:08:22 +00:00
|
|
|
|
|
|
|
if not docs_user:
|
2021-01-26 02:49:01 +00:00
|
|
|
raise RuntimeError('Valid DOCS_DEPLOY_SERVER_USER is needed to deploy')
|
2020-02-18 04:08:22 +00:00
|
|
|
|
2021-01-26 02:49:01 +00:00
|
|
|
docs_server = '{}@{}'.format(docs_user, docs_server)
|
2020-02-18 04:08:22 +00:00
|
|
|
|
|
|
|
if not docs_path:
|
2021-01-26 02:49:01 +00:00
|
|
|
raise RuntimeError('Valid DOCS_DEPLOY_PATH is needed to deploy')
|
2020-02-18 04:08:22 +00:00
|
|
|
|
2021-01-26 02:49:01 +00:00
|
|
|
print('DOCS_DEPLOY_SERVER {} DOCS_DEPLOY_PATH {}'.format(docs_server, docs_path))
|
2020-02-18 04:08:22 +00:00
|
|
|
|
2020-03-18 00:41:41 +00:00
|
|
|
tarball_path, version_urls = build_doc_tarball(version, git_ver, build_dir)
|
2020-02-18 04:08:22 +00:00
|
|
|
|
|
|
|
deploy(version, tarball_path, docs_path, docs_server)
|
|
|
|
|
2021-01-26 02:49:01 +00:00
|
|
|
print('Docs URLs:')
|
2020-04-29 07:41:53 +00:00
|
|
|
doc_deploy_type = os.getenv('TYPE')
|
2020-02-18 04:08:22 +00:00
|
|
|
for vurl in version_urls:
|
2020-04-29 07:41:53 +00:00
|
|
|
language, _, target = vurl.split('/')
|
|
|
|
tag = '{}_{}'.format(language, target)
|
2021-01-26 02:49:01 +00:00
|
|
|
url = '{}/{}/index.html'.format(url_base, vurl) # (index.html needed for the preview server)
|
|
|
|
url = re.sub(r'([^:])//', r'\1/', url) # get rid of any // that isn't in the https:// part
|
2020-04-29 07:41:53 +00:00
|
|
|
print('[document {}][{}] {}'.format(doc_deploy_type, tag, url))
|
2020-02-18 04:08:22 +00:00
|
|
|
|
|
|
|
# note: it would be neater to use symlinks for stable, but because of the directory order
|
|
|
|
# (language first) it's kind of a pain to do on a remote server, so we just repeat the
|
|
|
|
# process but call the version 'stable' this time
|
|
|
|
if is_stable_version(version):
|
2021-01-26 02:49:01 +00:00
|
|
|
print('Deploying again as stable version...')
|
|
|
|
tarball_path, version_urls = build_doc_tarball('stable', git_ver, build_dir)
|
|
|
|
deploy('stable', tarball_path, docs_path, docs_server)
|
2020-02-18 04:08:22 +00:00
|
|
|
|
|
|
|
|
|
|
|
def deploy(version, tarball_path, docs_path, docs_server):
|
|
|
|
def run_ssh(commands):
|
|
|
|
""" Log into docs_server and run a sequence of commands using ssh """
|
2021-01-26 02:49:01 +00:00
|
|
|
print('Running ssh: {}'.format(commands))
|
|
|
|
subprocess.run(['ssh', '-o', 'BatchMode=yes', docs_server, '-x', ' && '.join(commands)], check=True)
|
2020-02-18 04:08:22 +00:00
|
|
|
|
|
|
|
# copy the version tarball to the server
|
2021-01-26 02:49:01 +00:00
|
|
|
run_ssh(['mkdir -p {}'.format(docs_path)])
|
|
|
|
print('Running scp {} to {}'.format(tarball_path, '{}:{}'.format(docs_server, docs_path)))
|
|
|
|
subprocess.run(['scp', '-B', tarball_path, '{}:{}'.format(docs_server, docs_path)], check=True)
|
2020-02-18 04:08:22 +00:00
|
|
|
|
|
|
|
tarball_name = os.path.basename(tarball_path)
|
|
|
|
|
2021-01-26 02:49:01 +00:00
|
|
|
run_ssh(['cd {}'.format(docs_path),
|
|
|
|
'rm -rf ./*/{}'.format(version), # remove any pre-existing docs matching this version
|
|
|
|
'tar -zxvf {}'.format(tarball_name), # untar the archive with the new docs
|
|
|
|
'rm {}'.format(tarball_name)])
|
2020-02-18 04:08:22 +00:00
|
|
|
|
|
|
|
# Note: deleting and then extracting the archive is a bit awkward for updating stable/latest/etc
|
|
|
|
# as the version will be invalid for a window of time. Better to do it atomically, but this is
|
|
|
|
# another thing made much more complex by the directory structure putting language before version...
|
|
|
|
|
|
|
|
|
2020-03-18 00:41:41 +00:00
|
|
|
def build_doc_tarball(version, git_ver, build_dir):
|
2020-02-18 04:08:22 +00:00
|
|
|
""" Make a tar.gz archive of the docs, in the directory structure used to deploy as
|
|
|
|
the given version """
|
|
|
|
version_paths = []
|
2021-01-26 02:49:01 +00:00
|
|
|
tarball_path = '{}/{}.tar.gz'.format(build_dir, version)
|
2020-02-18 04:08:22 +00:00
|
|
|
|
|
|
|
# find all the 'html/' directories under build_dir
|
2021-01-26 02:49:01 +00:00
|
|
|
html_dirs = glob.glob('{}/**/html/'.format(build_dir), recursive=True)
|
|
|
|
print('Found %d html directories' % len(html_dirs))
|
2020-02-18 04:08:22 +00:00
|
|
|
|
2021-01-26 02:49:01 +00:00
|
|
|
pdfs = glob.glob('{}/**/latex/build/*.pdf'.format(build_dir), recursive=True)
|
|
|
|
print('Found %d PDFs in latex directories' % len(pdfs))
|
2020-03-18 00:41:41 +00:00
|
|
|
|
|
|
|
# add symlink for stable and latest and adds them to PDF blob
|
|
|
|
symlinks = create_and_add_symlinks(version, git_ver, pdfs)
|
|
|
|
|
2020-02-18 04:08:22 +00:00
|
|
|
def not_sources_dir(ti):
|
|
|
|
""" Filter the _sources directories out of the tarballs """
|
2021-01-26 02:49:01 +00:00
|
|
|
if ti.name.endswith('/_sources'):
|
2020-02-18 04:08:22 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
ti.mode |= stat.S_IWGRP # make everything group-writeable
|
|
|
|
return ti
|
|
|
|
|
|
|
|
try:
|
|
|
|
os.remove(tarball_path)
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
2021-01-26 02:49:01 +00:00
|
|
|
with tarfile.open(tarball_path, 'w:gz') as tarball:
|
2020-02-18 04:08:22 +00:00
|
|
|
for html_dir in html_dirs:
|
|
|
|
# html_dir has the form '<ignored>/<language>/<target>/html/'
|
|
|
|
target_dirname = os.path.dirname(os.path.dirname(html_dir))
|
|
|
|
target = os.path.basename(target_dirname)
|
|
|
|
language = os.path.basename(os.path.dirname(target_dirname))
|
|
|
|
|
|
|
|
# when deploying, we want the top-level directory layout 'language/version/target'
|
2021-01-26 02:49:01 +00:00
|
|
|
archive_path = '{}/{}/{}'.format(language, version, target)
|
2020-02-18 04:08:22 +00:00
|
|
|
print("Archiving '{}' as '{}'...".format(html_dir, archive_path))
|
|
|
|
tarball.add(html_dir, archive_path, filter=not_sources_dir)
|
|
|
|
version_paths.append(archive_path)
|
|
|
|
|
2020-03-18 00:41:41 +00:00
|
|
|
for pdf_path in pdfs:
|
|
|
|
# pdf_path has the form '<ignored>/<language>/<target>/latex/build'
|
|
|
|
latex_dirname = os.path.dirname(pdf_path)
|
|
|
|
pdf_filename = os.path.basename(pdf_path)
|
|
|
|
target_dirname = os.path.dirname(os.path.dirname(latex_dirname))
|
|
|
|
target = os.path.basename(target_dirname)
|
|
|
|
language = os.path.basename(os.path.dirname(target_dirname))
|
|
|
|
|
|
|
|
# when deploying, we want the layout 'language/version/target/pdf'
|
2021-01-26 02:49:01 +00:00
|
|
|
archive_path = '{}/{}/{}/{}'.format(language, version, target, pdf_filename)
|
2020-03-18 00:41:41 +00:00
|
|
|
print("Archiving '{}' as '{}'...".format(pdf_path, archive_path))
|
|
|
|
tarball.add(pdf_path, archive_path)
|
|
|
|
|
|
|
|
for symlink in symlinks:
|
|
|
|
os.unlink(symlink)
|
|
|
|
|
2020-02-18 04:08:22 +00:00
|
|
|
return (os.path.abspath(tarball_path), version_paths)
|
|
|
|
|
|
|
|
|
2020-03-18 00:41:41 +00:00
|
|
|
def create_and_add_symlinks(version, git_ver, pdfs):
|
|
|
|
""" Create symbolic links for PDFs for 'latest' and 'stable' releases """
|
|
|
|
|
|
|
|
symlinks = []
|
|
|
|
if 'stable' in version or 'latest' in version:
|
|
|
|
for pdf_path in pdfs:
|
|
|
|
symlink_path = pdf_path.replace(git_ver, version)
|
|
|
|
os.symlink(pdf_path, symlink_path)
|
|
|
|
symlinks.append(symlink_path)
|
|
|
|
|
|
|
|
pdfs.extend(symlinks)
|
2021-01-26 02:49:01 +00:00
|
|
|
print('Found %d PDFs in latex directories after adding symlink' % len(pdfs))
|
2020-03-18 00:41:41 +00:00
|
|
|
|
|
|
|
return symlinks
|
|
|
|
|
|
|
|
|
2020-02-18 04:08:22 +00:00
|
|
|
def is_stable_version(version):
|
|
|
|
""" Heuristic for whether this is the latest stable release """
|
2021-01-26 02:49:01 +00:00
|
|
|
if not version.startswith('v'):
|
2020-02-18 04:08:22 +00:00
|
|
|
return False # branch name
|
2021-01-26 02:49:01 +00:00
|
|
|
if '-' in version:
|
2020-02-18 04:08:22 +00:00
|
|
|
return False # prerelease tag
|
|
|
|
|
2021-01-26 02:49:01 +00:00
|
|
|
git_out = subprocess.check_output(['git', 'tag', '-l']).decode('utf-8')
|
2020-02-18 04:08:22 +00:00
|
|
|
|
2021-01-26 02:49:01 +00:00
|
|
|
versions = [v.strip() for v in git_out.split('\n')]
|
|
|
|
versions = [v for v in versions if re.match(r'^v[\d\.]+$', v)] # include vX.Y.Z only
|
2020-02-18 04:08:22 +00:00
|
|
|
|
|
|
|
versions = [packaging.version.parse(v) for v in versions]
|
|
|
|
|
|
|
|
max_version = max(versions)
|
|
|
|
|
|
|
|
if max_version.public != version[1:]:
|
2021-01-26 02:49:01 +00:00
|
|
|
print('Stable version is v{}. This version is {}.'.format(max_version.public, version))
|
2020-02-18 04:08:22 +00:00
|
|
|
return False
|
|
|
|
else:
|
2021-01-26 02:49:01 +00:00
|
|
|
print('This version {} is the stable version'.format(version))
|
2020-02-18 04:08:22 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
|
2021-01-26 02:49:01 +00:00
|
|
|
if __name__ == '__main__':
|
2020-02-18 04:08:22 +00:00
|
|
|
main()
|