kopia lustrzana https://github.com/jupyterhub/repo2docker
Merge pull request #206 from yuvipanda/refactor
Refactor (most) traitlets out of buildpackspull/200/head
commit
48e98fc421
|
@ -134,25 +134,17 @@ class BuildPack(LoggingConfigurable):
|
|||
and there are *some* general guarantees of ordering.
|
||||
|
||||
"""
|
||||
packages = Set(
|
||||
help="""
|
||||
List of packages that are installed in this BuildPack by default.
|
||||
def get_packages(self):
|
||||
"""
|
||||
List of packages that are installed in this BuildPack.
|
||||
|
||||
Versions are not specified, and ordering is not guaranteed. These
|
||||
are usually installed as apt packages.
|
||||
"""
|
||||
)
|
||||
return set()
|
||||
|
||||
base_packages = Set(
|
||||
{
|
||||
# Utils!
|
||||
"less",
|
||||
|
||||
# FIXME: Use npm from nodesource!
|
||||
# Everything seems to depend on npm these days, unfortunately.
|
||||
"npm",
|
||||
},
|
||||
help="""
|
||||
def get_base_packages(self):
|
||||
"""
|
||||
Base set of apt packages that are installed for all images.
|
||||
|
||||
These contain useful images that are commonly used by a lot of images,
|
||||
|
@ -161,11 +153,17 @@ class BuildPack(LoggingConfigurable):
|
|||
|
||||
These would be installed with a --no-install-recommends option.
|
||||
"""
|
||||
)
|
||||
return {
|
||||
# Utils!
|
||||
"less",
|
||||
|
||||
env = List(
|
||||
[],
|
||||
help="""
|
||||
# FIXME: Use npm from nodesource!
|
||||
# Everything seems to depend on npm these days, unfortunately.
|
||||
"npm",
|
||||
}
|
||||
|
||||
def get_env(self):
|
||||
"""
|
||||
Ordered list of environment variables to be set for this image.
|
||||
|
||||
Ordered so that environment variables can use other environment
|
||||
|
@ -174,29 +172,26 @@ class BuildPack(LoggingConfigurable):
|
|||
Expects tuples, with the first item being the environment variable
|
||||
name and the second item being the value.
|
||||
"""
|
||||
)
|
||||
return []
|
||||
|
||||
path = List(
|
||||
[],
|
||||
help="""
|
||||
def get_path(self):
|
||||
"""
|
||||
Ordered list of file system paths to look for executables in.
|
||||
|
||||
Just sets the PATH environment variable. Separated out since
|
||||
it is very commonly set by various buildpacks.
|
||||
"""
|
||||
)
|
||||
return []
|
||||
|
||||
labels = Dict(
|
||||
{},
|
||||
help="""
|
||||
def get_labels(self):
|
||||
"""
|
||||
Docker labels to set on the built image.
|
||||
"""
|
||||
)
|
||||
return {}
|
||||
|
||||
build_script_files = Dict(
|
||||
{},
|
||||
help="""
|
||||
List of files to be copied to the container image for use in building.
|
||||
def get_build_script_files(self):
|
||||
"""
|
||||
Dict of files to be copied to the container image for use in building.
|
||||
|
||||
This is copied before the `build_scripts` & `assemble_scripts` are
|
||||
run, so can be executed from either of them.
|
||||
|
@ -205,11 +200,10 @@ class BuildPack(LoggingConfigurable):
|
|||
system, and the value is the destination file path inside the
|
||||
container image.
|
||||
"""
|
||||
)
|
||||
return {}
|
||||
|
||||
build_scripts = List(
|
||||
[],
|
||||
help="""
|
||||
def get_build_scripts(self):
|
||||
"""
|
||||
Ordered list of shell script snippets to build the base image.
|
||||
|
||||
A list of tuples, where the first item is a username & the
|
||||
|
@ -226,11 +220,10 @@ class BuildPack(LoggingConfigurable):
|
|||
You can use environment variable substitutions in both the
|
||||
username and the execution script.
|
||||
"""
|
||||
)
|
||||
return []
|
||||
|
||||
assemble_scripts = List(
|
||||
[],
|
||||
help="""
|
||||
def get_assemble_scripts(self):
|
||||
"""
|
||||
Ordered list of shell script snippets to build the repo into the image.
|
||||
|
||||
A list of tuples, where the first item is a username & the
|
||||
|
@ -253,11 +246,10 @@ class BuildPack(LoggingConfigurable):
|
|||
You can use environment variable substitutions in both the
|
||||
username and the execution script.
|
||||
"""
|
||||
)
|
||||
return []
|
||||
|
||||
post_build_scripts = List(
|
||||
[],
|
||||
help="""
|
||||
def get_post_build_scripts(self):
|
||||
"""
|
||||
An ordered list of executable scripts to execute after build.
|
||||
|
||||
Is run as a non-root user, and must be executable. Used for doing
|
||||
|
@ -266,7 +258,7 @@ class BuildPack(LoggingConfigurable):
|
|||
The scripts should be as deterministic as possible - running it twice
|
||||
should not produce different results!
|
||||
"""
|
||||
)
|
||||
return []
|
||||
|
||||
name = Unicode(
|
||||
help="""
|
||||
|
@ -286,25 +278,24 @@ class BuildPack(LoggingConfigurable):
|
|||
for resolving them.
|
||||
"""
|
||||
result = BuildPack(parent=self)
|
||||
labels = {}
|
||||
labels.update(self.labels)
|
||||
labels.update(other.labels)
|
||||
result.labels = labels
|
||||
result.packages = self.packages.union(other.packages)
|
||||
result.base_packages = self.base_packages.union(other.base_packages)
|
||||
result.path = self.path + other.path
|
||||
# FIXME: Deduplicate Env
|
||||
result.env = self.env + other.env
|
||||
result.build_scripts = self.build_scripts + other.build_scripts
|
||||
result.assemble_scripts = (self.assemble_scripts +
|
||||
other.assemble_scripts)
|
||||
result.post_build_scripts = (self.post_build_scripts +
|
||||
other.post_build_scripts)
|
||||
# FIXME: Temporary hack so we can refactor this piece by piece instead of all at once!
|
||||
|
||||
def _merge_dicts(d1, d2):
|
||||
md = {}
|
||||
md.update(d1)
|
||||
md.update(d2)
|
||||
return md
|
||||
|
||||
result.get_packages = lambda: self.get_packages().union(other.get_packages())
|
||||
result.get_base_packages = lambda: self.get_base_packages().union(other.get_base_packages())
|
||||
result.get_path = lambda: self.get_path() + other.get_path()
|
||||
result.get_env = lambda: self.get_env() + other.get_env()
|
||||
result.get_labels = lambda: _merge_dicts(self.get_labels(), other.get_labels())
|
||||
result.get_build_script_files = lambda: _merge_dicts(self.get_build_script_files(), other.get_build_script_files())
|
||||
result.get_build_scripts = lambda: self.get_build_scripts() + other.get_build_scripts()
|
||||
result.get_assemble_scripts = lambda: self.get_assemble_scripts() + other.get_assemble_scripts()
|
||||
result.get_post_build_scripts = lambda: self.get_post_build_scripts() + other.get_post_build_scripts()
|
||||
|
||||
build_script_files = {}
|
||||
build_script_files.update(self.build_script_files)
|
||||
build_script_files.update(other.build_script_files)
|
||||
result.build_script_files = build_script_files
|
||||
|
||||
result.name = "{}-{}".format(self.name, other.name)
|
||||
|
||||
|
@ -330,7 +321,7 @@ class BuildPack(LoggingConfigurable):
|
|||
|
||||
build_script_directives = []
|
||||
last_user = 'root'
|
||||
for user, script in self.build_scripts:
|
||||
for user, script in self.get_build_scripts():
|
||||
if last_user != user:
|
||||
build_script_directives.append("USER {}".format(user))
|
||||
last_user = user
|
||||
|
@ -340,7 +331,7 @@ class BuildPack(LoggingConfigurable):
|
|||
|
||||
assemble_script_directives = []
|
||||
last_user = 'root'
|
||||
for user, script in self.assemble_scripts:
|
||||
for user, script in self.get_assemble_scripts():
|
||||
if last_user != user:
|
||||
assemble_script_directives.append("USER {}".format(user))
|
||||
last_user = user
|
||||
|
@ -349,15 +340,15 @@ class BuildPack(LoggingConfigurable):
|
|||
))
|
||||
|
||||
return t.render(
|
||||
packages=sorted(self.packages),
|
||||
path=self.path,
|
||||
env=self.env,
|
||||
labels=self.labels,
|
||||
packages=sorted(self.get_packages()),
|
||||
path=self.get_path(),
|
||||
env=self.get_env(),
|
||||
labels=self.get_labels(),
|
||||
build_script_directives=build_script_directives,
|
||||
assemble_script_directives=assemble_script_directives,
|
||||
build_script_files=self.build_script_files,
|
||||
base_packages=sorted(self.base_packages),
|
||||
post_build_scripts=self.post_build_scripts,
|
||||
build_script_files=self.get_build_script_files(),
|
||||
base_packages=sorted(self.get_base_packages()),
|
||||
post_build_scripts=self.get_post_build_scripts(),
|
||||
)
|
||||
|
||||
def build(self, image_spec, memory_limit, build_args):
|
||||
|
@ -383,7 +374,7 @@ class BuildPack(LoggingConfigurable):
|
|||
tar.gid = 1000
|
||||
return tar
|
||||
|
||||
for src in sorted(self.build_script_files):
|
||||
for src in sorted(self.get_build_script_files()):
|
||||
src_parts = src.split('/')
|
||||
src_path = os.path.join(os.path.dirname(__file__), *src_parts)
|
||||
tar.add(src_path, src, filter=_filter_tar)
|
||||
|
@ -419,15 +410,15 @@ class BaseImage(BuildPack):
|
|||
name = "repo2docker"
|
||||
version = "0.1"
|
||||
|
||||
env = [
|
||||
("APP_BASE", "/srv")
|
||||
]
|
||||
def get_env(self):
|
||||
return [
|
||||
("APP_BASE", "/srv")
|
||||
]
|
||||
|
||||
def detect(self):
|
||||
return True
|
||||
|
||||
@default('assemble_scripts')
|
||||
def setup_assembly(self):
|
||||
def get_assemble_scripts(self):
|
||||
assemble_scripts = []
|
||||
try:
|
||||
with open(self.binder_path('apt.txt')) as f:
|
||||
|
@ -458,8 +449,7 @@ class BaseImage(BuildPack):
|
|||
pass
|
||||
return assemble_scripts
|
||||
|
||||
@default('post_build_scripts')
|
||||
def setup_post_build_scripts(self):
|
||||
def get_post_build_scripts(self):
|
||||
post_build = self.binder_path('postBuild')
|
||||
if os.path.exists(post_build):
|
||||
if not stat.S_IXUSR & os.stat(post_build).st_mode:
|
||||
|
|
|
@ -19,30 +19,32 @@ HERE = os.path.dirname(os.path.abspath(__file__))
|
|||
class CondaBuildPack(BuildPack):
|
||||
name = "conda"
|
||||
version = "0.1"
|
||||
env = [
|
||||
('CONDA_DIR', '${APP_BASE}/conda'),
|
||||
('NB_PYTHON_PREFIX', '${CONDA_DIR}'),
|
||||
]
|
||||
def get_env(self):
|
||||
return [
|
||||
('CONDA_DIR', '${APP_BASE}/conda'),
|
||||
('NB_PYTHON_PREFIX', '${CONDA_DIR}'),
|
||||
]
|
||||
|
||||
path = ['${CONDA_DIR}/bin']
|
||||
def get_path(self):
|
||||
return ['${CONDA_DIR}/bin']
|
||||
|
||||
build_scripts = [
|
||||
(
|
||||
"root",
|
||||
r"""
|
||||
bash /tmp/install-miniconda.bash && \
|
||||
rm /tmp/install-miniconda.bash /tmp/environment.yml
|
||||
"""
|
||||
)
|
||||
]
|
||||
def get_build_scripts(self):
|
||||
return [
|
||||
(
|
||||
"root",
|
||||
r"""
|
||||
bash /tmp/install-miniconda.bash && \
|
||||
rm /tmp/install-miniconda.bash /tmp/environment.yml
|
||||
"""
|
||||
)
|
||||
]
|
||||
|
||||
major_pythons = {
|
||||
'2': '2.7',
|
||||
'3': '3.6',
|
||||
}
|
||||
|
||||
@default('build_script_files')
|
||||
def setup_build_script_files(self):
|
||||
def get_build_script_files(self):
|
||||
files = {
|
||||
'conda/install-miniconda.bash': '/tmp/install-miniconda.bash',
|
||||
}
|
||||
|
@ -68,45 +70,48 @@ class CondaBuildPack(BuildPack):
|
|||
files['conda/' + frozen_name] = '/tmp/environment.yml'
|
||||
return files
|
||||
|
||||
python_version = Unicode()
|
||||
@default('python_version')
|
||||
def detect_python_version(self):
|
||||
"""Detect the Python version for a given environment.yml
|
||||
@property
|
||||
def python_version(self):
|
||||
"""
|
||||
Detect the Python version for a given environment.yml
|
||||
|
||||
Will return 'x.y' if found, or Falsy '' if not.
|
||||
"""
|
||||
py_version = None
|
||||
environment_yml = self.binder_path('environment.yml')
|
||||
if not os.path.exists(environment_yml):
|
||||
return ''
|
||||
with open(environment_yml) as f:
|
||||
env = YAML().load(f)
|
||||
for dep in env.get('dependencies', []):
|
||||
if not isinstance(dep, str):
|
||||
continue
|
||||
match = PYTHON_REGEX.match(dep)
|
||||
if not match:
|
||||
continue
|
||||
py_version = match.group(1)
|
||||
break
|
||||
|
||||
# extract major.minor
|
||||
if py_version:
|
||||
if len(py_version) == 1:
|
||||
return self.major_pythons.get(py_version[0])
|
||||
if not hasattr(self, '_python_version'):
|
||||
py_version = None
|
||||
with open(environment_yml) as f:
|
||||
env = YAML().load(f)
|
||||
for dep in env.get('dependencies', []):
|
||||
if not isinstance(dep, str):
|
||||
continue
|
||||
match = PYTHON_REGEX.match(dep)
|
||||
if not match:
|
||||
continue
|
||||
py_version = match.group(1)
|
||||
break
|
||||
|
||||
# extract major.minor
|
||||
if py_version:
|
||||
if len(py_version) == 1:
|
||||
self._python_version = self.major_pythons.get(py_version[0])
|
||||
else:
|
||||
# return major.minor
|
||||
self._python_version = '.'.join(py_version[:2])
|
||||
else:
|
||||
# return major.minor
|
||||
return '.'.join(py_version[:2])
|
||||
self._python_version = ''
|
||||
|
||||
return ''
|
||||
return self._python_version
|
||||
|
||||
@property
|
||||
def py2(self):
|
||||
"""Am I building a Python 2 kernel environment?"""
|
||||
return self.python_version and self.python_version.split('.')[0] == '2'
|
||||
|
||||
@default('assemble_scripts')
|
||||
def setup_assembly(self):
|
||||
def get_assemble_scripts(self):
|
||||
assembly_scripts = []
|
||||
environment_yml = self.binder_path('environment.yml')
|
||||
env_name = 'kernel' if self.py2 else 'root'
|
||||
|
|
|
@ -9,46 +9,47 @@ from .base import BuildPack
|
|||
class JuliaBuildPack(BuildPack):
|
||||
name = "julia"
|
||||
version = "0.1"
|
||||
env = [
|
||||
('JULIA_PATH', '${APP_BASE}/julia'),
|
||||
('JULIA_HOME', '${JULIA_PATH}/bin'),
|
||||
('JULIA_PKGDIR', '${JULIA_PATH}/pkg'),
|
||||
('JULIA_VERSION', '0.6.0'),
|
||||
('JUPYTER', '${NB_PYTHON_PREFIX}/bin/jupyter')
|
||||
]
|
||||
|
||||
path = [
|
||||
'${JULIA_PATH}/bin'
|
||||
]
|
||||
def get_env(self):
|
||||
return [
|
||||
('JULIA_PATH', '${APP_BASE}/julia'),
|
||||
('JULIA_HOME', '${JULIA_PATH}/bin'),
|
||||
('JULIA_PKGDIR', '${JULIA_PATH}/pkg'),
|
||||
('JULIA_VERSION', '0.6.0'),
|
||||
('JUPYTER', '${NB_PYTHON_PREFIX}/bin/jupyter')
|
||||
]
|
||||
|
||||
build_scripts = [
|
||||
(
|
||||
"root",
|
||||
r"""
|
||||
mkdir -p ${JULIA_PATH} && \
|
||||
curl -sSL "https://julialang-s3.julialang.org/bin/linux/x64/${JULIA_VERSION%[.-]*}/julia-${JULIA_VERSION}-linux-x86_64.tar.gz" | tar -xz -C ${JULIA_PATH} --strip-components 1
|
||||
"""
|
||||
),
|
||||
(
|
||||
"root",
|
||||
r"""
|
||||
mkdir -p ${JULIA_PKGDIR} && \
|
||||
chown ${NB_USER}:${NB_USER} ${JULIA_PKGDIR}
|
||||
"""
|
||||
),
|
||||
(
|
||||
"${NB_USER}",
|
||||
# HACK: Can't seem to tell IJulia to install in sys-prefix
|
||||
# FIXME: Find way to get it to install under /srv and not $HOME?
|
||||
r"""
|
||||
julia -e 'Pkg.init(); Pkg.add("IJulia"); using IJulia;' && \
|
||||
mv ${HOME}/.local/share/jupyter/kernels/julia-0.6 ${NB_PYTHON_PREFIX}/share/jupyter/kernels/julia-0.6
|
||||
"""
|
||||
)
|
||||
]
|
||||
def get_path(self):
|
||||
return ['${JULIA_PATH}/bin']
|
||||
|
||||
@default('assemble_scripts')
|
||||
def setup_assembly(self):
|
||||
def get_build_scripts(self):
|
||||
return [
|
||||
(
|
||||
"root",
|
||||
r"""
|
||||
mkdir -p ${JULIA_PATH} && \
|
||||
curl -sSL "https://julialang-s3.julialang.org/bin/linux/x64/${JULIA_VERSION%[.-]*}/julia-${JULIA_VERSION}-linux-x86_64.tar.gz" | tar -xz -C ${JULIA_PATH} --strip-components 1
|
||||
"""
|
||||
),
|
||||
(
|
||||
"root",
|
||||
r"""
|
||||
mkdir -p ${JULIA_PKGDIR} && \
|
||||
chown ${NB_USER}:${NB_USER} ${JULIA_PKGDIR}
|
||||
"""
|
||||
),
|
||||
(
|
||||
"${NB_USER}",
|
||||
# HACK: Can't seem to tell IJulia to install in sys-prefix
|
||||
# FIXME: Find way to get it to install under /srv and not $HOME?
|
||||
r"""
|
||||
julia -e 'Pkg.init(); Pkg.add("IJulia"); using IJulia;' && \
|
||||
mv ${HOME}/.local/share/jupyter/kernels/julia-0.6 ${NB_PYTHON_PREFIX}/share/jupyter/kernels/julia-0.6
|
||||
"""
|
||||
)
|
||||
]
|
||||
|
||||
def get_assemble_scripts(self):
|
||||
require = self.binder_path('REQUIRE')
|
||||
return [(
|
||||
"${NB_USER}",
|
||||
|
|
|
@ -10,53 +10,57 @@ class PythonBuildPack(BuildPack):
|
|||
name = "python3.5"
|
||||
version = "0.1"
|
||||
|
||||
packages = {
|
||||
'python3',
|
||||
'python3-venv',
|
||||
'python3-dev',
|
||||
}
|
||||
def get_packages(self):
|
||||
return {
|
||||
'python3',
|
||||
'python3-venv',
|
||||
'python3-dev',
|
||||
}
|
||||
|
||||
env = [
|
||||
("VENV_PATH", "${APP_BASE}/venv"),
|
||||
# Prefix to use for installing kernels and finding jupyter binary
|
||||
("NB_PYTHON_PREFIX", "${VENV_PATH}"),
|
||||
]
|
||||
def get_env(self):
|
||||
return [
|
||||
("VENV_PATH", "${APP_BASE}/venv"),
|
||||
# Prefix to use for installing kernels and finding jupyter binary
|
||||
("NB_PYTHON_PREFIX", "${VENV_PATH}"),
|
||||
]
|
||||
|
||||
path = [
|
||||
"${VENV_PATH}/bin"
|
||||
]
|
||||
def get_path(self):
|
||||
return [
|
||||
"${VENV_PATH}/bin"
|
||||
]
|
||||
|
||||
|
||||
build_script_files = {
|
||||
'python/requirements.frozen.txt': '/tmp/requirements.frozen.txt',
|
||||
}
|
||||
def get_build_script_files(self):
|
||||
return {
|
||||
'python/requirements.frozen.txt': '/tmp/requirements.frozen.txt',
|
||||
}
|
||||
|
||||
build_scripts = [
|
||||
(
|
||||
"root",
|
||||
r"""
|
||||
mkdir -p ${VENV_PATH} && \
|
||||
chown -R ${NB_USER}:${NB_USER} ${VENV_PATH}
|
||||
"""
|
||||
),
|
||||
(
|
||||
"${NB_USER}",
|
||||
r"""
|
||||
python3 -m venv ${VENV_PATH}
|
||||
"""
|
||||
),
|
||||
(
|
||||
"${NB_USER}",
|
||||
r"""
|
||||
pip install --no-cache-dir -r /tmp/requirements.frozen.txt && \
|
||||
jupyter nbextension enable --py widgetsnbextension --sys-prefix && \
|
||||
jupyter serverextension enable --py jupyterlab --sys-prefix
|
||||
"""
|
||||
)
|
||||
]
|
||||
def get_build_scripts(self):
|
||||
return [
|
||||
(
|
||||
"root",
|
||||
r"""
|
||||
mkdir -p ${VENV_PATH} && \
|
||||
chown -R ${NB_USER}:${NB_USER} ${VENV_PATH}
|
||||
"""
|
||||
),
|
||||
(
|
||||
"${NB_USER}",
|
||||
r"""
|
||||
python3 -m venv ${VENV_PATH}
|
||||
"""
|
||||
),
|
||||
(
|
||||
"${NB_USER}",
|
||||
r"""
|
||||
pip install --no-cache-dir -r /tmp/requirements.frozen.txt && \
|
||||
jupyter nbextension enable --py widgetsnbextension --sys-prefix && \
|
||||
jupyter serverextension enable --py jupyterlab --sys-prefix
|
||||
"""
|
||||
)
|
||||
]
|
||||
|
||||
@default('assemble_scripts')
|
||||
def setup_assembly(self):
|
||||
def get_assemble_scripts(self):
|
||||
# If we have a runtime.txt & that's set to python-2.7,
|
||||
# we will *not* install requirements.txt but will find &
|
||||
# install a requirements3.txt file if it exists.
|
||||
|
@ -88,49 +92,54 @@ class Python2BuildPack(BuildPack):
|
|||
name = "python2.7"
|
||||
version = "0.1"
|
||||
|
||||
packages = {
|
||||
def get_packages(self):
|
||||
return {
|
||||
'python',
|
||||
'python-dev',
|
||||
'virtualenv'
|
||||
}
|
||||
|
||||
build_script_files = {
|
||||
'python/requirements2.frozen.txt': '/tmp/requirements2.frozen.txt',
|
||||
}
|
||||
|
||||
env = [
|
||||
('VENV2_PATH', '${APP_BASE}/venv2')
|
||||
]
|
||||
def get_env(self):
|
||||
return [
|
||||
('VENV2_PATH', '${APP_BASE}/venv2')
|
||||
]
|
||||
|
||||
path = [
|
||||
"${VENV2_PATH}/bin"
|
||||
]
|
||||
def get_path(self):
|
||||
return [
|
||||
"${VENV2_PATH}/bin"
|
||||
]
|
||||
|
||||
build_scripts = [
|
||||
(
|
||||
"root",
|
||||
r"""
|
||||
mkdir -p ${VENV2_PATH} && \
|
||||
chown -R ${NB_USER}:${NB_USER} ${VENV2_PATH}
|
||||
"""
|
||||
),
|
||||
(
|
||||
"${NB_USER}",
|
||||
r"""
|
||||
virtualenv -p python2 ${VENV2_PATH}
|
||||
"""
|
||||
),
|
||||
(
|
||||
"${NB_USER}",
|
||||
r"""
|
||||
pip2 install --no-cache-dir -r /tmp/requirements2.frozen.txt && \
|
||||
python2 -m ipykernel install --prefix=${NB_PYTHON_PREFIX}
|
||||
"""
|
||||
)
|
||||
]
|
||||
def get_build_script_files(self):
|
||||
return {
|
||||
'python/requirements2.frozen.txt': '/tmp/requirements2.frozen.txt',
|
||||
}
|
||||
|
||||
@default('assemble_scripts')
|
||||
def setup_assembly(self):
|
||||
def get_build_scripts(self):
|
||||
return [
|
||||
(
|
||||
"root",
|
||||
r"""
|
||||
mkdir -p ${VENV2_PATH} && \
|
||||
chown -R ${NB_USER}:${NB_USER} ${VENV2_PATH}
|
||||
"""
|
||||
),
|
||||
(
|
||||
"${NB_USER}",
|
||||
r"""
|
||||
virtualenv -p python2 ${VENV2_PATH}
|
||||
"""
|
||||
),
|
||||
(
|
||||
"${NB_USER}",
|
||||
r"""
|
||||
pip2 install --no-cache-dir -r /tmp/requirements2.frozen.txt && \
|
||||
python2 -m ipykernel install --prefix=${NB_PYTHON_PREFIX}
|
||||
"""
|
||||
)
|
||||
]
|
||||
|
||||
def get_assemble_scripts(self):
|
||||
return [
|
||||
(
|
||||
'${NB_USER}',
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
set -exuo pipefail
|
||||
which gfortran
|
||||
which unp
|
||||
which byacc
|
||||
|
|
Ładowanie…
Reference in New Issue