2017-12-01 04:09:14 +00:00
|
|
|
"""
|
|
|
|
Test that build time memory limits are actually enforced.
|
|
|
|
|
|
|
|
We give the container image at least 128M of RAM (so base things like
|
|
|
|
apt and pip can run), and then try to allocate & use 256MB in postBuild.
|
|
|
|
This should fail!
|
|
|
|
"""
|
2018-12-17 12:11:45 +00:00
|
|
|
|
2017-12-01 04:09:14 +00:00
|
|
|
import os
|
2018-12-17 12:11:45 +00:00
|
|
|
import shutil
|
2017-12-01 04:09:14 +00:00
|
|
|
import time
|
|
|
|
|
2018-12-17 12:11:45 +00:00
|
|
|
import pytest
|
|
|
|
|
|
|
|
from repo2docker.app import Repo2Docker
|
|
|
|
|
|
|
|
|
|
|
|
basedir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|
|
|
|
|
|
|
|
|
|
|
def does_build(tmpdir, build_src_dir, mem_limit, mem_allocate_mb):
|
|
|
|
builddir = tmpdir.join('build')
|
|
|
|
shutil.copytree(build_src_dir, builddir)
|
|
|
|
builddir.chdir()
|
|
|
|
print(os.getcwd(), os.listdir('.'))
|
2017-12-01 04:09:14 +00:00
|
|
|
mem_allocate_mb_file = os.path.join(builddir, 'mem_allocate_mb')
|
|
|
|
|
|
|
|
# Cache bust so we actually do a rebuild each time this is run!
|
2018-12-17 12:11:45 +00:00
|
|
|
with builddir.join('cachebust').open('w') as cachebust:
|
2017-12-01 04:09:14 +00:00
|
|
|
cachebust.write(str(time.time()))
|
2017-12-01 04:14:06 +00:00
|
|
|
|
2018-12-17 12:11:45 +00:00
|
|
|
# we don't have an easy way to pass env vars or whatever to
|
|
|
|
# postBuild from here, so we write a file into the repo that is
|
|
|
|
# read by the postBuild script!
|
|
|
|
with open(mem_allocate_mb_file, 'w') as f:
|
|
|
|
f.write(str(mem_allocate_mb))
|
|
|
|
r2d = Repo2Docker(build_memory_limit=str(mem_limit) + 'M')
|
|
|
|
r2d.initialize()
|
2017-12-01 04:09:14 +00:00
|
|
|
try:
|
2018-12-17 12:11:45 +00:00
|
|
|
r2d.build()
|
|
|
|
except Exception:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return True
|
2017-12-01 04:09:14 +00:00
|
|
|
|
|
|
|
|
2018-12-17 12:11:45 +00:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
'test, mem_limit, mem_allocate_mb, expected',
|
|
|
|
[
|
|
|
|
('dockerfile', 128, 256, False),
|
|
|
|
('dockerfile', 512, 256, True),
|
|
|
|
('non-dockerfile', 128, 256, False),
|
|
|
|
('non-dockerfile', 512, 256, True),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
def test_memlimit_nondockerfile(tmpdir, test, mem_limit, mem_allocate_mb, expected):
|
2017-12-01 02:22:28 +00:00
|
|
|
"""
|
2018-12-17 12:11:45 +00:00
|
|
|
Test if memory limited builds are working for non dockerfile builds
|
2017-12-01 02:22:28 +00:00
|
|
|
"""
|
2018-12-17 12:11:45 +00:00
|
|
|
success = does_build(
|
|
|
|
tmpdir,
|
|
|
|
os.path.join(basedir, 'memlimit', test),
|
|
|
|
mem_limit,
|
|
|
|
mem_allocate_mb,
|
2017-12-01 04:09:14 +00:00
|
|
|
)
|
2018-12-17 12:11:45 +00:00
|
|
|
assert success == expected
|
2017-12-01 04:09:14 +00:00
|
|
|
|
2017-12-01 08:41:50 +00:00
|
|
|
|
|
|
|
def test_memlimit_same_postbuild():
|
|
|
|
"""
|
|
|
|
Validate that the postBuild files for dockerfile & nondockerfile are same
|
|
|
|
|
|
|
|
Until https://github.com/jupyter/repo2docker/issues/160 gets fixed.
|
|
|
|
"""
|
|
|
|
filepaths = [
|
|
|
|
os.path.join(basedir, 'memlimit', t, "postBuild")
|
|
|
|
for t in ("dockerfile", "non-dockerfile")
|
|
|
|
]
|
|
|
|
file_contents = []
|
|
|
|
for fp in filepaths:
|
|
|
|
with open(fp) as f:
|
|
|
|
file_contents.append(f.read())
|
2017-12-01 08:46:29 +00:00
|
|
|
# Make sure they're all the same
|
|
|
|
assert len(set(file_contents)) == 1
|