kopia lustrzana https://github.com/jupyterhub/repo2docker
Merge pull request #1202 from consideRatio/pr/enable-pyupgrade
pre-commit: add pyupgrade, isort, and prettier for .md filespull/1212/head
commit
b6098c02f3
|
@ -1,23 +1,27 @@
|
|||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us repair something that is currently broken
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
title: ""
|
||||
labels: ""
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
<!-- Thank you for contributing. These HTML commments will not render in the issue, but you can delete them once you've read them if you prefer! -->
|
||||
|
||||
### Bug description
|
||||
|
||||
<!-- Use this section to clearly and concisely describe the bug. -->
|
||||
|
||||
#### Expected behaviour
|
||||
|
||||
<!-- Tell us what you thought would happen. -->
|
||||
|
||||
#### Actual behaviour
|
||||
|
||||
<!-- Tell us what you actually happens. -->
|
||||
|
||||
### How to reproduce
|
||||
|
||||
<!-- Use this section to describe the steps that a user would take to experience this bug. -->
|
||||
|
||||
1. Go to '...'
|
||||
|
@ -26,9 +30,9 @@ assignees: ''
|
|||
4. See error
|
||||
|
||||
### Your personal set up
|
||||
|
||||
<!-- Tell us a little about the system you're using. You can see the guidelines for setting up and reporting this information at https://repo2docker.readthedocs.io/en/latest/contributing/contributing.html#setting-up-for-local-development. -->
|
||||
|
||||
- OS: [e.g. linux, OSX]
|
||||
- Docker version: `docker version` <!-- Run this command to get your version. -->
|
||||
- repo2docker version `repo2docker --version` <!-- Run this command to get your version. -->
|
||||
|
||||
- OS: [e.g. linux, OSX]
|
||||
- Docker version: `docker version` <!-- Run this command to get your version. -->
|
||||
- repo2docker version `repo2docker --version` <!-- Run this command to get your version. -->
|
||||
|
|
|
@ -1,29 +1,29 @@
|
|||
---
|
||||
name: Feature request
|
||||
about: Suggest a new feature or a big change to repo2docker
|
||||
title: ''
|
||||
labels: 'needs: discussion'
|
||||
assignees: ''
|
||||
|
||||
title: ""
|
||||
labels: "needs: discussion"
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
<!-- Thank you for contributing. These HTML commments will not render in the issue, but you can delete them once you've read them if you prefer! -->
|
||||
|
||||
### Proposed change
|
||||
|
||||
<!-- Use this section to describe the feature you'd like to be added. -->
|
||||
|
||||
|
||||
### Alternative options
|
||||
|
||||
<!-- Use this section to describe alternative options and why you've decided on the proposed feature above. -->
|
||||
|
||||
|
||||
### Who would use this feature?
|
||||
|
||||
<!-- Describe the audience for this feature. This information will affect who chooses to work on the feature with you. -->
|
||||
|
||||
|
||||
### How much effort will adding it take?
|
||||
|
||||
<!-- Try to estimate how much work adding this feature will require. This information will affect who chooses to work on the feature with you. -->
|
||||
|
||||
|
||||
### Who can do this work?
|
||||
<!-- What skills are needed? Who can be recruited to add this feature? This information will affect who chooses to work on the feature with you. -->
|
||||
|
||||
<!-- What skills are needed? Who can be recruited to add this feature? This information will affect who chooses to work on the feature with you. -->
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
---
|
||||
name: Support question
|
||||
about: Ask a question about using repo2docker
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
title: ""
|
||||
labels: ""
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
🚨 Please do **not** open an issue for support questions. Instead please search for similar issues or post on http://discourse.jupyter.org/c/questions. 🚨
|
||||
|
|
|
@ -9,13 +9,17 @@
|
|||
# - Register git hooks: pre-commit install --install-hooks
|
||||
#
|
||||
repos:
|
||||
# # Autoformat: Python code, syntax patterns are modernized
|
||||
# - repo: https://github.com/asottile/pyupgrade
|
||||
# rev: v3.0.0
|
||||
# hooks:
|
||||
# - id: pyupgrade
|
||||
# args:
|
||||
# - --py38-plus
|
||||
# Autoformat: Python code, syntax patterns are modernized
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.2.0
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args:
|
||||
- --py36-plus
|
||||
# check-tmp is a Python based test script run in created environments
|
||||
# that can be at least Python 3.5 even though we require Python 3.6 for
|
||||
# repo2docker itself.
|
||||
exclude: check-tmp
|
||||
|
||||
# Autoformat: Python code
|
||||
- repo: https://github.com/psf/black
|
||||
|
@ -28,17 +32,19 @@ repos:
|
|||
- --target-version=py38
|
||||
- --target-version=py39
|
||||
- --target-version=py310
|
||||
- --target-version=py311
|
||||
|
||||
# # Autoformat: Python code
|
||||
# - repo: https://github.com/pycqa/isort
|
||||
# rev: 5.10.1
|
||||
# hooks:
|
||||
# - id: isort
|
||||
# args:
|
||||
# - --profile=black
|
||||
# Autoformat: Python code
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: 5.10.1
|
||||
hooks:
|
||||
- id: isort
|
||||
args:
|
||||
- --profile=black
|
||||
|
||||
# # Autoformat: markdown, yaml (but not helm templates)
|
||||
# - repo: https://github.com/pre-commit/mirrors-prettier
|
||||
# rev: v2.7.1
|
||||
# hooks:
|
||||
# - id: prettier
|
||||
# Autoformat: markdown
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v2.7.1
|
||||
hooks:
|
||||
- id: prettier
|
||||
files: ".md"
|
||||
|
|
|
@ -8,19 +8,20 @@ The repo2docker developer documentation is all rendered on our documentation web
|
|||
If you're here, you're probably looking for the [Contributing to repo2docker development](https://repo2docker.readthedocs.io/en/latest/contributing/contributing.html) page.
|
||||
|
||||
Please make sure you've read the following sections before opening an issue/pull request:
|
||||
* [Process for making a contribution](https://repo2docker.readthedocs.io/en/latest/contributing/contributing.html#process-for-making-a-contribution).
|
||||
* These steps talk you through choosing the right issue template (bug report or feature request) and making a change.
|
||||
* [Guidelines to getting a Pull Request merged](https://repo2docker.readthedocs.io/en/latest/contributing/contributing.html#guidelines-to-getting-a-pull-request-merged).
|
||||
* These are tips and tricks to help make your contribution as smooth as possible for you and for the repo2docker maintenance team.
|
||||
|
||||
- [Process for making a contribution](https://repo2docker.readthedocs.io/en/latest/contributing/contributing.html#process-for-making-a-contribution).
|
||||
- These steps talk you through choosing the right issue template (bug report or feature request) and making a change.
|
||||
- [Guidelines to getting a Pull Request merged](https://repo2docker.readthedocs.io/en/latest/contributing/contributing.html#guidelines-to-getting-a-pull-request-merged).
|
||||
- These are tips and tricks to help make your contribution as smooth as possible for you and for the repo2docker maintenance team.
|
||||
|
||||
There are a few other pages to highlight:
|
||||
|
||||
* [Our roadmap](https://repo2docker.readthedocs.io/en/latest/contributing/roadmap.html)
|
||||
* We use the roadmap to develop a shared understanding of the project's vision and direction amongst the community of users, contributors, and maintainers.
|
||||
- [Our roadmap](https://repo2docker.readthedocs.io/en/latest/contributing/roadmap.html)
|
||||
- We use the roadmap to develop a shared understanding of the project's vision and direction amongst the community of users, contributors, and maintainers.
|
||||
This is a great place to get a feel for what the maintainers are thinking about for the short, medium, and long term future of the project.
|
||||
* [Design of repo2docker](https://repo2docker.readthedocs.io/en/latest/design.html)
|
||||
* This page explains some of the design principles behind repo2docker.
|
||||
- [Design of repo2docker](https://repo2docker.readthedocs.io/en/latest/design.html)
|
||||
- This page explains some of the design principles behind repo2docker.
|
||||
Its a good place to understand _why_ the team have made the decisions that they have along the way!
|
||||
* We absolutely encourage discussion around refactoring, updating or extending repo2docker, but please make sure that you've understood this page before opening an issue to discuss the change you'd like to propose.
|
||||
* [Common developer tasks and how-tos](https://repo2docker.readthedocs.io/en/latest/contributing/tasks.html)
|
||||
* Some notes on running tests, buildpack dependencies, creating a release, and keeping the pip files up to date.
|
||||
- We absolutely encourage discussion around refactoring, updating or extending repo2docker, but please make sure that you've understood this page before opening an issue to discuss the change you'd like to propose.
|
||||
- [Common developer tasks and how-tos](https://repo2docker.readthedocs.io/en/latest/contributing/tasks.html)
|
||||
- Some notes on running tests, buildpack dependencies, creating a release, and keeping the pip files up to date.
|
||||
|
|
|
@ -31,6 +31,7 @@ For more information, please visit
|
|||
---
|
||||
|
||||
## Using repo2docker
|
||||
|
||||
### Prerequisites
|
||||
|
||||
1. Docker to build & run the repositories. The [community edition](https://store.docker.com/search?type=edition&offering=community)
|
||||
|
@ -83,21 +84,19 @@ something like:
|
|||
If you copy paste that URL into your browser you will see a Jupyter Notebook
|
||||
with the contents of the repository you had just built!
|
||||
|
||||
For more information on how to use ``repo2docker``, see the
|
||||
For more information on how to use `repo2docker`, see the
|
||||
[usage guide](http://repo2docker.readthedocs.io/en/latest/usage.html).
|
||||
|
||||
|
||||
## Repository specifications
|
||||
|
||||
Repo2Docker looks for configuration files in the source repository to
|
||||
determine how the Docker image should be built. For a list of the configuration
|
||||
files that ``repo2docker`` can use, see the
|
||||
files that `repo2docker` can use, see the
|
||||
[complete list of configuration files](https://repo2docker.readthedocs.io/en/latest/config_files.html).
|
||||
|
||||
The philosophy of repo2docker is inspired by
|
||||
[Heroku Build Packs](https://devcenter.heroku.com/articles/buildpacks).
|
||||
|
||||
|
||||
## Docker Image
|
||||
|
||||
Repo2Docker can be run inside a Docker container if access to the Docker Daemon is provided, for example see [BinderHub](https://github.com/jupyterhub/binderhub). Docker images are [published to quay.io](https://quay.io/repository/jupyterhub/repo2docker?tab=tags). The old [Docker Hub image](https://hub.docker.com/r/jupyter/repo2docker) is no longer supported.
|
||||
|
|
|
@ -4,6 +4,7 @@ This is a living document talking about the architecture of repo2docker
|
|||
from various perspectives.
|
||||
|
||||
(buildpacks)=
|
||||
|
||||
## Buildpacks
|
||||
|
||||
The **buildpack** concept comes from [Heroku](https://devcenter.heroku.com/articles/buildpacks)
|
||||
|
@ -57,7 +58,7 @@ and basic notebook packages (from `repo2docker/buildpacks/conda/environment.yml`
|
|||
to be the same for most repositories built with `CondaBuildPack`, so we want to use
|
||||
[docker layer caching](https://thenewstack.io/understanding-the-docker-cache-for-faster-builds/) as
|
||||
much as possible for performance reasons. Next time a repository is built with `CondaBuildPack`,
|
||||
we can skip straight to the **copy** step (since the base environment docker image *layers* have
|
||||
we can skip straight to the **copy** step (since the base environment docker image _layers_ have
|
||||
already been built and cached).
|
||||
|
||||
The `get_build_scripts` and `get_build_script_files` methods are primarily used for this.
|
||||
|
@ -65,11 +66,11 @@ The `get_build_scripts` and `get_build_script_files` methods are primarily used
|
|||
and `get_build_script_files` is used to copy specific scripts (such as a conda installer) into
|
||||
the image to be run as pat of `get_build_scripts`. Code in either has following constraints:
|
||||
|
||||
1. You can *not* use the contents of repository in them, since this happens before the repository
|
||||
1. You can _not_ use the contents of repository in them, since this happens before the repository
|
||||
is copied into the image. For example, `pip install -r requirements.txt` will not work,
|
||||
since there's no `requirements.txt` inside the image at this point. This is an explicit
|
||||
design decision, to enable better layer caching.
|
||||
2. You *may*, however, read the contents of the repository and modify the scripts emitted based
|
||||
2. You _may_, however, read the contents of the repository and modify the scripts emitted based
|
||||
on that! For example, in `CondaBuildPack`, if there's Python 2 specified in `environment.yml`,
|
||||
a different kind of environment is set up. The reading of the `environment.yml` is performed
|
||||
in the BuildPack itself, and not in the scripts returned by `get_build_scripts`. This is fine.
|
||||
|
@ -118,7 +119,7 @@ a path to a repository. This might be a local path or a URL. Upon being called,
|
|||
`repo2docker` will loop through all ContentProviders and perform the following
|
||||
commands:
|
||||
|
||||
* Run the `detect()` method on the repository path given to `repo2docker`. This
|
||||
- Run the `detect()` method on the repository path given to `repo2docker`. This
|
||||
should return any value other than `None` if the path matches what the ContentProvider is looking
|
||||
for.
|
||||
|
||||
|
@ -126,12 +127,11 @@ commands:
|
|||
> checks whether the argument is a valid local path. If so, then `detect(`
|
||||
> returns a dictionary: `{'path': source}` which defines the path to the repository.
|
||||
> This path is used by `fetch()` to check that it matches the output directory.
|
||||
* If `detect()` returns something other than `None`, run `fetch()` with the
|
||||
|
||||
- If `detect()` returns something other than `None`, run `fetch()` with the
|
||||
returned value as its argument. This should
|
||||
result in the contents of the repository being placed locally to a folder.
|
||||
|
||||
For more information on ContentProviders, take a look at
|
||||
[the ContentProvider base class](https://github.com/jupyterhub/repo2docker/blob/80b979f8580ddef184d2ba7d354e7a833cfa38a4/repo2docker/contentproviders/base.py#L16-L60)
|
||||
which has more explanation.
|
||||
|
||||
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
#
|
||||
import datetime
|
||||
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
# ref: https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
|
||||
#
|
||||
|
@ -61,12 +60,10 @@ from repo2docker.buildpacks.conda import CondaBuildPack
|
|||
|
||||
default_python = CondaBuildPack.major_pythons["3"]
|
||||
|
||||
rst_prolog = """
|
||||
rst_prolog = f"""
|
||||
.. |default_python| replace:: **Python {default_python}**
|
||||
.. |default_python_version| replace:: {default_python}
|
||||
""".format(
|
||||
default_python=default_python
|
||||
)
|
||||
"""
|
||||
|
||||
|
||||
# -- Options for HTML output -------------------------------------------------
|
||||
|
|
|
@ -2,33 +2,33 @@
|
|||
|
||||
Thank you for thinking about contributing to repo2docker!
|
||||
This is an open source project that is developed and maintained entirely by volunteers.
|
||||
*Your contribution* is integral to the future of the project.
|
||||
_Your contribution_ is integral to the future of the project.
|
||||
THANK YOU!
|
||||
|
||||
## Types of contribution
|
||||
|
||||
There are many ways to contribute to repo2docker:
|
||||
|
||||
* **Update the documentation.**
|
||||
- **Update the documentation.**
|
||||
If you're reading a page or docstring and it doesn't make sense (or doesn't exist!), please let us know by opening a bug report.
|
||||
It's even more amazing if you can give us a suggested change.
|
||||
* **Fix bugs or add requested features.**
|
||||
- **Fix bugs or add requested features.**
|
||||
Have a look through the [issue tracker](https://github.com/jupyterhub/repo2docker/issues) and see if there are any tagged as ["help wanted"](https://github.com/jupyterhub/repo2docker/issues?q=is%3Aissue+is%3Aopen+label%3A%22help+wanted%22).
|
||||
As the label suggests, we'd love your help!
|
||||
* **Report a bug.**
|
||||
- **Report a bug.**
|
||||
If repo2docker isn't doing what you thought it would do then open a [bug report](https://github.com/jupyterhub/repo2docker/issues/new?template=bug_report.md).
|
||||
That issue template will ask you a few questions described in more detail below.
|
||||
* **Suggest a new feature.**
|
||||
- **Suggest a new feature.**
|
||||
We know that there are lots of ways to extend repo2docker!
|
||||
If you're interested in adding a feature then please open a [feature request](https://github.com/jupyterhub/repo2docker/issues/new?template=feature_request.md).
|
||||
That issue template will ask you a few questions described in detail below.
|
||||
* **Review someone's Pull Request.**
|
||||
- **Review someone's Pull Request.**
|
||||
Whenever somebody proposes changes to the repo2docker codebase, the community reviews
|
||||
the changes, and provides feedback, edits, and suggestions. Check out the
|
||||
[open pull requests](https://github.com/jupyterhub/repo2docker/pulls?q=is%3Apr+is%3Aopen+sort%3Aupdated-desc)
|
||||
and provide feedback that helps improve the PR and get it merged. Please keep your
|
||||
feedback positive and constructive!
|
||||
* **Tell people about repo2docker.**
|
||||
- **Tell people about repo2docker.**
|
||||
As we said above, repo2docker is built by and for its community.
|
||||
If you know anyone who would like to use repo2docker, please tell them about the project!
|
||||
You could give a talk about it, or run a demonstration.
|
||||
|
@ -42,31 +42,31 @@ This outlines the process for getting changes to the repo2docker project merged.
|
|||
|
||||
1. Identify the correct issue template: [bug report](https://github.com/jupyterhub/repo2docker/issues/new?template=bug_report.md) or [feature request](https://github.com/jupyterhub/repo2docker/issues/new?template=feature_request.md).
|
||||
|
||||
**Bug reports** ([examples](https://github.com/jupyterhub/repo2docker/issues?q=is%3Aissue+is%3Aopen+label%3Abug), [new issue](https://github.com/jupyterhub/repo2docker/issues/new?template=bug_report.md)) will ask you for a description of the problem, the expected behaviour, the actual behaviour, how to reproduce the problem, and your personal set up.
|
||||
Bugs can include problems with the documentation, or code not running as expected.
|
||||
**Bug reports** ([examples](https://github.com/jupyterhub/repo2docker/issues?q=is%3Aissue+is%3Aopen+label%3Abug), [new issue](https://github.com/jupyterhub/repo2docker/issues/new?template=bug_report.md)) will ask you for a description of the problem, the expected behaviour, the actual behaviour, how to reproduce the problem, and your personal set up.
|
||||
Bugs can include problems with the documentation, or code not running as expected.
|
||||
|
||||
It is really important that you make it easy for the maintainers to reproduce the problem you're having.
|
||||
This guide on creating a [minimal, complete and verifiable example](https://stackoverflow.com/help/mcve) is a great place to start.
|
||||
It is really important that you make it easy for the maintainers to reproduce the problem you're having.
|
||||
This guide on creating a [minimal, complete and verifiable example](https://stackoverflow.com/help/mcve) is a great place to start.
|
||||
|
||||
**Feature requests** ([examples](https://github.com/jupyterhub/repo2docker/labels/needs%3A%20discussion), [new issue](https://github.com/jupyterhub/repo2docker/issues/new?template=feature_request.md)) will ask you for the proposed change, any alternatives that you have considered, a description of who would use this feature, and a best-guess of how much work it will take and what skills are required to accomplish.
|
||||
**Feature requests** ([examples](https://github.com/jupyterhub/repo2docker/labels/needs%3A%20discussion), [new issue](https://github.com/jupyterhub/repo2docker/issues/new?template=feature_request.md)) will ask you for the proposed change, any alternatives that you have considered, a description of who would use this feature, and a best-guess of how much work it will take and what skills are required to accomplish.
|
||||
|
||||
Very easy feature requests might be updates to the documentation to clarify steps for new users.
|
||||
Harder feature requests may be to add new functionality to the project and will need more in depth discussion about who can complete and maintain the work.
|
||||
Very easy feature requests might be updates to the documentation to clarify steps for new users.
|
||||
Harder feature requests may be to add new functionality to the project and will need more in depth discussion about who can complete and maintain the work.
|
||||
|
||||
Feature requests are a great opportunity for you to advocate for the use case you're suggesting.
|
||||
They help others understand how much effort it would be to integrate the work,and - if you're successful at convincing them that this effort is worth it - make it more likely that they to choose to work on it with you.
|
||||
Feature requests are a great opportunity for you to advocate for the use case you're suggesting.
|
||||
They help others understand how much effort it would be to integrate the work,and - if you're successful at convincing them that this effort is worth it - make it more likely that they to choose to work on it with you.
|
||||
|
||||
2. Open an issue.
|
||||
Getting consensus with the community is a great way to save time later.
|
||||
Getting consensus with the community is a great way to save time later.
|
||||
3. Make edits in [your fork](https://help.github.com/en/articles/fork-a-repo) of the [repo2docker repository](https://github.com/jupyterhub/repo2docker).
|
||||
4. Make a [pull request](https://help.github.com/en/articles/about-pull-requests).
|
||||
Read the [next section](guidelines-to-getting-a-pull-request-merged) for guidelines for both reviewers and contributors on merging a PR.
|
||||
6. Wait for a community member to merge your changes.
|
||||
Remember that **someone else must merge your pull request**.
|
||||
That goes for new contributors and long term maintainers alike.
|
||||
Because `main` is continuously deployed to mybinder.org it is essential
|
||||
that `main` is always in a deployable state.
|
||||
7. (optional) Deploy a new version of repo2docker to mybinder.org by [following these steps](http://mybinder-sre.readthedocs.io/en/latest/deployment/how.html)
|
||||
Read the [next section](guidelines-to-getting-a-pull-request-merged) for guidelines for both reviewers and contributors on merging a PR.
|
||||
5. Wait for a community member to merge your changes.
|
||||
Remember that **someone else must merge your pull request**.
|
||||
That goes for new contributors and long term maintainers alike.
|
||||
Because `main` is continuously deployed to mybinder.org it is essential
|
||||
that `main` is always in a deployable state.
|
||||
6. (optional) Deploy a new version of repo2docker to mybinder.org by [following these steps](http://mybinder-sre.readthedocs.io/en/latest/deployment/how.html)
|
||||
|
||||
(guidelines-to-getting-a-pull-request-merged)=
|
||||
|
||||
|
@ -74,26 +74,27 @@ Read the [next section](guidelines-to-getting-a-pull-request-merged) for guideli
|
|||
|
||||
These are not hard rules to be enforced by 🚓 but they are suggestions written by the repo2docker maintainers to help complete your contribution as smoothly as possible for both you and for them.
|
||||
|
||||
* **Create a PR as early as possible**, marking it with `[WIP]` while you work on it.
|
||||
- **Create a PR as early as possible**, marking it with `[WIP]` while you work on it.
|
||||
This avoids duplicated work, lets you get high level feedback on functionality or API changes, and/or helps find collaborators to work with you.
|
||||
* **Keep your PR focused.**
|
||||
- **Keep your PR focused.**
|
||||
The best PRs solve one problem.
|
||||
If you end up changing multiple things, please open separate PRs for the different conceptual changes.
|
||||
* **Add tests to your code.**
|
||||
- **Add tests to your code.**
|
||||
PRs will not be merged if Travis is failing.
|
||||
* **Apply [PEP8](https://www.python.org/dev/peps/pep-0008/)** as much as possible, but not too much.
|
||||
- **Apply [PEP8](https://www.python.org/dev/peps/pep-0008/)** as much as possible, but not too much.
|
||||
If in doubt, ask.
|
||||
* **Use merge commits** instead of merge-by-squashing/-rebasing.
|
||||
- **Use merge commits** instead of merge-by-squashing/-rebasing.
|
||||
This makes it easier to find all changes since the last deployment `git log --merges --pretty=format:"%h %<(10,trunc)%an %<(15)%ar %s" <deployed-revision>..` and your PR easier to review.
|
||||
* **Make it clear when your PR is ready for review.**
|
||||
- **Make it clear when your PR is ready for review.**
|
||||
Prefix the title of your pull request (PR) with `[MRG]` if the contribution is complete and should be subjected to a detailed review.
|
||||
* **Use commit messages to describe _why_ you are proposing the changes you are proposing.**
|
||||
* **Try to not rush changes** (the definition of rush depends on how big your changes are).
|
||||
- **Use commit messages to describe _why_ you are proposing the changes you are proposing.**
|
||||
- **Try to not rush changes** (the definition of rush depends on how big your changes are).
|
||||
Remember that everyone in the repo2docker team is a volunteer and we can not (nor would we want to) control their time or interests.
|
||||
Wait patiently for a reviewer to merge the PR.
|
||||
(Remember that **someone else** must merge your PR, even if you have the admin rights to do so.)
|
||||
|
||||
(contributing:local-dev)=
|
||||
|
||||
## Setting up for Local Development
|
||||
|
||||
To develop & test repo2docker locally, you need:
|
||||
|
@ -149,7 +150,6 @@ according to black's style guide. You can activate it with `pre-commit install`.
|
|||
As part of our continuous integration tests we will check that code is
|
||||
formatted properly and the tests will fail if this is not the case.
|
||||
|
||||
|
||||
### Verify that docker is installed and running
|
||||
|
||||
If you do not already have [Docker](https://www.docker.com/), you should be able
|
||||
|
|
|
@ -7,6 +7,7 @@ The goal is to communicate priorities and upcoming release plans.
|
|||
It is not a aimed at limiting contributions to what is listed here.
|
||||
|
||||
## Using the roadmap
|
||||
|
||||
### Sharing Feedback on the Roadmap
|
||||
|
||||
All of the community is encouraged to provide feedback as well as share new
|
||||
|
@ -16,24 +17,22 @@ After submitting the issue, others from the community will probably
|
|||
respond with questions or comments they have to clarify the issue. The
|
||||
maintainers will help identify what a good next step is for the issue.
|
||||
|
||||
|
||||
### What do we mean by "next step"?
|
||||
|
||||
When submitting an issue, think about what "next step" category best describes
|
||||
your issue:
|
||||
|
||||
* **now**, concrete/actionable step that is ready for someone to start work on.
|
||||
These might be items that have a link to an issue or more abstract like
|
||||
"decrease typos and dead links in the documentation"
|
||||
* **soon**, less concrete/actionable step that is going to happen soon,
|
||||
discussions around the topic are coming close to an end at which point it can
|
||||
move into the "now" category
|
||||
* **later**, abstract ideas or tasks, need a lot of discussion or
|
||||
experimentation to shape the idea so that it can be executed. Can also
|
||||
contain concrete/actionable steps that have been postponed on purpose
|
||||
(these are steps that could be in "now" but the decision was taken to work on
|
||||
them later)
|
||||
|
||||
- **now**, concrete/actionable step that is ready for someone to start work on.
|
||||
These might be items that have a link to an issue or more abstract like
|
||||
"decrease typos and dead links in the documentation"
|
||||
- **soon**, less concrete/actionable step that is going to happen soon,
|
||||
discussions around the topic are coming close to an end at which point it can
|
||||
move into the "now" category
|
||||
- **later**, abstract ideas or tasks, need a lot of discussion or
|
||||
experimentation to shape the idea so that it can be executed. Can also
|
||||
contain concrete/actionable steps that have been postponed on purpose
|
||||
(these are steps that could be in "now" but the decision was taken to work on
|
||||
them later)
|
||||
|
||||
### Reviewing and Updating the Roadmap
|
||||
|
||||
|
@ -48,22 +47,21 @@ For those please create a
|
|||
The roadmap should give the reader an idea of what is happening next, what needs
|
||||
input and discussion before it can happen and what has been postponed.
|
||||
|
||||
|
||||
## The roadmap proper
|
||||
|
||||
### Project vision
|
||||
|
||||
Repo2docker is a dependable tool used by humans that reduces the complexity of
|
||||
creating the environment in which a piece of software can be executed.
|
||||
|
||||
|
||||
### Now
|
||||
|
||||
The "Now" items are being actively worked on by the project:
|
||||
* reduce documentation typos and syntax errors
|
||||
* increase test coverage to 80% (see https://codecov.io/gh/jupyterhub/repo2docker/tree/main/repo2docker for low coverage files)
|
||||
* mounting repository contents in locations that is not `/home/jovyan`
|
||||
* investigate options for pinning repo2docker versions ([#490](https://github.com/jupyterhub/repo2docker/issues/490))
|
||||
|
||||
- reduce documentation typos and syntax errors
|
||||
- increase test coverage to 80% (see https://codecov.io/gh/jupyterhub/repo2docker/tree/main/repo2docker for low coverage files)
|
||||
- mounting repository contents in locations that is not `/home/jovyan`
|
||||
- investigate options for pinning repo2docker versions ([#490](https://github.com/jupyterhub/repo2docker/issues/490))
|
||||
|
||||
### Soon
|
||||
|
||||
|
@ -71,15 +69,16 @@ The "Soon" items are being discussed/a plan of action is being made. Once an
|
|||
item reaches the point of an actionable plan and person who wants to work on
|
||||
it, the item will be moved to the "Now" section. Typically, these will be moved
|
||||
at a future review of the roadmap.
|
||||
* create the contributor highway, define the route from newcomer to project lead
|
||||
* add Julia Manifest support (https://docs.julialang.org/en/v1/stdlib/Pkg/index.html, [#486](https://github.com/jupyterhub/repo2docker/issues/486))
|
||||
* support different base images/build pack stacks ([#487](https://github.com/jupyterhub/repo2docker/issues/487))
|
||||
|
||||
- create the contributor highway, define the route from newcomer to project lead
|
||||
- add Julia Manifest support (https://docs.julialang.org/en/v1/stdlib/Pkg/index.html, [#486](https://github.com/jupyterhub/repo2docker/issues/486))
|
||||
- support different base images/build pack stacks ([#487](https://github.com/jupyterhub/repo2docker/issues/487))
|
||||
|
||||
### Later
|
||||
|
||||
The "Later" items are things that are at the back of the project's mind. At this
|
||||
time there is no active plan for an item. The project would like to find the
|
||||
resources and time to discuss and then execute these ideas.
|
||||
* support execution on a remote host (with more resources than available locally) via the command-line
|
||||
* add support for using ZIP files as the repo (`repo2docker https://example.com/an-archive.zip`)
|
||||
|
||||
- support execution on a remote host (with more resources than available locally) via the command-line
|
||||
- add support for using ZIP files as the repo (`repo2docker https://example.com/an-archive.zip`)
|
||||
|
|
|
@ -32,17 +32,18 @@ py.test -s tests/<path-to-test>
|
|||
|
||||
To skip the tests related to Mercurial repositories (to avoid to install
|
||||
Mercurial and hg-evolve), one can use the environment variable
|
||||
``REPO2DOCKER_SKIP_HG_TESTS``.
|
||||
`REPO2DOCKER_SKIP_HG_TESTS`.
|
||||
|
||||
### Troubleshooting Tests
|
||||
|
||||
Some of the tests have non-python requirements for your development machine. They are:
|
||||
|
||||
- `git-lfs` must be installed ([instructions](https://github.com/git-lfs/git-lfs)). It need not be activated -- there is no need to run the `git lfs install` command. It just needs to be available to the test suite.
|
||||
- If your test failure messages include "`git-lfs filter-process: git-lfs: command not found`", this step should address the problem.
|
||||
|
||||
- If your test failure messages include "`git-lfs filter-process: git-lfs: command not found`", this step should address the problem.
|
||||
|
||||
- Minimum Docker Image size of 128GB is required. If you are not running docker on a linux OS, you may need to expand the runtime image size for your installation. See Docker's instructions for [macOS](https://docs.docker.com/docker-for-mac/space/) or [Windows 10](https://docs.docker.com/docker-for-windows/#resources) for more information.
|
||||
- If your test failure messages include "`No space left on device: '/home/...`", this step should address the problem.
|
||||
- If your test failure messages include "`No space left on device: '/home/...`", this step should address the problem.
|
||||
|
||||
## Update and Freeze BuildPack Dependencies
|
||||
|
||||
|
@ -51,35 +52,36 @@ dependencies that are installed by default for several buildpacks.
|
|||
|
||||
For both the `conda` and `virtualenv` (`pip`) base environments in the **Conda BuildPack** and **Python BuildPack**,
|
||||
we install specific pinned versions of all dependencies. We explicitly list the dependencies
|
||||
we want, then *freeze* them at commit time to explicitly list all the
|
||||
we want, then _freeze_ them at commit time to explicitly list all the
|
||||
transitive dependencies at current versions. This way, we know that
|
||||
all dependencies will have the exact same version installed at all times.
|
||||
|
||||
To update one of the dependencies shared across all `repo2docker` builds, you
|
||||
must follow these steps (with more detailed information in the sections below):
|
||||
|
||||
1. Bump the version numbers of the dependencies you want to update in the `conda` environment ([link](tasks:conda-dependencies))
|
||||
1. Bump the version numbers of the dependencies you want to update in the `conda` environment ([link](tasks:conda-dependencies))
|
||||
2. Make a pull request with your changes ([link](https://github.com/jupyterhub/repo2docker/blob/HEAD/CONTRIBUTING.md#make-a-pull-request))
|
||||
|
||||
See the subsections below for more detailed instructions.
|
||||
|
||||
(tasks:conda-dependencies)=
|
||||
|
||||
### Conda dependencies
|
||||
|
||||
1. There are two files related to conda dependencies. Edit as needed.
|
||||
|
||||
- `repo2docker/buildpacks/conda/environment.yml`
|
||||
- `repo2docker/buildpacks/conda/environment.yml`
|
||||
|
||||
Contains list of packages to install in Python3 conda environments,
|
||||
which are the default. **This is where all Notebook versions &
|
||||
notebook extensions (such as JupyterLab / nteract) go**.
|
||||
Contains list of packages to install in Python3 conda environments,
|
||||
which are the default. **This is where all Notebook versions &
|
||||
notebook extensions (such as JupyterLab / nteract) go**.
|
||||
|
||||
- `repo2docker/buildpacks/conda/environment.py-2.7.yml`
|
||||
- `repo2docker/buildpacks/conda/environment.py-2.7.yml`
|
||||
|
||||
Contains list of packages to install in Python2 conda environments, which
|
||||
can be specifically requested by users. **This only needs `IPyKernel`
|
||||
and kernel related libraries**. Notebook / Notebook Extension need
|
||||
not be installed here.
|
||||
Contains list of packages to install in Python2 conda environments, which
|
||||
can be specifically requested by users. **This only needs `IPyKernel`
|
||||
and kernel related libraries**. Notebook / Notebook Extension need
|
||||
not be installed here.
|
||||
|
||||
2. Once you edit either of these files to add a new package / bump version on
|
||||
an existing package, you should then run:
|
||||
|
@ -147,14 +149,13 @@ Once this has completed, make sure that the new version has been updated.
|
|||
Once the new release has been pushed to PyPI, we need to create a new
|
||||
release on the [GitHub repository releases page](https://github.com/jupyterhub/repo2docker/releases). Once on that page, follow these steps:
|
||||
|
||||
* Click "Draft a new release"
|
||||
* Choose a tag version using the same tag you just created above
|
||||
* The release name is simply the tag version
|
||||
* Finally, click "Publish release"
|
||||
- Click "Draft a new release"
|
||||
- Choose a tag version using the same tag you just created above
|
||||
- The release name is simply the tag version
|
||||
- Finally, click "Publish release"
|
||||
|
||||
That's it!
|
||||
|
||||
|
||||
# Uncommon tasks
|
||||
|
||||
## Compare generated Dockerfiles between repo2docker versions
|
||||
|
|
|
@ -7,8 +7,7 @@ The philosophy for the repo2docker buildpacks includes:
|
|||
- using common configuration files for familiar installation and packaging tools
|
||||
- allowing configuration files to be combined to compose more complex setups
|
||||
- specifying default locations for configuration files
|
||||
(in the repository's root, `binder` or `.binder` directory)
|
||||
|
||||
(in the repository's root, `binder` or `.binder` directory)
|
||||
|
||||
When designing `repo2docker` and adding to it in the future, the
|
||||
developers are influenced by two primary use cases.
|
||||
|
@ -79,7 +78,7 @@ is a highly recommended quick read.
|
|||
Although other projects, like
|
||||
[s2i](https://github.com/openshift/source-to-image), exist to convert source to
|
||||
Docker images, `repo2docker` provides the additional functionality to support
|
||||
*composable* environments. We want to easily have an image with
|
||||
_composable_ environments. We want to easily have an image with
|
||||
Python3+Julia+R-3.2 environments, rather than just one single language
|
||||
environment. While generally one language environment per container works well,
|
||||
in many scientific / datascience computing environments you need multiple
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
import argparse
|
||||
import sys
|
||||
import os
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from . import __version__
|
||||
from .app import Repo2Docker
|
||||
from .engine import BuildError, ImageLoadError
|
||||
from . import __version__
|
||||
from .utils import (
|
||||
validate_and_generate_port_mapping,
|
||||
is_valid_docker_image_name,
|
||||
R2dState,
|
||||
is_valid_docker_image_name,
|
||||
validate_and_generate_port_mapping,
|
||||
)
|
||||
|
||||
|
||||
|
@ -52,7 +53,7 @@ class MimicDockerEnvHandling(argparse.Action):
|
|||
# key pass using current value, or don't pass
|
||||
if "=" not in values:
|
||||
try:
|
||||
value_to_append = "{}={}".format(values, os.environ[values])
|
||||
value_to_append = f"{values}={os.environ[values]}"
|
||||
except KeyError:
|
||||
# no local def, so don't pass
|
||||
return
|
||||
|
@ -304,8 +305,8 @@ def make_r2d(argv=None):
|
|||
r2d.volumes[os.path.abspath(args.repo)] = "."
|
||||
else:
|
||||
r2d.log.error(
|
||||
'Cannot mount "{}" in editable mode '
|
||||
"as it is not a directory".format(args.repo),
|
||||
f'Cannot mount "{args.repo}" in editable mode '
|
||||
"as it is not a directory",
|
||||
extra=dict(phase=R2dState.FAILED),
|
||||
)
|
||||
sys.exit(1)
|
||||
|
|
|
@ -84,7 +84,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=
|
|||
stderr=(subprocess.PIPE if hide_stderr else None),
|
||||
)
|
||||
break
|
||||
except EnvironmentError:
|
||||
except OSError:
|
||||
e = sys.exc_info()[1]
|
||||
if e.errno == errno.ENOENT:
|
||||
continue
|
||||
|
@ -94,7 +94,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=
|
|||
return None, None
|
||||
else:
|
||||
if verbose:
|
||||
print("unable to find command, tried %s" % (commands,))
|
||||
print(f"unable to find command, tried {commands}")
|
||||
return None, None
|
||||
stdout = p.communicate()[0].strip()
|
||||
if sys.version_info[0] >= 3:
|
||||
|
@ -147,7 +147,7 @@ def git_get_keywords(versionfile_abs):
|
|||
# _version.py.
|
||||
keywords = {}
|
||||
try:
|
||||
f = open(versionfile_abs, "r")
|
||||
f = open(versionfile_abs)
|
||||
for line in f.readlines():
|
||||
if line.strip().startswith("git_refnames ="):
|
||||
mo = re.search(r'=\s*"(.*)"', line)
|
||||
|
@ -162,7 +162,7 @@ def git_get_keywords(versionfile_abs):
|
|||
if mo:
|
||||
keywords["date"] = mo.group(1)
|
||||
f.close()
|
||||
except EnvironmentError:
|
||||
except OSError:
|
||||
pass
|
||||
return keywords
|
||||
|
||||
|
@ -186,11 +186,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
|
|||
if verbose:
|
||||
print("keywords are unexpanded, not using")
|
||||
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
|
||||
refs = set([r.strip() for r in refnames.strip("()").split(",")])
|
||||
refs = {r.strip() for r in refnames.strip("()").split(",")}
|
||||
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
|
||||
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
|
||||
TAG = "tag: "
|
||||
tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)])
|
||||
tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)}
|
||||
if not tags:
|
||||
# Either we're using git < 1.8.3, or there really are no tags. We use
|
||||
# a heuristic: assume all version tags have a digit. The old git %d
|
||||
|
@ -199,7 +199,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
|
|||
# between branches and tags. By ignoring refnames without digits, we
|
||||
# filter out many common branch names like "release" and
|
||||
# "stabilization", as well as "HEAD" and "master".
|
||||
tags = set([r for r in refs if re.search(r"\d", r)])
|
||||
tags = {r for r in refs if re.search(r"\d", r)}
|
||||
if verbose:
|
||||
print("discarding '%s', no digits" % ",".join(refs - tags))
|
||||
if verbose:
|
||||
|
@ -293,7 +293,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
|
|||
mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
|
||||
if not mo:
|
||||
# unparseable. Maybe git-describe is misbehaving?
|
||||
pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out
|
||||
pieces["error"] = f"unable to parse git-describe output: '{describe_out}'"
|
||||
return pieces
|
||||
|
||||
# tag
|
||||
|
@ -302,10 +302,9 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
|
|||
if verbose:
|
||||
fmt = "tag '%s' doesn't start with prefix '%s'"
|
||||
print(fmt % (full_tag, tag_prefix))
|
||||
pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
|
||||
full_tag,
|
||||
tag_prefix,
|
||||
)
|
||||
pieces[
|
||||
"error"
|
||||
] = f"tag '{full_tag}' doesn't start with prefix '{tag_prefix}'"
|
||||
return pieces
|
||||
pieces["closest-tag"] = full_tag[len(tag_prefix) :]
|
||||
|
||||
|
|
|
@ -7,24 +7,23 @@ Usage:
|
|||
|
||||
python -m repo2docker https://github.com/you/your-repo
|
||||
"""
|
||||
import getpass
|
||||
import json
|
||||
import sys
|
||||
import logging
|
||||
import os
|
||||
import entrypoints
|
||||
import getpass
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import entrypoints
|
||||
import escapism
|
||||
from pythonjsonlogger import jsonlogger
|
||||
|
||||
from traitlets import Any, Dict, Int, List, Unicode, Bool, default
|
||||
from traitlets import Any, Bool, Dict, Int, List, Unicode, default
|
||||
from traitlets.config import Application
|
||||
|
||||
from . import __version__
|
||||
from . import __version__, contentproviders
|
||||
from .buildpacks import (
|
||||
CondaBuildPack,
|
||||
DockerBuildPack,
|
||||
|
@ -36,9 +35,8 @@ from .buildpacks import (
|
|||
PythonBuildPack,
|
||||
RBuildPack,
|
||||
)
|
||||
from . import contentproviders
|
||||
from .engine import BuildError, ContainerEngineException, ImageLoadError
|
||||
from .utils import ByteSpecification, chdir, R2dState
|
||||
from .utils import ByteSpecification, R2dState, chdir
|
||||
|
||||
|
||||
class Repo2Docker(Application):
|
||||
|
@ -425,9 +423,7 @@ class Repo2Docker(Application):
|
|||
entry = engines[self.engine]
|
||||
except KeyError:
|
||||
raise ContainerEngineException(
|
||||
"Container engine '{}' not found. Available engines: {}".format(
|
||||
self.engine, ",".join(engines.keys())
|
||||
)
|
||||
f"Container engine '{self.engine}' not found. Available engines: {','.join(engines.keys())}"
|
||||
)
|
||||
engine_class = entry.load()
|
||||
return engine_class(parent=self)
|
||||
|
@ -447,16 +443,11 @@ class Repo2Docker(Application):
|
|||
spec = cp.detect(url, ref=ref)
|
||||
if spec is not None:
|
||||
picked_content_provider = cp
|
||||
self.log.info(
|
||||
"Picked {cp} content "
|
||||
"provider.\n".format(cp=cp.__class__.__name__)
|
||||
)
|
||||
self.log.info(f"Picked {cp.__class__.__name__} content provider.\n")
|
||||
break
|
||||
|
||||
if picked_content_provider is None:
|
||||
self.log.error(
|
||||
"No matching content provider found for " "{url}.".format(url=url)
|
||||
)
|
||||
self.log.error(f"No matching content provider found for {url}.")
|
||||
|
||||
swh_token = self.config.get("swh_token", self.swh_token)
|
||||
if swh_token and isinstance(picked_content_provider, contentproviders.Swhid):
|
||||
|
@ -488,8 +479,7 @@ class Repo2Docker(Application):
|
|||
Avoids non-JSON output on errors when using --json-logs
|
||||
"""
|
||||
self.log.error(
|
||||
"Error during build: %s",
|
||||
evalue,
|
||||
f"Error during build: {evalue}",
|
||||
exc_info=(etype, evalue, traceback),
|
||||
extra=dict(phase=R2dState.FAILED),
|
||||
)
|
||||
|
@ -568,7 +558,7 @@ class Repo2Docker(Application):
|
|||
)
|
||||
last_emit_time = time.time()
|
||||
self.log.info(
|
||||
"Successfully pushed {}".format(self.output_image_spec),
|
||||
f"Successfully pushed {self.output_image_spec}",
|
||||
extra=dict(phase=R2dState.PUSHING),
|
||||
)
|
||||
|
||||
|
@ -619,11 +609,9 @@ class Repo2Docker(Application):
|
|||
run_cmd = [
|
||||
"jupyter",
|
||||
"notebook",
|
||||
"--ip",
|
||||
"0.0.0.0",
|
||||
"--port",
|
||||
container_port,
|
||||
f"--NotebookApp.custom_display_url=http://{host_name}:{host_port}"
|
||||
"--ip=0.0.0.0",
|
||||
f"--port={container_port}",
|
||||
f"--NotebookApp.custom_display_url=http://{host_name}:{host_port}",
|
||||
"--NotebookApp.default_url=/lab",
|
||||
]
|
||||
else:
|
||||
|
@ -730,7 +718,7 @@ class Repo2Docker(Application):
|
|||
try:
|
||||
docker_client = self.get_engine()
|
||||
except ContainerEngineException as e:
|
||||
self.log.error("\nContainer engine initialization error: %s\n", e)
|
||||
self.log.error(f"\nContainer engine initialization error: {e}\n")
|
||||
self.exit(1)
|
||||
|
||||
# If the source to be executed is a directory, continue using the
|
||||
|
@ -751,8 +739,7 @@ class Repo2Docker(Application):
|
|||
|
||||
if self.find_image():
|
||||
self.log.info(
|
||||
"Reusing existing image ({}), not "
|
||||
"building.".format(self.output_image_spec)
|
||||
f"Reusing existing image ({self.output_image_spec}), not building."
|
||||
)
|
||||
# no need to build, so skip to the end by `return`ing here
|
||||
# this will still execute the finally clause and let's us
|
||||
|
@ -763,11 +750,10 @@ class Repo2Docker(Application):
|
|||
checkout_path = os.path.join(checkout_path, self.subdir)
|
||||
if not os.path.isdir(checkout_path):
|
||||
self.log.error(
|
||||
"Subdirectory %s does not exist",
|
||||
self.subdir,
|
||||
f"Subdirectory {self.subdir} does not exist",
|
||||
extra=dict(phase=R2dState.FAILED),
|
||||
)
|
||||
raise FileNotFoundError("Could not find {}".format(checkout_path))
|
||||
raise FileNotFoundError(f"Could not find {checkout_path}")
|
||||
|
||||
with chdir(checkout_path):
|
||||
for BP in self.buildpacks:
|
||||
|
@ -808,8 +794,7 @@ class Repo2Docker(Application):
|
|||
)
|
||||
|
||||
self.log.info(
|
||||
"Using %s builder\n",
|
||||
bp.__class__.__name__,
|
||||
f"Using {bp.__class__.__name__} builder\n",
|
||||
extra=dict(phase=R2dState.BUILDING),
|
||||
)
|
||||
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
from .base import BuildPack, BaseImage
|
||||
from .python import PythonBuildPack
|
||||
from .pipfile import PipfileBuildPack
|
||||
from .base import BaseImage, BuildPack
|
||||
from .conda import CondaBuildPack
|
||||
from .julia import JuliaProjectTomlBuildPack
|
||||
from .julia import JuliaRequireBuildPack
|
||||
from .docker import DockerBuildPack
|
||||
from .julia import JuliaProjectTomlBuildPack, JuliaRequireBuildPack
|
||||
from .legacy import LegacyBinderDockerBuildPack
|
||||
from .r import RBuildPack
|
||||
from .nix import NixBuildPack
|
||||
from .pipfile import PipfileBuildPack
|
||||
from .python import PythonBuildPack
|
||||
from .r import RBuildPack
|
||||
|
|
|
@ -26,7 +26,7 @@ def rstudio_base_scripts(r_version):
|
|||
# we should have --no-install-recommends on all our apt-get install commands,
|
||||
# but here it's important because these recommend r-base,
|
||||
# which will upgrade the installed version of R, undoing our pinned version
|
||||
r"""
|
||||
rf"""
|
||||
curl --silent --location --fail {rstudio_url} > /tmp/rstudio.deb && \
|
||||
curl --silent --location --fail {shiny_server_url} > /tmp/shiny.deb && \
|
||||
echo '{rstudio_sha256sum} /tmp/rstudio.deb' | sha256sum -c - && \
|
||||
|
@ -37,24 +37,16 @@ def rstudio_base_scripts(r_version):
|
|||
apt-get -qq purge && \
|
||||
apt-get -qq clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
""".format(
|
||||
rstudio_url=rstudio_url,
|
||||
rstudio_sha256sum=rstudio_sha256sum,
|
||||
shiny_server_url=shiny_server_url,
|
||||
shiny_sha256sum=shiny_sha256sum,
|
||||
),
|
||||
""",
|
||||
),
|
||||
(
|
||||
"${NB_USER}",
|
||||
# Install jupyter-rsession-proxy
|
||||
r"""
|
||||
rf"""
|
||||
pip install --no-cache \
|
||||
jupyter-rsession-proxy=={rsession_proxy_version} \
|
||||
jupyter-shiny-proxy=={shiny_proxy_version}
|
||||
""".format(
|
||||
rsession_proxy_version=rsession_proxy_version,
|
||||
shiny_proxy_version=shiny_proxy_version,
|
||||
),
|
||||
""",
|
||||
),
|
||||
(
|
||||
# Not all of these locations are configurable; so we make sure
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
import textwrap
|
||||
import jinja2
|
||||
import tarfile
|
||||
import hashlib
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
import string
|
||||
import sys
|
||||
import hashlib
|
||||
import tarfile
|
||||
import textwrap
|
||||
|
||||
import escapism
|
||||
import jinja2
|
||||
|
||||
# Only use syntax features supported by Docker 17.09
|
||||
TEMPLATE = r"""
|
||||
|
@ -462,7 +463,7 @@ class BuildPack:
|
|||
last_user = "root"
|
||||
for user, script in self.get_build_scripts():
|
||||
if last_user != user:
|
||||
build_script_directives.append("USER {}".format(user))
|
||||
build_script_directives.append(f"USER {user}")
|
||||
last_user = user
|
||||
build_script_directives.append(
|
||||
"RUN {}".format(textwrap.dedent(script.strip("\n")))
|
||||
|
@ -472,7 +473,7 @@ class BuildPack:
|
|||
last_user = "root"
|
||||
for user, script in self.get_assemble_scripts():
|
||||
if last_user != user:
|
||||
assemble_script_directives.append("USER {}".format(user))
|
||||
assemble_script_directives.append(f"USER {user}")
|
||||
last_user = user
|
||||
assemble_script_directives.append(
|
||||
"RUN {}".format(textwrap.dedent(script.strip("\n")))
|
||||
|
@ -482,7 +483,7 @@ class BuildPack:
|
|||
last_user = "root"
|
||||
for user, script in self.get_preassemble_scripts():
|
||||
if last_user != user:
|
||||
preassemble_script_directives.append("USER {}".format(user))
|
||||
preassemble_script_directives.append(f"USER {user}")
|
||||
last_user = user
|
||||
preassemble_script_directives.append(
|
||||
"RUN {}".format(textwrap.dedent(script.strip("\n")))
|
||||
|
@ -594,8 +595,8 @@ class BuildPack:
|
|||
# buildpacks/docker.py where it is duplicated
|
||||
if not isinstance(memory_limit, int):
|
||||
raise ValueError(
|
||||
"The memory limit has to be specified as an"
|
||||
"integer but is '{}'".format(type(memory_limit))
|
||||
"The memory limit has to be specified as an "
|
||||
f"integer but is '{type(memory_limit)}'"
|
||||
)
|
||||
limits = {}
|
||||
if memory_limit:
|
||||
|
@ -616,8 +617,7 @@ class BuildPack:
|
|||
|
||||
build_kwargs.update(extra_build_kwargs)
|
||||
|
||||
for line in client.build(**build_kwargs):
|
||||
yield line
|
||||
yield from client.build(**build_kwargs)
|
||||
|
||||
|
||||
class BaseImage(BuildPack):
|
||||
|
@ -648,8 +648,7 @@ class BaseImage(BuildPack):
|
|||
# FIXME: Add support for specifying version numbers
|
||||
if not re.match(r"^[a-z0-9.+-]+", package):
|
||||
raise ValueError(
|
||||
"Found invalid package name {} in "
|
||||
"apt.txt".format(package)
|
||||
f"Found invalid package name {package} in apt.txt"
|
||||
)
|
||||
extra_apt_packages.append(package)
|
||||
|
||||
|
|
|
@ -5,9 +5,9 @@ from collections.abc import Mapping
|
|||
|
||||
from ruamel.yaml import YAML
|
||||
|
||||
from ..base import BaseImage
|
||||
from .._r_base import rstudio_base_scripts
|
||||
from ...utils import is_local_pip_requirement
|
||||
from .._r_base import rstudio_base_scripts
|
||||
from ..base import BaseImage
|
||||
|
||||
# pattern for parsing conda dependency line
|
||||
PYTHON_REGEX = re.compile(r"python\s*=+\s*([\d\.]*)")
|
||||
|
@ -341,15 +341,13 @@ class CondaBuildPack(BaseImage):
|
|||
scripts.append(
|
||||
(
|
||||
"${NB_USER}",
|
||||
r"""
|
||||
rf"""
|
||||
TIMEFORMAT='time: %3R' \
|
||||
bash -c 'time ${{MAMBA_EXE}} env update -p {0} --file "{1}" && \
|
||||
bash -c 'time ${{MAMBA_EXE}} env update -p {env_prefix} --file "{environment_yml}" && \
|
||||
time ${{MAMBA_EXE}} clean --all -f -y && \
|
||||
${{MAMBA_EXE}} list -p {0} \
|
||||
${{MAMBA_EXE}} list -p {env_prefix} \
|
||||
'
|
||||
""".format(
|
||||
env_prefix, environment_yml
|
||||
),
|
||||
""",
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -361,36 +359,30 @@ class CondaBuildPack(BaseImage):
|
|||
scripts.append(
|
||||
(
|
||||
"${NB_USER}",
|
||||
r"""
|
||||
${{MAMBA_EXE}} install -p {0} r-base{1} r-irkernel r-devtools -y && \
|
||||
rf"""
|
||||
${{MAMBA_EXE}} install -p {env_prefix} r-base{r_pin} r-irkernel r-devtools -y && \
|
||||
${{MAMBA_EXE}} clean --all -f -y && \
|
||||
${{MAMBA_EXE}} list -p {0}
|
||||
""".format(
|
||||
env_prefix, r_pin
|
||||
),
|
||||
${{MAMBA_EXE}} list -p {env_prefix}
|
||||
""",
|
||||
)
|
||||
)
|
||||
scripts += rstudio_base_scripts(self.r_version)
|
||||
scripts += [
|
||||
(
|
||||
"root",
|
||||
r"""
|
||||
rf"""
|
||||
echo auth-none=1 >> /etc/rstudio/rserver.conf && \
|
||||
echo auth-minimum-user-id=0 >> /etc/rstudio/rserver.conf && \
|
||||
echo "rsession-which-r={0}/bin/R" >> /etc/rstudio/rserver.conf && \
|
||||
echo "rsession-which-r={env_prefix}/bin/R" >> /etc/rstudio/rserver.conf && \
|
||||
echo www-frame-origin=same >> /etc/rstudio/rserver.conf
|
||||
""".format(
|
||||
env_prefix
|
||||
),
|
||||
""",
|
||||
),
|
||||
(
|
||||
"${NB_USER}",
|
||||
# Register the jupyter kernel
|
||||
r"""
|
||||
R --quiet -e "IRkernel::installspec(prefix='{0}')"
|
||||
""".format(
|
||||
env_prefix
|
||||
),
|
||||
rf"""
|
||||
R --quiet -e "IRkernel::installspec(prefix='{env_prefix}')"
|
||||
""",
|
||||
),
|
||||
]
|
||||
return scripts
|
||||
|
|
|
@ -9,17 +9,16 @@ Usage:
|
|||
python freeze.py [3.8]
|
||||
"""
|
||||
|
||||
from argparse import ArgumentParser
|
||||
from datetime import datetime
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
from subprocess import check_call
|
||||
import sys
|
||||
from argparse import ArgumentParser
|
||||
from datetime import datetime
|
||||
from subprocess import check_call
|
||||
|
||||
from ruamel.yaml import YAML
|
||||
|
||||
|
||||
HERE = pathlib.Path(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
ENV_FILE = HERE / "environment.yml"
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
"""Generates a variety of Dockerfiles based on an input matrix
|
||||
"""
|
||||
import os
|
||||
|
||||
import docker
|
||||
|
||||
from .base import BuildPack
|
||||
|
||||
|
||||
|
@ -34,8 +36,8 @@ class DockerBuildPack(BuildPack):
|
|||
# buildpacks/base.py where it is duplicated
|
||||
if not isinstance(memory_limit, int):
|
||||
raise ValueError(
|
||||
"The memory limit has to be specified as an"
|
||||
"integer but is '{}'".format(type(memory_limit))
|
||||
"The memory limit has to be specified as an "
|
||||
f"integer but is '{type(memory_limit)}'"
|
||||
)
|
||||
limits = {}
|
||||
if memory_limit:
|
||||
|
@ -57,5 +59,4 @@ class DockerBuildPack(BuildPack):
|
|||
|
||||
build_kwargs.update(extra_build_kwargs)
|
||||
|
||||
for line in client.build(**build_kwargs):
|
||||
yield line
|
||||
yield from client.build(**build_kwargs)
|
||||
|
|
|
@ -6,8 +6,8 @@ import requests
|
|||
import semver
|
||||
import toml
|
||||
|
||||
from ..python import PythonBuildPack
|
||||
from ...semver import find_semver_match
|
||||
from ..python import PythonBuildPack
|
||||
|
||||
|
||||
class JuliaProjectTomlBuildPack(PythonBuildPack):
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
|
||||
import os
|
||||
|
||||
from ..python import PythonBuildPack
|
||||
from ...semver import parse_version as V
|
||||
from ..python import PythonBuildPack
|
||||
|
||||
|
||||
class JuliaRequireBuildPack(PythonBuildPack):
|
||||
|
|
|
@ -20,7 +20,7 @@ class LegacyBinderDockerBuildPack:
|
|||
"""Check if current repo should be built with the Legacy BuildPack."""
|
||||
log = logging.getLogger("repo2docker")
|
||||
try:
|
||||
with open("Dockerfile", "r") as f:
|
||||
with open("Dockerfile") as f:
|
||||
for line in f:
|
||||
if line.startswith("FROM"):
|
||||
if "andrewosh/binder-base" in line.split("#")[0].lower():
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""BuildPack for nixpkgs environments"""
|
||||
import os
|
||||
|
||||
from ..base import BuildPack, BaseImage
|
||||
from ..base import BaseImage, BuildPack
|
||||
|
||||
|
||||
class NixBuildPack(BaseImage):
|
||||
|
@ -62,13 +62,11 @@ class NixBuildPack(BaseImage):
|
|||
return super().get_assemble_scripts() + [
|
||||
(
|
||||
"${NB_USER}",
|
||||
"""
|
||||
f"""
|
||||
nix-channel --add https://nixos.org/channels/nixpkgs-unstable nixpkgs && \
|
||||
nix-channel --update && \
|
||||
nix-shell {}
|
||||
""".format(
|
||||
self.binder_path("default.nix")
|
||||
),
|
||||
nix-shell {self.binder_path("default.nix")}
|
||||
""",
|
||||
)
|
||||
]
|
||||
|
||||
|
|
|
@ -123,9 +123,7 @@ class PipfileBuildPack(CondaBuildPack):
|
|||
assemble_scripts.append(
|
||||
(
|
||||
"${NB_USER}",
|
||||
'${{NB_PYTHON_PREFIX}}/bin/pip install --no-cache-dir -r "{}"'.format(
|
||||
nb_requirements_file
|
||||
),
|
||||
f'${{NB_PYTHON_PREFIX}}/bin/pip install --no-cache-dir -r "{nb_requirements_file}"',
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
"""Generates Dockerfiles based on an input matrix based on Python."""
|
||||
import os
|
||||
|
||||
from ..conda import CondaBuildPack
|
||||
from ...utils import is_local_pip_requirement, open_guess_encoding
|
||||
from ..conda import CondaBuildPack
|
||||
|
||||
|
||||
class PythonBuildPack(CondaBuildPack):
|
||||
|
@ -55,9 +55,7 @@ class PythonBuildPack(CondaBuildPack):
|
|||
"${NB_USER}",
|
||||
# want the $NB_PYHTON_PREFIX environment variable, not for
|
||||
# Python's string formatting to try and replace this
|
||||
'${{NB_PYTHON_PREFIX}}/bin/pip install --no-cache-dir -r "{}"'.format(
|
||||
nb_requirements_file
|
||||
),
|
||||
f'${{NB_PYTHON_PREFIX}}/bin/pip install --no-cache-dir -r "{nb_requirements_file}"',
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -67,7 +65,7 @@ class PythonBuildPack(CondaBuildPack):
|
|||
scripts.append(
|
||||
(
|
||||
"${NB_USER}",
|
||||
'{} install --no-cache-dir -r "{}"'.format(pip, requirements_file),
|
||||
f'{pip} install --no-cache-dir -r "{requirements_file}"',
|
||||
)
|
||||
)
|
||||
return scripts
|
||||
|
@ -126,9 +124,7 @@ class PythonBuildPack(CondaBuildPack):
|
|||
|
||||
# setup.py exists *and* binder dir is not used
|
||||
if not self.binder_dir and os.path.exists(setup_py):
|
||||
assemble_scripts.append(
|
||||
("${NB_USER}", "{} install --no-cache-dir .".format(pip))
|
||||
)
|
||||
assemble_scripts.append(("${NB_USER}", f"{pip} install --no-cache-dir ."))
|
||||
return assemble_scripts
|
||||
|
||||
def detect(self):
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import re
|
||||
import os
|
||||
import datetime
|
||||
import os
|
||||
import re
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
from ..semver import parse_version as V
|
||||
from .python import PythonBuildPack
|
||||
from ._r_base import rstudio_base_scripts
|
||||
from .python import PythonBuildPack
|
||||
|
||||
|
||||
class RBuildPack(PythonBuildPack):
|
||||
|
@ -139,7 +139,7 @@ class RBuildPack(PythonBuildPack):
|
|||
self._checkpoint_date = datetime.date.today() - datetime.timedelta(
|
||||
days=2
|
||||
)
|
||||
self._runtime = "r-{}".format(str(self._checkpoint_date))
|
||||
self._runtime = f"r-{str(self._checkpoint_date)}"
|
||||
return True
|
||||
|
||||
def get_env(self):
|
||||
|
@ -223,7 +223,7 @@ class RBuildPack(PythonBuildPack):
|
|||
for i in range(max_days_prior):
|
||||
try_date = snapshot_date - datetime.timedelta(days=i)
|
||||
# Fall back to MRAN if packagemanager.rstudio.com doesn't have it
|
||||
url = "https://mran.microsoft.com/snapshot/{}".format(try_date.isoformat())
|
||||
url = f"https://mran.microsoft.com/snapshot/{try_date.isoformat()}"
|
||||
r = requests.head(url)
|
||||
if r.ok:
|
||||
return url
|
||||
|
@ -336,12 +336,10 @@ class RBuildPack(PythonBuildPack):
|
|||
(
|
||||
"${NB_USER}",
|
||||
# Install a pinned version of devtools, IRKernel and shiny
|
||||
r"""
|
||||
R --quiet -e "install.packages(c('devtools', 'IRkernel', 'shiny'), repos='{devtools_cran_mirror_url}')" && \
|
||||
rf"""
|
||||
R --quiet -e "install.packages(c('devtools', 'IRkernel', 'shiny'), repos='{self.get_devtools_snapshot_url()}')" && \
|
||||
R --quiet -e "IRkernel::installspec(prefix='$NB_PYTHON_PREFIX')"
|
||||
""".format(
|
||||
devtools_cran_mirror_url=self.get_devtools_snapshot_url()
|
||||
),
|
||||
""",
|
||||
),
|
||||
]
|
||||
|
||||
|
@ -374,8 +372,7 @@ class RBuildPack(PythonBuildPack):
|
|||
"${NB_USER}",
|
||||
# Delete /tmp/downloaded_packages only if install.R fails, as the second
|
||||
# invocation of install.R might be able to reuse them
|
||||
"Rscript %s && touch /tmp/.preassembled || true && rm -rf /tmp/downloaded_packages"
|
||||
% installR_path,
|
||||
f"Rscript {installR_path} && touch /tmp/.preassembled || true && rm -rf /tmp/downloaded_packages",
|
||||
)
|
||||
]
|
||||
|
||||
|
@ -392,9 +389,7 @@ class RBuildPack(PythonBuildPack):
|
|||
"${NB_USER}",
|
||||
# only run install.R if the pre-assembly failed
|
||||
# Delete any downloaded packages in /tmp, as they aren't reused by R
|
||||
"""if [ ! -f /tmp/.preassembled ]; then Rscript {}; rm -rf /tmp/downloaded_packages; fi""".format(
|
||||
installR_path
|
||||
),
|
||||
f"""if [ ! -f /tmp/.preassembled ]; then Rscript {installR_path}; rm -rf /tmp/downloaded_packages; fi""",
|
||||
)
|
||||
]
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
from .git import Git
|
||||
from .base import Local
|
||||
from .zenodo import Zenodo
|
||||
from .figshare import Figshare
|
||||
from .dataverse import Dataverse
|
||||
from .figshare import Figshare
|
||||
from .git import Git
|
||||
from .hydroshare import Hydroshare
|
||||
from .mercurial import Mercurial
|
||||
from .swhid import Swhid
|
||||
from .zenodo import Zenodo
|
||||
|
|
|
@ -68,6 +68,6 @@ class Local(ContentProvider):
|
|||
|
||||
def fetch(self, spec, output_dir, yield_output=False):
|
||||
# nothing to be done if your content is already in the output directory
|
||||
msg = "Local content provider assumes {} == {}".format(spec["path"], output_dir)
|
||||
msg = f'Local content provider assumes {spec["path"]} == {output_dir}'
|
||||
assert output_dir == spec["path"], msg
|
||||
yield "Using local repo {}.\n".format(spec["path"])
|
||||
yield f'Using local repo {spec["path"]}.\n'
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
import os
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
from urllib.parse import parse_qs, urlparse, urlunparse
|
||||
|
||||
from urllib.parse import urlparse, urlunparse, parse_qs
|
||||
|
||||
from .doi import DoiProvider
|
||||
from ..utils import copytree, deep_get
|
||||
from .doi import DoiProvider
|
||||
|
||||
|
||||
class Dataverse(DoiProvider):
|
||||
|
@ -20,7 +19,7 @@ class Dataverse(DoiProvider):
|
|||
|
||||
def __init__(self):
|
||||
data_file = os.path.join(os.path.dirname(__file__), "dataverse.json")
|
||||
with open(data_file, "r") as fp:
|
||||
with open(data_file) as fp:
|
||||
self.hosts = json.load(fp)["installations"]
|
||||
super().__init__()
|
||||
|
||||
|
@ -76,9 +75,7 @@ class Dataverse(DoiProvider):
|
|||
data = self.urlopen(search_url).json()["data"]
|
||||
if data["count_in_response"] != 1:
|
||||
self.log.debug(
|
||||
"Dataverse search query failed!\n - doi: {}\n - url: {}\n - resp: {}\n".format(
|
||||
doi, url, json.dump(data)
|
||||
)
|
||||
f"Dataverse search query failed!\n - doi: {doi}\n - url: {url}\n - resp: {json.dump(data)}\n"
|
||||
)
|
||||
return
|
||||
|
||||
|
@ -97,25 +94,22 @@ class Dataverse(DoiProvider):
|
|||
record_id = spec["record"]
|
||||
host = spec["host"]
|
||||
|
||||
yield "Fetching Dataverse record {}.\n".format(record_id)
|
||||
url = "{}/api/datasets/:persistentId?persistentId={}".format(
|
||||
host["url"], record_id
|
||||
)
|
||||
yield f"Fetching Dataverse record {record_id}.\n"
|
||||
url = f'{host["url"]}/api/datasets/:persistentId?persistentId={record_id}'
|
||||
|
||||
resp = self.urlopen(url, headers={"accept": "application/json"})
|
||||
record = resp.json()["data"]
|
||||
|
||||
for fobj in deep_get(record, "latestVersion.files"):
|
||||
file_url = "{}/api/access/datafile/{}".format(
|
||||
host["url"], deep_get(fobj, "dataFile.id")
|
||||
file_url = (
|
||||
f'{host["url"]}/api/access/datafile/{deep_get(fobj, "dataFile.id")}'
|
||||
)
|
||||
filename = os.path.join(fobj.get("directoryLabel", ""), fobj["label"])
|
||||
|
||||
file_ref = {"download": file_url, "filename": filename}
|
||||
fetch_map = {key: key for key in file_ref.keys()}
|
||||
|
||||
for line in self.fetch_file(file_ref, fetch_map, output_dir):
|
||||
yield line
|
||||
yield from self.fetch_file(file_ref, fetch_map, output_dir)
|
||||
|
||||
new_subdirs = os.listdir(output_dir)
|
||||
# if there is only one new subdirectory move its contents
|
||||
|
|
|
@ -1,18 +1,15 @@
|
|||
import os
|
||||
import json
|
||||
import shutil
|
||||
import logging
|
||||
|
||||
from os import makedirs
|
||||
from os import path
|
||||
from requests import Session, HTTPError
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from os import makedirs, path
|
||||
from zipfile import ZipFile, is_zipfile
|
||||
|
||||
from .base import ContentProvider
|
||||
from ..utils import copytree, deep_get
|
||||
from ..utils import normalize_doi, is_doi
|
||||
from requests import HTTPError, Session
|
||||
|
||||
from .. import __version__
|
||||
from ..utils import copytree, deep_get, is_doi, normalize_doi
|
||||
from .base import ContentProvider
|
||||
|
||||
|
||||
class DoiProvider(ContentProvider):
|
||||
|
@ -23,7 +20,7 @@ class DoiProvider(ContentProvider):
|
|||
self.session = Session()
|
||||
self.session.headers.update(
|
||||
{
|
||||
"user-agent": "repo2docker {}".format(__version__),
|
||||
"user-agent": f"repo2docker {__version__}",
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -38,7 +35,7 @@ class DoiProvider(ContentProvider):
|
|||
if not isinstance(req, request.Request):
|
||||
req = request.Request(req)
|
||||
|
||||
req.add_header("User-Agent", "repo2docker {}".format(__version__))
|
||||
req.add_header("User-Agent", f"repo2docker {__version__}")
|
||||
if headers is not None:
|
||||
for key, value in headers.items():
|
||||
req.add_header(key, value)
|
||||
|
@ -52,7 +49,7 @@ class DoiProvider(ContentProvider):
|
|||
doi = normalize_doi(doi)
|
||||
|
||||
try:
|
||||
resp = self._request("https://doi.org/{}".format(doi))
|
||||
resp = self._request(f"https://doi.org/{doi}")
|
||||
resp.raise_for_status()
|
||||
# If the DOI doesn't resolve, just return URL
|
||||
except HTTPError:
|
||||
|
@ -67,26 +64,26 @@ class DoiProvider(ContentProvider):
|
|||
# file related to a record
|
||||
file_url = deep_get(file_ref, host["download"])
|
||||
fname = deep_get(file_ref, host["filename"])
|
||||
logging.debug("Downloading file {} as {}\n".format(file_url, fname))
|
||||
logging.debug(f"Downloading file {file_url} as {fname}\n")
|
||||
|
||||
yield "Requesting {}\n".format(file_url)
|
||||
yield f"Requesting {file_url}\n"
|
||||
resp = self._request(file_url, stream=True)
|
||||
resp.raise_for_status()
|
||||
|
||||
if path.dirname(fname):
|
||||
sub_dir = path.join(output_dir, path.dirname(fname))
|
||||
if not path.exists(sub_dir):
|
||||
yield "Creating {}\n".format(sub_dir)
|
||||
yield f"Creating {sub_dir}\n"
|
||||
makedirs(sub_dir, exist_ok=True)
|
||||
|
||||
dst_fname = path.join(output_dir, fname)
|
||||
with open(dst_fname, "wb") as dst:
|
||||
yield "Fetching {}\n".format(fname)
|
||||
yield f"Fetching {fname}\n"
|
||||
for chunk in resp.iter_content(chunk_size=None):
|
||||
dst.write(chunk)
|
||||
|
||||
if unzip and is_zipfile(dst_fname):
|
||||
yield "Extracting {}\n".format(fname)
|
||||
yield f"Extracting {fname}\n"
|
||||
zfile = ZipFile(dst_fname)
|
||||
zfile.extractall(path=output_dir)
|
||||
zfile.close()
|
||||
|
@ -106,4 +103,4 @@ class DoiProvider(ContentProvider):
|
|||
copytree(path.join(output_dir, d), output_dir)
|
||||
shutil.rmtree(path.join(output_dir, d))
|
||||
|
||||
yield "Fetched files: {}\n".format(os.listdir(output_dir))
|
||||
yield f"Fetched files: {os.listdir(output_dir)}\n"
|
||||
|
|
|
@ -1,16 +1,14 @@
|
|||
import json
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
import shutil
|
||||
|
||||
from os import makedirs
|
||||
from os import path
|
||||
from urllib.request import Request
|
||||
from os import makedirs, path
|
||||
from urllib.error import HTTPError
|
||||
from urllib.request import Request
|
||||
from zipfile import is_zipfile
|
||||
|
||||
from .doi import DoiProvider
|
||||
from ..utils import copytree, deep_get
|
||||
from .doi import DoiProvider
|
||||
|
||||
|
||||
class Figshare(DoiProvider):
|
||||
|
@ -75,11 +73,9 @@ class Figshare(DoiProvider):
|
|||
article_version = spec["version"]
|
||||
host = spec["host"]
|
||||
|
||||
yield "Fetching Figshare article {} in version {}.\n".format(
|
||||
article_id, article_version
|
||||
)
|
||||
yield f"Fetching Figshare article {article_id} in version {article_version}.\n"
|
||||
resp = self.urlopen(
|
||||
"{}{}/versions/{}".format(host["api"], article_id, article_version),
|
||||
f'{host["api"]}{article_id}/versions/{article_version}',
|
||||
headers={"accept": "application/json"},
|
||||
)
|
||||
|
||||
|
@ -91,10 +87,9 @@ class Figshare(DoiProvider):
|
|||
only_one_file = len(files) == 1
|
||||
for file_ref in files:
|
||||
unzip = file_ref["name"].endswith(".zip") and only_one_file
|
||||
for line in self.fetch_file(file_ref, host, output_dir, unzip):
|
||||
yield line
|
||||
yield from self.fetch_file(file_ref, host, output_dir, unzip)
|
||||
|
||||
@property
|
||||
def content_id(self):
|
||||
"""The Figshare article ID"""
|
||||
return "{}.v{}".format(self.article_id, self.article_version)
|
||||
return f"{self.article_id}.v{self.article_version}"
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import subprocess
|
||||
|
||||
from ..utils import R2dState, check_ref, execute_cmd
|
||||
from .base import ContentProvider, ContentProviderException
|
||||
from ..utils import execute_cmd, check_ref, R2dState
|
||||
|
||||
|
||||
class Git(ContentProvider):
|
||||
|
@ -29,13 +29,12 @@ class Git(ContentProvider):
|
|||
# this prevents HEAD's submodules to be cloned if ref doesn't have them
|
||||
cmd.extend(["--no-checkout"])
|
||||
cmd.extend([repo, output_dir])
|
||||
for line in execute_cmd(cmd, capture=yield_output):
|
||||
yield line
|
||||
yield from execute_cmd(cmd, capture=yield_output)
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
msg = "Failed to clone repository from {repo}".format(repo=repo)
|
||||
msg = f"Failed to clone repository from {repo}"
|
||||
if ref != "HEAD":
|
||||
msg += " (ref {ref})".format(ref=ref)
|
||||
msg += f" (ref {ref})"
|
||||
msg += "."
|
||||
raise ContentProviderException(msg) from e
|
||||
|
||||
|
@ -44,7 +43,7 @@ class Git(ContentProvider):
|
|||
hash = check_ref(ref, output_dir)
|
||||
if hash is None:
|
||||
self.log.error(
|
||||
"Failed to check out ref %s", ref, extra=dict(phase=R2dState.FAILED)
|
||||
f"Failed to check out ref {ref}", extra=dict(phase=R2dState.FAILED)
|
||||
)
|
||||
if ref == "master":
|
||||
msg = (
|
||||
|
@ -54,23 +53,21 @@ class Git(ContentProvider):
|
|||
"specifying `--ref`."
|
||||
)
|
||||
else:
|
||||
msg = "Failed to check out ref {}".format(ref)
|
||||
msg = f"Failed to check out ref {ref}"
|
||||
raise ValueError(msg)
|
||||
# We don't need to explicitly checkout things as the reset will
|
||||
# take care of that. If the hash is resolved above, we should be
|
||||
# able to reset to it
|
||||
for line in execute_cmd(
|
||||
yield from execute_cmd(
|
||||
["git", "reset", "--hard", hash], cwd=output_dir, capture=yield_output
|
||||
):
|
||||
yield line
|
||||
)
|
||||
|
||||
# ensure that git submodules are initialised and updated
|
||||
for line in execute_cmd(
|
||||
yield from execute_cmd(
|
||||
["git", "submodule", "update", "--init", "--recursive"],
|
||||
cwd=output_dir,
|
||||
capture=yield_output,
|
||||
):
|
||||
yield line
|
||||
)
|
||||
|
||||
cmd = ["git", "rev-parse", "HEAD"]
|
||||
sha1 = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=output_dir)
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
import zipfile
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
import json
|
||||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
import zipfile
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from urllib.request import urlretrieve
|
||||
|
||||
from .doi import DoiProvider
|
||||
from .base import ContentProviderException
|
||||
from .doi import DoiProvider
|
||||
|
||||
|
||||
class Hydroshare(DoiProvider):
|
||||
|
@ -59,9 +58,9 @@ class Hydroshare(DoiProvider):
|
|||
resource_id = spec["resource"]
|
||||
host = spec["host"]
|
||||
|
||||
bag_url = "{}{}".format(host["django_irods"], resource_id)
|
||||
bag_url = f'{host["django_irods"]}{resource_id}'
|
||||
|
||||
yield "Downloading {}.\n".format(bag_url)
|
||||
yield f"Downloading {bag_url}.\n"
|
||||
|
||||
# bag downloads are prepared on demand and may need some time
|
||||
conn = self.urlopen(bag_url)
|
||||
|
@ -76,13 +75,11 @@ class Hydroshare(DoiProvider):
|
|||
msg = "Bag taking too long to prepare, exiting now, try again later."
|
||||
yield msg
|
||||
raise ContentProviderException(msg)
|
||||
yield "Bag is being prepared, requesting again in {} seconds.\n".format(
|
||||
wait_time
|
||||
)
|
||||
yield f"Bag is being prepared, requesting again in {wait_time} seconds.\n"
|
||||
time.sleep(wait_time)
|
||||
conn = self.urlopen(bag_url)
|
||||
if conn.status_code != 200:
|
||||
msg = "Failed to download bag. status code {}.\n".format(conn.status_code)
|
||||
msg = f"Failed to download bag. status code {conn.status_code}.\n"
|
||||
yield msg
|
||||
raise ContentProviderException(msg)
|
||||
# Bag creation seems to need a small time buffer after it says it's ready.
|
||||
|
@ -102,4 +99,4 @@ class Hydroshare(DoiProvider):
|
|||
@property
|
||||
def content_id(self):
|
||||
"""The HydroShare resource ID"""
|
||||
return "{}.v{}".format(self.resource_id, self.version)
|
||||
return f"{self.resource_id}.v{self.version}"
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import subprocess
|
||||
|
||||
from ..utils import R2dState, execute_cmd
|
||||
from .base import ContentProvider, ContentProviderException
|
||||
from ..utils import execute_cmd, R2dState
|
||||
|
||||
args_enabling_topic = ["--config", "extensions.topic="]
|
||||
|
||||
|
@ -41,8 +41,7 @@ class Mercurial(ContentProvider):
|
|||
# don't update so the clone will include an empty working
|
||||
# directory, the given ref will be updated out later
|
||||
cmd.extend(["--noupdate"])
|
||||
for line in execute_cmd(cmd, capture=yield_output):
|
||||
yield line
|
||||
yield from execute_cmd(cmd, capture=yield_output)
|
||||
|
||||
except subprocess.CalledProcessError as error:
|
||||
msg = f"Failed to clone repository from {repo}"
|
||||
|
@ -54,17 +53,16 @@ class Mercurial(ContentProvider):
|
|||
# check out the specific ref given by the user
|
||||
if ref is not None:
|
||||
try:
|
||||
for line in execute_cmd(
|
||||
yield from execute_cmd(
|
||||
["hg", "update", "--clean", ref] + args_enabling_topic,
|
||||
cwd=output_dir,
|
||||
capture=yield_output,
|
||||
):
|
||||
yield line
|
||||
)
|
||||
except subprocess.CalledProcessError:
|
||||
self.log.error(
|
||||
"Failed to update to ref %s", ref, extra=dict(phase=R2dState.FAILED)
|
||||
f"Failed to update to ref {ref}", extra=dict(phase=R2dState.FAILED)
|
||||
)
|
||||
raise ValueError("Failed to update to ref {}".format(ref))
|
||||
raise ValueError(f"Failed to update to ref {ref}")
|
||||
|
||||
cmd = ["hg", "identify", "-i"] + args_enabling_topic
|
||||
sha1 = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=output_dir)
|
||||
|
|
|
@ -1,17 +1,16 @@
|
|||
import io
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import tarfile
|
||||
import time
|
||||
import re
|
||||
|
||||
from os import path
|
||||
|
||||
import requests
|
||||
|
||||
from .base import ContentProvider
|
||||
from ..utils import copytree
|
||||
from .. import __version__
|
||||
from ..utils import copytree
|
||||
from .base import ContentProvider
|
||||
|
||||
|
||||
def parse_swhid(swhid):
|
||||
|
@ -34,12 +33,12 @@ class Swhid(ContentProvider):
|
|||
self.session = requests.Session()
|
||||
self.session.headers.update(
|
||||
{
|
||||
"user-agent": "repo2docker {}".format(__version__),
|
||||
"user-agent": f"repo2docker {__version__}",
|
||||
}
|
||||
)
|
||||
|
||||
def set_auth_token(self, token):
|
||||
header = {"Authorization": "Bearer {}".format(token)}
|
||||
header = {"Authorization": f"Bearer {token}"}
|
||||
self.session.headers.update(header)
|
||||
|
||||
def _request(self, url, method="GET"):
|
||||
|
@ -72,8 +71,8 @@ class Swhid(ContentProvider):
|
|||
return {"swhid": swhid, "swhid_obj": swhid_dict}
|
||||
|
||||
def fetch_directory(self, dir_hash, output_dir):
|
||||
url = "{}/vault/directory/{}/".format(self.base_url, dir_hash)
|
||||
yield "Fetching directory {} from {}\n".format(dir_hash, url)
|
||||
url = f"{self.base_url}/vault/directory/{dir_hash}/"
|
||||
yield f"Fetching directory {dir_hash} from {url}\n"
|
||||
resp = self._request(url, "POST")
|
||||
receipt = resp.json()
|
||||
status = receipt["status"]
|
||||
|
@ -92,7 +91,7 @@ class Swhid(ContentProvider):
|
|||
# move its content one level up
|
||||
copytree(path.join(output_dir, dir_hash), output_dir)
|
||||
shutil.rmtree(path.join(output_dir, dir_hash))
|
||||
yield "Fetched files: {}\n".format(os.listdir(output_dir))
|
||||
yield f"Fetched files: {os.listdir(output_dir)}\n"
|
||||
|
||||
def fetch(self, spec, output_dir, yield_output=False):
|
||||
swhid = spec["swhid"]
|
||||
|
@ -101,12 +100,12 @@ class Swhid(ContentProvider):
|
|||
if swhid_obj["type"] == "rev":
|
||||
# need to get the directory for this revision
|
||||
sha1git = swhid_obj["hash"]
|
||||
url = "{}/revision/{}/".format(self.base_url, sha1git)
|
||||
yield "Fetching revision {} from {}\n".format(sha1git, url)
|
||||
url = f"{self.base_url}/revision/{sha1git}/"
|
||||
yield f"Fetching revision {sha1git} from {url}\n"
|
||||
resp = self._request(url)
|
||||
assert resp.ok, (resp.content, self.session.headers)
|
||||
directory = resp.json()["directory"]
|
||||
self.swhid = "swh:1:dir:{}".format(directory)
|
||||
self.swhid = f"swh:1:dir:{directory}"
|
||||
yield from self.fetch_directory(directory, output_dir)
|
||||
elif swhid_obj["type"] == "dir":
|
||||
self.swhid = swhid
|
||||
|
|
|
@ -1,14 +1,12 @@
|
|||
import os
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from os import makedirs
|
||||
from os import path
|
||||
from urllib.request import Request
|
||||
from os import makedirs, path
|
||||
from urllib.error import HTTPError
|
||||
from urllib.request import Request
|
||||
|
||||
from .doi import DoiProvider
|
||||
from ..utils import copytree, deep_get
|
||||
from .doi import DoiProvider
|
||||
|
||||
|
||||
class Zenodo(DoiProvider):
|
||||
|
@ -66,9 +64,9 @@ class Zenodo(DoiProvider):
|
|||
record_id = spec["record"]
|
||||
host = spec["host"]
|
||||
|
||||
yield "Fetching Zenodo record {}.\n".format(record_id)
|
||||
yield f"Fetching Zenodo record {record_id}.\n"
|
||||
resp = self.urlopen(
|
||||
"{}{}".format(host["api"], record_id),
|
||||
f'{host["api"]}{record_id}',
|
||||
headers={"accept": "application/json"},
|
||||
)
|
||||
|
||||
|
@ -77,10 +75,7 @@ class Zenodo(DoiProvider):
|
|||
files = deep_get(record, host["filepath"])
|
||||
only_one_file = len(files) == 1
|
||||
for file_ref in files:
|
||||
for line in self.fetch_file(
|
||||
file_ref, host, output_dir, unzip=only_one_file
|
||||
):
|
||||
yield line
|
||||
yield from self.fetch_file(file_ref, host, output_dir, unzip=only_one_file)
|
||||
|
||||
@property
|
||||
def content_id(self):
|
||||
|
|
|
@ -2,9 +2,10 @@
|
|||
Docker container engine for repo2docker
|
||||
"""
|
||||
|
||||
import docker
|
||||
from traitlets import Dict
|
||||
from iso8601 import parse_date
|
||||
from traitlets import Dict
|
||||
|
||||
import docker
|
||||
|
||||
from .engine import Container, ContainerEngine, ContainerEngineException, Image
|
||||
|
||||
|
|
|
@ -3,8 +3,8 @@ Interface for a repo2docker container engine
|
|||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from traitlets.config import LoggingConfigurable
|
||||
|
||||
from traitlets.config import LoggingConfigurable
|
||||
|
||||
# Based on https://docker-py.readthedocs.io/en/4.2.0/containers.html
|
||||
|
||||
|
@ -131,7 +131,7 @@ class Image:
|
|||
return self._config
|
||||
|
||||
def __repr__(self):
|
||||
return "Image(tags={},config={})".format(self.tags, self.config)
|
||||
return f"Image(tags={self.tags},config={self.config})"
|
||||
|
||||
|
||||
class ContainerEngine(LoggingConfigurable):
|
||||
|
|
|
@ -1,13 +1,12 @@
|
|||
from contextlib import contextmanager
|
||||
from enum import Enum
|
||||
from functools import partial
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
from contextlib import contextmanager
|
||||
from enum import Enum
|
||||
from functools import partial
|
||||
from shutil import copy2, copystat
|
||||
|
||||
import chardet
|
||||
|
||||
from shutil import copystat, copy2
|
||||
|
||||
from traitlets import Integer, TraitError
|
||||
|
||||
|
||||
|
@ -136,13 +135,10 @@ def validate_and_generate_port_mapping(port_mappings):
|
|||
try:
|
||||
p = int(port)
|
||||
except ValueError as e:
|
||||
raise ValueError(
|
||||
'Port specification "{}" has ' "an invalid port.".format(mapping)
|
||||
)
|
||||
raise ValueError(f'Port specification "{mapping}" has an invalid port.')
|
||||
if not 0 < p <= 65535:
|
||||
raise ValueError(
|
||||
'Port specification "{}" specifies '
|
||||
"a port outside 1-65535.".format(mapping)
|
||||
f'Port specification "{mapping}" specifies a port outside 1-65535.'
|
||||
)
|
||||
return port
|
||||
|
||||
|
@ -152,8 +148,7 @@ def validate_and_generate_port_mapping(port_mappings):
|
|||
port, protocol = parts
|
||||
if protocol not in ("tcp", "udp"):
|
||||
raise ValueError(
|
||||
'Port specification "{}" has '
|
||||
"an invalid protocol.".format(mapping)
|
||||
f'Port specification "{mapping}" has an invalid protocol.'
|
||||
)
|
||||
elif len(parts) == 1:
|
||||
port = parts[0]
|
||||
|
@ -310,14 +305,14 @@ class ByteSpecification(Integer):
|
|||
num = float(value[:-1])
|
||||
except ValueError:
|
||||
raise TraitError(
|
||||
"{val} is not a valid memory specification. "
|
||||
"Must be an int or a string with suffix K, M, G, T".format(val=value)
|
||||
f"{value} is not a valid memory specification. "
|
||||
"Must be an int or a string with suffix K, M, G, T"
|
||||
)
|
||||
suffix = value[-1]
|
||||
if suffix not in self.UNIT_SUFFIXES:
|
||||
raise TraitError(
|
||||
"{val} is not a valid memory specification. "
|
||||
"Must be an int or a string with suffix K, M, G, T".format(val=value)
|
||||
f"{value} is not a valid memory specification. "
|
||||
"Must be an int or a string with suffix K, M, G, T"
|
||||
)
|
||||
else:
|
||||
return int(float(num) * self.UNIT_SUFFIXES[suffix])
|
||||
|
|
8
setup.py
8
setup.py
|
@ -1,6 +1,8 @@
|
|||
from distutils.cmd import Command
|
||||
from setuptools import setup, find_packages
|
||||
import sys
|
||||
from distutils.cmd import Command
|
||||
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
import versioneer
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
|
@ -23,8 +25,8 @@ class GenerateDataverseInstallationsFileCommand(Command):
|
|||
pass
|
||||
|
||||
def run(self):
|
||||
from urllib.request import urlopen
|
||||
import json
|
||||
from urllib.request import urlopen
|
||||
|
||||
resp = urlopen(self.url, timeout=5)
|
||||
resp_body = resp.read()
|
||||
|
|
|
@ -57,7 +57,7 @@
|
|||
|
||||
`--target-repo-dir` is meant to support custom paths where repositories can be
|
||||
copied to besides `${HOME}`.
|
||||
|
||||
|
||||
This test makes use of the `test-extra-args.yaml` file to influence additional
|
||||
arguments passed to `repo2docker` during the test. In this test, specify
|
||||
`--target-repo-dir=/srv/repo`.
|
||||
|
|
|
@ -30,4 +30,4 @@ for pkg in pkgs:
|
|||
assert pkg["version"].startswith("2.7.")
|
||||
break
|
||||
else:
|
||||
assert False, "python not found in %s" % pkg_names
|
||||
assert False, f"python not found in {pkg_names}"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env python
|
||||
import sys
|
||||
import os
|
||||
import sys
|
||||
from glob import glob
|
||||
|
||||
# conda should still be in /srv/conda
|
||||
|
|
|
@ -19,14 +19,13 @@ Test lifecycle:
|
|||
import os
|
||||
import pipes
|
||||
import shlex
|
||||
import requests
|
||||
import subprocess
|
||||
import time
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
|
||||
import escapism
|
||||
import pytest
|
||||
import requests
|
||||
import yaml
|
||||
|
||||
from repo2docker.__main__ import make_r2d
|
||||
|
@ -65,7 +64,7 @@ def make_test_func(args, skip_build=False, extra_run_kwargs=None):
|
|||
container = app.start_container()
|
||||
port = app.port
|
||||
# wait a bit for the container to be ready
|
||||
container_url = "http://localhost:%s/api" % port
|
||||
container_url = f"http://localhost:{port}/api"
|
||||
# give the container a chance to start
|
||||
time.sleep(1)
|
||||
try:
|
||||
|
@ -77,13 +76,13 @@ def make_test_func(args, skip_build=False, extra_run_kwargs=None):
|
|||
try:
|
||||
info = requests.get(container_url).json()
|
||||
except Exception as e:
|
||||
print("Error: %s" % e)
|
||||
print(f"Error: {e}")
|
||||
time.sleep(i * 3)
|
||||
else:
|
||||
print(info)
|
||||
success = True
|
||||
break
|
||||
assert success, "Notebook never started in %s" % container
|
||||
assert success, f"Notebook never started in {container}"
|
||||
finally:
|
||||
# stop the container
|
||||
container.stop()
|
||||
|
@ -211,8 +210,8 @@ class Repo2DockerTest(pytest.Function):
|
|||
def repr_failure(self, excinfo):
|
||||
err = excinfo.value
|
||||
if isinstance(err, SystemExit):
|
||||
cmd = "jupyter-repo2docker %s" % " ".join(map(pipes.quote, self.args))
|
||||
return "%s | exited with status=%s" % (cmd, err.code)
|
||||
cmd = f'jupyter-repo2docker {" ".join(map(pipes.quote, self.args))}'
|
||||
return f"{cmd} | exited with status={err.code}"
|
||||
else:
|
||||
return super().repr_failure(excinfo)
|
||||
|
||||
|
|
|
@ -10,8 +10,8 @@ NOTE: This file has to be duplicated & present in all the following locations:
|
|||
- tests/memlimit/dockerfile/postBuild
|
||||
See https://github.com/jupyterhub/repo2docker/issues/160 for reason
|
||||
"""
|
||||
from ctypes import cdll, c_void_p, memset
|
||||
import os
|
||||
from ctypes import c_void_p, cdll, memset
|
||||
|
||||
libc = cdll.LoadLibrary("libc.so.6")
|
||||
libc.malloc.restype = c_void_p
|
||||
|
@ -20,7 +20,7 @@ with open("mem_allocate_mb") as f:
|
|||
mem_allocate_mb = int(f.read().strip())
|
||||
|
||||
size = 1024 * 1024 * mem_allocate_mb
|
||||
print("trying to allocate {}MB".format(mem_allocate_mb))
|
||||
print(f"trying to allocate {mem_allocate_mb}MB")
|
||||
|
||||
ret = libc.malloc(size)
|
||||
|
||||
|
|
|
@ -10,8 +10,8 @@ NOTE: This file has to be duplicated & present in all the following locations:
|
|||
- tests/memlimit/dockerfile/postBuild
|
||||
See https://github.com/jupyterhub/repo2docker/issues/160 for reason
|
||||
"""
|
||||
from ctypes import cdll, c_void_p, memset
|
||||
import os
|
||||
from ctypes import c_void_p, cdll, memset
|
||||
|
||||
libc = cdll.LoadLibrary("libc.so.6")
|
||||
libc.malloc.restype = c_void_p
|
||||
|
@ -20,7 +20,7 @@ with open("mem_allocate_mb") as f:
|
|||
mem_allocate_mb = int(f.read().strip())
|
||||
|
||||
size = 1024 * 1024 * mem_allocate_mb
|
||||
print("trying to allocate {}MB".format(mem_allocate_mb))
|
||||
print(f"trying to allocate {mem_allocate_mb}MB")
|
||||
|
||||
ret = libc.malloc(size)
|
||||
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
|
||||
# Verify - kernel's Python: use Python 2
|
||||
print(sys.version_info)
|
||||
assert sys.version_info[:2] == (2, 7)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from setuptools import setup, find_packages
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
setup(
|
||||
name="Dummy",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env python
|
||||
import there
|
||||
import dummy
|
||||
|
||||
# This package should be available, as it was a dependency for dummy
|
||||
import pypi_pkg_test
|
||||
import there
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from setuptools import setup, find_packages
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
setup(
|
||||
name="Dummy",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env python
|
||||
import there
|
||||
import dummy
|
||||
|
||||
# This package should be available, as it was a dependency for dummy
|
||||
import pypi_pkg_test
|
||||
import there
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from setuptools import setup, find_packages
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
setup(
|
||||
name="Dummy",
|
||||
|
|
|
@ -1,19 +1,18 @@
|
|||
import json
|
||||
import os
|
||||
import pytest
|
||||
import re
|
||||
|
||||
from io import BytesIO
|
||||
from tempfile import TemporaryDirectory
|
||||
from unittest.mock import patch
|
||||
from urllib.request import urlopen, Request
|
||||
from urllib.request import Request, urlopen
|
||||
|
||||
import pytest
|
||||
|
||||
from repo2docker.contentproviders import Dataverse
|
||||
|
||||
|
||||
test_dv = Dataverse()
|
||||
harvard_dv = next((_ for _ in test_dv.hosts if _["name"] == "Harvard Dataverse"))
|
||||
cimmyt_dv = next((_ for _ in test_dv.hosts if _["name"] == "CIMMYT Research Data"))
|
||||
harvard_dv = next(_ for _ in test_dv.hosts if _["name"] == "Harvard Dataverse")
|
||||
cimmyt_dv = next(_ for _ in test_dv.hosts if _["name"] == "CIMMYT Research Data")
|
||||
test_hosts = [
|
||||
(
|
||||
[
|
||||
|
@ -153,7 +152,7 @@ def test_dataverse_fetch(dv_files, requests_mock):
|
|||
for l in dv.fetch(spec, d):
|
||||
output.append(l)
|
||||
unpacked_files = set(os.listdir(d))
|
||||
expected = set(["directory", "some-file.txt"])
|
||||
expected = {"directory", "some-file.txt"}
|
||||
assert expected == unpacked_files
|
||||
assert os.path.isfile(
|
||||
os.path.join(d, "directory", "subdirectory", "the-other-file.txt")
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import urllib
|
||||
import pytest
|
||||
import tempfile
|
||||
import logging
|
||||
|
||||
from unittest.mock import patch, MagicMock, mock_open
|
||||
import urllib
|
||||
from unittest.mock import MagicMock, mock_open, patch
|
||||
from zipfile import ZipFile
|
||||
|
||||
from repo2docker.contentproviders.doi import DoiProvider
|
||||
from repo2docker.contentproviders.base import ContentProviderException
|
||||
import pytest
|
||||
|
||||
from repo2docker import __version__
|
||||
from repo2docker.contentproviders.base import ContentProviderException
|
||||
from repo2docker.contentproviders.doi import DoiProvider
|
||||
|
||||
|
||||
def test_content_id():
|
||||
|
@ -27,7 +27,7 @@ def test_url_headers(requests_mock):
|
|||
result = doi.urlopen("https://mybinder.org", headers=headers)
|
||||
assert "test1" in result.request.headers
|
||||
assert "Test2" in result.request.headers
|
||||
assert result.request.headers["User-Agent"] == "repo2docker {}".format(__version__)
|
||||
assert result.request.headers["User-Agent"] == f"repo2docker {__version__}"
|
||||
|
||||
|
||||
def test_unresolving_doi():
|
||||
|
|
|
@ -1,18 +1,17 @@
|
|||
import json
|
||||
import os
|
||||
import re
|
||||
import pytest
|
||||
|
||||
from contextlib import contextmanager
|
||||
from io import BytesIO
|
||||
from tempfile import TemporaryDirectory, NamedTemporaryFile
|
||||
from tempfile import NamedTemporaryFile, TemporaryDirectory
|
||||
from unittest.mock import patch
|
||||
from urllib.request import urlopen, Request
|
||||
from urllib.request import Request, urlopen
|
||||
from zipfile import ZipFile
|
||||
|
||||
from repo2docker.contentproviders import Figshare
|
||||
from repo2docker.__main__ import make_r2d
|
||||
import pytest
|
||||
|
||||
from repo2docker.__main__ import make_r2d
|
||||
from repo2docker.contentproviders import Figshare
|
||||
|
||||
test_content_ids = [
|
||||
("https://figshare.com/articles/title/9782777", "9782777.v1"),
|
||||
|
@ -113,8 +112,8 @@ def test_detect_not_figshare():
|
|||
def figshare_archive(prefix="a_directory"):
|
||||
with NamedTemporaryFile(suffix=".zip") as zfile:
|
||||
with ZipFile(zfile.name, mode="w") as zip:
|
||||
zip.writestr("{}/some-file.txt".format(prefix), "some content")
|
||||
zip.writestr("{}/some-other-file.txt".format(prefix), "some more content")
|
||||
zip.writestr(f"{prefix}/some-file.txt", "some content")
|
||||
zip.writestr(f"{prefix}/some-other-file.txt", "some more content")
|
||||
|
||||
yield zfile.name
|
||||
|
||||
|
@ -127,7 +126,7 @@ def test_fetch_zip(requests_mock):
|
|||
{
|
||||
"name": "afake.zip",
|
||||
"is_link_only": False,
|
||||
"download_url": "file://{}".format(fig_path),
|
||||
"download_url": f"file://{fig_path}",
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -135,9 +134,7 @@ def test_fetch_zip(requests_mock):
|
|||
"https://api.figshare.com/v2/articles/123456/versions/42",
|
||||
json=mock_response,
|
||||
)
|
||||
requests_mock.get(
|
||||
"file://{}".format(fig_path), content=open(fig_path, "rb").read()
|
||||
)
|
||||
requests_mock.get(f"file://{fig_path}", content=open(fig_path, "rb").read())
|
||||
|
||||
# with patch.object(Figshare, "urlopen", new=mock_urlopen):
|
||||
with TemporaryDirectory() as d:
|
||||
|
@ -146,7 +143,7 @@ def test_fetch_zip(requests_mock):
|
|||
output.append(l)
|
||||
|
||||
unpacked_files = set(os.listdir(d))
|
||||
expected = set(["some-other-file.txt", "some-file.txt"])
|
||||
expected = {"some-other-file.txt", "some-file.txt"}
|
||||
assert expected == unpacked_files
|
||||
|
||||
|
||||
|
@ -157,12 +154,12 @@ def test_fetch_data(requests_mock):
|
|||
"files": [
|
||||
{
|
||||
"name": "afake.file",
|
||||
"download_url": "file://{}".format(a_path),
|
||||
"download_url": f"file://{a_path}",
|
||||
"is_link_only": False,
|
||||
},
|
||||
{
|
||||
"name": "bfake.data",
|
||||
"download_url": "file://{}".format(b_path),
|
||||
"download_url": f"file://{b_path}",
|
||||
"is_link_only": False,
|
||||
},
|
||||
{"name": "cfake.link", "is_link_only": True},
|
||||
|
@ -173,12 +170,8 @@ def test_fetch_data(requests_mock):
|
|||
"https://api.figshare.com/v2/articles/123456/versions/42",
|
||||
json=mock_response,
|
||||
)
|
||||
requests_mock.get(
|
||||
"file://{}".format(a_path), content=open(a_path, "rb").read()
|
||||
)
|
||||
requests_mock.get(
|
||||
"file://{}".format(b_path), content=open(b_path, "rb").read()
|
||||
)
|
||||
requests_mock.get(f"file://{a_path}", content=open(a_path, "rb").read())
|
||||
requests_mock.get(f"file://{b_path}", content=open(b_path, "rb").read())
|
||||
|
||||
with TemporaryDirectory() as d:
|
||||
output = []
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
import os
|
||||
import pytest
|
||||
import subprocess
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
import pytest
|
||||
|
||||
from repo2docker.contentproviders import Git
|
||||
|
||||
|
||||
|
|
|
@ -1,16 +1,15 @@
|
|||
import os
|
||||
import pytest
|
||||
|
||||
import re
|
||||
from contextlib import contextmanager
|
||||
from tempfile import TemporaryDirectory, NamedTemporaryFile
|
||||
from tempfile import NamedTemporaryFile, TemporaryDirectory
|
||||
from unittest.mock import patch
|
||||
from zipfile import ZipFile
|
||||
import re
|
||||
|
||||
import pytest
|
||||
|
||||
from repo2docker.contentproviders import Hydroshare
|
||||
from repo2docker.contentproviders.base import ContentProviderException
|
||||
|
||||
|
||||
doi_responses = {
|
||||
"https://doi.org/10.4211/hs.b8f6eae9d89241cf8b5904033460af61": (
|
||||
"https://www.hydroshare.org/resource/b8f6eae9d89241cf8b5904033460af61"
|
||||
|
@ -103,8 +102,8 @@ def test_detect_hydroshare(requests_mock):
|
|||
def hydroshare_archive(prefix="b8f6eae9d89241cf8b5904033460af61/data/contents"):
|
||||
with NamedTemporaryFile(suffix=".zip") as zfile:
|
||||
with ZipFile(zfile.name, mode="w") as zip:
|
||||
zip.writestr("{}/some-file.txt".format(prefix), "some content")
|
||||
zip.writestr("{}/some-other-file.txt".format(prefix), "some more content")
|
||||
zip.writestr(f"{prefix}/some-file.txt", "some content")
|
||||
zip.writestr(f"{prefix}/some-other-file.txt", "some more content")
|
||||
|
||||
yield zfile
|
||||
|
||||
|
@ -149,7 +148,7 @@ def test_fetch_bag():
|
|||
output.append(l)
|
||||
|
||||
unpacked_files = set(os.listdir(d))
|
||||
expected = set(["some-other-file.txt", "some-file.txt"])
|
||||
expected = {"some-other-file.txt", "some-file.txt"}
|
||||
assert expected == unpacked_files
|
||||
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import os
|
||||
from tempfile import TemporaryDirectory, NamedTemporaryFile
|
||||
from tempfile import NamedTemporaryFile, TemporaryDirectory
|
||||
|
||||
from repo2docker.contentproviders import Local
|
||||
|
||||
|
|
|
@ -8,7 +8,6 @@ import pytest
|
|||
from repo2docker.contentproviders import Mercurial
|
||||
from repo2docker.contentproviders.mercurial import args_enabling_topic
|
||||
|
||||
|
||||
SKIP_HG = os.environ.get("REPO2DOCKER_SKIP_HG_TESTS", "").lower() not in {
|
||||
"",
|
||||
"0",
|
||||
|
|
|
@ -1,22 +1,22 @@
|
|||
import json
|
||||
import os
|
||||
import io
|
||||
import tarfile
|
||||
import shutil
|
||||
import re
|
||||
import urllib
|
||||
import pytest
|
||||
import tempfile
|
||||
import json
|
||||
import logging
|
||||
import requests_mock
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import tarfile
|
||||
import tempfile
|
||||
import urllib
|
||||
from os import makedirs
|
||||
from os.path import join
|
||||
from unittest.mock import patch, MagicMock, mock_open
|
||||
from unittest.mock import MagicMock, mock_open, patch
|
||||
from zipfile import ZipFile
|
||||
|
||||
from repo2docker.contentproviders.swhid import Swhid, parse_swhid
|
||||
import pytest
|
||||
import requests_mock
|
||||
|
||||
from repo2docker.contentproviders.base import ContentProviderException
|
||||
from repo2docker.contentproviders.swhid import Swhid, parse_swhid
|
||||
|
||||
|
||||
# this is a slightly stripped down copy of swh.model.cli.swhid_of_dir().
|
||||
|
@ -99,7 +99,7 @@ def mocked_provider(tmpdir, dirhash, tarfile_buf):
|
|||
|
||||
adapter.register_uri(
|
||||
"GET",
|
||||
"mock://api/1/revision/{}/".format(NULLID),
|
||||
f"mock://api/1/revision/{NULLID}/",
|
||||
json={
|
||||
"author": {"fullname": "John Doe <jdoe@example.com>"},
|
||||
"directory": dirhash,
|
||||
|
@ -107,25 +107,25 @@ def mocked_provider(tmpdir, dirhash, tarfile_buf):
|
|||
)
|
||||
adapter.register_uri(
|
||||
"POST",
|
||||
"mock://api/1/vault/directory/{}/".format(dirhash),
|
||||
f"mock://api/1/vault/directory/{dirhash}/",
|
||||
json={
|
||||
"fetch_url": "mock://api/1/vault/directory/{}/raw/".format(dirhash),
|
||||
"fetch_url": f"mock://api/1/vault/directory/{dirhash}/raw/",
|
||||
"status": "new",
|
||||
},
|
||||
)
|
||||
adapter.register_uri(
|
||||
"GET",
|
||||
"mock://api/1/vault/directory/{}/".format(dirhash),
|
||||
f"mock://api/1/vault/directory/{dirhash}/",
|
||||
[
|
||||
{
|
||||
"json": {
|
||||
"fetch_url": "mock://api/1/vault/directory/{}/raw/".format(dirhash),
|
||||
"fetch_url": f"mock://api/1/vault/directory/{dirhash}/raw/",
|
||||
"status": "pending",
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"fetch_url": "mock://api/1/vault/directory/{}/raw/".format(dirhash),
|
||||
"fetch_url": f"mock://api/1/vault/directory/{dirhash}/raw/",
|
||||
"status": "done",
|
||||
}
|
||||
},
|
||||
|
@ -133,7 +133,7 @@ def mocked_provider(tmpdir, dirhash, tarfile_buf):
|
|||
)
|
||||
adapter.register_uri(
|
||||
"GET",
|
||||
"mock://api/1/vault/directory/{}/raw/".format(dirhash),
|
||||
f"mock://api/1/vault/directory/{dirhash}/raw/",
|
||||
content=tarfile_buf,
|
||||
)
|
||||
return provider
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
import json
|
||||
import os
|
||||
import pytest
|
||||
import re
|
||||
|
||||
from contextlib import contextmanager
|
||||
from io import BytesIO
|
||||
from tempfile import TemporaryDirectory, NamedTemporaryFile
|
||||
from tempfile import NamedTemporaryFile, TemporaryDirectory
|
||||
from unittest.mock import patch
|
||||
from urllib.request import urlopen, Request
|
||||
from urllib.request import Request, urlopen
|
||||
from zipfile import ZipFile
|
||||
|
||||
import pytest
|
||||
|
||||
from repo2docker.contentproviders import Zenodo
|
||||
|
||||
doi_responses = {
|
||||
|
@ -82,8 +82,8 @@ def test_detect_zenodo(test_input, expected, requests_mock):
|
|||
def zenodo_archive(prefix="a_directory"):
|
||||
with NamedTemporaryFile(suffix=".zip") as zfile:
|
||||
with ZipFile(zfile.name, mode="w") as zip:
|
||||
zip.writestr("{}/some-file.txt".format(prefix), "some content")
|
||||
zip.writestr("{}/some-other-file.txt".format(prefix), "some more content")
|
||||
zip.writestr(f"{prefix}/some-file.txt", "some content")
|
||||
zip.writestr(f"{prefix}/some-other-file.txt", "some more content")
|
||||
|
||||
yield zfile.name
|
||||
|
||||
|
@ -96,15 +96,13 @@ def test_fetch_software_from_github_archive(requests_mock):
|
|||
"files": [
|
||||
{
|
||||
"filename": "some_dir/afake.zip",
|
||||
"links": {"download": "file://{}".format(zen_path)},
|
||||
"links": {"download": f"file://{zen_path}"},
|
||||
}
|
||||
],
|
||||
"metadata": {"upload_type": "other"},
|
||||
}
|
||||
requests_mock.get("https://zenodo.org/api/records/1234", json=mock_response)
|
||||
requests_mock.get(
|
||||
"file://{}".format(zen_path), content=open(zen_path, "rb").read()
|
||||
)
|
||||
requests_mock.get(f"file://{zen_path}", content=open(zen_path, "rb").read())
|
||||
|
||||
zen = Zenodo()
|
||||
spec = {"host": test_zen.hosts[1], "record": "1234"}
|
||||
|
@ -115,7 +113,7 @@ def test_fetch_software_from_github_archive(requests_mock):
|
|||
output.append(l)
|
||||
|
||||
unpacked_files = set(os.listdir(d))
|
||||
expected = set(["some-other-file.txt", "some-file.txt"])
|
||||
expected = {"some-other-file.txt", "some-file.txt"}
|
||||
assert expected == unpacked_files
|
||||
|
||||
|
||||
|
@ -129,15 +127,13 @@ def test_fetch_software(requests_mock):
|
|||
# this is the difference to the GitHub generated one,
|
||||
# the ZIP file isn't in a directory
|
||||
"filename": "afake.zip",
|
||||
"links": {"download": "file://{}".format(zen_path)},
|
||||
"links": {"download": f"file://{zen_path}"},
|
||||
}
|
||||
],
|
||||
"metadata": {"upload_type": "software"},
|
||||
}
|
||||
requests_mock.get("https://zenodo.org/api/records/1234", json=mock_response)
|
||||
requests_mock.get(
|
||||
"file://{}".format(zen_path), content=open(zen_path, "rb").read()
|
||||
)
|
||||
requests_mock.get(f"file://{zen_path}", content=open(zen_path, "rb").read())
|
||||
|
||||
with TemporaryDirectory() as d:
|
||||
zen = Zenodo()
|
||||
|
@ -147,7 +143,7 @@ def test_fetch_software(requests_mock):
|
|||
output.append(l)
|
||||
|
||||
unpacked_files = set(os.listdir(d))
|
||||
expected = set(["some-other-file.txt", "some-file.txt"])
|
||||
expected = {"some-other-file.txt", "some-file.txt"}
|
||||
assert expected == unpacked_files
|
||||
|
||||
|
||||
|
@ -159,21 +155,21 @@ def test_fetch_data(requests_mock):
|
|||
"files": [
|
||||
{
|
||||
"filename": "afake.zip",
|
||||
"links": {"download": "file://{}".format(a_zen_path)},
|
||||
"links": {"download": f"file://{a_zen_path}"},
|
||||
},
|
||||
{
|
||||
"filename": "bfake.zip",
|
||||
"links": {"download": "file://{}".format(b_zen_path)},
|
||||
"links": {"download": f"file://{b_zen_path}"},
|
||||
},
|
||||
],
|
||||
"metadata": {"upload_type": "data"},
|
||||
}
|
||||
requests_mock.get("https://zenodo.org/api/records/1234", json=mock_response)
|
||||
requests_mock.get(
|
||||
"file://{}".format(a_zen_path), content=open(a_zen_path, "rb").read()
|
||||
f"file://{a_zen_path}", content=open(a_zen_path, "rb").read()
|
||||
)
|
||||
requests_mock.get(
|
||||
"file://{}".format(b_zen_path), content=open(b_zen_path, "rb").read()
|
||||
f"file://{b_zen_path}", content=open(b_zen_path, "rb").read()
|
||||
)
|
||||
|
||||
with TemporaryDirectory() as d:
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import errno
|
||||
import pytest
|
||||
from tempfile import TemporaryDirectory
|
||||
from unittest.mock import patch
|
||||
|
||||
import docker
|
||||
import escapism
|
||||
import pytest
|
||||
|
||||
from repo2docker.app import Repo2Docker
|
||||
import docker
|
||||
from repo2docker.__main__ import make_r2d
|
||||
from repo2docker.app import Repo2Docker
|
||||
from repo2docker.utils import chdir
|
||||
|
||||
|
||||
|
|
|
@ -2,9 +2,11 @@
|
|||
Test argument parsing and r2d construction
|
||||
"""
|
||||
import os
|
||||
|
||||
import pytest
|
||||
from repo2docker.__main__ import make_r2d
|
||||
|
||||
from repo2docker import __version__
|
||||
from repo2docker.__main__ import make_r2d
|
||||
|
||||
|
||||
def test_version(capsys):
|
||||
|
@ -13,7 +15,7 @@ def test_version(capsys):
|
|||
"""
|
||||
with pytest.raises(SystemExit):
|
||||
make_r2d(["--version"])
|
||||
assert capsys.readouterr().out == "{}\n".format(__version__)
|
||||
assert capsys.readouterr().out == f"{__version__}\n"
|
||||
|
||||
|
||||
def test_simple():
|
||||
|
|
|
@ -7,7 +7,6 @@ import subprocess
|
|||
|
||||
import pytest
|
||||
|
||||
|
||||
here = os.path.dirname(os.path.abspath(__file__))
|
||||
test_dir = os.path.dirname(here)
|
||||
docker_simple = os.path.join(test_dir, "dockerfile", "simple")
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
from os.path import join as pjoin
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
import pytest
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from repo2docker.buildpacks import LegacyBinderDockerBuildPack, PythonBuildPack
|
||||
from repo2docker.utils import chdir
|
||||
|
||||
|
|
|
@ -5,7 +5,6 @@ Test that --cache-from is passed in to docker API properly.
|
|||
from unittest.mock import MagicMock
|
||||
|
||||
import docker
|
||||
|
||||
from repo2docker.buildpacks import (
|
||||
BaseImage,
|
||||
DockerBuildPack,
|
||||
|
|
|
@ -8,12 +8,10 @@ and that is the only thing that is tested.
|
|||
"""
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from repo2docker.app import Repo2Docker
|
||||
|
||||
|
||||
URL = "https://github.com/binderhub-ci-repos/repo2docker-ci-clone-depth"
|
||||
|
||||
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
"""
|
||||
Test if the explict hostname is supplied correctly to the container
|
||||
"""
|
||||
import requests
|
||||
import time
|
||||
from repo2docker.app import Repo2Docker
|
||||
|
||||
import requests
|
||||
|
||||
from repo2docker.app import Repo2Docker
|
||||
|
||||
# Minimal Dockerfile to make build as fast as possible
|
||||
DOCKER_FILE = """
|
||||
|
@ -40,8 +41,8 @@ def test_connect_url(tmpdir):
|
|||
app.start()
|
||||
container = app.start_container()
|
||||
|
||||
container_url = "http://{}:{}/api".format(app.hostname, app.port)
|
||||
expected_url = "http://{}:{}".format(app.hostname, app.port)
|
||||
container_url = f"http://{app.hostname}:{app.port}/api"
|
||||
expected_url = f"http://{app.hostname}:{app.port}"
|
||||
|
||||
# wait a bit for the container to be ready
|
||||
# give the container a chance to start
|
||||
|
@ -59,13 +60,13 @@ def test_connect_url(tmpdir):
|
|||
try:
|
||||
info = requests.get(container_url).json()
|
||||
except Exception as e:
|
||||
print("Error: %s" % e)
|
||||
print(f"Error: {e}")
|
||||
time.sleep(i * 3)
|
||||
else:
|
||||
print(info)
|
||||
success = True
|
||||
break
|
||||
assert success, "Notebook never started in %s" % container
|
||||
assert success, f"Notebook never started in {container}"
|
||||
finally:
|
||||
# stop the container
|
||||
container.stop()
|
||||
|
|
|
@ -5,7 +5,6 @@ import time
|
|||
|
||||
from repo2docker.__main__ import make_r2d
|
||||
|
||||
|
||||
DIR = os.path.join(os.path.dirname(os.path.dirname(__file__)), "dockerfile", "editable")
|
||||
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ def test_env(capfd):
|
|||
"repo2docker",
|
||||
# 'key=value' are exported as is in docker
|
||||
"-e",
|
||||
"FOO={}".format(ts),
|
||||
f"FOO={ts}",
|
||||
"--env",
|
||||
"BAR=baz",
|
||||
# 'key' is exported with the currently exported value
|
||||
|
@ -65,7 +65,7 @@ def test_env(capfd):
|
|||
|
||||
# stderr should contain lines of output
|
||||
declares = [x for x in captured.err.splitlines() if x.startswith("declare")]
|
||||
assert 'declare -x FOO="{}"'.format(ts) in declares
|
||||
assert f'declare -x FOO="{ts}"' in declares
|
||||
assert 'declare -x BAR="baz"' in declares
|
||||
assert 'declare -x SPAM="eggs"' in declares
|
||||
assert "declare -x NO_SPAM" not in declares
|
||||
|
|
|
@ -3,7 +3,9 @@ Test if the environment.yml is empty or it constains other data structure than a
|
|||
"""
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from repo2docker import buildpacks
|
||||
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
"""Test if assemble scripts from outside of r2d repo are accepted."""
|
||||
import time
|
||||
|
||||
from repo2docker.app import Repo2Docker
|
||||
from repo2docker.buildpacks import PythonBuildPack
|
||||
|
||||
|
|
|
@ -2,12 +2,11 @@ import os
|
|||
from tempfile import TemporaryDirectory
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from ruamel.yaml import YAML
|
||||
|
||||
from repo2docker.buildpacks.conda.freeze import set_python
|
||||
|
||||
import pytest
|
||||
|
||||
V = "3.7"
|
||||
yaml = YAML(typ="rt")
|
||||
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
"""
|
||||
Test if labels are supplied correctly to the container
|
||||
"""
|
||||
from repo2docker.app import Repo2Docker
|
||||
from repo2docker.buildpacks import BuildPack
|
||||
from repo2docker import __version__
|
||||
import pytest
|
||||
from unittest.mock import Mock
|
||||
|
||||
import pytest
|
||||
|
||||
from repo2docker import __version__
|
||||
from repo2docker.app import Repo2Docker
|
||||
from repo2docker.buildpacks import BuildPack
|
||||
|
||||
URL = "https://github.com/binderhub-ci-repos/repo2docker-ci-clone-depth"
|
||||
|
||||
|
|
|
@ -3,16 +3,13 @@ Test that build time memory limits are enforced
|
|||
"""
|
||||
|
||||
import os
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import docker
|
||||
|
||||
import pytest
|
||||
|
||||
import docker
|
||||
from repo2docker.buildpacks import BaseImage, DockerBuildPack
|
||||
|
||||
|
||||
basedir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
|
||||
|
|
|
@ -2,18 +2,18 @@
|
|||
Test Port mappings work on running non-jupyter workflows
|
||||
"""
|
||||
|
||||
import requests
|
||||
import time
|
||||
import os
|
||||
import tempfile
|
||||
import random
|
||||
import tempfile
|
||||
import time
|
||||
from getpass import getuser
|
||||
|
||||
import docker
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from repo2docker.app import Repo2Docker
|
||||
import docker
|
||||
from repo2docker.__main__ import make_r2d
|
||||
from repo2docker.app import Repo2Docker
|
||||
|
||||
|
||||
def read_port_mapping_response(
|
||||
|
@ -82,13 +82,13 @@ def read_port_mapping_response(
|
|||
if all_ports:
|
||||
port = port_mapping["8888/tcp"][0]["HostPort"]
|
||||
|
||||
url = "http://{}:{}".format(host, port)
|
||||
url = f"http://{host}:{port}"
|
||||
for i in range(5):
|
||||
try:
|
||||
r = requests.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
print("No response from {}: {}".format(url, e))
|
||||
print(f"No response from {url}: {e}")
|
||||
container.reload()
|
||||
assert container.status == "running"
|
||||
time.sleep(3)
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
from datetime import date
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from requests.models import Response
|
||||
from unittest.mock import patch
|
||||
|
||||
from repo2docker import buildpacks
|
||||
|
||||
|
@ -87,6 +87,7 @@ def test_snapshot_mran_date(requested, expected):
|
|||
|
||||
with patch("requests.head", side_effect=mock_request_head):
|
||||
r = buildpacks.RBuildPack()
|
||||
assert r.get_mran_snapshot_url(
|
||||
requested
|
||||
) == "https://mran.microsoft.com/snapshot/{}".format(expected.isoformat())
|
||||
assert (
|
||||
r.get_mran_snapshot_url(requested)
|
||||
== f"https://mran.microsoft.com/snapshot/{expected.isoformat()}"
|
||||
)
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import pytest
|
||||
from semver import VersionInfo
|
||||
|
||||
from repo2docker import semver
|
||||
|
||||
|
||||
|
|
|
@ -4,8 +4,8 @@ Test if the subdirectory is correctly navigated to
|
|||
import os
|
||||
|
||||
import escapism
|
||||
|
||||
import pytest
|
||||
|
||||
from repo2docker.app import Repo2Docker
|
||||
|
||||
TEST_REPO = "https://github.com/binderhub-ci-repos/repo2docker-subdir-support"
|
||||
|
@ -21,7 +21,7 @@ def test_subdir(run_repo2docker):
|
|||
run_repo2docker(argv)
|
||||
|
||||
# check that we restored the current working directory
|
||||
assert cwd == os.getcwd(), "We should be back in %s" % cwd
|
||||
assert cwd == os.getcwd(), f"We should be back in {cwd}"
|
||||
|
||||
|
||||
def test_subdir_in_image_name():
|
||||
|
|
|
@ -34,26 +34,21 @@ def test_user():
|
|||
subprocess.check_call(
|
||||
[
|
||||
"repo2docker",
|
||||
"-v",
|
||||
"{}:/home/{}".format(tmpdir, username),
|
||||
"--user-id",
|
||||
userid,
|
||||
"--user-name",
|
||||
username,
|
||||
f"--volume={tmpdir}:/home/{username}",
|
||||
f"--user-id={userid}",
|
||||
f"--user-name={username}",
|
||||
tmpdir,
|
||||
"--",
|
||||
"/bin/bash",
|
||||
"-c",
|
||||
"id -u > id && pwd > pwd && whoami > name && echo -n $USER > env_user".format(
|
||||
ts
|
||||
),
|
||||
"id -u > id && pwd > pwd && whoami > name && echo -n $USER > env_user",
|
||||
]
|
||||
)
|
||||
|
||||
with open(os.path.join(tmpdir, "id")) as f:
|
||||
assert f.read().strip() == userid
|
||||
with open(os.path.join(tmpdir, "pwd")) as f:
|
||||
assert f.read().strip() == "/home/{}".format(username)
|
||||
assert f.read().strip() == f"/home/{username}"
|
||||
with open(os.path.join(tmpdir, "name")) as f:
|
||||
assert f.read().strip() == username
|
||||
with open(os.path.join(tmpdir, "name")) as f:
|
||||
|
|
|
@ -1,13 +1,15 @@
|
|||
"""
|
||||
Tests for repo2docker/utils.py
|
||||
"""
|
||||
import traitlets
|
||||
import os
|
||||
from repo2docker import utils
|
||||
import pytest
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
import traitlets
|
||||
|
||||
from repo2docker import utils
|
||||
|
||||
|
||||
def test_capture_cmd_no_capture_success():
|
||||
# This should succeed
|
||||
|
@ -90,7 +92,7 @@ def test_invalid_port_mapping(port_spec):
|
|||
with pytest.raises(ValueError) as e:
|
||||
utils.validate_and_generate_port_mapping([port_spec])
|
||||
|
||||
assert 'Port specification "{}"'.format(port_spec) in str(e.value)
|
||||
assert f'Port specification "{port_spec}"' in str(e.value)
|
||||
|
||||
|
||||
def test_deep_get():
|
||||
|
|
|
@ -22,7 +22,7 @@ def test_volume_abspath():
|
|||
[
|
||||
"repo2docker",
|
||||
"-v",
|
||||
"{}:/home/{}".format(tmpdir, username),
|
||||
f"{tmpdir}:/home/{username}",
|
||||
"--user-id",
|
||||
str(os.geteuid()),
|
||||
"--user-name",
|
||||
|
@ -31,7 +31,7 @@ def test_volume_abspath():
|
|||
"--",
|
||||
"/bin/bash",
|
||||
"-c",
|
||||
"echo -n {} > ts".format(ts),
|
||||
f"echo -n {ts} > ts",
|
||||
]
|
||||
)
|
||||
|
||||
|
@ -61,7 +61,7 @@ def test_volume_relpath():
|
|||
"--",
|
||||
"/bin/bash",
|
||||
"-c",
|
||||
"echo -n {} > ts".format(ts),
|
||||
f"echo -n {ts} > ts",
|
||||
]
|
||||
)
|
||||
|
||||
|
|
|
@ -4,4 +4,4 @@ import os
|
|||
|
||||
assert os.path.expanduser("~/.local/bin") in os.getenv("PATH"), os.getenv("PATH")
|
||||
assert os.getcwd() == os.environ["REPO_DIR"]
|
||||
assert "{}/.local/bin".format(os.environ["REPO_DIR"]) in os.getenv("PATH")
|
||||
assert f'{os.environ["REPO_DIR"]}/.local/bin' in os.getenv("PATH")
|
||||
|
|
|
@ -275,7 +275,6 @@ https://creativecommons.org/publicdomain/zero/1.0/ .
|
|||
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
try:
|
||||
import configparser
|
||||
|
@ -344,7 +343,7 @@ def get_config_from_root(root):
|
|||
# the top of versioneer.py for instructions on writing your setup.cfg .
|
||||
setup_cfg = os.path.join(root, "setup.cfg")
|
||||
parser = configparser.SafeConfigParser()
|
||||
with open(setup_cfg, "r") as f:
|
||||
with open(setup_cfg) as f:
|
||||
parser.readfp(f)
|
||||
VCS = parser.get("versioneer", "VCS") # mandatory
|
||||
|
||||
|
@ -404,7 +403,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=
|
|||
stderr=(subprocess.PIPE if hide_stderr else None),
|
||||
)
|
||||
break
|
||||
except EnvironmentError:
|
||||
except OSError:
|
||||
e = sys.exc_info()[1]
|
||||
if e.errno == errno.ENOENT:
|
||||
continue
|
||||
|
@ -414,7 +413,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=
|
|||
return None, None
|
||||
else:
|
||||
if verbose:
|
||||
print("unable to find command, tried %s" % (commands,))
|
||||
print(f"unable to find command, tried {commands}")
|
||||
return None, None
|
||||
stdout = p.communicate()[0].strip()
|
||||
if sys.version_info[0] >= 3:
|
||||
|
@ -429,7 +428,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=
|
|||
|
||||
LONG_VERSION_PY[
|
||||
"git"
|
||||
] = '''
|
||||
] = r'''
|
||||
# This file helps to compute a version number in source trees obtained from
|
||||
# git-archive tarball (such as those provided by githubs download-from-tag
|
||||
# feature). Distribution tarballs (built by setup.py sdist) and build
|
||||
|
@ -961,7 +960,7 @@ def git_get_keywords(versionfile_abs):
|
|||
# _version.py.
|
||||
keywords = {}
|
||||
try:
|
||||
f = open(versionfile_abs, "r")
|
||||
f = open(versionfile_abs)
|
||||
for line in f.readlines():
|
||||
if line.strip().startswith("git_refnames ="):
|
||||
mo = re.search(r'=\s*"(.*)"', line)
|
||||
|
@ -976,7 +975,7 @@ def git_get_keywords(versionfile_abs):
|
|||
if mo:
|
||||
keywords["date"] = mo.group(1)
|
||||
f.close()
|
||||
except EnvironmentError:
|
||||
except OSError:
|
||||
pass
|
||||
return keywords
|
||||
|
||||
|
@ -1000,11 +999,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
|
|||
if verbose:
|
||||
print("keywords are unexpanded, not using")
|
||||
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
|
||||
refs = set([r.strip() for r in refnames.strip("()").split(",")])
|
||||
refs = {r.strip() for r in refnames.strip("()").split(",")}
|
||||
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
|
||||
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
|
||||
TAG = "tag: "
|
||||
tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)])
|
||||
tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)}
|
||||
if not tags:
|
||||
# Either we're using git < 1.8.3, or there really are no tags. We use
|
||||
# a heuristic: assume all version tags have a digit. The old git %d
|
||||
|
@ -1013,7 +1012,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
|
|||
# between branches and tags. By ignoring refnames without digits, we
|
||||
# filter out many common branch names like "release" and
|
||||
# "stabilization", as well as "HEAD" and "master".
|
||||
tags = set([r for r in refs if re.search(r"\d", r)])
|
||||
tags = {r for r in refs if re.search(r"\d", r)}
|
||||
if verbose:
|
||||
print("discarding '%s', no digits" % ",".join(refs - tags))
|
||||
if verbose:
|
||||
|
@ -1107,7 +1106,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
|
|||
mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
|
||||
if not mo:
|
||||
# unparseable. Maybe git-describe is misbehaving?
|
||||
pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out
|
||||
pieces["error"] = f"unable to parse git-describe output: '{describe_out}'"
|
||||
return pieces
|
||||
|
||||
# tag
|
||||
|
@ -1116,10 +1115,9 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
|
|||
if verbose:
|
||||
fmt = "tag '%s' doesn't start with prefix '%s'"
|
||||
print(fmt % (full_tag, tag_prefix))
|
||||
pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
|
||||
full_tag,
|
||||
tag_prefix,
|
||||
)
|
||||
pieces[
|
||||
"error"
|
||||
] = f"tag '{full_tag}' doesn't start with prefix '{tag_prefix}'"
|
||||
return pieces
|
||||
pieces["closest-tag"] = full_tag[len(tag_prefix) :]
|
||||
|
||||
|
@ -1166,13 +1164,13 @@ def do_vcs_install(manifest_in, versionfile_source, ipy):
|
|||
files.append(versioneer_file)
|
||||
present = False
|
||||
try:
|
||||
f = open(".gitattributes", "r")
|
||||
f = open(".gitattributes")
|
||||
for line in f.readlines():
|
||||
if line.strip().startswith(versionfile_source):
|
||||
if "export-subst" in line.strip().split()[1:]:
|
||||
present = True
|
||||
f.close()
|
||||
except EnvironmentError:
|
||||
except OSError:
|
||||
pass
|
||||
if not present:
|
||||
f = open(".gitattributes", "a+")
|
||||
|
@ -1236,7 +1234,7 @@ def versions_from_file(filename):
|
|||
try:
|
||||
with open(filename) as f:
|
||||
contents = f.read()
|
||||
except EnvironmentError:
|
||||
except OSError:
|
||||
raise NotThisMethod("unable to read _version.py")
|
||||
mo = re.search(
|
||||
r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S
|
||||
|
@ -1257,7 +1255,7 @@ def write_to_version_file(filename, versions):
|
|||
with open(filename, "w") as f:
|
||||
f.write(SHORT_VERSION_PY % contents)
|
||||
|
||||
print("set %s to '%s'" % (filename, versions["version"]))
|
||||
print(f"set {filename} to '{versions['version']}'")
|
||||
|
||||
|
||||
def plus_or_dot(pieces):
|
||||
|
@ -1482,7 +1480,7 @@ def get_versions(verbose=False):
|
|||
try:
|
||||
ver = versions_from_file(versionfile_abs)
|
||||
if verbose:
|
||||
print("got version from file %s %s" % (versionfile_abs, ver))
|
||||
print(f"got version from file {versionfile_abs} {ver}")
|
||||
return ver
|
||||
except NotThisMethod:
|
||||
pass
|
||||
|
@ -1755,11 +1753,7 @@ def do_setup():
|
|||
root = get_root()
|
||||
try:
|
||||
cfg = get_config_from_root(root)
|
||||
except (
|
||||
EnvironmentError,
|
||||
configparser.NoSectionError,
|
||||
configparser.NoOptionError,
|
||||
) as e:
|
||||
except (OSError, configparser.NoSectionError, configparser.NoOptionError) as e:
|
||||
if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
|
||||
print("Adding sample versioneer config to setup.cfg", file=sys.stderr)
|
||||
with open(os.path.join(root, "setup.cfg"), "a") as f:
|
||||
|
@ -1784,9 +1778,9 @@ def do_setup():
|
|||
ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py")
|
||||
if os.path.exists(ipy):
|
||||
try:
|
||||
with open(ipy, "r") as f:
|
||||
with open(ipy) as f:
|
||||
old = f.read()
|
||||
except EnvironmentError:
|
||||
except OSError:
|
||||
old = ""
|
||||
if INIT_PY_SNIPPET not in old:
|
||||
print(" appending to %s" % ipy)
|
||||
|
@ -1805,12 +1799,12 @@ def do_setup():
|
|||
manifest_in = os.path.join(root, "MANIFEST.in")
|
||||
simple_includes = set()
|
||||
try:
|
||||
with open(manifest_in, "r") as f:
|
||||
with open(manifest_in) as f:
|
||||
for line in f:
|
||||
if line.startswith("include "):
|
||||
for include in line.split()[1:]:
|
||||
simple_includes.add(include)
|
||||
except EnvironmentError:
|
||||
except OSError:
|
||||
pass
|
||||
# That doesn't cover everything MANIFEST.in can do
|
||||
# (http://docs.python.org/2/distutils/sourcedist.html#commands), so
|
||||
|
@ -1844,7 +1838,7 @@ def scan_setup_py():
|
|||
found = set()
|
||||
setters = False
|
||||
errors = 0
|
||||
with open("setup.py", "r") as f:
|
||||
with open("setup.py") as f:
|
||||
for line in f.readlines():
|
||||
if "import versioneer" in line:
|
||||
found.add("import")
|
||||
|
|
Ładowanie…
Reference in New Issue