s3-credentials put-objects command, closes #68

pull/84/head
Simon Willison 2022-09-15 16:38:45 -07:00
rodzic 51e36ae352
commit c7bb7268c4
5 zmienionych plików z 224 dodań i 4 usunięć

Wyświetl plik

@ -45,6 +45,7 @@ Commands:
list-users List all users for this account
policy Output generated JSON policy for one or more buckets
put-object Upload an object to an S3 bucket
put-objects Upload multiple objects to an S3 bucket
set-cors-policy Set CORS policy for a bucket
whoami Identify currently authenticated user
```
@ -374,6 +375,45 @@ Options:
-a, --auth FILENAME Path to JSON/INI file containing credentials
--help Show this message and exit.
```
## s3-credentials put-objects --help
```
Usage: s3-credentials put-objects [OPTIONS] BUCKET OBJECTS...
Upload multiple objects to an S3 bucket
Pass one or more files to upload them:
s3-credentials put-objects my-bucket one.txt two.txt
These will be saved to the root of the bucket. To save to a different location
use the --prefix option:
s3-credentials put-objects my-bucket one.txt two.txt --prefix my-folder
This will upload them my-folder/one.txt and my-folder/two.txt.
If you pass a directory it will be uploaded recursively:
s3-credentials put-objects my-bucket my-folder
This will create keys in my-folder/... in the S3 bucket.
To upload all files in a folder to the root of the bucket instead use this:
s3-credentials put-objects my-bucket my-folder/*
Options:
--prefix TEXT Prefix to add to the files within the bucket
-s, --silent Don't show progress bar
--dry-run Show steps without executing them
--access-key TEXT AWS access key ID
--secret-key TEXT AWS secret access key
--session-token TEXT AWS session token
--endpoint-url TEXT Custom endpoint URL
-a, --auth FILENAME Path to JSON/INI file containing credentials
--help Show this message and exit.
```
## s3-credentials set-cors-policy --help
```

Wyświetl plik

@ -359,6 +359,46 @@ The `Content-Type` on the uploaded object will be automatically set based on the
echo "<h1>Hello World</h1>" | \
s3-credentials put-object my-bucket hello.html - --content-type "text/html"
## put-objects
`s3-credentials put-objects` can be used to upload more than one file at once.
Pass one or more filenames to upload them to the root of your bucket:
s3-credentials put-objects my-bucket one.txt two.txt three.txt
Use `--prefix my-prefix` to upload them to the specified prefix:
s3-credentials put-objects my-bucket one.txt --prefix my-prefix
This will upload the file to `my-prefix/one.txt`.
Pass one or more directories to upload the contents of those directories.
`.` uploads everything in your current directory:
s3-credentials put-objects my-bucket .
Passing directory names will upload the directory and all of its contents:
s3-credentials put-objects my-bucket my-directory
If `my-directory` had files `one.txt` and `two.txt` in it, the result would be:
my-directory/one.txt
my-directory/two.txt
A progress bar will be shown by default. Use `-s` or `--silent` to hide it.
Add `--dry-run` to get a preview of what would be uploaded without uploading anything:
s3-credentials put-objects my-bucket . --dry-run
```
out/IMG_1254.jpeg => s3://my-bucket/out/IMG_1254.jpeg
out/alverstone-mead-2.jpg => s3://my-bucket/out/alverstone-mead-2.jpg
out/alverstone-mead-1.jpg => s3://my-bucket/out/alverstone-mead-1.jpg
```
## get-object
To download a file from a bucket use `s3-credentials get-object`:

Wyświetl plik

@ -992,7 +992,7 @@ def put_object(bucket, key, path, content_type, silent, **boto_options):
extra_args["ContentType"] = content_type
if not silent:
# Show progress bar
with click.progressbar(length=size, label="Uploading") as bar:
with click.progressbar(length=size, label="Uploading", file=sys.stderr) as bar:
s3.upload_fileobj(
fp, bucket, key, Callback=bar.update, ExtraArgs=extra_args
)
@ -1000,6 +1000,91 @@ def put_object(bucket, key, path, content_type, silent, **boto_options):
s3.upload_fileobj(fp, bucket, key, ExtraArgs=extra_args)
@cli.command()
@click.argument("bucket")
@click.argument(
"objects",
nargs=-1,
required=True,
)
@click.option(
"--prefix",
help="Prefix to add to the files within the bucket",
)
@click.option("silent", "-s", "--silent", is_flag=True, help="Don't show progress bar")
@click.option("--dry-run", help="Show steps without executing them", is_flag=True)
@common_boto3_options
def put_objects(bucket, objects, prefix, silent, dry_run, **boto_options):
"""
Upload multiple objects to an S3 bucket
Pass one or more files to upload them:
s3-credentials put-objects my-bucket one.txt two.txt
These will be saved to the root of the bucket. To save to a different location
use the --prefix option:
s3-credentials put-objects my-bucket one.txt two.txt --prefix my-folder
This will upload them my-folder/one.txt and my-folder/two.txt.
If you pass a directory it will be uploaded recursively:
s3-credentials put-objects my-bucket my-folder
This will create keys in my-folder/... in the S3 bucket.
To upload all files in a folder to the root of the bucket instead use this:
s3-credentials put-objects my-bucket my-folder/*
"""
s3 = make_client("s3", **boto_options)
if prefix and not prefix.endswith("/"):
prefix = prefix + "/"
total_size = 0
# Figure out files to upload and their keys
paths = [] # (path, key)
for obj in objects:
path = pathlib.Path(obj)
if path.is_file():
# Just use the filename as the key
paths.append((path, path.name))
elif path.is_dir():
# Key is the relative path within the directory
for p in path.glob("**/*"):
if p.is_file():
paths.append((p, str(p.relative_to(path.parent))))
def upload(path, key, callback=None):
final_key = key
if prefix:
final_key = prefix + key
if dry_run:
click.echo("{} => s3://{}/{}".format(path, bucket, final_key))
else:
s3.upload_file(
Filename=str(path), Bucket=bucket, Key=final_key, Callback=callback
)
if not silent and not dry_run:
total_size = sum(p[0].stat().st_size for p in paths)
with click.progressbar(
length=total_size,
label="Uploading {} ({} file{})".format(
format_bytes(total_size),
len(paths),
"s" if len(paths) != 1 else "",
),
file=sys.stderr,
) as bar:
for path, key in paths:
upload(path, key, bar.update)
else:
for path, key in paths:
upload(path, key)
@cli.command()
@click.argument("bucket")
@click.argument("key")
@ -1131,6 +1216,7 @@ def get_objects(bucket, keys, output, patterns, silent, **boto_options):
len(key_sizes),
"s" if len(key_sizes) != 1 else "",
),
file=sys.stderr,
) as bar:
for key in keys_to_download:
download(key, bar.update)

Wyświetl plik

@ -54,6 +54,11 @@ def moto_s3(aws_credentials):
with mock_s3():
client = boto3.client("s3", region_name="us-east-1")
client.create_bucket(Bucket="my-bucket")
for key in ("one.txt", "directory/two.txt", "directory/three.json"):
client.put_object(Bucket="my-bucket", Key=key, Body=key.encode("utf-8"))
yield client
@pytest.fixture(scope="function")
def moto_s3_populated(moto_s3):
for key in ("one.txt", "directory/two.txt", "directory/three.json"):
moto_s3.put_object(Bucket="my-bucket", Key=key, Body=key.encode("utf-8"))
yield moto_s3

Wyświetl plik

@ -1,5 +1,6 @@
import botocore
from click.testing import CliRunner
import s3_credentials
from s3_credentials.cli import cli
import json
import os
@ -1137,7 +1138,7 @@ def test_list_roles_csv(stub_iam_for_list_roles):
),
)
@pytest.mark.parametrize("output", (None, "out"))
def test_get_objects(moto_s3, output, files, patterns, expected, error):
def test_get_objects(moto_s3_populated, output, files, patterns, expected, error):
runner = CliRunner()
with runner.isolated_filesystem():
args = ["get-objects", "my-bucket"] + (files or [])
@ -1161,3 +1162,51 @@ def test_get_objects(moto_s3, output, files, patterns, expected, error):
assert all_files == expected
if error:
assert error in result.output
@pytest.mark.parametrize(
"args,expected,expected_output",
(
(["."], {"one.txt", "directory/two.txt", "directory/three.json"}, None),
(["one.txt"], {"one.txt"}, None),
(["directory"], {"directory/two.txt", "directory/three.json"}, None),
(
["directory", "--prefix", "o"],
{"o/directory/two.txt", "o/directory/three.json"},
None,
),
# --dry-run tests
(
["directory", "--prefix", "o", "--dry-run"],
None,
(
"directory/two.txt => s3://my-bucket/o/directory/two.txt\n"
"directory/three.json => s3://my-bucket/o/directory/three.json\n"
),
),
(
[".", "--prefix", "p"],
{"p/one.txt", "p/directory/two.txt", "p/directory/three.json"},
None,
),
),
)
def test_put_objects(moto_s3, args, expected, expected_output):
runner = CliRunner(mix_stderr=False)
with runner.isolated_filesystem():
# Create files
pathlib.Path("one.txt").write_text("one")
pathlib.Path("directory").mkdir()
pathlib.Path("directory/two.txt").write_text("two")
pathlib.Path("directory/three.json").write_text('{"three": 3}')
result = runner.invoke(
cli, ["put-objects", "my-bucket"] + args, catch_exceptions=False
)
assert result.exit_code == 0, result.output
assert result.output == (expected_output or "")
# Check files were uploaded
keys = {
obj["Key"]
for obj in moto_s3.list_objects(Bucket="my-bucket").get("Contents") or []
}
assert keys == (expected or set())