kopia lustrzana https://github.com/simonw/s3-credentials
list-users/list-buckets --nl, --csv, --tsv - refs #48
rodzic
cc10c7f2b5
commit
917d575c73
|
@ -202,11 +202,11 @@ To see a list of all users that exist for your AWS account:
|
||||||
|
|
||||||
s3-credentials list-users
|
s3-credentials list-users
|
||||||
|
|
||||||
This will return pretty-printed JSON objects by default.
|
This will a pretty-printed array of JSON objects by default.
|
||||||
|
|
||||||
Add `--nl` to collapse these to single lines as valid newline-delimited JSON.
|
Add `--nl` to collapse these to single lines as valid newline-delimited JSON.
|
||||||
|
|
||||||
Add `--array` to output a valid JSON array of objects instead.
|
Add `--csv` or `--tsv` to get back CSV or TSV data.
|
||||||
|
|
||||||
### list-buckets
|
### list-buckets
|
||||||
|
|
||||||
|
@ -230,7 +230,7 @@ With no extra arguments this will show all available buckets - you can also add
|
||||||
"CreationDate": "2021-11-03 21:46:12+00:00"
|
"CreationDate": "2021-11-03 21:46:12+00:00"
|
||||||
}
|
}
|
||||||
|
|
||||||
This accepts the same `--nl` and `--array` options as `list-users`.
|
This accepts the same `--nl`, `--csv` and `--tsv` options as `list-users`.
|
||||||
|
|
||||||
Add `--details` to include details of the bucket ACL, website configuration and public access block settings. This is useful for running a security audit of your buckets.
|
Add `--details` to include details of the bucket ACL, website configuration and public access block settings. This is useful for running a security audit of your buckets.
|
||||||
|
|
||||||
|
|
|
@ -480,25 +480,35 @@ def whoami(**boto_options):
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option("--array", help="Output a valid JSON array", is_flag=True)
|
@common_output_options
|
||||||
@click.option("--nl", help="Output newline-delimited JSON", is_flag=True)
|
|
||||||
@common_boto3_options
|
@common_boto3_options
|
||||||
def list_users(array, nl, **boto_options):
|
def list_users(nl, csv, tsv, **boto_options):
|
||||||
"List all users"
|
"List all users"
|
||||||
iam = make_client("iam", **boto_options)
|
iam = make_client("iam", **boto_options)
|
||||||
paginator = iam.get_paginator("list_users")
|
paginator = iam.get_paginator("list_users")
|
||||||
gathered = []
|
gathered = []
|
||||||
for response in paginator.paginate():
|
|
||||||
for user in response["Users"]:
|
def iterate():
|
||||||
if array:
|
for response in paginator.paginate():
|
||||||
gathered.append(user)
|
for user in response["Users"]:
|
||||||
else:
|
yield user
|
||||||
if nl:
|
|
||||||
click.echo(json.dumps(user, default=str))
|
output(
|
||||||
else:
|
iterate(),
|
||||||
click.echo(json.dumps(user, indent=4, default=str))
|
(
|
||||||
if gathered:
|
"UserName",
|
||||||
click.echo(json.dumps(gathered, indent=4, default=str))
|
"UserId",
|
||||||
|
"Arn",
|
||||||
|
"Path",
|
||||||
|
"CreateDate",
|
||||||
|
"PasswordLastUsed",
|
||||||
|
"PermissionsBoundary",
|
||||||
|
"Tags",
|
||||||
|
),
|
||||||
|
nl,
|
||||||
|
csv,
|
||||||
|
tsv,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
|
@ -531,52 +541,50 @@ def list_user_policies(usernames, **boto_options):
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.argument("buckets", nargs=-1)
|
@click.argument("buckets", nargs=-1)
|
||||||
@click.option("--details", help="Include extra bucket details (slower)", is_flag=True)
|
@click.option("--details", help="Include extra bucket details (slower)", is_flag=True)
|
||||||
@click.option("--array", help="Output a valid JSON array", is_flag=True)
|
@common_output_options
|
||||||
@click.option("--nl", help="Output newline-delimited JSON", is_flag=True)
|
|
||||||
@common_boto3_options
|
@common_boto3_options
|
||||||
def list_buckets(buckets, details, array, nl, **boto_options):
|
def list_buckets(buckets, details, nl, csv, tsv, **boto_options):
|
||||||
"List buckets - defaults to all, or pass one or more bucket names"
|
"List buckets - defaults to all, or pass one or more bucket names"
|
||||||
s3 = make_client("s3", **boto_options)
|
s3 = make_client("s3", **boto_options)
|
||||||
gathered = []
|
|
||||||
for bucket in s3.list_buckets()["Buckets"]:
|
headers = ["Name", "CreationDate"]
|
||||||
if buckets and (bucket["Name"] not in buckets):
|
if details:
|
||||||
continue
|
headers += ["bucket_acl", "public_access_block", "bucket_website"]
|
||||||
if details:
|
|
||||||
bucket_acl = dict(
|
def iterator():
|
||||||
(key, value)
|
for bucket in s3.list_buckets()["Buckets"]:
|
||||||
for key, value in s3.get_bucket_acl(
|
if buckets and (bucket["Name"] not in buckets):
|
||||||
Bucket=bucket["Name"],
|
continue
|
||||||
).items()
|
if details:
|
||||||
if key != "ResponseMetadata"
|
bucket_acl = dict(
|
||||||
)
|
|
||||||
try:
|
|
||||||
pab = s3.get_public_access_block(
|
|
||||||
Bucket=bucket["Name"],
|
|
||||||
)["PublicAccessBlockConfiguration"]
|
|
||||||
except s3.exceptions.ClientError:
|
|
||||||
pab = None
|
|
||||||
try:
|
|
||||||
bucket_website = dict(
|
|
||||||
(key, value)
|
(key, value)
|
||||||
for key, value in s3.get_bucket_website(
|
for key, value in s3.get_bucket_acl(
|
||||||
Bucket=bucket["Name"],
|
Bucket=bucket["Name"],
|
||||||
).items()
|
).items()
|
||||||
if key != "ResponseMetadata"
|
if key != "ResponseMetadata"
|
||||||
)
|
)
|
||||||
except s3.exceptions.ClientError:
|
try:
|
||||||
bucket_website = None
|
pab = s3.get_public_access_block(
|
||||||
bucket["bucket_acl"] = bucket_acl
|
Bucket=bucket["Name"],
|
||||||
bucket["public_access_block"] = pab
|
)["PublicAccessBlockConfiguration"]
|
||||||
bucket["bucket_website"] = bucket_website
|
except s3.exceptions.ClientError:
|
||||||
if array:
|
pab = None
|
||||||
gathered.append(bucket)
|
try:
|
||||||
else:
|
bucket_website = dict(
|
||||||
if nl:
|
(key, value)
|
||||||
click.echo(json.dumps(bucket, default=str))
|
for key, value in s3.get_bucket_website(
|
||||||
else:
|
Bucket=bucket["Name"],
|
||||||
click.echo(json.dumps(bucket, indent=4, default=str))
|
).items()
|
||||||
if gathered:
|
if key != "ResponseMetadata"
|
||||||
click.echo(json.dumps(gathered, indent=4, default=str))
|
)
|
||||||
|
except s3.exceptions.ClientError:
|
||||||
|
bucket_website = None
|
||||||
|
bucket["bucket_acl"] = bucket_acl
|
||||||
|
bucket["public_access_block"] = pab
|
||||||
|
bucket["bucket_website"] = bucket_website
|
||||||
|
yield bucket
|
||||||
|
|
||||||
|
output(iterator(), headers, nl, csv, tsv)
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
|
@ -712,7 +720,13 @@ def list_bucket(bucket, prefix, nl, csv, tsv, **boto_options):
|
||||||
except botocore.exceptions.ClientError as e:
|
except botocore.exceptions.ClientError as e:
|
||||||
raise click.ClickException(e)
|
raise click.ClickException(e)
|
||||||
|
|
||||||
output(iterate(), nl, csv, tsv)
|
output(
|
||||||
|
iterate(),
|
||||||
|
("Key", "LastModified", "ETag", "Size", "StorageClass", "Owner"),
|
||||||
|
nl,
|
||||||
|
csv,
|
||||||
|
tsv,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
|
@ -779,20 +793,16 @@ def get_object(bucket, key, output, **boto_options):
|
||||||
s3.download_fileobj(bucket, key, fp)
|
s3.download_fileobj(bucket, key, fp)
|
||||||
|
|
||||||
|
|
||||||
def output(iterator, nl, csv, tsv):
|
def output(iterator, headers, nl, csv, tsv):
|
||||||
if nl:
|
if nl:
|
||||||
for item in iterator:
|
for item in iterator:
|
||||||
click.echo(json.dumps(item, default=repr))
|
click.echo(json.dumps(item, default=str))
|
||||||
elif csv or tsv:
|
elif csv or tsv:
|
||||||
first = next(iterator, None)
|
|
||||||
if first is None:
|
|
||||||
return
|
|
||||||
headers = first.keys()
|
|
||||||
writer = DictWriter(
|
writer = DictWriter(
|
||||||
sys.stdout, headers, dialect="excel-tab" if tsv else "excel"
|
sys.stdout, headers, dialect="excel-tab" if tsv else "excel"
|
||||||
)
|
)
|
||||||
writer.writeheader()
|
writer.writeheader()
|
||||||
writer.writerows(itertools.chain([first], iterator))
|
writer.writerows(iterator)
|
||||||
else:
|
else:
|
||||||
for line in stream_indented_json(iterator):
|
for line in stream_indented_json(iterator):
|
||||||
click.echo(line)
|
click.echo(line)
|
||||||
|
@ -811,7 +821,7 @@ def stream_indented_json(iterator, indent=2):
|
||||||
line = "{first}{serialized}{separator}{last}".format(
|
line = "{first}{serialized}{separator}{last}".format(
|
||||||
first="[\n" if first else "",
|
first="[\n" if first else "",
|
||||||
serialized=textwrap.indent(
|
serialized=textwrap.indent(
|
||||||
json.dumps(data, indent=indent, default=repr), " " * indent
|
json.dumps(data, indent=indent, default=str), " " * indent
|
||||||
),
|
),
|
||||||
separator="," if not is_last else "",
|
separator="," if not is_last else "",
|
||||||
last="\n]" if is_last else "",
|
last="\n]" if is_last else "",
|
||||||
|
|
|
@ -232,7 +232,7 @@ def read_file(s3, bucket, path):
|
||||||
|
|
||||||
def cleanup_any_resources():
|
def cleanup_any_resources():
|
||||||
# Delete any users beginning s3-credentials-tests.
|
# Delete any users beginning s3-credentials-tests.
|
||||||
users = json.loads(get_output("list-users", "--array"))
|
users = json.loads(get_output("list-users"))
|
||||||
users_to_delete = [
|
users_to_delete = [
|
||||||
user["UserName"]
|
user["UserName"]
|
||||||
for user in users
|
for user in users
|
||||||
|
@ -243,7 +243,7 @@ def cleanup_any_resources():
|
||||||
get_output("delete-user", *users_to_delete)
|
get_output("delete-user", *users_to_delete)
|
||||||
s3 = boto3.client("s3")
|
s3 = boto3.client("s3")
|
||||||
# Delete any buckets beginning s3-credentials-tests.
|
# Delete any buckets beginning s3-credentials-tests.
|
||||||
buckets = json.loads(get_output("list-buckets", "--array"))
|
buckets = json.loads(get_output("list-buckets"))
|
||||||
buckets_to_delete = [
|
buckets_to_delete = [
|
||||||
bucket["Name"]
|
bucket["Name"]
|
||||||
for bucket in buckets
|
for bucket in buckets
|
||||||
|
|
|
@ -61,46 +61,44 @@ def test_whoami(mocker, stub_sts):
|
||||||
(
|
(
|
||||||
(
|
(
|
||||||
"",
|
"",
|
||||||
"{\n"
|
"[\n"
|
||||||
|
" {\n"
|
||||||
' "Path": "/",\n'
|
' "Path": "/",\n'
|
||||||
' "UserName": "NameA",\n'
|
' "UserName": "NameA",\n'
|
||||||
' "UserId": "AID000000000000000001",\n'
|
' "UserId": "AID000000000000000001",\n'
|
||||||
' "Arn": "arn:aws:iam::000000000000:user/NameB",\n'
|
' "Arn": "arn:aws:iam::000000000000:user/NameB",\n'
|
||||||
' "CreateDate": "2020-01-01 00:00:00+00:00"\n'
|
' "CreateDate": "2020-01-01 00:00:00+00:00"\n'
|
||||||
"}\n"
|
" },\n"
|
||||||
"{\n"
|
" {\n"
|
||||||
' "Path": "/",\n'
|
' "Path": "/",\n'
|
||||||
' "UserName": "NameA",\n'
|
' "UserName": "NameA",\n'
|
||||||
' "UserId": "AID000000000000000000",\n'
|
' "UserId": "AID000000000000000000",\n'
|
||||||
' "Arn": "arn:aws:iam::000000000000:user/NameB",\n'
|
' "Arn": "arn:aws:iam::000000000000:user/NameB",\n'
|
||||||
' "CreateDate": "2020-01-01 00:00:00+00:00"\n'
|
' "CreateDate": "2020-01-01 00:00:00+00:00"\n'
|
||||||
"}\n",
|
" }\n"
|
||||||
),
|
"]\n",
|
||||||
(
|
|
||||||
"--array",
|
|
||||||
"[\n"
|
|
||||||
" {\n"
|
|
||||||
' "Path": "/",\n'
|
|
||||||
' "UserName": "NameA",\n'
|
|
||||||
' "UserId": "AID000000000000000001",\n'
|
|
||||||
' "Arn": "arn:aws:iam::000000000000:user/NameB",\n'
|
|
||||||
' "CreateDate": "2020-01-01 00:00:00+00:00"\n'
|
|
||||||
" },\n"
|
|
||||||
" {\n"
|
|
||||||
' "Path": "/",\n'
|
|
||||||
' "UserName": "NameA",\n'
|
|
||||||
' "UserId": "AID000000000000000000",\n'
|
|
||||||
' "Arn": "arn:aws:iam::000000000000:user/NameB",\n'
|
|
||||||
' "CreateDate": "2020-01-01 00:00:00+00:00"\n'
|
|
||||||
" }\n"
|
|
||||||
"]\n"
|
|
||||||
"",
|
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"--nl",
|
"--nl",
|
||||||
'{"Path": "/", "UserName": "NameA", "UserId": "AID000000000000000001", "Arn": "arn:aws:iam::000000000000:user/NameB", "CreateDate": "2020-01-01 00:00:00+00:00"}\n'
|
'{"Path": "/", "UserName": "NameA", "UserId": "AID000000000000000001", "Arn": "arn:aws:iam::000000000000:user/NameB", "CreateDate": "2020-01-01 00:00:00+00:00"}\n'
|
||||||
'{"Path": "/", "UserName": "NameA", "UserId": "AID000000000000000000", "Arn": "arn:aws:iam::000000000000:user/NameB", "CreateDate": "2020-01-01 00:00:00+00:00"}\n',
|
'{"Path": "/", "UserName": "NameA", "UserId": "AID000000000000000000", "Arn": "arn:aws:iam::000000000000:user/NameB", "CreateDate": "2020-01-01 00:00:00+00:00"}\n',
|
||||||
),
|
),
|
||||||
|
(
|
||||||
|
"--csv",
|
||||||
|
(
|
||||||
|
"UserName,UserId,Arn,Path,CreateDate,PasswordLastUsed,PermissionsBoundary,Tags\n"
|
||||||
|
"NameA,AID000000000000000001,arn:aws:iam::000000000000:user/NameB,/,2020-01-01 00:00:00+00:00,,,\n"
|
||||||
|
"NameA,AID000000000000000000,arn:aws:iam::000000000000:user/NameB,/,2020-01-01 00:00:00+00:00,,,\n"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"--tsv",
|
||||||
|
(
|
||||||
|
"UserName\tUserId\tArn\tPath\tCreateDate\tPasswordLastUsed\tPermissionsBoundary\tTags\n"
|
||||||
|
"NameA\tAID000000000000000001\tarn:aws:iam::000000000000:user/NameB\t/\t2020-01-01 00:00:00+00:00\t\t\t\n"
|
||||||
|
"NameA\tAID000000000000000000\tarn:aws:iam::000000000000:user/NameB\t/\t2020-01-01 00:00:00+00:00\t\t\t\n"
|
||||||
|
),
|
||||||
|
),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
def test_list_users(option, expected, stub_iam):
|
def test_list_users(option, expected, stub_iam):
|
||||||
|
@ -138,27 +136,18 @@ def test_list_users(option, expected, stub_iam):
|
||||||
(
|
(
|
||||||
(
|
(
|
||||||
[],
|
[],
|
||||||
"{\n"
|
(
|
||||||
' "Name": "bucket-one",\n'
|
"[\n"
|
||||||
' "CreationDate": "2020-01-01 00:00:00+00:00"\n'
|
" {\n"
|
||||||
"}\n"
|
' "Name": "bucket-one",\n'
|
||||||
"{\n"
|
' "CreationDate": "2020-01-01 00:00:00+00:00"\n'
|
||||||
' "Name": "bucket-two",\n'
|
" },\n"
|
||||||
' "CreationDate": "2020-02-01 00:00:00+00:00"\n'
|
" {\n"
|
||||||
"}\n",
|
' "Name": "bucket-two",\n'
|
||||||
),
|
' "CreationDate": "2020-02-01 00:00:00+00:00"\n'
|
||||||
(
|
" }\n"
|
||||||
["--array"],
|
"]\n"
|
||||||
"[\n"
|
),
|
||||||
" {\n"
|
|
||||||
' "Name": "bucket-one",\n'
|
|
||||||
' "CreationDate": "2020-01-01 00:00:00+00:00"\n'
|
|
||||||
" },\n"
|
|
||||||
" {\n"
|
|
||||||
' "Name": "bucket-two",\n'
|
|
||||||
' "CreationDate": "2020-02-01 00:00:00+00:00"\n'
|
|
||||||
" }"
|
|
||||||
"\n]\n",
|
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
["--nl"],
|
["--nl"],
|
||||||
|
@ -249,40 +238,42 @@ def test_list_buckets_details(stub_s3):
|
||||||
result = runner.invoke(cli, ["list-buckets", "--details"])
|
result = runner.invoke(cli, ["list-buckets", "--details"])
|
||||||
assert result.exit_code == 0
|
assert result.exit_code == 0
|
||||||
assert result.output == (
|
assert result.output == (
|
||||||
"{\n"
|
"[\n"
|
||||||
|
" {\n"
|
||||||
' "Name": "bucket-one",\n'
|
' "Name": "bucket-one",\n'
|
||||||
' "CreationDate": "2020-01-01 00:00:00+00:00",\n'
|
' "CreationDate": "2020-01-01 00:00:00+00:00",\n'
|
||||||
' "bucket_acl": {\n'
|
' "bucket_acl": {\n'
|
||||||
' "Owner": {\n'
|
' "Owner": {\n'
|
||||||
|
' "DisplayName": "swillison",\n'
|
||||||
|
' "ID": "36b2eeee501c5952a8ac119f9e5212277a4c01eccfa8d6a9d670bba1e2d5f441"\n'
|
||||||
|
" },\n"
|
||||||
|
' "Grants": [\n'
|
||||||
|
" {\n"
|
||||||
|
' "Grantee": {\n'
|
||||||
' "DisplayName": "swillison",\n'
|
' "DisplayName": "swillison",\n'
|
||||||
' "ID": "36b2eeee501c5952a8ac119f9e5212277a4c01eccfa8d6a9d670bba1e2d5f441"\n'
|
' "ID": "36b2eeee501c5952a8ac119f9e5212277a4c01eccfa8d6a9d670bba1e2d5f441",\n'
|
||||||
" },\n"
|
' "Type": "CanonicalUser"\n'
|
||||||
' "Grants": [\n'
|
" },\n"
|
||||||
" {\n"
|
' "Permission": "FULL_CONTROL"\n'
|
||||||
' "Grantee": {\n'
|
" }\n"
|
||||||
' "DisplayName": "swillison",\n'
|
" ]\n"
|
||||||
' "ID": "36b2eeee501c5952a8ac119f9e5212277a4c01eccfa8d6a9d670bba1e2d5f441",\n'
|
|
||||||
' "Type": "CanonicalUser"\n'
|
|
||||||
" },\n"
|
|
||||||
' "Permission": "FULL_CONTROL"\n'
|
|
||||||
" }\n"
|
|
||||||
" ]\n"
|
|
||||||
" },\n"
|
" },\n"
|
||||||
' "public_access_block": {\n'
|
' "public_access_block": {\n'
|
||||||
' "BlockPublicAcls": true,\n'
|
' "BlockPublicAcls": true,\n'
|
||||||
' "IgnorePublicAcls": true,\n'
|
' "IgnorePublicAcls": true,\n'
|
||||||
' "BlockPublicPolicy": true,\n'
|
' "BlockPublicPolicy": true,\n'
|
||||||
' "RestrictPublicBuckets": true\n'
|
' "RestrictPublicBuckets": true\n'
|
||||||
" },\n"
|
" },\n"
|
||||||
' "bucket_website": {\n'
|
' "bucket_website": {\n'
|
||||||
' "IndexDocument": {\n'
|
' "IndexDocument": {\n'
|
||||||
' "Suffix": "index.html"\n'
|
' "Suffix": "index.html"\n'
|
||||||
" },\n"
|
" },\n"
|
||||||
' "ErrorDocument": {\n'
|
' "ErrorDocument": {\n'
|
||||||
' "Key": "error.html"\n'
|
' "Key": "error.html"\n'
|
||||||
" }\n"
|
" }\n"
|
||||||
" }\n"
|
" }\n"
|
||||||
"}\n"
|
" }\n"
|
||||||
|
"]\n"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -771,17 +762,17 @@ def test_policy(options, expected):
|
||||||
(
|
(
|
||||||
["--tsv"],
|
["--tsv"],
|
||||||
(
|
(
|
||||||
"Key\tLastModified\tETag\tSize\tStorageClass\n"
|
"Key\tLastModified\tETag\tSize\tStorageClass\tOwner\n"
|
||||||
'yolo-causeway-1.jpg\t2019-12-26 17:00:22+00:00\t"""87abea888b22089cabe93a0e17cf34a4"""\t5923104\tSTANDARD\n'
|
'yolo-causeway-1.jpg\t2019-12-26 17:00:22+00:00\t"""87abea888b22089cabe93a0e17cf34a4"""\t5923104\tSTANDARD\t\n'
|
||||||
'yolo-causeway-2.jpg\t2019-12-26 17:00:22+00:00\t"""87abea888b22089cabe93a0e17cf34a4"""\t5923104\tSTANDARD\n'
|
'yolo-causeway-2.jpg\t2019-12-26 17:00:22+00:00\t"""87abea888b22089cabe93a0e17cf34a4"""\t5923104\tSTANDARD\t\n'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
["--csv"],
|
["--csv"],
|
||||||
(
|
(
|
||||||
"Key,LastModified,ETag,Size,StorageClass\n"
|
"Key,LastModified,ETag,Size,StorageClass,Owner\n"
|
||||||
'yolo-causeway-1.jpg,2019-12-26 17:00:22+00:00,"""87abea888b22089cabe93a0e17cf34a4""",5923104,STANDARD\n'
|
'yolo-causeway-1.jpg,2019-12-26 17:00:22+00:00,"""87abea888b22089cabe93a0e17cf34a4""",5923104,STANDARD,\n'
|
||||||
'yolo-causeway-2.jpg,2019-12-26 17:00:22+00:00,"""87abea888b22089cabe93a0e17cf34a4""",5923104,STANDARD\n'
|
'yolo-causeway-2.jpg,2019-12-26 17:00:22+00:00,"""87abea888b22089cabe93a0e17cf34a4""",5923104,STANDARD,\n'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
|
Ładowanie…
Reference in New Issue