Move tests from S3ProxyTest to S3AwsSdkTest

This makes testing more consistent and gives better coverage of the
more widely-used AWS SDK.  Fixes #147.
pull/185/head
Andrew Gaul 2017-02-15 20:02:00 -08:00
rodzic bbcdf4391f
commit 4295f72be5
2 zmienionych plików z 622 dodań i 744 usunięć

Wyświetl plik

@ -27,6 +27,7 @@ import java.security.NoSuchAlgorithmException;
import java.security.cert.X509Certificate;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.TimeUnit;
@ -48,32 +49,42 @@ import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.AbortMultipartUploadRequest;
import com.amazonaws.services.s3.model.AccessControlList;
import com.amazonaws.services.s3.model.AmazonS3Exception;
import com.amazonaws.services.s3.model.Bucket;
import com.amazonaws.services.s3.model.BucketLoggingConfiguration;
import com.amazonaws.services.s3.model.CannedAccessControlList;
import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest;
import com.amazonaws.services.s3.model.CompleteMultipartUploadResult;
import com.amazonaws.services.s3.model.CopyObjectRequest;
import com.amazonaws.services.s3.model.CopyPartRequest;
import com.amazonaws.services.s3.model.CopyPartResult;
import com.amazonaws.services.s3.model.DeleteObjectsRequest;
import com.amazonaws.services.s3.model.DeleteObjectsResult;
import com.amazonaws.services.s3.model.GetObjectRequest;
import com.amazonaws.services.s3.model.GroupGrantee;
import com.amazonaws.services.s3.model.HeadBucketRequest;
import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest;
import com.amazonaws.services.s3.model.InitiateMultipartUploadResult;
import com.amazonaws.services.s3.model.ListObjectsRequest;
import com.amazonaws.services.s3.model.ListPartsRequest;
import com.amazonaws.services.s3.model.ObjectListing;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PartETag;
import com.amazonaws.services.s3.model.Permission;
import com.amazonaws.services.s3.model.PutObjectResult;
import com.amazonaws.services.s3.model.ResponseHeaderOverrides;
import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import com.amazonaws.services.s3.model.SetBucketLoggingConfigurationRequest;
import com.amazonaws.services.s3.model.UploadPartRequest;
import com.amazonaws.services.s3.model.UploadPartResult;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.ByteSource;
import org.assertj.core.api.Fail;
import org.jclouds.blobstore.BlobStoreContext;
import org.jclouds.rest.HttpClient;
import org.junit.After;
import org.junit.Before;
@ -588,6 +599,617 @@ public final class S3AwsSdkTest {
}
}
@Test
public void testHttpClient() throws Exception {
String blobName = "blob-name";
ObjectMetadata metadata = new ObjectMetadata();
metadata.setContentLength(BYTE_SOURCE.size());
client.putObject(containerName, blobName, BYTE_SOURCE.openStream(),
metadata);
if (Quirks.NO_BLOB_ACCESS_CONTROL.contains(blobStoreType)) {
client.setBucketAcl(containerName,
CannedAccessControlList.PublicRead);
} else {
client.setObjectAcl(containerName, blobName,
CannedAccessControlList.PublicRead);
}
HttpClient httpClient = context.utils().http();
URI uri = new URI(s3Endpoint.getScheme(), s3Endpoint.getUserInfo(),
s3Endpoint.getHost(), s3Proxy.getSecurePort(),
"/" + containerName + "/" + blobName,
/*query=*/ null, /*fragment=*/ null);
try (InputStream actual = httpClient.get(uri);
InputStream expected = BYTE_SOURCE.openStream()) {
assertThat(actual).hasContentEqualTo(expected);
}
}
@Test
public void testListBuckets() throws Exception {
ImmutableList.Builder<String> builder = ImmutableList.builder();
for (Bucket bucket : client.listBuckets()) {
builder.add(bucket.getName());
}
assertThat(builder.build()).contains(containerName);
}
@Test
public void testContainerExists() throws Exception {
client.headBucket(new HeadBucketRequest(containerName));
try {
client.headBucket(new HeadBucketRequest(
createRandomContainerName()));
Fail.failBecauseExceptionWasNotThrown(AmazonS3Exception.class);
} catch (AmazonS3Exception e) {
assertThat(e.getErrorCode()).isEqualTo("404 Not Found");
}
}
@Test
public void testContainerCreateDelete() throws Exception {
String containerName2 = createRandomContainerName();
client.createBucket(containerName2);
try {
client.createBucket(containerName2);
client.deleteBucket(containerName2);
Fail.failBecauseExceptionWasNotThrown(AmazonS3Exception.class);
} catch (AmazonS3Exception e) {
assertThat(e.getErrorCode()).isEqualTo("BucketAlreadyOwnedByYou");
}
}
@Test
public void testContainerDelete() throws Exception {
client.headBucket(new HeadBucketRequest(containerName));
client.deleteBucket(containerName);
try {
client.headBucket(new HeadBucketRequest(containerName));
Fail.failBecauseExceptionWasNotThrown(AmazonS3Exception.class);
} catch (AmazonS3Exception e) {
assertThat(e.getErrorCode()).isEqualTo("404 Not Found");
}
}
private void putBlobAndCheckIt(String blobName) throws Exception {
ObjectMetadata metadata = new ObjectMetadata();
metadata.setContentLength(BYTE_SOURCE.size());
client.putObject(containerName, blobName, BYTE_SOURCE.openStream(),
metadata);
S3Object object = client.getObject(containerName, blobName);
try (InputStream actual = object.getObjectContent();
InputStream expected = BYTE_SOURCE.openStream()) {
assertThat(actual).hasContentEqualTo(expected);
}
}
@Test
public void testBlobPutGet() throws Exception {
putBlobAndCheckIt("blob");
putBlobAndCheckIt("blob%");
putBlobAndCheckIt("blob%%");
}
@Test
public void testBlobEscape() throws Exception {
ObjectListing listing = client.listObjects(containerName);
assertThat(listing.getObjectSummaries()).isEmpty();
putBlobAndCheckIt("blob%");
listing = client.listObjects(containerName);
assertThat(listing.getObjectSummaries()).hasSize(1);
assertThat(listing.getObjectSummaries().iterator().next().getKey())
.isEqualTo("blob%");
}
@Test
public void testBlobList() throws Exception {
ObjectListing listing = client.listObjects(containerName);
assertThat(listing.getObjectSummaries()).isEmpty();
ObjectMetadata metadata = new ObjectMetadata();
metadata.setContentLength(BYTE_SOURCE.size());
ImmutableList.Builder<String> builder = ImmutableList.builder();
client.putObject(containerName, "blob1", BYTE_SOURCE.openStream(),
metadata);
listing = client.listObjects(containerName);
for (S3ObjectSummary summary : listing.getObjectSummaries()) {
builder.add(summary.getKey());
}
assertThat(builder.build()).containsOnly("blob1");
builder = ImmutableList.builder();
client.putObject(containerName, "blob2", BYTE_SOURCE.openStream(),
metadata);
listing = client.listObjects(containerName);
for (S3ObjectSummary summary : listing.getObjectSummaries()) {
builder.add(summary.getKey());
}
assertThat(builder.build()).containsOnly("blob1", "blob2");
}
@Test
public void testBlobListRecursive() throws Exception {
ObjectListing listing = client.listObjects(containerName);
assertThat(listing.getObjectSummaries()).isEmpty();
ObjectMetadata metadata = new ObjectMetadata();
metadata.setContentLength(BYTE_SOURCE.size());
client.putObject(containerName, "prefix/blob1",
BYTE_SOURCE.openStream(), metadata);
client.putObject(containerName, "prefix/blob2",
BYTE_SOURCE.openStream(), metadata);
ImmutableList.Builder<String> builder = ImmutableList.builder();
listing = client.listObjects(new ListObjectsRequest()
.withBucketName(containerName)
.withDelimiter("/"));
assertThat(listing.getObjectSummaries()).isEmpty();
for (String prefix : listing.getCommonPrefixes()) {
builder.add(prefix);
}
assertThat(builder.build()).containsOnly("prefix/");
builder = ImmutableList.builder();
listing = client.listObjects(containerName);
for (S3ObjectSummary summary : listing.getObjectSummaries()) {
builder.add(summary.getKey());
}
assertThat(builder.build()).containsOnly("prefix/blob1",
"prefix/blob2");
assertThat(listing.getCommonPrefixes()).isEmpty();
}
@Test
public void testBlobListRecursiveImplicitMarker() throws Exception {
ObjectListing listing = client.listObjects(containerName);
assertThat(listing.getObjectSummaries()).isEmpty();
ObjectMetadata metadata = new ObjectMetadata();
metadata.setContentLength(BYTE_SOURCE.size());
client.putObject(containerName, "blob1", BYTE_SOURCE.openStream(),
metadata);
client.putObject(containerName, "blob2", BYTE_SOURCE.openStream(),
metadata);
listing = client.listObjects(new ListObjectsRequest()
.withBucketName(containerName)
.withMaxKeys(1));
assertThat(listing.getObjectSummaries()).hasSize(1);
assertThat(listing.getObjectSummaries().iterator().next().getKey())
.isEqualTo("blob1");
listing = client.listObjects(new ListObjectsRequest()
.withBucketName(containerName)
.withMaxKeys(1)
.withMarker("blob1"));
assertThat(listing.getObjectSummaries()).hasSize(1);
assertThat(listing.getObjectSummaries().iterator().next().getKey())
.isEqualTo("blob2");
}
@Test
public void testBlobMetadata() throws Exception {
String blobName = "blob";
ObjectMetadata metadata = new ObjectMetadata();
metadata.setContentLength(BYTE_SOURCE.size());
client.putObject(containerName, blobName, BYTE_SOURCE.openStream(),
metadata);
ObjectMetadata newMetadata = client.getObjectMetadata(containerName,
blobName);
assertThat(newMetadata.getContentLength())
.isEqualTo(BYTE_SOURCE.size());
}
@Test
public void testBlobRemove() throws Exception {
String blobName = "blob";
ObjectMetadata metadata = new ObjectMetadata();
metadata.setContentLength(BYTE_SOURCE.size());
client.putObject(containerName, blobName, BYTE_SOURCE.openStream(),
metadata);
assertThat(client.getObjectMetadata(containerName, blobName))
.isNotNull();
client.deleteObject(containerName, blobName);
try {
client.getObjectMetadata(containerName, blobName);
Fail.failBecauseExceptionWasNotThrown(AmazonS3Exception.class);
} catch (AmazonS3Exception e) {
assertThat(e.getErrorCode()).isEqualTo("404 Not Found");
}
client.deleteObject(containerName, blobName);
}
@Test
public void testSinglepartUpload() throws Exception {
String blobName = "singlepart-upload";
String cacheControl = "max-age=3600";
if (Quirks.NO_CACHE_CONTROL_SUPPORT.contains(blobStoreType)) {
cacheControl = null;
}
String contentDisposition = "attachment; filename=new.jpg";
if (Quirks.NO_CONTENT_DISPOSITION.contains(blobStoreType)) {
contentDisposition = null;
}
String contentEncoding = "gzip";
if (Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
contentEncoding = null;
}
String contentLanguage = "fr";
if (Quirks.NO_CONTENT_LANGUAGE.contains(blobStoreType)) {
contentLanguage = null;
}
String contentType = "audio/mp4";
Map<String, String> userMetadata = ImmutableMap.of(
"key1", "value1",
"key2", "value2");
ObjectMetadata metadata = new ObjectMetadata();
metadata.setCacheControl(cacheControl);
metadata.setContentDisposition(contentDisposition);
metadata.setContentEncoding(contentEncoding);
metadata.setContentLanguage(contentLanguage);
metadata.setContentLength(BYTE_SOURCE.size());
metadata.setContentType(contentType);
// TODO: expires
metadata.setUserMetadata(userMetadata);
client.putObject(containerName, blobName, BYTE_SOURCE.openStream(),
metadata);
S3Object object = client.getObject(containerName, blobName);
try (InputStream actual = object.getObjectContent();
InputStream expected = BYTE_SOURCE.openStream()) {
assertThat(actual).hasContentEqualTo(expected);
}
ObjectMetadata newContentMetadata = object.getObjectMetadata();
if (!Quirks.NO_CACHE_CONTROL_SUPPORT.contains(blobStoreType)) {
assertThat(newContentMetadata.getCacheControl()).isEqualTo(
cacheControl);
}
if (!Quirks.NO_CONTENT_DISPOSITION.contains(blobStoreType)) {
assertThat(newContentMetadata.getContentDisposition()).isEqualTo(
contentDisposition);
}
if (!Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
assertThat(newContentMetadata.getContentEncoding()).isEqualTo(
contentEncoding);
}
if (!Quirks.NO_CONTENT_LANGUAGE.contains(blobStoreType)) {
assertThat(newContentMetadata.getContentLanguage()).isEqualTo(
contentLanguage);
}
assertThat(newContentMetadata.getContentType()).isEqualTo(
contentType);
// TODO: expires
assertThat(newContentMetadata.getUserMetadata()).isEqualTo(
userMetadata);
}
// TODO: fails for GCS (jclouds not implemented)
@Test
public void testMultipartUpload() throws Exception {
String blobName = "multipart-upload";
String cacheControl = "max-age=3600";
if (Quirks.NO_CACHE_CONTROL_SUPPORT.contains(blobStoreType)) {
cacheControl = null;
}
String contentDisposition = "attachment; filename=new.jpg";
if (Quirks.NO_CONTENT_DISPOSITION.contains(blobStoreType)) {
contentDisposition = null;
}
String contentEncoding = "gzip";
if (Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
contentEncoding = null;
}
String contentLanguage = "fr";
if (Quirks.NO_CONTENT_LANGUAGE.contains(blobStoreType)) {
contentLanguage = null;
}
String contentType = "audio/mp4";
Map<String, String> userMetadata = ImmutableMap.of(
"key1", "value1",
"key2", "value2");
ObjectMetadata metadata = new ObjectMetadata();
metadata.setCacheControl(cacheControl);
metadata.setContentDisposition(contentDisposition);
metadata.setContentEncoding(contentEncoding);
metadata.setContentLanguage(contentLanguage);
metadata.setContentType(contentType);
// TODO: expires
metadata.setUserMetadata(userMetadata);
InitiateMultipartUploadResult result = client.initiateMultipartUpload(
new InitiateMultipartUploadRequest(containerName, blobName,
metadata));
ByteSource byteSource = TestUtils.randomByteSource().slice(
0, context.getBlobStore().getMinimumMultipartPartSize() + 1);
ByteSource byteSource1 = byteSource.slice(
0, context.getBlobStore().getMinimumMultipartPartSize());
ByteSource byteSource2 = byteSource.slice(
context.getBlobStore().getMinimumMultipartPartSize(), 1);
UploadPartResult part1 = client.uploadPart(new UploadPartRequest()
.withBucketName(containerName)
.withKey(blobName)
.withUploadId(result.getUploadId())
.withPartNumber(1)
.withPartSize(byteSource1.size())
.withInputStream(byteSource1.openStream()));
UploadPartResult part2 = client.uploadPart(new UploadPartRequest()
.withBucketName(containerName)
.withKey(blobName)
.withUploadId(result.getUploadId())
.withPartNumber(2)
.withPartSize(byteSource2.size())
.withInputStream(byteSource2.openStream()));
client.completeMultipartUpload(new CompleteMultipartUploadRequest(
containerName, blobName, result.getUploadId(),
ImmutableList.of(part1.getPartETag(), part2.getPartETag())));
ObjectListing listing = client.listObjects(containerName);
assertThat(listing.getObjectSummaries()).hasSize(1);
S3Object object = client.getObject(containerName, blobName);
try (InputStream actual = object.getObjectContent();
InputStream expected = byteSource.openStream()) {
assertThat(actual).hasContentEqualTo(expected);
}
ObjectMetadata newContentMetadata = object.getObjectMetadata();
if (!Quirks.NO_CACHE_CONTROL_SUPPORT.contains(blobStoreType)) {
assertThat(newContentMetadata.getCacheControl()).isEqualTo(
cacheControl);
}
if (!Quirks.NO_CONTENT_DISPOSITION.contains(blobStoreType)) {
assertThat(newContentMetadata.getContentDisposition()).isEqualTo(
contentDisposition);
}
if (!Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
assertThat(newContentMetadata.getContentEncoding()).isEqualTo(
contentEncoding);
}
if (!Quirks.NO_CONTENT_LANGUAGE.contains(blobStoreType)) {
assertThat(newContentMetadata.getContentLanguage()).isEqualTo(
contentLanguage);
}
assertThat(newContentMetadata.getContentType()).isEqualTo(
contentType);
// TODO: expires
assertThat(newContentMetadata.getUserMetadata()).isEqualTo(
userMetadata);
}
// this test runs for several minutes
@Ignore
@Test
public void testMaximumMultipartUpload() throws Exception {
// skip with remote blobstores to avoid excessive run-times
assumeTrue(blobStoreType.equals("filesystem") ||
blobStoreType.equals("transient"));
String blobName = "multipart-upload";
int numParts = 10_000;
ByteSource byteSource = TestUtils.randomByteSource().slice(0, numParts);
InitiateMultipartUploadResult result = client.initiateMultipartUpload(
new InitiateMultipartUploadRequest(containerName, blobName));
ImmutableList.Builder<PartETag> parts = ImmutableList.builder();
for (int i = 0; i < numParts; ++i) {
ByteSource partByteSource = byteSource.slice(i, 1);
UploadPartResult partResult = client.uploadPart(
new UploadPartRequest()
.withBucketName(containerName)
.withKey(blobName)
.withUploadId(result.getUploadId())
.withPartNumber(i + 1)
.withPartSize(partByteSource.size())
.withInputStream(partByteSource.openStream()));
parts.add(partResult.getPartETag());
}
client.completeMultipartUpload(new CompleteMultipartUploadRequest(
containerName, blobName, result.getUploadId(), parts.build()));
ObjectListing listing = client.listObjects(containerName);
assertThat(listing.getObjectSummaries()).hasSize(1);
S3Object object = client.getObject(containerName, blobName);
try (InputStream actual = object.getObjectContent();
InputStream expected = byteSource.openStream()) {
assertThat(actual).hasContentEqualTo(expected);
}
}
@Test
public void testMultipartUploadAbort() throws Exception {
String blobName = "multipart-upload-abort";
InitiateMultipartUploadResult result = client.initiateMultipartUpload(
new InitiateMultipartUploadRequest(containerName, blobName));
ByteSource byteSource = TestUtils.randomByteSource().slice(
0, context.getBlobStore().getMinimumMultipartPartSize());
client.uploadPart(new UploadPartRequest()
.withBucketName(containerName)
.withKey(blobName)
.withUploadId(result.getUploadId())
.withPartNumber(1)
.withPartSize(byteSource.size())
.withInputStream(byteSource.openStream()));
client.abortMultipartUpload(new AbortMultipartUploadRequest(
containerName, blobName, result.getUploadId()));
ObjectListing listing = client.listObjects(containerName);
assertThat(listing.getObjectSummaries()).isEmpty();
}
@Test
public void testCopyObjectPreserveMetadata() throws Exception {
String fromName = "from-name";
String toName = "to-name";
String cacheControl = "max-age=3600";
String contentDisposition = "attachment; filename=old.jpg";
String contentEncoding = "gzip";
if (Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
contentEncoding = null;
}
String contentLanguage = "en";
String contentType = "audio/ogg";
Map<String, String> userMetadata = ImmutableMap.of(
"key1", "value1",
"key2", "value2");
ObjectMetadata metadata = new ObjectMetadata();
metadata.setCacheControl(cacheControl);
metadata.setContentLength(BYTE_SOURCE.size());
metadata.setContentDisposition(contentDisposition);
metadata.setContentEncoding(contentEncoding);
metadata.setContentLanguage(contentLanguage);
metadata.setContentType(contentType);
// TODO: expires
metadata.setUserMetadata(userMetadata);
client.putObject(containerName, fromName, BYTE_SOURCE.openStream(),
metadata);
client.copyObject(containerName, fromName, containerName, toName);
S3Object object = client.getObject(containerName, toName);
try (InputStream actual = object.getObjectContent();
InputStream expected = BYTE_SOURCE.openStream()) {
assertThat(actual).hasContentEqualTo(expected);
}
ObjectMetadata contentMetadata = object.getObjectMetadata();
assertThat(contentMetadata.getContentLength()).isEqualTo(
BYTE_SOURCE.size());
if (!Quirks.NO_CACHE_CONTROL_SUPPORT.contains(blobStoreType)) {
assertThat(contentMetadata.getCacheControl()).isEqualTo(
cacheControl);
}
assertThat(contentMetadata.getContentDisposition()).isEqualTo(
contentDisposition);
if (!Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
assertThat(contentMetadata.getContentEncoding()).isEqualTo(
contentEncoding);
}
if (!Quirks.NO_CONTENT_LANGUAGE.contains(blobStoreType)) {
assertThat(contentMetadata.getContentLanguage()).isEqualTo(
contentLanguage);
}
assertThat(contentMetadata.getContentType()).isEqualTo(
contentType);
// TODO: expires
assertThat(contentMetadata.getUserMetadata()).isEqualTo(
userMetadata);
}
@Test
public void testCopyObjectReplaceMetadata() throws Exception {
String fromName = "from-name";
String toName = "to-name";
ObjectMetadata metadata = new ObjectMetadata();
metadata.setContentLength(BYTE_SOURCE.size());
metadata.setCacheControl("max-age=3600");
metadata.setContentDisposition("attachment; filename=old.jpg");
metadata.setContentEncoding("compress");
metadata.setContentLanguage("en");
metadata.setContentType("audio/ogg");
// TODO: expires
metadata.setUserMetadata(ImmutableMap.of(
"key1", "value1",
"key2", "value2"));
client.putObject(containerName, fromName, BYTE_SOURCE.openStream(),
metadata);
String cacheControl = "max-age=1800";
String contentDisposition = "attachment; filename=new.jpg";
String contentEncoding = "gzip";
if (Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
contentEncoding = null;
}
String contentLanguage = "fr";
String contentType = "audio/mp4";
ObjectMetadata contentMetadata = new ObjectMetadata();
contentMetadata.setCacheControl(cacheControl);
contentMetadata.setContentDisposition(contentDisposition);
contentMetadata.setContentEncoding(contentEncoding);
contentMetadata.setContentLanguage(contentLanguage);
contentMetadata.setContentType(contentType);
// TODO: expires
Map<String, String> userMetadata = ImmutableMap.of(
"key3", "value3",
"key4", "value4");
contentMetadata.setUserMetadata(userMetadata);
client.copyObject(new CopyObjectRequest(
containerName, fromName, containerName, toName)
.withNewObjectMetadata(contentMetadata));
S3Object object = client.getObject(containerName, toName);
try (InputStream actual = object.getObjectContent();
InputStream expected = BYTE_SOURCE.openStream()) {
assertThat(actual).hasContentEqualTo(expected);
}
ObjectMetadata toContentMetadata = object.getObjectMetadata();
if (!Quirks.NO_CACHE_CONTROL_SUPPORT.contains(blobStoreType)) {
assertThat(contentMetadata.getCacheControl()).isEqualTo(
cacheControl);
}
assertThat(toContentMetadata.getContentDisposition()).isEqualTo(
contentDisposition);
if (!Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
assertThat(toContentMetadata.getContentEncoding()).isEqualTo(
contentEncoding);
}
if (!Quirks.NO_CONTENT_LANGUAGE.contains(blobStoreType)) {
assertThat(toContentMetadata.getContentLanguage()).isEqualTo(
contentLanguage);
}
assertThat(toContentMetadata.getContentType()).isEqualTo(
contentType);
// TODO: expires
assertThat(toContentMetadata.getUserMetadata()).isEqualTo(
userMetadata);
}
@Test
public void testConditionalGet() throws Exception {
String blobName = "blob-name";
ObjectMetadata metadata = new ObjectMetadata();
metadata.setContentLength(BYTE_SOURCE.size());
PutObjectResult result = client.putObject(containerName, blobName,
BYTE_SOURCE.openStream(), metadata);
S3Object object = client.getObject(
new GetObjectRequest(containerName, blobName)
.withMatchingETagConstraint(result.getETag()));
assertThat(object.getObjectContent()).isNotNull();
object = client.getObject(
new GetObjectRequest(containerName, blobName)
.withNonmatchingETagConstraint(result.getETag()));
assertThat(object).isNull();
}
@Test
public void testUnknownParameter() throws Exception {
try {
client.setBucketLoggingConfiguration(
new SetBucketLoggingConfigurationRequest(
containerName, new BucketLoggingConfiguration()));
Fail.failBecauseExceptionWasNotThrown(AmazonS3Exception.class);
} catch (AmazonS3Exception e) {
assertThat(e.getErrorCode()).isEqualTo("NotImplemented");
}
}
private static final class NullX509TrustManager
implements X509TrustManager {
@Override

Wyświetl plik

@ -1,744 +0,0 @@
/*
* Copyright 2014-2016 Andrew Gaul <andrew@gaul.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gaul.s3proxy;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assume.assumeTrue;
import java.io.InputStream;
import java.net.URI;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
import javax.servlet.http.HttpServletResponse;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.io.ByteSource;
import org.assertj.core.api.Fail;
import org.jclouds.Constants;
import org.jclouds.ContextBuilder;
import org.jclouds.aws.AWSResponseException;
import org.jclouds.blobstore.BlobRequestSigner;
import org.jclouds.blobstore.BlobStore;
import org.jclouds.blobstore.BlobStoreContext;
import org.jclouds.blobstore.domain.Blob;
import org.jclouds.blobstore.domain.BlobAccess;
import org.jclouds.blobstore.domain.BlobMetadata;
import org.jclouds.blobstore.domain.ContainerAccess;
import org.jclouds.blobstore.domain.MultipartPart;
import org.jclouds.blobstore.domain.MultipartUpload;
import org.jclouds.blobstore.domain.PageSet;
import org.jclouds.blobstore.domain.StorageMetadata;
import org.jclouds.blobstore.options.CopyOptions;
import org.jclouds.blobstore.options.GetOptions;
import org.jclouds.blobstore.options.ListContainerOptions;
import org.jclouds.blobstore.options.PutOptions;
import org.jclouds.http.HttpRequest;
import org.jclouds.http.HttpResponse;
import org.jclouds.http.HttpResponseException;
import org.jclouds.io.ContentMetadata;
import org.jclouds.io.ContentMetadataBuilder;
import org.jclouds.io.Payload;
import org.jclouds.io.Payloads;
import org.jclouds.rest.HttpClient;
import org.jclouds.s3.S3Client;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
public final class S3ProxyTest {
private static final ByteSource BYTE_SOURCE = ByteSource.wrap(new byte[1]);
private URI s3Endpoint;
private S3Proxy s3Proxy;
private BlobStoreContext context;
private BlobStore blobStore;
private String blobStoreType;
private BlobStoreContext s3Context;
private BlobStore s3BlobStore;
private String containerName;
@Before
public void setUp() throws Exception {
TestUtils.S3ProxyLaunchInfo info = TestUtils.startS3Proxy();
s3Proxy = info.getS3Proxy();
context = info.getBlobStore().getContext();
blobStore = info.getBlobStore();
blobStoreType = context.unwrap().getProviderMetadata().getId();
s3Endpoint = info.getSecureEndpoint();
containerName = createRandomContainerName();
if (blobStoreType.equals("google-cloud-storage")) {
// GCS rate limits create container
Thread.sleep(30 * 1000);
}
blobStore.createContainerInLocation(null, containerName);
Properties s3Properties = new Properties();
s3Properties.setProperty(Constants.PROPERTY_TRUST_ALL_CERTS, "true");
s3Context = ContextBuilder
.newBuilder("s3")
.credentials(info.getS3Identity(), info.getS3Credential())
.endpoint(s3Endpoint.toString())
.overrides(s3Properties)
.build(BlobStoreContext.class);
s3BlobStore = s3Context.getBlobStore();
}
@After
public void tearDown() throws Exception {
if (s3Proxy != null) {
s3Proxy.stop();
}
if (s3Context != null) {
s3Context.close();
}
if (context != null) {
context.getBlobStore().deleteContainer(containerName);
context.close();
}
}
@Test
public void testHttpClient() throws Exception {
String blobName = "blob-name";
Blob blob = blobStore.blobBuilder(blobName)
.payload(BYTE_SOURCE)
.contentLength(BYTE_SOURCE.size())
.build();
blobStore.putBlob(containerName, blob);
if (Quirks.NO_BLOB_ACCESS_CONTROL.contains(blobStoreType)) {
blobStore.setContainerAccess(containerName,
ContainerAccess.PUBLIC_READ);
} else {
blobStore.setBlobAccess(containerName, blobName,
BlobAccess.PUBLIC_READ);
}
HttpClient httpClient = s3Context.utils().http();
URI uri = new URI(s3Endpoint.getScheme(), s3Endpoint.getUserInfo(),
s3Endpoint.getHost(), s3Proxy.getSecurePort(),
"/" + containerName + "/" + blobName,
/*query=*/ null, /*fragment=*/ null);
try (InputStream actual = httpClient.get(uri);
InputStream expected = BYTE_SOURCE.openStream()) {
assertThat(actual).hasContentEqualTo(expected);
}
}
@Test
public void testJcloudsClient() throws Exception {
ImmutableSet.Builder<String> builder = ImmutableSet.builder();
for (StorageMetadata metadata : s3BlobStore.list()) {
builder.add(metadata.getName());
}
assertThat(builder.build()).contains(containerName);
}
@Test
public void testContainerExists() throws Exception {
assertThat(s3BlobStore.containerExists(containerName)).isTrue();
assertThat(s3BlobStore.containerExists(createRandomContainerName()))
.isFalse();
}
@Test
public void testContainerCreateDelete() throws Exception {
String containerName2 = createRandomContainerName();
assertThat(s3BlobStore.createContainerInLocation(null,
containerName2)).isTrue();
try {
assertThat(s3BlobStore.createContainerInLocation(null,
containerName2)).isFalse();
} finally {
s3BlobStore.deleteContainer(containerName2);
}
}
@Test
public void testContainerDelete() throws Exception {
assertThat(s3BlobStore.containerExists(containerName)).isTrue();
s3BlobStore.deleteContainerIfEmpty(containerName);
assertThat(s3BlobStore.containerExists(containerName)).isFalse();
}
private void putBlobAndCheckIt(String blobName) throws Exception {
Blob blob = s3BlobStore.blobBuilder(blobName)
.payload(BYTE_SOURCE)
.contentLength(BYTE_SOURCE.size())
.build();
s3BlobStore.putBlob(containerName, blob);
Blob blob2 = s3BlobStore.getBlob(containerName, blobName);
assertThat(blob2.getMetadata().getName()).isEqualTo(blobName);
try (InputStream actual = blob2.getPayload().openStream();
InputStream expected = BYTE_SOURCE.openStream()) {
assertThat(actual).hasContentEqualTo(expected);
}
}
@Test
public void testBlobPutGet() throws Exception {
putBlobAndCheckIt("blob");
putBlobAndCheckIt("blob%");
putBlobAndCheckIt("blob%%");
}
@Test
public void testBlobEscape() throws Exception {
assertThat(s3BlobStore.list(containerName)).isEmpty();
putBlobAndCheckIt("blob%");
PageSet<? extends StorageMetadata> res =
s3BlobStore.list(containerName);
StorageMetadata meta = res.iterator().next();
assertThat(meta.getName()).isEqualTo("blob%");
assertThat(res).hasSize(1);
}
@Test
public void testBlobList() throws Exception {
assertThat(s3BlobStore.list(containerName)).isEmpty();
ImmutableSet.Builder<String> builder = ImmutableSet.builder();
Blob blob1 = s3BlobStore.blobBuilder("blob1")
.payload(BYTE_SOURCE)
.contentLength(BYTE_SOURCE.size())
.build();
s3BlobStore.putBlob(containerName, blob1);
for (StorageMetadata metadata : s3BlobStore.list(containerName)) {
builder.add(metadata.getName());
}
assertThat(builder.build()).containsOnly("blob1");
builder = ImmutableSet.builder();
Blob blob2 = s3BlobStore.blobBuilder("blob2")
.payload(BYTE_SOURCE)
.contentLength(BYTE_SOURCE.size())
.build();
s3BlobStore.putBlob(containerName, blob2);
for (StorageMetadata metadata : s3BlobStore.list(containerName)) {
builder.add(metadata.getName());
}
assertThat(builder.build()).containsOnly("blob1", "blob2");
}
@Test
public void testBlobListRecursive() throws Exception {
assertThat(s3BlobStore.list(containerName)).isEmpty();
Blob blob1 = s3BlobStore.blobBuilder("prefix/blob1")
.payload(BYTE_SOURCE)
.contentLength(BYTE_SOURCE.size())
.build();
s3BlobStore.putBlob(containerName, blob1);
Blob blob2 = s3BlobStore.blobBuilder("prefix/blob2")
.payload(BYTE_SOURCE)
.contentLength(BYTE_SOURCE.size())
.build();
s3BlobStore.putBlob(containerName, blob2);
ImmutableSet.Builder<String> builder = ImmutableSet.builder();
for (StorageMetadata metadata : s3BlobStore.list(containerName)) {
builder.add(metadata.getName());
}
assertThat(builder.build()).containsOnly("prefix/");
builder = ImmutableSet.builder();
for (StorageMetadata metadata : s3BlobStore.list(containerName,
new ListContainerOptions().recursive())) {
builder.add(metadata.getName());
}
assertThat(builder.build()).containsOnly("prefix/blob1",
"prefix/blob2");
}
@Test
public void testBlobListRecursiveImplicitMarker() throws Exception {
assertThat(s3BlobStore.list(containerName)).isEmpty();
Blob blob1 = s3BlobStore.blobBuilder("blob1")
.payload(BYTE_SOURCE)
.contentLength(BYTE_SOURCE.size())
.build();
s3BlobStore.putBlob(containerName, blob1);
Blob blob2 = s3BlobStore.blobBuilder("blob2")
.payload(BYTE_SOURCE)
.contentLength(BYTE_SOURCE.size())
.build();
s3BlobStore.putBlob(containerName, blob2);
PageSet<? extends StorageMetadata> pageSet = s3BlobStore.list(
containerName, new ListContainerOptions().maxResults(1));
String blobName = pageSet.iterator().next().getName();
assertThat(blobName).isEqualTo("blob1");
pageSet = s3BlobStore.list(containerName,
new ListContainerOptions().maxResults(1).afterMarker(blobName));
blobName = pageSet.iterator().next().getName();
assertThat(blobName).isEqualTo("blob2");
}
@Test
public void testBlobMetadata() throws Exception {
String blobName = "blob";
Blob blob1 = s3BlobStore.blobBuilder(blobName)
.payload(BYTE_SOURCE)
.contentLength(BYTE_SOURCE.size())
.build();
s3BlobStore.putBlob(containerName, blob1);
BlobMetadata metadata = s3BlobStore.blobMetadata(containerName,
blobName);
assertThat(metadata.getName()).isEqualTo(blobName);
assertThat(metadata.getContentMetadata().getContentLength())
.isEqualTo(BYTE_SOURCE.size());
assertThat(s3BlobStore.blobMetadata(containerName,
"fake-blob")).isNull();
}
@Test
public void testBlobRemove() throws Exception {
String blobName = "blob";
Blob blob = s3BlobStore.blobBuilder(blobName)
.payload(BYTE_SOURCE)
.contentLength(BYTE_SOURCE.size())
.build();
s3BlobStore.putBlob(containerName, blob);
assertThat(s3BlobStore.blobExists(containerName, blobName)).isTrue();
s3BlobStore.removeBlob(containerName, blobName);
assertThat(s3BlobStore.blobExists(containerName, blobName)).isFalse();
s3BlobStore.removeBlob(containerName, blobName);
}
// TODO: this test fails since S3BlobRequestSigner does not implement the
// same logic as AWSS3BlobRequestSigner.signForTemporaryAccess.
@Ignore
@Test
public void testUrlSigning() throws Exception {
HttpClient httpClient = s3Context.utils().http();
BlobRequestSigner signer = s3Context.getSigner();
String blobName = "blob";
Blob blob = s3BlobStore.blobBuilder(blobName)
.payload(BYTE_SOURCE)
.contentLength(BYTE_SOURCE.size())
.build();
HttpRequest putRequest = signer.signPutBlob(containerName, blob, 10);
HttpResponse putResponse = httpClient.invoke(putRequest);
assertThat(putResponse.getStatusCode())
.isEqualTo(HttpServletResponse.SC_OK);
HttpRequest getRequest = signer.signGetBlob(containerName, blobName,
10);
HttpResponse getResponse = httpClient.invoke(getRequest);
assertThat(getResponse.getStatusCode())
.isEqualTo(HttpServletResponse.SC_OK);
}
@Test
public void testSinglepartUpload() throws Exception {
String blobName = "singlepart-upload";
String cacheControl = "max-age=3600";
if (Quirks.NO_CACHE_CONTROL_SUPPORT.contains(blobStoreType)) {
cacheControl = null;
}
String contentDisposition = "attachment; filename=new.jpg";
if (Quirks.NO_CONTENT_DISPOSITION.contains(blobStoreType)) {
contentDisposition = null;
}
String contentEncoding = "gzip";
if (Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
contentEncoding = null;
}
String contentLanguage = "fr";
if (Quirks.NO_CONTENT_LANGUAGE.contains(blobStoreType)) {
contentLanguage = null;
}
String contentType = "audio/mp4";
Map<String, String> userMetadata = ImmutableMap.of(
"key1", "value1",
"key2", "value2");
Blob blob = s3BlobStore.blobBuilder(blobName)
.payload(BYTE_SOURCE)
.cacheControl(cacheControl)
.contentDisposition(contentDisposition)
.contentEncoding(contentEncoding)
.contentLanguage(contentLanguage)
.contentLength(BYTE_SOURCE.size())
.contentType(contentType)
// TODO: expires
.userMetadata(userMetadata)
.build();
s3BlobStore.putBlob(containerName, blob);
Blob newBlob = s3BlobStore.getBlob(containerName, blobName);
try (InputStream actual = newBlob.getPayload().openStream();
InputStream expected = BYTE_SOURCE.openStream()) {
assertThat(actual).hasContentEqualTo(expected);
}
ContentMetadata newContentMetadata =
newBlob.getMetadata().getContentMetadata();
if (!Quirks.NO_CACHE_CONTROL_SUPPORT.contains(blobStoreType)) {
assertThat(newContentMetadata.getCacheControl()).isEqualTo(
cacheControl);
}
if (!Quirks.NO_CONTENT_DISPOSITION.contains(blobStoreType)) {
assertThat(newContentMetadata.getContentDisposition()).isEqualTo(
contentDisposition);
}
if (!Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
assertThat(newContentMetadata.getContentEncoding()).isEqualTo(
contentEncoding);
}
if (!Quirks.NO_CONTENT_LANGUAGE.contains(blobStoreType)) {
assertThat(newContentMetadata.getContentLanguage()).isEqualTo(
contentLanguage);
}
assertThat(newContentMetadata.getContentType()).isEqualTo(
contentType);
// TODO: expires
assertThat(newBlob.getMetadata().getUserMetadata()).isEqualTo(
userMetadata);
}
// TODO: fails for GCS (jclouds not implemented)
@Test
public void testMultipartUpload() throws Exception {
String blobName = "multipart-upload";
String cacheControl = "max-age=3600";
if (Quirks.NO_CACHE_CONTROL_SUPPORT.contains(blobStoreType)) {
cacheControl = null;
}
String contentDisposition = "attachment; filename=new.jpg";
if (Quirks.NO_CONTENT_DISPOSITION.contains(blobStoreType)) {
contentDisposition = null;
}
String contentEncoding = "gzip";
if (Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
contentEncoding = null;
}
String contentLanguage = "fr";
if (Quirks.NO_CONTENT_LANGUAGE.contains(blobStoreType)) {
contentLanguage = null;
}
String contentType = "audio/mp4";
Map<String, String> userMetadata = ImmutableMap.of(
"key1", "value1",
"key2", "value2");
BlobMetadata blobMetadata = s3BlobStore.blobBuilder(blobName)
.payload(new byte[0]) // fake payload to add content metadata
.cacheControl(cacheControl)
.contentDisposition(contentDisposition)
.contentEncoding(contentEncoding)
.contentLanguage(contentLanguage)
.contentType(contentType)
// TODO: expires
.userMetadata(userMetadata)
.build()
.getMetadata();
MultipartUpload mpu = s3BlobStore.initiateMultipartUpload(
containerName, blobMetadata, new PutOptions());
ByteSource byteSource = TestUtils.randomByteSource().slice(
0, s3BlobStore.getMinimumMultipartPartSize() + 1);
ByteSource byteSource1 = byteSource.slice(
0, s3BlobStore.getMinimumMultipartPartSize());
ByteSource byteSource2 = byteSource.slice(
s3BlobStore.getMinimumMultipartPartSize(), 1);
Payload payload1 = Payloads.newByteSourcePayload(byteSource1);
Payload payload2 = Payloads.newByteSourcePayload(byteSource2);
payload1.getContentMetadata().setContentLength(byteSource1.size());
payload2.getContentMetadata().setContentLength(byteSource2.size());
MultipartPart part1 = s3BlobStore.uploadMultipartPart(mpu, 1, payload1);
MultipartPart part2 = s3BlobStore.uploadMultipartPart(mpu, 2, payload2);
s3BlobStore.completeMultipartUpload(mpu, ImmutableList.of(part1,
part2));
assertThat(s3BlobStore.list(containerName)).hasSize(1);
Blob newBlob = s3BlobStore.getBlob(containerName, blobName);
try (InputStream actual = newBlob.getPayload().openStream();
InputStream expected = byteSource.openStream()) {
assertThat(actual).hasContentEqualTo(expected);
}
ContentMetadata newContentMetadata =
newBlob.getMetadata().getContentMetadata();
if (!Quirks.NO_CACHE_CONTROL_SUPPORT.contains(blobStoreType)) {
assertThat(newContentMetadata.getCacheControl()).isEqualTo(
cacheControl);
}
if (!Quirks.NO_CONTENT_DISPOSITION.contains(blobStoreType)) {
assertThat(newContentMetadata.getContentDisposition()).isEqualTo(
contentDisposition);
}
if (!Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
assertThat(newContentMetadata.getContentEncoding()).isEqualTo(
contentEncoding);
}
if (!Quirks.NO_CONTENT_LANGUAGE.contains(blobStoreType)) {
assertThat(newContentMetadata.getContentLanguage()).isEqualTo(
contentLanguage);
}
assertThat(newContentMetadata.getContentType()).isEqualTo(
contentType);
// TODO: expires
assertThat(newBlob.getMetadata().getUserMetadata()).isEqualTo(
userMetadata);
}
// this test runs for several minutes
@Ignore
@Test
public void testMaximumMultipartUpload() throws Exception {
// skip with remote blobstores to avoid excessive run-times
assumeTrue(blobStoreType.equals("filesystem") ||
blobStoreType.equals("transient"));
String blobName = "multipart-upload";
int numParts = 10_000;
ByteSource byteSource = TestUtils.randomByteSource().slice(0, numParts);
BlobMetadata blobMetadata = s3BlobStore.blobBuilder(blobName)
.payload(new byte[0]) // fake payload to add content metadata
.build()
.getMetadata();
MultipartUpload mpu = s3BlobStore.initiateMultipartUpload(
containerName, blobMetadata, new PutOptions());
ImmutableList.Builder<MultipartPart> parts = ImmutableList.builder();
for (int i = 0; i < numParts; ++i) {
ByteSource partByteSource = byteSource.slice(i, 1);
Payload payload = Payloads.newByteSourcePayload(partByteSource);
payload.getContentMetadata().setContentLength(
partByteSource.size());
parts.add(s3BlobStore.uploadMultipartPart(mpu, i + 1, payload));
}
s3BlobStore.completeMultipartUpload(mpu, parts.build());
assertThat(s3BlobStore.list(containerName)).hasSize(1);
Blob newBlob = s3BlobStore.getBlob(containerName, blobName);
try (InputStream actual = newBlob.getPayload().openStream();
InputStream expected = byteSource.openStream()) {
assertThat(actual).hasContentEqualTo(expected);
}
}
@Test
public void testMultipartUploadAbort() throws Exception {
String blobName = "multipart-upload-abort";
BlobMetadata blobMetadata = s3BlobStore.blobBuilder(blobName)
.payload(new byte[0]) // fake payload to add content metadata
.build()
.getMetadata();
MultipartUpload mpu = s3BlobStore.initiateMultipartUpload(
containerName, blobMetadata, new PutOptions());
ByteSource byteSource = TestUtils.randomByteSource().slice(
0, s3BlobStore.getMinimumMultipartPartSize());
Payload payload = Payloads.newByteSourcePayload(byteSource);
payload.getContentMetadata().setContentLength(byteSource.size());
s3BlobStore.uploadMultipartPart(mpu, 1, payload);
s3BlobStore.abortMultipartUpload(mpu);
assertThat(s3BlobStore.list(containerName)).isEmpty();
}
@Test
public void testCopyObjectPreserveMetadata() throws Exception {
String fromName = "from-name";
String toName = "to-name";
String cacheControl = "max-age=3600";
String contentDisposition = "attachment; filename=old.jpg";
String contentEncoding = "gzip";
if (Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
contentEncoding = null;
}
String contentLanguage = "en";
String contentType = "audio/ogg";
Map<String, String> userMetadata = ImmutableMap.of(
"key1", "value1",
"key2", "value2");
Blob fromBlob = s3BlobStore.blobBuilder(fromName)
.payload(BYTE_SOURCE)
.cacheControl(cacheControl)
.contentLength(BYTE_SOURCE.size())
.contentDisposition(contentDisposition)
.contentEncoding(contentEncoding)
.contentLanguage(contentLanguage)
.contentType(contentType)
// TODO: expires
.userMetadata(userMetadata)
.build();
s3BlobStore.putBlob(containerName, fromBlob);
s3BlobStore.copyBlob(containerName, fromName, containerName, toName,
CopyOptions.NONE);
Blob toBlob = s3BlobStore.getBlob(containerName, toName);
try (InputStream actual = toBlob.getPayload().openStream();
InputStream expected = BYTE_SOURCE.openStream()) {
assertThat(actual).hasContentEqualTo(expected);
}
ContentMetadata contentMetadata =
toBlob.getMetadata().getContentMetadata();
if (!Quirks.NO_CACHE_CONTROL_SUPPORT.contains(blobStoreType)) {
assertThat(contentMetadata.getCacheControl()).isEqualTo(
cacheControl);
}
assertThat(contentMetadata.getContentDisposition()).isEqualTo(
contentDisposition);
if (!Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
assertThat(contentMetadata.getContentEncoding()).isEqualTo(
contentEncoding);
}
if (!Quirks.NO_CONTENT_LANGUAGE.contains(blobStoreType)) {
assertThat(contentMetadata.getContentLanguage()).isEqualTo(
contentLanguage);
}
assertThat(contentMetadata.getContentType()).isEqualTo(
contentType);
// TODO: expires
assertThat(toBlob.getMetadata().getUserMetadata()).isEqualTo(
userMetadata);
}
@Test
public void testCopyObjectReplaceMetadata() throws Exception {
String fromName = "from-name";
String toName = "to-name";
Blob fromBlob = s3BlobStore.blobBuilder(fromName)
.payload(BYTE_SOURCE)
.contentLength(BYTE_SOURCE.size())
.cacheControl("max-age=3600")
.contentDisposition("attachment; filename=old.jpg")
.contentEncoding("compress")
.contentLanguage("en")
.contentType("audio/ogg")
// TODO: expires
.userMetadata(ImmutableMap.of(
"key1", "value1",
"key2", "value2"))
.build();
s3BlobStore.putBlob(containerName, fromBlob);
String cacheControl = "max-age=1800";
String contentDisposition = "attachment; filename=new.jpg";
String contentEncoding = "gzip";
if (Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
contentEncoding = null;
}
String contentLanguage = "fr";
String contentType = "audio/mp4";
ContentMetadata contentMetadata = ContentMetadataBuilder.create()
.cacheControl(cacheControl)
.contentDisposition(contentDisposition)
.contentEncoding(contentEncoding)
.contentLanguage(contentLanguage)
.contentType(contentType)
// TODO: expires
.build();
Map<String, String> userMetadata = ImmutableMap.of(
"key3", "value3",
"key4", "value4");
s3BlobStore.copyBlob(containerName, fromName, containerName, toName,
CopyOptions.builder()
.contentMetadata(contentMetadata)
.userMetadata(userMetadata)
.build());
Blob toBlob = s3BlobStore.getBlob(containerName, toName);
try (InputStream actual = toBlob.getPayload().openStream();
InputStream expected = BYTE_SOURCE.openStream()) {
assertThat(actual).hasContentEqualTo(expected);
}
ContentMetadata toContentMetadata =
toBlob.getMetadata().getContentMetadata();
if (!Quirks.NO_CACHE_CONTROL_SUPPORT.contains(blobStoreType)) {
assertThat(contentMetadata.getCacheControl()).isEqualTo(
cacheControl);
}
assertThat(toContentMetadata.getContentDisposition()).isEqualTo(
contentDisposition);
if (!Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
assertThat(toContentMetadata.getContentEncoding()).isEqualTo(
contentEncoding);
}
if (!Quirks.NO_CONTENT_LANGUAGE.contains(blobStoreType)) {
assertThat(toContentMetadata.getContentLanguage()).isEqualTo(
contentLanguage);
}
assertThat(toContentMetadata.getContentType()).isEqualTo(
contentType);
// TODO: expires
assertThat(toBlob.getMetadata().getUserMetadata()).isEqualTo(
userMetadata);
}
@Test
public void testConditionalGet() throws Exception {
String blobName = "blob-name";
Blob putBlob = s3BlobStore.blobBuilder(blobName)
.payload(BYTE_SOURCE)
.contentLength(BYTE_SOURCE.size())
.build();
String eTag = s3BlobStore.putBlob(containerName, putBlob);
Blob getBlob = s3BlobStore.getBlob(containerName, blobName,
new GetOptions().ifETagMatches(eTag));
assertThat(getBlob.getPayload()).isNotNull();
try {
s3BlobStore.getBlob(containerName, blobName,
new GetOptions().ifETagDoesntMatch(eTag));
Fail.failBecauseExceptionWasNotThrown(HttpResponseException.class);
} catch (HttpResponseException hre) {
assertThat(hre.getResponse().getStatusCode()).isEqualTo(
HttpServletResponse.SC_NOT_MODIFIED);
}
}
@Test
public void testUnknownParameter() throws Exception {
final S3Client s3Client = s3Context.unwrapApi(S3Client.class);
try {
s3Client.disableBucketLogging(containerName);
Fail.failBecauseExceptionWasNotThrown(AWSResponseException.class);
} catch (AWSResponseException e) {
assertThat(e.getError().getCode()).isEqualTo("NotImplemented");
}
}
private static String createRandomContainerName() {
return "s3proxy-" + new Random().nextInt(Integer.MAX_VALUE);
}
}