kopia lustrzana https://github.com/gaul/s3proxy
rodzic
1d42879316
commit
17f8fae37b
5
pom.xml
5
pom.xml
|
@ -329,6 +329,11 @@
|
||||||
<artifactId>filesystem</artifactId>
|
<artifactId>filesystem</artifactId>
|
||||||
<version>${jclouds.version}</version>
|
<version>${jclouds.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.jclouds.labs</groupId>
|
||||||
|
<artifactId>b2</artifactId>
|
||||||
|
<version>${jclouds.version}</version>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.jclouds.driver</groupId>
|
<groupId>org.apache.jclouds.driver</groupId>
|
||||||
<artifactId>jclouds-slf4j</artifactId>
|
<artifactId>jclouds-slf4j</artifactId>
|
||||||
|
|
|
@ -24,6 +24,7 @@ final class Quirks {
|
||||||
/** Blobstores which do not support blob-level access control. */
|
/** Blobstores which do not support blob-level access control. */
|
||||||
static final Set<String> NO_BLOB_ACCESS_CONTROL = ImmutableSet.of(
|
static final Set<String> NO_BLOB_ACCESS_CONTROL = ImmutableSet.of(
|
||||||
"azureblob",
|
"azureblob",
|
||||||
|
"b2",
|
||||||
"rackspace-cloudfiles-uk",
|
"rackspace-cloudfiles-uk",
|
||||||
"rackspace-cloudfiles-us",
|
"rackspace-cloudfiles-us",
|
||||||
"openstack-swift"
|
"openstack-swift"
|
||||||
|
@ -32,19 +33,27 @@ final class Quirks {
|
||||||
/** Blobstores which do not support the Cache-Control header. */
|
/** Blobstores which do not support the Cache-Control header. */
|
||||||
static final Set<String> NO_CACHE_CONTROL_SUPPORT = ImmutableSet.of(
|
static final Set<String> NO_CACHE_CONTROL_SUPPORT = ImmutableSet.of(
|
||||||
"atmos",
|
"atmos",
|
||||||
|
"b2",
|
||||||
"google-cloud-storage",
|
"google-cloud-storage",
|
||||||
"rackspace-cloudfiles-uk",
|
"rackspace-cloudfiles-uk",
|
||||||
"rackspace-cloudfiles-us",
|
"rackspace-cloudfiles-us",
|
||||||
"openstack-swift"
|
"openstack-swift"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
/** Blobstores which do not support the Cache-Control header. */
|
||||||
|
static final Set<String> NO_CONTENT_DISPOSITION = ImmutableSet.of(
|
||||||
|
"b2"
|
||||||
|
);
|
||||||
|
|
||||||
/** Blobstores which do not support the Content-Encoding header. */
|
/** Blobstores which do not support the Content-Encoding header. */
|
||||||
static final Set<String> NO_CONTENT_ENCODING = ImmutableSet.of(
|
static final Set<String> NO_CONTENT_ENCODING = ImmutableSet.of(
|
||||||
|
"b2",
|
||||||
"google-cloud-storage"
|
"google-cloud-storage"
|
||||||
);
|
);
|
||||||
|
|
||||||
/** Blobstores which do not support the Content-Language header. */
|
/** Blobstores which do not support the Content-Language header. */
|
||||||
static final Set<String> NO_CONTENT_LANGUAGE = ImmutableSet.of(
|
static final Set<String> NO_CONTENT_LANGUAGE = ImmutableSet.of(
|
||||||
|
"b2",
|
||||||
"rackspace-cloudfiles-uk",
|
"rackspace-cloudfiles-uk",
|
||||||
"rackspace-cloudfiles-us",
|
"rackspace-cloudfiles-us",
|
||||||
"openstack-swift"
|
"openstack-swift"
|
||||||
|
@ -80,12 +89,15 @@ final class Quirks {
|
||||||
/** Blobstores with opaque ETags. */
|
/** Blobstores with opaque ETags. */
|
||||||
static final Set<String> OPAQUE_ETAG = ImmutableSet.of(
|
static final Set<String> OPAQUE_ETAG = ImmutableSet.of(
|
||||||
"azureblob",
|
"azureblob",
|
||||||
|
"b2",
|
||||||
"google-cloud-storage"
|
"google-cloud-storage"
|
||||||
);
|
);
|
||||||
|
|
||||||
/** Blobstores with opaque markers. */
|
/** Blobstores with opaque markers. */
|
||||||
static final Set<String> OPAQUE_MARKERS = ImmutableSet.of(
|
static final Set<String> OPAQUE_MARKERS = ImmutableSet.of(
|
||||||
"azureblob",
|
"azureblob",
|
||||||
|
// S3 marker means one past this token while B2 means this token
|
||||||
|
"b2",
|
||||||
"google-cloud-storage"
|
"google-cloud-storage"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -77,6 +77,7 @@ import com.google.common.hash.HashingInputStream;
|
||||||
import com.google.common.io.BaseEncoding;
|
import com.google.common.io.BaseEncoding;
|
||||||
import com.google.common.io.ByteSource;
|
import com.google.common.io.ByteSource;
|
||||||
import com.google.common.io.ByteStreams;
|
import com.google.common.io.ByteStreams;
|
||||||
|
import com.google.common.io.FileBackedOutputStream;
|
||||||
import com.google.common.net.HostAndPort;
|
import com.google.common.net.HostAndPort;
|
||||||
import com.google.common.net.HttpHeaders;
|
import com.google.common.net.HttpHeaders;
|
||||||
import com.google.common.net.PercentEscaper;
|
import com.google.common.net.PercentEscaper;
|
||||||
|
@ -183,6 +184,8 @@ final class S3ProxyHandler extends AbstractHandler {
|
||||||
);
|
);
|
||||||
private static final PercentEscaper AWS_URL_PARAMETER_ESCAPER =
|
private static final PercentEscaper AWS_URL_PARAMETER_ESCAPER =
|
||||||
new PercentEscaper("-_.~", false);
|
new PercentEscaper("-_.~", false);
|
||||||
|
// TODO: configurable fileThreshold
|
||||||
|
private static final int B2_PUT_BLOB_BUFFER_SIZE = 1024 * 1024;
|
||||||
|
|
||||||
private final boolean anonymousIdentity;
|
private final boolean anonymousIdentity;
|
||||||
private final Optional<String> virtualHost;
|
private final Optional<String> virtualHost;
|
||||||
|
@ -1120,9 +1123,21 @@ final class S3ProxyHandler extends AbstractHandler {
|
||||||
if (!blobStore.containerExists(containerName)) {
|
if (!blobStore.containerExists(containerName)) {
|
||||||
throw new S3Exception(S3ErrorCode.NO_SUCH_BUCKET);
|
throw new S3Exception(S3ErrorCode.NO_SUCH_BUCKET);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
String blobStoreType = getBlobStoreType(blobStore);
|
||||||
|
if (blobStoreType.equals("b2")) {
|
||||||
|
// S3 allows deleting a container with in-progress MPU while B2 does
|
||||||
|
// not. Explicitly cancel uploads for B2.
|
||||||
|
for (MultipartUpload mpu : blobStore.listMultipartUploads(
|
||||||
|
containerName)) {
|
||||||
|
blobStore.abortMultipartUpload(mpu);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (!blobStore.deleteContainerIfEmpty(containerName)) {
|
if (!blobStore.deleteContainerIfEmpty(containerName)) {
|
||||||
throw new S3Exception(S3ErrorCode.BUCKET_NOT_EMPTY);
|
throw new S3Exception(S3ErrorCode.BUCKET_NOT_EMPTY);
|
||||||
}
|
}
|
||||||
|
|
||||||
response.setStatus(HttpServletResponse.SC_NO_CONTENT);
|
response.setStatus(HttpServletResponse.SC_NO_CONTENT);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1618,15 +1633,6 @@ final class S3ProxyHandler extends AbstractHandler {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
BlobBuilder.PayloadBlobBuilder builder = blobStore
|
|
||||||
.blobBuilder(blobName)
|
|
||||||
.payload(is)
|
|
||||||
.contentLength(contentLength);
|
|
||||||
addContentMetdataFromHttpRequest(builder, request);
|
|
||||||
if (contentMD5 != null) {
|
|
||||||
builder = builder.contentMD5(contentMD5);
|
|
||||||
}
|
|
||||||
|
|
||||||
PutOptions options = new PutOptions().setBlobAccess(access);
|
PutOptions options = new PutOptions().setBlobAccess(access);
|
||||||
|
|
||||||
String blobStoreType = getBlobStoreType(blobStore);
|
String blobStoreType = getBlobStoreType(blobStore);
|
||||||
|
@ -1634,8 +1640,30 @@ final class S3ProxyHandler extends AbstractHandler {
|
||||||
contentLength > 64 * 1024 * 1024) {
|
contentLength > 64 * 1024 * 1024) {
|
||||||
options.multipart(true);
|
options.multipart(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
FileBackedOutputStream fbos = null;
|
||||||
String eTag;
|
String eTag;
|
||||||
try {
|
try {
|
||||||
|
BlobBuilder.PayloadBlobBuilder builder;
|
||||||
|
if (blobStoreType.equals("b2")) {
|
||||||
|
// B2 requires a repeatable payload to calculate the SHA1 hash
|
||||||
|
fbos = new FileBackedOutputStream(B2_PUT_BLOB_BUFFER_SIZE);
|
||||||
|
ByteStreams.copy(is, fbos);
|
||||||
|
fbos.close();
|
||||||
|
builder = blobStore.blobBuilder(blobName)
|
||||||
|
.payload(fbos.asByteSource());
|
||||||
|
} else {
|
||||||
|
builder = blobStore.blobBuilder(blobName)
|
||||||
|
.payload(is);
|
||||||
|
}
|
||||||
|
|
||||||
|
builder.contentLength(contentLength);
|
||||||
|
|
||||||
|
addContentMetdataFromHttpRequest(builder, request);
|
||||||
|
if (contentMD5 != null) {
|
||||||
|
builder = builder.contentMD5(contentMD5);
|
||||||
|
}
|
||||||
|
|
||||||
eTag = blobStore.putBlob(containerName, builder.build(),
|
eTag = blobStore.putBlob(containerName, builder.build(),
|
||||||
options);
|
options);
|
||||||
} catch (HttpResponseException hre) {
|
} catch (HttpResponseException hre) {
|
||||||
|
@ -1661,6 +1689,10 @@ final class S3ProxyHandler extends AbstractHandler {
|
||||||
} else {
|
} else {
|
||||||
throw re;
|
throw re;
|
||||||
}
|
}
|
||||||
|
} finally {
|
||||||
|
if (fbos != null) {
|
||||||
|
fbos.reset();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
response.addHeader(HttpHeaders.ETAG, maybeQuoteETag(eTag));
|
response.addHeader(HttpHeaders.ETAG, maybeQuoteETag(eTag));
|
||||||
|
@ -2123,8 +2155,10 @@ final class S3ProxyHandler extends AbstractHandler {
|
||||||
long contentLength =
|
long contentLength =
|
||||||
blobMetadata.getContentMetadata().getContentLength();
|
blobMetadata.getContentMetadata().getContentLength();
|
||||||
|
|
||||||
|
String blobStoreType = getBlobStoreType(blobStore);
|
||||||
|
FileBackedOutputStream fbos = null;
|
||||||
try (InputStream is = blob.getPayload().openStream()) {
|
try (InputStream is = blob.getPayload().openStream()) {
|
||||||
if (getBlobStoreType(blobStore).equals("azureblob")) {
|
if (blobStoreType.equals("azureblob")) {
|
||||||
// Azure has a maximum part size of 4 MB while S3 has a minimum
|
// Azure has a maximum part size of 4 MB while S3 has a minimum
|
||||||
// part size of 5 MB and a maximum of 5 GB. Split a single S3
|
// part size of 5 MB and a maximum of 5 GB. Split a single S3
|
||||||
// part multiple Azure parts.
|
// part multiple Azure parts.
|
||||||
|
@ -2146,13 +2180,29 @@ final class S3ProxyHandler extends AbstractHandler {
|
||||||
eTag = BaseEncoding.base16().lowerCase().encode(
|
eTag = BaseEncoding.base16().lowerCase().encode(
|
||||||
his.hash().asBytes());
|
his.hash().asBytes());
|
||||||
} else {
|
} else {
|
||||||
Payload payload = Payloads.newInputStreamPayload(is);
|
Payload payload;
|
||||||
|
if (blobStoreType.equals("b2")) {
|
||||||
|
// B2 requires a repeatable payload to calculate the SHA1
|
||||||
|
// hash
|
||||||
|
fbos = new FileBackedOutputStream(B2_PUT_BLOB_BUFFER_SIZE);
|
||||||
|
ByteStreams.copy(is, fbos);
|
||||||
|
fbos.close();
|
||||||
|
payload = Payloads.newByteSourcePayload(
|
||||||
|
fbos.asByteSource());
|
||||||
|
} else {
|
||||||
|
payload = Payloads.newInputStreamPayload(is);
|
||||||
|
}
|
||||||
|
|
||||||
payload.getContentMetadata().setContentLength(contentLength);
|
payload.getContentMetadata().setContentLength(contentLength);
|
||||||
|
|
||||||
MultipartPart part = blobStore.uploadMultipartPart(mpu,
|
MultipartPart part = blobStore.uploadMultipartPart(mpu,
|
||||||
partNumber, payload);
|
partNumber, payload);
|
||||||
eTag = part.partETag();
|
eTag = part.partETag();
|
||||||
}
|
}
|
||||||
|
} finally {
|
||||||
|
if (fbos != null) {
|
||||||
|
fbos.reset();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
try (Writer writer = response.getWriter()) {
|
try (Writer writer = response.getWriter()) {
|
||||||
|
@ -2276,14 +2326,34 @@ final class S3ProxyHandler extends AbstractHandler {
|
||||||
BaseEncoding.base16().lowerCase().encode(
|
BaseEncoding.base16().lowerCase().encode(
|
||||||
his.hash().asBytes())));
|
his.hash().asBytes())));
|
||||||
} else {
|
} else {
|
||||||
Payload payload = Payloads.newInputStreamPayload(is);
|
MultipartPart part;
|
||||||
payload.getContentMetadata().setContentLength(contentLength);
|
Payload payload;
|
||||||
if (contentMD5 != null) {
|
FileBackedOutputStream fbos = null;
|
||||||
payload.getContentMetadata().setContentMD5(contentMD5);
|
try {
|
||||||
|
String blobStoreType = getBlobStoreType(blobStore);
|
||||||
|
if (blobStoreType.equals("b2")) {
|
||||||
|
// B2 requires a repeatable payload to calculate the SHA1
|
||||||
|
// hash
|
||||||
|
fbos = new FileBackedOutputStream(B2_PUT_BLOB_BUFFER_SIZE);
|
||||||
|
ByteStreams.copy(is, fbos);
|
||||||
|
fbos.close();
|
||||||
|
payload = Payloads.newByteSourcePayload(
|
||||||
|
fbos.asByteSource());
|
||||||
|
} else {
|
||||||
|
payload = Payloads.newInputStreamPayload(is);
|
||||||
|
}
|
||||||
|
payload.getContentMetadata().setContentLength(contentLength);
|
||||||
|
if (contentMD5 != null) {
|
||||||
|
payload.getContentMetadata().setContentMD5(contentMD5);
|
||||||
|
}
|
||||||
|
|
||||||
|
part = blobStore.uploadMultipartPart(mpu, partNumber, payload);
|
||||||
|
} finally {
|
||||||
|
if (fbos != null) {
|
||||||
|
fbos.reset();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
MultipartPart part = blobStore.uploadMultipartPart(mpu,
|
|
||||||
partNumber, payload);
|
|
||||||
if (part.partETag() != null) {
|
if (part.partETag() != null) {
|
||||||
response.addHeader(HttpHeaders.ETAG,
|
response.addHeader(HttpHeaders.ETAG,
|
||||||
maybeQuoteETag(part.partETag()));
|
maybeQuoteETag(part.partETag()));
|
||||||
|
|
|
@ -370,6 +370,9 @@ public final class S3ProxyTest {
|
||||||
cacheControl = null;
|
cacheControl = null;
|
||||||
}
|
}
|
||||||
String contentDisposition = "attachment; filename=new.jpg";
|
String contentDisposition = "attachment; filename=new.jpg";
|
||||||
|
if (Quirks.NO_CONTENT_DISPOSITION.contains(blobStoreType)) {
|
||||||
|
contentDisposition = null;
|
||||||
|
}
|
||||||
String contentEncoding = "gzip";
|
String contentEncoding = "gzip";
|
||||||
if (Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
|
if (Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
|
||||||
contentEncoding = null;
|
contentEncoding = null;
|
||||||
|
@ -407,8 +410,10 @@ public final class S3ProxyTest {
|
||||||
assertThat(newContentMetadata.getCacheControl()).isEqualTo(
|
assertThat(newContentMetadata.getCacheControl()).isEqualTo(
|
||||||
cacheControl);
|
cacheControl);
|
||||||
}
|
}
|
||||||
assertThat(newContentMetadata.getContentDisposition()).isEqualTo(
|
if (!Quirks.NO_CONTENT_DISPOSITION.contains(blobStoreType)) {
|
||||||
contentDisposition);
|
assertThat(newContentMetadata.getContentDisposition()).isEqualTo(
|
||||||
|
contentDisposition);
|
||||||
|
}
|
||||||
if (!Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
|
if (!Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
|
||||||
assertThat(newContentMetadata.getContentEncoding()).isEqualTo(
|
assertThat(newContentMetadata.getContentEncoding()).isEqualTo(
|
||||||
contentEncoding);
|
contentEncoding);
|
||||||
|
@ -433,6 +438,9 @@ public final class S3ProxyTest {
|
||||||
cacheControl = null;
|
cacheControl = null;
|
||||||
}
|
}
|
||||||
String contentDisposition = "attachment; filename=new.jpg";
|
String contentDisposition = "attachment; filename=new.jpg";
|
||||||
|
if (Quirks.NO_CONTENT_DISPOSITION.contains(blobStoreType)) {
|
||||||
|
contentDisposition = null;
|
||||||
|
}
|
||||||
String contentEncoding = "gzip";
|
String contentEncoding = "gzip";
|
||||||
if (Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
|
if (Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
|
||||||
contentEncoding = null;
|
contentEncoding = null;
|
||||||
|
@ -487,8 +495,10 @@ public final class S3ProxyTest {
|
||||||
assertThat(newContentMetadata.getCacheControl()).isEqualTo(
|
assertThat(newContentMetadata.getCacheControl()).isEqualTo(
|
||||||
cacheControl);
|
cacheControl);
|
||||||
}
|
}
|
||||||
assertThat(newContentMetadata.getContentDisposition()).isEqualTo(
|
if (!Quirks.NO_CONTENT_DISPOSITION.contains(blobStoreType)) {
|
||||||
contentDisposition);
|
assertThat(newContentMetadata.getContentDisposition()).isEqualTo(
|
||||||
|
contentDisposition);
|
||||||
|
}
|
||||||
if (!Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
|
if (!Quirks.NO_CONTENT_ENCODING.contains(blobStoreType)) {
|
||||||
assertThat(newContentMetadata.getContentEncoding()).isEqualTo(
|
assertThat(newContentMetadata.getContentEncoding()).isEqualTo(
|
||||||
contentEncoding);
|
contentEncoding);
|
||||||
|
|
Ładowanie…
Reference in New Issue