diff --git a/README.md b/README.md index a4411b1..3bbb899 100644 --- a/README.md +++ b/README.md @@ -106,6 +106,7 @@ A bucket (or a glob) cannot be assigned to multiple backends. S3Proxy can modify its behavior based on middlewares: * [bucket aliasing](https://github.com/gaul/s3proxy/wiki/Middleware-alias-blobstore) +* [bucket prefix scoping](docs/PrefixBlobStore.md) * [bucket locator](https://github.com/gaul/s3proxy/wiki/Middleware-bucket-locator) * [eventual consistency modeling](https://github.com/gaul/s3proxy/wiki/Middleware---eventual-consistency) * [large object mocking](https://github.com/gaul/s3proxy/wiki/Middleware-large-object-mocking) diff --git a/docs/PrefixBlobStore.md b/docs/PrefixBlobStore.md new file mode 100644 index 0000000..e722945 --- /dev/null +++ b/docs/PrefixBlobStore.md @@ -0,0 +1,29 @@ +## Bucket Prefix Middleware + +Use the prefix middleware when you want a single S3 bucket exposed by S3Proxy +to map onto a fixed prefix inside a backend bucket. This is useful when an +upstream consumer cannot specify object prefixes but your storage layout +requires them. + +Enable the middleware by adding one property per bucket that should be scoped +to a prefix: + +``` +s3proxy.prefix-blobstore.= +``` + +For example, to expose `scoped-data/` objects from your backend storage as if +they were located at the top of `customer-bucket`: + +``` +s3proxy.prefix-blobstore.customer-bucket=scoped-data/ +``` + +With this configuration all reads, writes, listings, and multipart uploads +issued to the `customer-bucket` bucket will transparently operate under the +`scoped-data/` prefix on the backend. Objects stored outside the configured +prefix remain untouched, and deleting the virtual bucket contents only affects +objects within the scoped prefix. + +Multiple buckets can be configured and each bucket may define at most one +prefix. diff --git a/src/main/java/org/gaul/s3proxy/Main.java b/src/main/java/org/gaul/s3proxy/Main.java index 925b3fb..8d27d0d 100644 --- a/src/main/java/org/gaul/s3proxy/Main.java +++ b/src/main/java/org/gaul/s3proxy/Main.java @@ -255,6 +255,13 @@ public final class Main { blobStore = AliasBlobStore.newAliasBlobStore(blobStore, aliases); } + Map prefixMap = PrefixBlobStore.parsePrefixes(properties); + if (!prefixMap.isEmpty()) { + System.err.println("Using prefix backend"); + blobStore = PrefixBlobStore.newPrefixBlobStore(blobStore, + prefixMap); + } + List> regexs = RegexBlobStore.parseRegexs(properties); if (!regexs.isEmpty()) { diff --git a/src/main/java/org/gaul/s3proxy/PrefixBlobStore.java b/src/main/java/org/gaul/s3proxy/PrefixBlobStore.java new file mode 100644 index 0000000..df217d8 --- /dev/null +++ b/src/main/java/org/gaul/s3proxy/PrefixBlobStore.java @@ -0,0 +1,407 @@ +/* + * Copyright 2014-2025 Andrew Gaul + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.gaul.s3proxy; + +import static com.google.common.base.Preconditions.checkArgument; +import static java.util.Objects.requireNonNull; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +import com.google.common.base.Strings; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; + +import org.jclouds.blobstore.BlobStore; +import org.jclouds.blobstore.domain.Blob; +import org.jclouds.blobstore.domain.BlobAccess; +import org.jclouds.blobstore.domain.BlobMetadata; +import org.jclouds.blobstore.domain.MultipartPart; +import org.jclouds.blobstore.domain.MultipartUpload; +import org.jclouds.blobstore.domain.PageSet; +import org.jclouds.blobstore.domain.StorageMetadata; +import org.jclouds.blobstore.domain.internal.MutableBlobMetadataImpl; +import org.jclouds.blobstore.domain.internal.MutableStorageMetadataImpl; +import org.jclouds.blobstore.domain.internal.PageSetImpl; +import org.jclouds.blobstore.options.CopyOptions; +import org.jclouds.blobstore.options.GetOptions; +import org.jclouds.blobstore.options.ListContainerOptions; +import org.jclouds.blobstore.options.PutOptions; +import org.jclouds.blobstore.util.ForwardingBlobStore; +import org.jclouds.io.Payload; + +/** + * Middleware that scopes a virtual bucket to a fixed backend prefix. + */ +public final class PrefixBlobStore extends ForwardingBlobStore { + private final Map prefixes; + + private PrefixBlobStore(BlobStore delegate, Map prefixes) { + super(delegate); + this.prefixes = ImmutableMap.copyOf(requireNonNull(prefixes)); + } + + static BlobStore newPrefixBlobStore(BlobStore delegate, + Map prefixes) { + return new PrefixBlobStore(delegate, prefixes); + } + + public static Map parsePrefixes(Properties properties) { + Map prefixMap = new HashMap<>(); + for (String key : properties.stringPropertyNames()) { + if (!key.startsWith(S3ProxyConstants.PROPERTY_PREFIX_BLOBSTORE + ".")) { + continue; + } + String bucket = key.substring( + S3ProxyConstants.PROPERTY_PREFIX_BLOBSTORE.length() + 1); + String prefix = properties.getProperty(key); + checkArgument(!Strings.isNullOrEmpty(bucket), + "Prefix property %s must specify a bucket", key); + checkArgument(!Strings.isNullOrEmpty(prefix), + "Prefix for bucket %s must not be empty", bucket); + checkArgument(prefixMap.put(bucket, prefix) == null, + "Multiple prefixes configured for bucket %s", bucket); + } + return ImmutableMap.copyOf(prefixMap); + } + + private boolean hasPrefix(String container) { + return this.prefixes.containsKey(container); + } + + private String getPrefix(String container) { + return this.prefixes.get(container); + } + + private String addPrefix(String container, String name) { + if (!hasPrefix(container) || Strings.isNullOrEmpty(name)) { + return name; + } + String prefix = getPrefix(container); + if (name.startsWith(prefix)) { + return name; + } + if (prefix.endsWith("/") && name.startsWith("/")) { + return prefix + name.substring(1); + } + return prefix + name; + } + + private String trimPrefix(String container, String name) { + if (!hasPrefix(container) || Strings.isNullOrEmpty(name)) { + return name; + } + String prefix = getPrefix(container); + if (name.startsWith(prefix)) { + return name.substring(prefix.length()); + } + return name; + } + + private BlobMetadata trimBlobMetadata(String container, + BlobMetadata metadata) { + if (metadata == null || !hasPrefix(container)) { + return metadata; + } + var mutable = new MutableBlobMetadataImpl(metadata); + mutable.setName(trimPrefix(container, metadata.getName())); + return mutable; + } + + private Blob trimBlob(String container, Blob blob) { + if (blob == null || !hasPrefix(container)) { + return blob; + } + blob.getMetadata().setName( + trimPrefix(container, blob.getMetadata().getName())); + return blob; + } + + private MultipartUpload toDelegateMultipartUpload(MultipartUpload upload) { + if (upload == null || !hasPrefix(upload.containerName())) { + return upload; + } + var metadata = upload.blobMetadata() == null ? null : + new MutableBlobMetadataImpl(upload.blobMetadata()); + if (metadata != null) { + metadata.setName( + addPrefix(upload.containerName(), metadata.getName())); + } + return MultipartUpload.create(upload.containerName(), + addPrefix(upload.containerName(), upload.blobName()), + upload.id(), metadata, upload.putOptions()); + } + + private MultipartUpload toClientMultipartUpload(MultipartUpload upload) { + if (upload == null || !hasPrefix(upload.containerName())) { + return upload; + } + var metadata = upload.blobMetadata() == null ? null : + new MutableBlobMetadataImpl(upload.blobMetadata()); + if (metadata != null) { + metadata.setName( + trimPrefix(upload.containerName(), metadata.getName())); + } + return MultipartUpload.create(upload.containerName(), + trimPrefix(upload.containerName(), upload.blobName()), + upload.id(), metadata, upload.putOptions()); + } + + private ListContainerOptions applyPrefix(String container, + ListContainerOptions options) { + if (!hasPrefix(container)) { + return options; + } + ListContainerOptions effective = options == null ? + new ListContainerOptions() : options.clone(); + String basePrefix = getPrefix(container); + String requestedPrefix = effective.getPrefix(); + String requestedMarker = effective.getMarker(); + String requestedDir = effective.getDir(); + + if (Strings.isNullOrEmpty(requestedPrefix)) { + effective.prefix(basePrefix); + } else { + effective.prefix(addPrefix(container, requestedPrefix)); + } + + if (!Strings.isNullOrEmpty(requestedMarker)) { + effective.afterMarker(addPrefix(container, requestedMarker)); + } + + if (!Strings.isNullOrEmpty(requestedDir)) { + effective.inDirectory(addPrefix(container, requestedDir)); + } + + return effective; + } + + private PageSet trimListing(String container, + PageSet listing) { + if (!hasPrefix(container)) { + return listing; + } + var builder = ImmutableList.builder(); + for (StorageMetadata metadata : listing) { + if (metadata instanceof BlobMetadata blobMetadata) { + var mutable = new MutableBlobMetadataImpl(blobMetadata); + mutable.setName(trimPrefix(container, blobMetadata.getName())); + builder.add(mutable); + } else { + var mutable = new MutableStorageMetadataImpl(metadata); + mutable.setName(trimPrefix(container, metadata.getName())); + builder.add(mutable); + } + } + String nextMarker = listing.getNextMarker(); + if (nextMarker != null) { + nextMarker = trimPrefix(container, nextMarker); + } + return new PageSetImpl<>(builder.build(), nextMarker); + } + + @Override + public boolean directoryExists(String container, String directory) { + return super.directoryExists(container, + addPrefix(container, directory)); + } + + @Override + public void createDirectory(String container, String directory) { + super.createDirectory(container, addPrefix(container, directory)); + } + + @Override + public void deleteDirectory(String container, String directory) { + super.deleteDirectory(container, addPrefix(container, directory)); + } + + @Override + public boolean blobExists(String container, String name) { + return super.blobExists(container, addPrefix(container, name)); + } + + @Override + public BlobMetadata blobMetadata(String container, String name) { + return trimBlobMetadata(container, + super.blobMetadata(container, addPrefix(container, name))); + } + + @Override + public Blob getBlob(String containerName, String blobName) { + return trimBlob(containerName, + super.getBlob(containerName, addPrefix(containerName, + blobName))); + } + + @Override + public Blob getBlob(String containerName, String blobName, + GetOptions getOptions) { + return trimBlob(containerName, + super.getBlob(containerName, addPrefix(containerName, + blobName), getOptions)); + } + + @Override + public String putBlob(String containerName, Blob blob) { + String originalName = blob.getMetadata().getName(); + blob.getMetadata().setName(addPrefix(containerName, originalName)); + try { + return super.putBlob(containerName, blob); + } finally { + blob.getMetadata().setName(originalName); + } + } + + @Override + public String putBlob(String containerName, Blob blob, + PutOptions options) { + String originalName = blob.getMetadata().getName(); + blob.getMetadata().setName(addPrefix(containerName, originalName)); + try { + return super.putBlob(containerName, blob, options); + } finally { + blob.getMetadata().setName(originalName); + } + } + + @Override + public void removeBlob(String container, String name) { + super.removeBlob(container, addPrefix(container, name)); + } + + @Override + public void removeBlobs(String container, Iterable names) { + if (!hasPrefix(container)) { + super.removeBlobs(container, names); + return; + } + var builder = ImmutableList.builder(); + for (String name : names) { + builder.add(addPrefix(container, name)); + } + super.removeBlobs(container, builder.build()); + } + + @Override + public BlobAccess getBlobAccess(String container, String name) { + return super.getBlobAccess(container, addPrefix(container, name)); + } + + @Override + public void setBlobAccess(String container, String name, + BlobAccess access) { + super.setBlobAccess(container, addPrefix(container, name), access); + } + + @Override + public String copyBlob(String fromContainer, String fromName, + String toContainer, String toName, CopyOptions options) { + return super.copyBlob(fromContainer, addPrefix(fromContainer, fromName), + toContainer, addPrefix(toContainer, toName), options); + } + + @Override + public PageSet list(String container) { + if (!hasPrefix(container)) { + return super.list(container); + } + return list(container, new ListContainerOptions()); + } + + @Override + public PageSet list(String container, + ListContainerOptions options) { + if (!hasPrefix(container)) { + return super.list(container, options); + } + var effective = applyPrefix(container, options); + return trimListing(container, super.list(container, effective)); + } + + @Override + public void clearContainer(String container) { + if (!hasPrefix(container)) { + super.clearContainer(container); + return; + } + var options = new ListContainerOptions() + .prefix(getPrefix(container)) + .recursive(); + super.clearContainer(container, options); + } + + @Override + public void clearContainer(String container, ListContainerOptions options) { + if (!hasPrefix(container)) { + super.clearContainer(container, options); + return; + } + super.clearContainer(container, applyPrefix(container, options)); + } + + @Override + public MultipartUpload initiateMultipartUpload(String container, + BlobMetadata blobMetadata, PutOptions options) { + var mutable = new MutableBlobMetadataImpl(blobMetadata); + mutable.setName(addPrefix(container, blobMetadata.getName())); + MultipartUpload upload = super.initiateMultipartUpload(container, + mutable, options); + return toClientMultipartUpload(upload); + } + + @Override + public void abortMultipartUpload(MultipartUpload mpu) { + super.abortMultipartUpload(toDelegateMultipartUpload(mpu)); + } + + @Override + public String completeMultipartUpload(MultipartUpload mpu, + List parts) { + return super.completeMultipartUpload( + toDelegateMultipartUpload(mpu), parts); + } + + @Override + public MultipartPart uploadMultipartPart(MultipartUpload mpu, + int partNumber, Payload payload) { + return super.uploadMultipartPart( + toDelegateMultipartUpload(mpu), partNumber, payload); + } + + @Override + public List listMultipartUpload(MultipartUpload mpu) { + return super.listMultipartUpload(toDelegateMultipartUpload(mpu)); + } + + @Override + public List listMultipartUploads(String container) { + List uploads = + super.listMultipartUploads(container); + if (!hasPrefix(container)) { + return uploads; + } + var builder = ImmutableList.builder(); + for (MultipartUpload upload : uploads) { + builder.add(toClientMultipartUpload(upload)); + } + return builder.build(); + } +} + diff --git a/src/main/java/org/gaul/s3proxy/S3ProxyConstants.java b/src/main/java/org/gaul/s3proxy/S3ProxyConstants.java index 9867b6f..8af6bff 100644 --- a/src/main/java/org/gaul/s3proxy/S3ProxyConstants.java +++ b/src/main/java/org/gaul/s3proxy/S3ProxyConstants.java @@ -97,6 +97,9 @@ public final class S3ProxyConstants { /** Alias a backend bucket to an alternate name. */ public static final String PROPERTY_ALIAS_BLOBSTORE = "s3proxy.alias-blobstore"; + /** Scope bucket operations to a specific object prefix. */ + public static final String PROPERTY_PREFIX_BLOBSTORE = + "s3proxy.prefix-blobstore"; /** Alias a backend bucket to an alternate name. */ public static final String PROPERTY_REGEX_BLOBSTORE = "s3proxy.regex-blobstore"; diff --git a/src/test/java/org/gaul/s3proxy/PrefixBlobStoreTest.java b/src/test/java/org/gaul/s3proxy/PrefixBlobStoreTest.java new file mode 100644 index 0000000..245fa5a --- /dev/null +++ b/src/test/java/org/gaul/s3proxy/PrefixBlobStoreTest.java @@ -0,0 +1,181 @@ +/* + * Copyright 2014-2025 Andrew Gaul + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.gaul.s3proxy; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.io.IOException; +import java.io.InputStream; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +import com.google.common.collect.ImmutableList; +import com.google.common.io.ByteSource; + +import org.assertj.core.api.Assertions; +import org.jclouds.ContextBuilder; +import org.jclouds.blobstore.BlobStore; +import org.jclouds.blobstore.BlobStoreContext; +import org.jclouds.blobstore.domain.Blob; +import org.jclouds.blobstore.domain.MultipartPart; +import org.jclouds.blobstore.domain.MultipartUpload; +import org.jclouds.blobstore.domain.PageSet; +import org.jclouds.blobstore.domain.StorageMetadata; +import org.jclouds.blobstore.options.PutOptions; +import org.jclouds.io.Payloads; +import org.jclouds.logging.slf4j.config.SLF4JLoggingModule; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public final class PrefixBlobStoreTest { + private String containerName; + private String prefix; + private BlobStoreContext context; + private BlobStore blobStore; + private BlobStore prefixBlobStore; + + @Before + public void setUp() { + containerName = TestUtils.createRandomContainerName(); + prefix = "forward-prefix/"; + context = ContextBuilder + .newBuilder("transient") + .credentials("identity", "credential") + .modules(List.of(new SLF4JLoggingModule())) + .build(BlobStoreContext.class); + blobStore = context.getBlobStore(); + blobStore.createContainerInLocation(null, containerName); + prefixBlobStore = PrefixBlobStore.newPrefixBlobStore( + blobStore, Map.of(containerName, prefix)); + } + + @After + public void tearDown() { + if (context != null) { + blobStore.clearContainer(containerName); + blobStore.deleteContainer(containerName); + context.close(); + } + } + + @Test + public void testPutAndGetBlob() throws IOException { + ByteSource content = TestUtils.randomByteSource().slice(0, 256); + Blob blob = prefixBlobStore.blobBuilder("object.txt") + .payload(content) + .build(); + prefixBlobStore.putBlob(containerName, blob); + + assertThat(blobStore.blobExists(containerName, + prefix + "object.txt")).isTrue(); + + Blob stored = prefixBlobStore.getBlob(containerName, "object.txt"); + assertThat(stored).isNotNull(); + assertThat(stored.getMetadata().getName()).isEqualTo("object.txt"); + try (InputStream expected = content.openStream(); + InputStream actual = stored.getPayload().openStream()) { + assertThat(actual).hasSameContentAs(expected); + } + } + + @Test + public void testListTrimsPrefix() throws IOException { + ByteSource content = TestUtils.randomByteSource().slice(0, 64); + prefixBlobStore.putBlob(containerName, prefixBlobStore.blobBuilder( + "file-one.txt").payload(content).build()); + blobStore.putBlob(containerName, blobStore.blobBuilder( + prefix + "file-two.txt").payload(content).build()); + blobStore.putBlob(containerName, blobStore.blobBuilder( + "outside.txt").payload(content).build()); + + PageSet listing = + prefixBlobStore.list(containerName); + List names = ImmutableList.copyOf(listing).stream() + .map(StorageMetadata::getName) + .collect(ImmutableList.toImmutableList()); + assertThat(names).containsExactlyInAnyOrder( + "file-one.txt", "file-two.txt"); + assertThat(listing.getNextMarker()).isNull(); + } + + @Test + public void testClearContainerKeepsOtherObjects() { + ByteSource content = TestUtils.randomByteSource().slice(0, 32); + prefixBlobStore.putBlob(containerName, prefixBlobStore.blobBuilder( + "inside.txt").payload(content).build()); + blobStore.putBlob(containerName, blobStore.blobBuilder( + "outside.txt").payload(content).build()); + + prefixBlobStore.clearContainer(containerName); + + assertThat(blobStore.blobExists(containerName, + prefix + "inside.txt")).isFalse(); + assertThat(blobStore.blobExists(containerName, + "outside.txt")).isTrue(); + } + + @Test + public void testMultipartUploadUsesPrefix() throws IOException { + ByteSource content = TestUtils.randomByteSource().slice(0, 512); + Blob blob = prefixBlobStore.blobBuilder("archive.bin").build(); + MultipartUpload mpu = prefixBlobStore.initiateMultipartUpload( + containerName, blob.getMetadata(), PutOptions.NONE); + assertThat(mpu.containerName()).isEqualTo(containerName); + assertThat(mpu.blobName()).isEqualTo("archive.bin"); + + MultipartPart part = prefixBlobStore.uploadMultipartPart( + mpu, 1, Payloads.newPayload(content)); + prefixBlobStore.completeMultipartUpload(mpu, List.of(part)); + + assertThat(blobStore.blobExists(containerName, + prefix + "archive.bin")).isTrue(); + } + + @Test + public void testListMultipartUploadsTrimsPrefix() { + Blob blob = prefixBlobStore.blobBuilder("pending.bin").build(); + MultipartUpload mpu = prefixBlobStore.initiateMultipartUpload( + containerName, blob.getMetadata(), PutOptions.NONE); + + try { + List uploads = + prefixBlobStore.listMultipartUploads(containerName); + assertThat(uploads).hasSize(1); + assertThat(uploads.get(0).blobName()).isEqualTo("pending.bin"); + } finally { + prefixBlobStore.abortMultipartUpload(mpu); + } + } + + @Test + public void testParseRejectsEmptyPrefix() { + var properties = new Properties(); + properties.setProperty(String.format("%s.bucket", + S3ProxyConstants.PROPERTY_PREFIX_BLOBSTORE), ""); + + try { + PrefixBlobStore.parsePrefixes(properties); + Assertions.failBecauseExceptionWasNotThrown( + IllegalArgumentException.class); + } catch (IllegalArgumentException exc) { + assertThat(exc.getMessage()).isEqualTo( + "Prefix for bucket bucket must not be empty"); + } + } +}