Porównaj commity

...

180 Commity

Autor SHA1 Wiadomość Data
dependabot[bot] 4175022ff8 Bump org.sonatype.plugins:nexus-staging-maven-plugin
Bumps org.sonatype.plugins:nexus-staging-maven-plugin from 1.6.13 to 1.7.0.

---
updated-dependencies:
- dependency-name: org.sonatype.plugins:nexus-staging-maven-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-06-02 08:47:42 +09:00
dependabot[bot] 8c5fb41c6c Bump com.github.spotbugs:spotbugs-annotations from 4.8.3 to 4.8.5
Bumps [com.github.spotbugs:spotbugs-annotations](https://github.com/spotbugs/spotbugs) from 4.8.3 to 4.8.5.
- [Release notes](https://github.com/spotbugs/spotbugs/releases)
- [Changelog](https://github.com/spotbugs/spotbugs/blob/master/CHANGELOG.md)
- [Commits](https://github.com/spotbugs/spotbugs/compare/4.8.3...4.8.5)

---
updated-dependencies:
- dependency-name: com.github.spotbugs:spotbugs-annotations
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-06-02 08:47:34 +09:00
dependabot[bot] 21d42f3768 Bump com.puppycrawl.tools:checkstyle from 10.16.0 to 10.17.0
Bumps [com.puppycrawl.tools:checkstyle](https://github.com/checkstyle/checkstyle) from 10.16.0 to 10.17.0.
- [Release notes](https://github.com/checkstyle/checkstyle/releases)
- [Commits](https://github.com/checkstyle/checkstyle/compare/checkstyle-10.16.0...checkstyle-10.17.0)

---
updated-dependencies:
- dependency-name: com.puppycrawl.tools:checkstyle
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-06-02 08:47:19 +09:00
dependabot[bot] 6a0182df7f Bump org.gaul:modernizer-maven-plugin from 2.7.0 to 2.9.0
Bumps [org.gaul:modernizer-maven-plugin](https://github.com/gaul/modernizer-maven-plugin) from 2.7.0 to 2.9.0.
- [Release notes](https://github.com/gaul/modernizer-maven-plugin/releases)
- [Commits](https://github.com/gaul/modernizer-maven-plugin/compare/modernizer-maven-plugin-2.7.0...modernizer-maven-plugin-2.9.0)

---
updated-dependencies:
- dependency-name: org.gaul:modernizer-maven-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-06-02 08:47:07 +09:00
Nagy Vilmos 6c2fa6fbcf #637 use full names for base images 2024-05-23 23:20:37 +09:00
dependabot[bot] f5889e4cd9 Bump slf4j.version from 2.0.12 to 2.0.13
Bumps `slf4j.version` from 2.0.12 to 2.0.13.

Updates `org.slf4j:slf4j-api` from 2.0.12 to 2.0.13

Updates `org.slf4j:jcl-over-slf4j` from 2.0.12 to 2.0.13

---
updated-dependencies:
- dependency-name: org.slf4j:slf4j-api
  dependency-type: direct:production
  update-type: version-update:semver-patch
- dependency-name: org.slf4j:jcl-over-slf4j
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-05-06 20:08:13 +09:00
dependabot[bot] 40c7b43731 Bump com.puppycrawl.tools:checkstyle from 10.15.0 to 10.16.0
Bumps [com.puppycrawl.tools:checkstyle](https://github.com/checkstyle/checkstyle) from 10.15.0 to 10.16.0.
- [Release notes](https://github.com/checkstyle/checkstyle/releases)
- [Commits](https://github.com/checkstyle/checkstyle/compare/checkstyle-10.15.0...checkstyle-10.16.0)

---
updated-dependencies:
- dependency-name: com.puppycrawl.tools:checkstyle
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-05-06 20:07:53 +09:00
dependabot[bot] f2ba17586c Bump ch.qos.logback:logback-classic from 1.5.1 to 1.5.6
Bumps [ch.qos.logback:logback-classic](https://github.com/qos-ch/logback) from 1.5.1 to 1.5.6.
- [Commits](https://github.com/qos-ch/logback/compare/v_1.5.1...v_1.5.6)

---
updated-dependencies:
- dependency-name: ch.qos.logback:logback-classic
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-05-06 20:07:36 +09:00
Timur Alperovich 468cffaa67
Update README.md 2024-05-02 14:04:55 -07:00
dependabot[bot] abc6d0295d Bump com.fasterxml.jackson.dataformat:jackson-dataformat-xml
Bumps [com.fasterxml.jackson.dataformat:jackson-dataformat-xml](https://github.com/FasterXML/jackson-dataformat-xml) from 2.16.1 to 2.17.0.
- [Commits](https://github.com/FasterXML/jackson-dataformat-xml/compare/jackson-dataformat-xml-2.16.1...jackson-dataformat-xml-2.17.0)

---
updated-dependencies:
- dependency-name: com.fasterxml.jackson.dataformat:jackson-dataformat-xml
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-04-02 08:18:39 +09:00
dependabot[bot] 45843ab330 Bump com.puppycrawl.tools:checkstyle from 10.3.1 to 10.15.0
Bumps [com.puppycrawl.tools:checkstyle](https://github.com/checkstyle/checkstyle) from 10.3.1 to 10.15.0.
- [Release notes](https://github.com/checkstyle/checkstyle/releases)
- [Commits](https://github.com/checkstyle/checkstyle/compare/checkstyle-10.3.1...checkstyle-10.15.0)

---
updated-dependencies:
- dependency-name: com.puppycrawl.tools:checkstyle
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-04-02 08:18:25 +09:00
dependabot[bot] d1ba64077c Bump args4j:args4j from 2.33 to 2.37
Bumps [args4j:args4j](https://github.com/kohsuke/args4j) from 2.33 to 2.37.
- [Changelog](https://github.com/kohsuke/args4j/blob/master/xdocs/changes.xml)
- [Commits](https://github.com/kohsuke/args4j/compare/args4j-site-2.33...args4j-site-2.37)

---
updated-dependencies:
- dependency-name: args4j:args4j
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-04-02 08:18:18 +09:00
sullis 67f913d6f6 maven surefire 3.2.5 2024-04-01 09:31:09 +09:00
sullis 823dcc4e1c setup-java v4 2024-03-31 13:15:35 +09:00
Andrew Gaul 6943bf8f21 Bump to 2.3.0-SNAPSHOT 2024-03-10 16:50:38 +09:00
Andrew Gaul c667eced02 S3Proxy 2.2.0 release
Fixes #614.
2024-03-10 16:41:11 +09:00
Andrew Gaul a78c0df4f7 Skip failing test due to jclouds 2.6.0 upgrade 2024-03-10 15:31:25 +09:00
Andrew Gaul 00bb83b74f Upgrade to jclouds 2.6.0
Fixes #473.  References #614.
2024-03-10 14:43:54 +09:00
Andrew Gaul ec12ae0fe5 Plug leak in handleCompleteMultipartUpload
In jclouds 2.6.0 BlobStore.getBlob returns an InputStream instead of a
ByteSource for the filesystem.  Previously the call site leaked the
former since it only consumed the BlobMetadata.  Instead call
BlobStore.blobMedata which does not include the payload.
Fixes JCLOUDS-1626.
2024-03-03 13:13:08 +09:00
dependabot[bot] b147909ff3 Bump org.junit.jupiter:junit-jupiter from 5.9.0 to 5.10.2
Bumps [org.junit.jupiter:junit-jupiter](https://github.com/junit-team/junit5) from 5.9.0 to 5.10.2.
- [Release notes](https://github.com/junit-team/junit5/releases)
- [Commits](https://github.com/junit-team/junit5/compare/r5.9.0...r5.10.2)

---
updated-dependencies:
- dependency-name: org.junit.jupiter:junit-jupiter
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-03-02 09:30:28 +09:00
dependabot[bot] ae7a20f845 Bump com.fasterxml.jackson.dataformat:jackson-dataformat-xml
Bumps [com.fasterxml.jackson.dataformat:jackson-dataformat-xml](https://github.com/FasterXML/jackson-dataformat-xml) from 2.13.3 to 2.16.1.
- [Commits](https://github.com/FasterXML/jackson-dataformat-xml/compare/jackson-dataformat-xml-2.13.3...jackson-dataformat-xml-2.16.1)

---
updated-dependencies:
- dependency-name: com.fasterxml.jackson.dataformat:jackson-dataformat-xml
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-03-02 09:30:20 +09:00
dependabot[bot] 2bdd10ca88 Bump com.github.spotbugs:spotbugs-maven-plugin from 4.8.3.0 to 4.8.3.1
Bumps [com.github.spotbugs:spotbugs-maven-plugin](https://github.com/spotbugs/spotbugs-maven-plugin) from 4.8.3.0 to 4.8.3.1.
- [Release notes](https://github.com/spotbugs/spotbugs-maven-plugin/releases)
- [Commits](https://github.com/spotbugs/spotbugs-maven-plugin/compare/spotbugs-maven-plugin-4.8.3.0...spotbugs-maven-plugin-4.8.3.1)

---
updated-dependencies:
- dependency-name: com.github.spotbugs:spotbugs-maven-plugin
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-03-02 09:30:11 +09:00
dependabot[bot] fcda97197a Bump ch.qos.logback:logback-classic from 1.4.14 to 1.5.1
Bumps [ch.qos.logback:logback-classic](https://github.com/qos-ch/logback) from 1.4.14 to 1.5.1.
- [Commits](https://github.com/qos-ch/logback/compare/v_1.4.14...v_1.5.1)

---
updated-dependencies:
- dependency-name: ch.qos.logback:logback-classic
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-03-02 09:30:02 +09:00
dependabot[bot] ede9777a5f Bump slf4j.version from 2.0.11 to 2.0.12
Bumps `slf4j.version` from 2.0.11 to 2.0.12.

Updates `org.slf4j:slf4j-api` from 2.0.11 to 2.0.12

Updates `org.slf4j:jcl-over-slf4j` from 2.0.11 to 2.0.12

---
updated-dependencies:
- dependency-name: org.slf4j:slf4j-api
  dependency-type: direct:production
  update-type: version-update:semver-patch
- dependency-name: org.slf4j:jcl-over-slf4j
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-03-02 09:28:45 +09:00
dependabot[bot] 33e8c55dc8 Bump org.assertj:assertj-core from 3.24.2 to 3.25.3
Bumps [org.assertj:assertj-core](https://github.com/assertj/assertj) from 3.24.2 to 3.25.3.
- [Release notes](https://github.com/assertj/assertj/releases)
- [Commits](https://github.com/assertj/assertj/compare/assertj-build-3.24.2...assertj-build-3.25.3)

---
updated-dependencies:
- dependency-name: org.assertj:assertj-core
  dependency-type: direct:development
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-02-25 18:04:03 +09:00
dependabot[bot] dcf7624e4c Bump org.testng:testng from 7.5 to 7.5.1
Bumps [org.testng:testng](https://github.com/cbeust/testng) from 7.5 to 7.5.1.
- [Release notes](https://github.com/cbeust/testng/releases)
- [Changelog](https://github.com/testng-team/testng/blob/master/CHANGES.txt)
- [Commits](https://github.com/cbeust/testng/compare/7.5...7.5.1)

---
updated-dependencies:
- dependency-name: org.testng:testng
  dependency-type: direct:development
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-02-25 18:03:56 +09:00
Andrew Gaul 90d6abc7fb Reapply "Upgrade to jclouds 2.6.0-SNAPSHOT"
This reverts commit e1e1a9e0fd.
References #473.
2024-02-25 17:42:15 +09:00
Andrew Gaul bf879d653e Set Jetty legacy URI compliance
This resolves an issue with, "Ambiguous URI empty segment" in s3fs
integration test_time_mountpoint.
2024-02-25 16:04:06 +09:00
Andrew Gaul 8c1fc80e9e Add bucket locator middleware to README
Fixes #521.
2024-02-20 12:22:12 +09:00
dependabot[bot] dd73dff0ac Bump org.skife.maven:really-executable-jar-maven-plugin
Bumps [org.skife.maven:really-executable-jar-maven-plugin](https://github.com/brianm/really-executable-jar-maven-plugin) from 1.5.0 to 2.1.1.
- [Commits](https://github.com/brianm/really-executable-jar-maven-plugin/commits)

---
updated-dependencies:
- dependency-name: org.skife.maven:really-executable-jar-maven-plugin
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-02-07 13:49:26 +09:00
momez 435eb37bf8 Remove tests that expect signature failure on OPTIONS preflight
References #411.
2024-02-07 13:16:53 +09:00
Andrew Gaul b1453bd8f6 Specify inputFile to resolve ambiguity
Required by really-executable-jars 2.1.0 and newer
2024-02-07 13:11:27 +09:00
momez 6bb0250103 Do not perform signature check for OPTIONS verb
Fixes #411.
2024-02-07 13:09:06 +09:00
dependabot[bot] 19897069b4 Bump com.github.spotbugs:spotbugs-maven-plugin from 4.8.2.0 to 4.8.3.0
Bumps [com.github.spotbugs:spotbugs-maven-plugin](https://github.com/spotbugs/spotbugs-maven-plugin) from 4.8.2.0 to 4.8.3.0.
- [Release notes](https://github.com/spotbugs/spotbugs-maven-plugin/releases)
- [Commits](https://github.com/spotbugs/spotbugs-maven-plugin/compare/spotbugs-maven-plugin-4.8.2.0...spotbugs-maven-plugin-4.8.3.0)

---
updated-dependencies:
- dependency-name: com.github.spotbugs:spotbugs-maven-plugin
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-02-02 15:42:44 +09:00
dependabot[bot] 35c0719383 Bump com.github.spotbugs:spotbugs-annotations from 4.7.1 to 4.8.3
Bumps [com.github.spotbugs:spotbugs-annotations](https://github.com/spotbugs/spotbugs) from 4.7.1 to 4.8.3.
- [Release notes](https://github.com/spotbugs/spotbugs/releases)
- [Changelog](https://github.com/spotbugs/spotbugs/blob/master/CHANGELOG.md)
- [Commits](https://github.com/spotbugs/spotbugs/compare/4.7.1...4.8.3)

---
updated-dependencies:
- dependency-name: com.github.spotbugs:spotbugs-annotations
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-02-02 15:42:39 +09:00
dependabot[bot] 3ba2dc576b Bump org.eclipse.jetty:jetty-servlet from 11.0.19 to 11.0.20
Bumps org.eclipse.jetty:jetty-servlet from 11.0.19 to 11.0.20.

---
updated-dependencies:
- dependency-name: org.eclipse.jetty:jetty-servlet
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-02-02 15:42:32 +09:00
Andrew Gaul 10469f40c0 Upgrade Docker JRE to 17
Fixes #568.
2024-01-18 11:24:57 +09:00
Steffen 671a8f539a create armv7 docker images 2024-01-15 07:57:08 +09:00
ali-firat.kilic 8165be6b17 Increase max non chunked request size to 128 MB 2024-01-12 08:19:34 +09:00
Steffen 0af09a557b add missing pipe for images in meta step
new-lines without pipe don't get treated as newlines, see https://yaml-multiline.info/
2024-01-12 08:12:11 +09:00
dependabot[bot] 87d3db457c Bump slf4j.version from 2.0.9 to 2.0.11
Bumps `slf4j.version` from 2.0.9 to 2.0.11.

Updates `org.slf4j:slf4j-api` from 2.0.9 to 2.0.11

Updates `org.slf4j:jcl-over-slf4j` from 2.0.9 to 2.0.11

---
updated-dependencies:
- dependency-name: org.slf4j:slf4j-api
  dependency-type: direct:production
  update-type: version-update:semver-patch
- dependency-name: org.slf4j:jcl-over-slf4j
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-01-11 17:13:35 +09:00
Steffen d35eb257b4 use actions cache for docker build 2024-01-11 17:10:37 +09:00
snpz 40bdb6c1f9 fix pushing images to docker.io 2024-01-11 17:10:37 +09:00
dependabot[bot] ea9fb9fd30 Bump com.github.spotbugs:spotbugs-maven-plugin from 4.7.3.0 to 4.8.2.0
Bumps [com.github.spotbugs:spotbugs-maven-plugin](https://github.com/spotbugs/spotbugs-maven-plugin) from 4.7.3.0 to 4.8.2.0.
- [Release notes](https://github.com/spotbugs/spotbugs-maven-plugin/releases)
- [Commits](https://github.com/spotbugs/spotbugs-maven-plugin/compare/spotbugs-maven-plugin-4.7.3.0...spotbugs-maven-plugin-4.8.2.0)

---
updated-dependencies:
- dependency-name: com.github.spotbugs:spotbugs-maven-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-01-11 17:07:28 +09:00
dependabot[bot] 309e08396d Bump org.eclipse.jetty:jetty-servlet from 11.0.18 to 11.0.19
Bumps [org.eclipse.jetty:jetty-servlet](https://github.com/jetty/jetty.project) from 11.0.18 to 11.0.19.
- [Release notes](https://github.com/jetty/jetty.project/releases)
- [Commits](https://github.com/jetty/jetty.project/compare/jetty-11.0.18...jetty-11.0.19)

---
updated-dependencies:
- dependency-name: org.eclipse.jetty:jetty-servlet
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-01-11 17:07:15 +09:00
dependabot[bot] 7e8462d417 Bump org.gaul:modernizer-maven-plugin from 2.6.0 to 2.7.0
Bumps [org.gaul:modernizer-maven-plugin](https://github.com/gaul/modernizer-maven-plugin) from 2.6.0 to 2.7.0.
- [Release notes](https://github.com/gaul/modernizer-maven-plugin/releases)
- [Commits](https://github.com/gaul/modernizer-maven-plugin/compare/modernizer-maven-plugin-2.6.0...modernizer-maven-plugin-2.7.0)

---
updated-dependencies:
- dependency-name: org.gaul:modernizer-maven-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-01-11 17:07:05 +09:00
Andrew Gaul a7aa9a63ac Re-enable Checkstyle
Also disable UnnecessaryParentheses due to false positive.
2023-12-26 17:40:31 +09:00
Andrew Gaul 4aeff5fb28 Address Checkstyle LineLength violations 2023-12-26 17:38:59 +09:00
Andrew Gaul a732dca4c2 Add jakarta to Checkstyle ImportOrder 2023-12-26 13:51:54 +09:00
Andrew Gaul 1dac9ccd12 Address Checkstyle violations 2023-12-26 13:47:18 +09:00
Andrew Gaul e5fb3619df Remove unneeded UnsupportedEncodingException 2023-12-26 13:29:40 +09:00
dependabot[bot] 4e8b57b227 Bump ch.qos.logback:logback-classic from 1.4.12 to 1.4.14
Bumps [ch.qos.logback:logback-classic](https://github.com/qos-ch/logback) from 1.4.12 to 1.4.14.
- [Commits](https://github.com/qos-ch/logback/compare/v_1.4.12...v_1.4.14)

---
updated-dependencies:
- dependency-name: ch.qos.logback:logback-classic
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-12-02 12:30:57 +09:00
dependabot[bot] b571235168 Bump ch.qos.logback:logback-classic from 1.4.11 to 1.4.12
Bumps [ch.qos.logback:logback-classic](https://github.com/qos-ch/logback) from 1.4.11 to 1.4.12.
- [Commits](https://github.com/qos-ch/logback/compare/v_1.4.11...v_1.4.12)

---
updated-dependencies:
- dependency-name: ch.qos.logback:logback-classic
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-11-30 10:22:41 +09:00
jixinchi 2a44bcd709
commonPrefixes is only for delimiter request; check size and tier null 2023-11-13 22:38:13 +09:00
dependabot[bot] 608934309b Bump org.eclipse.jetty:jetty-servlet from 11.0.17 to 11.0.18
Bumps [org.eclipse.jetty:jetty-servlet](https://github.com/eclipse/jetty.project) from 11.0.17 to 11.0.18.
- [Release notes](https://github.com/eclipse/jetty.project/releases)
- [Commits](https://github.com/eclipse/jetty.project/compare/jetty-11.0.17...jetty-11.0.18)

---
updated-dependencies:
- dependency-name: org.eclipse.jetty:jetty-servlet
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-11-02 07:50:10 +09:00
Lars Hagen 2281c74150 treat null and empty string equally for endpoint properties
This improves the usability of the docker image, as it defaults
to empty strings for both properties. With this fix, the docker image
will work when only one of the endpoints are set.
2023-10-24 20:45:37 +09:00
jixinchi 60f8366d33
Add DELETE and HEAD methods for CORS and CORS headers for all responses 2023-10-18 11:34:50 +09:00
Steven Sheehy e6955afb43 Bump jetty from 11.0.16 to 11.0.17
Signed-off-by: Steven Sheehy <steven.sheehy@swirldslabs.com>
2023-10-17 09:51:51 +09:00
Lars Hagen 521bc14ab6 change base image to eclipse-temurin
the openjdk images are deprecated, and have critical
vulnerabilities
2023-10-12 20:32:52 +09:00
Raul Sampedro 916af55366 add some test for regex midd 2023-10-03 20:53:45 +09:00
Raul Sampedro 06702abf3f add readme reference to middleware 2023-10-03 20:53:45 +09:00
Raul Sampedro 5682b10c71 add regex middleware 2023-10-03 20:53:45 +09:00
dependabot[bot] 25e79454c2 Bump slf4j.version from 2.0.7 to 2.0.9
Bumps `slf4j.version` from 2.0.7 to 2.0.9.

Updates `org.slf4j:slf4j-api` from 2.0.7 to 2.0.9

Updates `org.slf4j:jcl-over-slf4j` from 2.0.7 to 2.0.9

---
updated-dependencies:
- dependency-name: org.slf4j:slf4j-api
  dependency-type: direct:production
  update-type: version-update:semver-patch
- dependency-name: org.slf4j:jcl-over-slf4j
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-10-02 08:18:20 +09:00
Shane St Savage d52ceb134b Support S3PROXY_READ_ONLY_BLOBSTORE Docker env var 2023-09-27 22:18:26 +09:00
Andrew Gaul 1c6a1aca9e Bump to 2.2.0-SNAPSHOT 2023-09-26 20:45:46 +09:00
Andrew Gaul 8794d98d17 S3Proxy 2.1.0 release
Fixes #532.
2023-09-26 20:40:20 +09:00
Andrew Gaul e1e1a9e0fd Revert "Upgrade to jclouds 2.6.0-SNAPSHOT"
This reverts commit 222f4796b1.
References #473.
2023-09-26 20:39:28 +09:00
Steve b134e81406
Add Access-Control-Allow-Credentials header
Fixes #415
2023-09-26 20:38:22 +09:00
Lars Hagen ff347352bf
Clear system property for credentials for file paths
jclouds will override the passed in credentials if the system property is present
2023-09-26 20:37:04 +09:00
dependabot[bot] 1b33acacc5 Bump ch.qos.logback:logback-classic from 1.4.8 to 1.4.11
Bumps [ch.qos.logback:logback-classic](https://github.com/qos-ch/logback) from 1.4.8 to 1.4.11.
- [Commits](https://github.com/qos-ch/logback/compare/v_1.4.8...v_1.4.11)

---
updated-dependencies:
- dependency-name: ch.qos.logback:logback-classic
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-09-01 15:52:57 -07:00
dependabot[bot] c9567cbcca Bump org.eclipse.jetty:jetty-servlet from 11.0.15 to 11.0.16
Bumps [org.eclipse.jetty:jetty-servlet](https://github.com/eclipse/jetty.project) from 11.0.15 to 11.0.16.
- [Release notes](https://github.com/eclipse/jetty.project/releases)
- [Commits](https://github.com/eclipse/jetty.project/compare/jetty-11.0.15...jetty-11.0.16)

---
updated-dependencies:
- dependency-name: org.eclipse.jetty:jetty-servlet
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-09-01 15:52:51 -07:00
Andrew Gaul 4706b1760f Log headers at trace instead of debug 2023-08-28 20:57:47 -07:00
Andrew Gaul 24fb1dd19a Add mvn verify to CI 2023-08-17 23:08:48 +09:00
Lars Hagen a4cf4dcd1f fix issues reported by modernizer 2.6.0 2023-08-17 23:08:38 +09:00
Lars Hagen b6fce1432d make sure all errors are logged at debug level 2023-08-17 17:21:02 +09:00
Lars Hagen 4e0aca47f2 Update to 2.6.0 2023-08-17 17:19:13 +09:00
Lars Hagen 90717230d4 update modernizer plugin to 2.5.0
This allows the project to work with maven 3.9+, as the modernizer
no longer implicitly depends on plexus-utils.
2023-08-17 17:19:13 +09:00
Szymon Draszkiewicz cf4db284a6 fixed marker being saved under wrong key in marker cache 2023-07-10 21:24:01 +09:00
dependabot[bot] 79e2189184 Bump logback-classic from 1.4.7 to 1.4.8
Bumps [logback-classic](https://github.com/qos-ch/logback) from 1.4.7 to 1.4.8.
- [Commits](https://github.com/qos-ch/logback/compare/v_1.4.7...v_1.4.8)

---
updated-dependencies:
- dependency-name: ch.qos.logback:logback-classic
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-07-02 09:10:53 +09:00
Timur Alperovich e7dc97a1af Cleanup the build workflow 2023-05-19 12:00:11 +09:00
Andrew Gaul 99c1486fd3 Add and ignore x-amz-api-version
Fixes #497.
2023-05-16 21:35:11 +09:00
Andrew Gaul 5996114f8c Update s3-tests
Fixes #479.
2023-05-13 20:52:27 +09:00
dependabot[bot] 26603a5662 Bump logback-classic from 1.4.6 to 1.4.7
Bumps [logback-classic](https://github.com/qos-ch/logback) from 1.4.6 to 1.4.7.
- [Release notes](https://github.com/qos-ch/logback/releases)
- [Commits](https://github.com/qos-ch/logback/compare/v_1.4.6...v_1.4.7)

---
updated-dependencies:
- dependency-name: ch.qos.logback:logback-classic
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-05-08 17:14:33 +09:00
dependabot[bot] 6f7613ddba Bump jetty-servlet from 11.0.14 to 11.0.15
Bumps [jetty-servlet](https://github.com/eclipse/jetty.project) from 11.0.14 to 11.0.15.
- [Release notes](https://github.com/eclipse/jetty.project/releases)
- [Commits](https://github.com/eclipse/jetty.project/compare/jetty-11.0.14...jetty-11.0.15)

---
updated-dependencies:
- dependency-name: org.eclipse.jetty:jetty-servlet
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-05-08 17:14:22 +09:00
Andrew Gaul 886eb53f93 Update s3-tests submodule
References #479.
2023-04-22 19:00:09 +09:00
Yann Defretin f84bec3dfb Align SSL/TLS terminology 2023-04-22 16:22:19 +09:00
Yann Defretin 755456e555 Improve TLS Support documentation 2023-04-22 16:22:19 +09:00
Yann Defretin ef94f3984e Expose 443 port for TLS/secure endpoint feature 2023-04-22 16:22:19 +09:00
Yann Defretin d42e42c3f0 Set default non chunked request size value 2023-04-22 16:22:19 +09:00
Yann Defretin bc607d6cf9 Configure Secure Endpoint with environment variable 2023-04-22 16:22:19 +09:00
Yann Defretin b8d285da04 Fix variable typo 2023-04-19 12:42:56 +09:00
Yann Defretin 52d942d3f7 Configure Keystore through environment variables 2023-04-19 12:42:56 +09:00
dependabot[bot] af05534c64 Bump slf4j.version from 2.0.6 to 2.0.7
Bumps `slf4j.version` from 2.0.6 to 2.0.7.

Updates `slf4j-api` from 2.0.6 to 2.0.7
- [Release notes](https://github.com/qos-ch/slf4j/releases)
- [Commits](https://github.com/qos-ch/slf4j/compare/v_2.0.6...v_2.0.7)

Updates `jcl-over-slf4j` from 2.0.6 to 2.0.7
- [Release notes](https://github.com/qos-ch/slf4j/releases)
- [Commits](https://github.com/qos-ch/slf4j/compare/v_2.0.6...v_2.0.7)

---
updated-dependencies:
- dependency-name: org.slf4j:slf4j-api
  dependency-type: direct:production
  update-type: version-update:semver-patch
- dependency-name: org.slf4j:jcl-over-slf4j
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-04-02 09:00:09 +09:00
dependabot[bot] 2d36d099ff Bump surefire.version from 3.0.0-M9 to 3.0.0
Bumps `surefire.version` from 3.0.0-M9 to 3.0.0.

Updates `surefire-junit47` from 3.0.0-M9 to 3.0.0

Updates `surefire-testng` from 3.0.0-M9 to 3.0.0

Updates `surefire-junit-platform` from 3.0.0-M9 to 3.0.0

Updates `maven-surefire-plugin` from 3.0.0-M9 to 3.0.0
- [Release notes](https://github.com/apache/maven-surefire/releases)
- [Commits](https://github.com/apache/maven-surefire/compare/surefire-3.0.0-M9...surefire-3.0.0)

---
updated-dependencies:
- dependency-name: org.apache.maven.surefire:surefire-junit47
  dependency-type: direct:production
  update-type: version-update:semver-patch
- dependency-name: org.apache.maven.surefire:surefire-testng
  dependency-type: direct:production
  update-type: version-update:semver-patch
- dependency-name: org.apache.maven.surefire:surefire-junit-platform
  dependency-type: direct:production
  update-type: version-update:semver-patch
- dependency-name: org.apache.maven.plugins:maven-surefire-plugin
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-04-02 09:00:03 +09:00
dependabot[bot] 3cb90b2be5 Bump maven-javadoc-plugin from 3.4.1 to 3.5.0
Bumps [maven-javadoc-plugin](https://github.com/apache/maven-javadoc-plugin) from 3.4.1 to 3.5.0.
- [Release notes](https://github.com/apache/maven-javadoc-plugin/releases)
- [Commits](https://github.com/apache/maven-javadoc-plugin/compare/maven-javadoc-plugin-3.4.1...maven-javadoc-plugin-3.5.0)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-javadoc-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-04-02 08:59:57 +09:00
dependabot[bot] 655cac67f6 Bump maven-install-plugin from 3.1.0 to 3.1.1
Bumps [maven-install-plugin](https://github.com/apache/maven-install-plugin) from 3.1.0 to 3.1.1.
- [Release notes](https://github.com/apache/maven-install-plugin/releases)
- [Commits](https://github.com/apache/maven-install-plugin/compare/maven-install-plugin-3.1.0...maven-install-plugin-3.1.1)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-install-plugin
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-04-02 08:59:46 +09:00
dependabot[bot] cdbbd44928 Bump logback-classic from 1.4.5 to 1.4.6
Bumps [logback-classic](https://github.com/qos-ch/logback) from 1.4.5 to 1.4.6.
- [Release notes](https://github.com/qos-ch/logback/releases)
- [Commits](https://github.com/qos-ch/logback/compare/v_1.4.5...v_1.4.6)

---
updated-dependencies:
- dependency-name: ch.qos.logback:logback-classic
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-04-02 08:59:39 +09:00
Srujan Deshpande 2f941aa990 Add S3PROXY_JAVA_OPTS environment variable 2023-03-18 03:56:00 +09:00
dependabot[bot] e3857ba6db Bump jetty-servlet from 11.0.13 to 11.0.14
Bumps [jetty-servlet](https://github.com/eclipse/jetty.project) from 11.0.13 to 11.0.14.
- [Release notes](https://github.com/eclipse/jetty.project/releases)
- [Commits](https://github.com/eclipse/jetty.project/compare/jetty-11.0.13...jetty-11.0.14)

---
updated-dependencies:
- dependency-name: org.eclipse.jetty:jetty-servlet
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-03-02 14:18:26 +09:00
dependabot[bot] 5a1fe3ba38 Bump maven-assembly-plugin from 3.4.2 to 3.5.0
Bumps [maven-assembly-plugin](https://github.com/apache/maven-assembly-plugin) from 3.4.2 to 3.5.0.
- [Release notes](https://github.com/apache/maven-assembly-plugin/releases)
- [Commits](https://github.com/apache/maven-assembly-plugin/compare/maven-assembly-plugin-3.4.2...maven-assembly-plugin-3.5.0)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-assembly-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-03-02 14:18:19 +09:00
dependabot[bot] 5bc031dec8 Bump maven-compiler-plugin from 3.10.1 to 3.11.0
Bumps [maven-compiler-plugin](https://github.com/apache/maven-compiler-plugin) from 3.10.1 to 3.11.0.
- [Release notes](https://github.com/apache/maven-compiler-plugin/releases)
- [Commits](https://github.com/apache/maven-compiler-plugin/compare/maven-compiler-plugin-3.10.1...maven-compiler-plugin-3.11.0)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-compiler-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-03-02 14:18:11 +09:00
dependabot[bot] df71002052 Bump surefire.version from 3.0.0-M8 to 3.0.0-M9
Bumps `surefire.version` from 3.0.0-M8 to 3.0.0-M9.

Updates `surefire-junit47` from 3.0.0-M8 to 3.0.0-M9

Updates `surefire-testng` from 3.0.0-M8 to 3.0.0-M9

Updates `surefire-junit-platform` from 3.0.0-M8 to 3.0.0-M9

Updates `maven-surefire-plugin` from 3.0.0-M8 to 3.0.0-M9
- [Release notes](https://github.com/apache/maven-surefire/releases)
- [Commits](https://github.com/apache/maven-surefire/compare/surefire-3.0.0-M8...surefire-3.0.0-M9)

---
updated-dependencies:
- dependency-name: org.apache.maven.surefire:surefire-junit47
  dependency-type: direct:production
  update-type: version-update:semver-patch
- dependency-name: org.apache.maven.surefire:surefire-testng
  dependency-type: direct:production
  update-type: version-update:semver-patch
- dependency-name: org.apache.maven.surefire:surefire-junit-platform
  dependency-type: direct:production
  update-type: version-update:semver-patch
- dependency-name: org.apache.maven.plugins:maven-surefire-plugin
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-03-02 14:18:04 +09:00
dependabot[bot] 86c384f2af Bump maven-deploy-plugin from 3.0.0 to 3.1.0
Bumps [maven-deploy-plugin](https://github.com/apache/maven-deploy-plugin) from 3.0.0 to 3.1.0.
- [Release notes](https://github.com/apache/maven-deploy-plugin/releases)
- [Commits](https://github.com/apache/maven-deploy-plugin/compare/maven-deploy-plugin-3.0.0...maven-deploy-plugin-3.1.0)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-deploy-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-03-02 14:17:57 +09:00
dependabot[bot] 7624ffad85 Bump commons-fileupload from 1.4 to 1.5
Bumps commons-fileupload from 1.4 to 1.5.

---
updated-dependencies:
- dependency-name: commons-fileupload:commons-fileupload
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-02-23 14:26:19 +09:00
dependabot[bot] 59094ba2a6 Bump maven-enforcer-plugin from 3.1.0 to 3.2.1
Bumps [maven-enforcer-plugin](https://github.com/apache/maven-enforcer) from 3.1.0 to 3.2.1.
- [Release notes](https://github.com/apache/maven-enforcer/releases)
- [Commits](https://github.com/apache/maven-enforcer/compare/enforcer-3.1.0...enforcer-3.2.1)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-enforcer-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-02-02 16:37:33 +09:00
dependabot[bot] 78f0f67ebc Bump maven-checkstyle-plugin from 3.2.0 to 3.2.1
Bumps [maven-checkstyle-plugin](https://github.com/apache/maven-checkstyle-plugin) from 3.2.0 to 3.2.1.
- [Release notes](https://github.com/apache/maven-checkstyle-plugin/releases)
- [Commits](https://github.com/apache/maven-checkstyle-plugin/compare/maven-checkstyle-plugin-3.2.0...maven-checkstyle-plugin-3.2.1)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-checkstyle-plugin
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-01-28 23:03:46 +09:00
dependabot[bot] 7ca3b808d7 Bump assertj-core from 3.23.1 to 3.24.2
Bumps assertj-core from 3.23.1 to 3.24.2.

---
updated-dependencies:
- dependency-name: org.assertj:assertj-core
  dependency-type: direct:development
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-01-28 23:03:36 +09:00
Andrew Gaul 222f4796b1 Upgrade to jclouds 2.6.0-SNAPSHOT
Shuffle some test teardown methods to satisfy newer testng.
References #473.
2023-01-28 22:34:22 +09:00
Andrew Gaul 65725c4c1e Configure Dependabot for monthly notifications 2023-01-14 17:25:06 +09:00
dependabot[bot] 55f779480a Bump maven-install-plugin from 3.0.1 to 3.1.0
Bumps [maven-install-plugin](https://github.com/apache/maven-install-plugin) from 3.0.1 to 3.1.0.
- [Release notes](https://github.com/apache/maven-install-plugin/releases)
- [Commits](https://github.com/apache/maven-install-plugin/compare/maven-install-plugin-3.0.1...maven-install-plugin-3.1.0)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-install-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-12-28 08:34:09 +09:00
dependabot[bot] ad6a9947b3 Bump maven-javadoc-plugin from 3.4.0 to 3.4.1
Bumps [maven-javadoc-plugin](https://github.com/apache/maven-javadoc-plugin) from 3.4.0 to 3.4.1.
- [Release notes](https://github.com/apache/maven-javadoc-plugin/releases)
- [Commits](https://github.com/apache/maven-javadoc-plugin/compare/maven-javadoc-plugin-3.4.0...maven-javadoc-plugin-3.4.1)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-javadoc-plugin
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-12-28 08:34:00 +09:00
dependabot[bot] 47605c9864 Bump logback-classic from 1.2.11 to 1.4.5
Bumps [logback-classic](https://github.com/qos-ch/logback) from 1.2.11 to 1.4.5.
- [Release notes](https://github.com/qos-ch/logback/releases)
- [Commits](https://github.com/qos-ch/logback/compare/v_1.2.11...v_1.4.5)

---
updated-dependencies:
- dependency-name: ch.qos.logback:logback-classic
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-12-28 08:33:49 +09:00
dependabot[bot] d94c0d81ff Bump slf4j.version from 1.7.36 to 2.0.6
Bumps `slf4j.version` from 1.7.36 to 2.0.6.

Updates `slf4j-api` from 1.7.36 to 2.0.6
- [Release notes](https://github.com/qos-ch/slf4j/releases)
- [Commits](https://github.com/qos-ch/slf4j/compare/v_1.7.36...v_2.0.6)

Updates `jcl-over-slf4j` from 1.7.36 to 2.0.6
- [Release notes](https://github.com/qos-ch/slf4j/releases)
- [Commits](https://github.com/qos-ch/slf4j/compare/v_1.7.36...v_2.0.6)

---
updated-dependencies:
- dependency-name: org.slf4j:slf4j-api
  dependency-type: direct:production
  update-type: version-update:semver-major
- dependency-name: org.slf4j:jcl-over-slf4j
  dependency-type: direct:development
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-12-27 20:06:22 +09:00
dependabot[bot] 56a63116e8 Bump maven-checkstyle-plugin from 3.1.2 to 3.2.0
Bumps [maven-checkstyle-plugin](https://github.com/apache/maven-checkstyle-plugin) from 3.1.2 to 3.2.0.
- [Release notes](https://github.com/apache/maven-checkstyle-plugin/releases)
- [Commits](https://github.com/apache/maven-checkstyle-plugin/compare/maven-checkstyle-plugin-3.1.2...maven-checkstyle-plugin-3.2.0)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-checkstyle-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-12-27 07:41:53 +09:00
dependabot[bot] 69eadc5c5d Bump maven-shade-plugin from 3.3.0 to 3.4.1
Bumps [maven-shade-plugin](https://github.com/apache/maven-shade-plugin) from 3.3.0 to 3.4.1.
- [Release notes](https://github.com/apache/maven-shade-plugin/releases)
- [Commits](https://github.com/apache/maven-shade-plugin/compare/maven-shade-plugin-3.3.0...maven-shade-plugin-3.4.1)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-shade-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-12-27 07:41:45 +09:00
dependabot[bot] 9524d032e1 Bump jetty-servlet from 11.0.11 to 11.0.13
Bumps [jetty-servlet](https://github.com/eclipse/jetty.project) from 11.0.11 to 11.0.13.
- [Release notes](https://github.com/eclipse/jetty.project/releases)
- [Commits](https://github.com/eclipse/jetty.project/compare/jetty-11.0.11...jetty-11.0.13)

---
updated-dependencies:
- dependency-name: org.eclipse.jetty:jetty-servlet
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-12-27 07:41:29 +09:00
Andrew Gaul 302e2050b1 Upgrade to error-prone 2.16
Release notes:

https://github.com/google/error-prone/releases/tag/v2.16
2022-12-26 12:30:30 +09:00
Andrew Gaul b72a6fea9e Remove unneeded AliasBlobStore directory methods
These are not needed and generate deprecation warnings.
2022-12-26 12:24:28 +09:00
Andrew Gaul 81e885422f Suppress spurious md5 deprecation warnings 2022-12-26 12:21:04 +09:00
dependabot[bot] 92c6171bfb Bump maven-assembly-plugin from 3.4.1 to 3.4.2
Bumps [maven-assembly-plugin](https://github.com/apache/maven-assembly-plugin) from 3.4.1 to 3.4.2.
- [Release notes](https://github.com/apache/maven-assembly-plugin/releases)
- [Commits](https://github.com/apache/maven-assembly-plugin/compare/maven-assembly-plugin-3.4.1...maven-assembly-plugin-3.4.2)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-assembly-plugin
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-12-26 12:14:26 +09:00
dependabot[bot] 9408f53cf5 Bump maven-resources-plugin from 3.2.0 to 3.3.0
Bumps [maven-resources-plugin](https://github.com/apache/maven-resources-plugin) from 3.2.0 to 3.3.0.
- [Release notes](https://github.com/apache/maven-resources-plugin/releases)
- [Commits](https://github.com/apache/maven-resources-plugin/compare/maven-resources-plugin-3.2.0...maven-resources-plugin-3.3.0)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-resources-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-12-26 12:14:20 +09:00
dependabot[bot] ddd32686f5 Bump maven-jar-plugin from 3.2.2 to 3.3.0
Bumps [maven-jar-plugin](https://github.com/apache/maven-jar-plugin) from 3.2.2 to 3.3.0.
- [Release notes](https://github.com/apache/maven-jar-plugin/releases)
- [Commits](https://github.com/apache/maven-jar-plugin/compare/maven-jar-plugin-3.2.2...maven-jar-plugin-3.3.0)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-jar-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-12-26 12:14:08 +09:00
Andrew Gaul 68237a1012 Upgrade spotbugs to 4.7.3
Release notes:

https://github.com/spotbugs/spotbugs/blob/master/CHANGELOG.md#473---2022-10-15
2022-12-23 15:03:34 +09:00
dependabot[bot] a856cc7681 Bump junit-jupiter from 5.8.2 to 5.9.0
Bumps [junit-jupiter](https://github.com/junit-team/junit5) from 5.8.2 to 5.9.0.
- [Release notes](https://github.com/junit-team/junit5/releases)
- [Commits](https://github.com/junit-team/junit5/compare/r5.8.2...r5.9.0)

---
updated-dependencies:
- dependency-name: org.junit.jupiter:junit-jupiter
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-09-17 20:55:29 +09:00
Andrew Gaul df57f5453b Upgrade to error-prone 2.15.0
Release notes:

https://github.com/google/error-prone/releases
2022-08-20 19:32:59 +09:00
Andrew Gaul 0245de405c Address error-prone warnings 2022-08-20 19:29:41 +09:00
Andrew Gaul bbbacaa442 Require Java 11
Jetty 11 requires this.  Also address some Modernizer issues.
References #422.
2022-08-08 21:24:58 +09:00
Andrew Gaul d375011388 Upgrade to Jetty 11.0.11
Jetty 9 is EOL.  This requires Java 11 and updating some Java EE
imports.  Fixes #422.
2022-08-08 21:17:40 +09:00
dependabot[bot] 277ef3c9d7 Bump spotbugs-maven-plugin from 4.7.1.0 to 4.7.1.1
Bumps [spotbugs-maven-plugin](https://github.com/spotbugs/spotbugs-maven-plugin) from 4.7.1.0 to 4.7.1.1.
- [Release notes](https://github.com/spotbugs/spotbugs-maven-plugin/releases)
- [Commits](https://github.com/spotbugs/spotbugs-maven-plugin/compare/spotbugs-maven-plugin-4.7.1.0...spotbugs-maven-plugin-4.7.1.1)

---
updated-dependencies:
- dependency-name: com.github.spotbugs:spotbugs-maven-plugin
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-26 14:47:23 +09:00
dependabot[bot] e4666932bb Bump maven-install-plugin from 2.5.2 to 3.0.1
Bumps [maven-install-plugin](https://github.com/apache/maven-install-plugin) from 2.5.2 to 3.0.1.
- [Release notes](https://github.com/apache/maven-install-plugin/releases)
- [Commits](https://github.com/apache/maven-install-plugin/compare/maven-install-plugin-2.5.2...maven-install-plugin-3.0.1)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-install-plugin
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-26 14:47:13 +09:00
dependabot[bot] e32ca2ca50 Bump maven-deploy-plugin from 2.8.2 to 3.0.0
Bumps [maven-deploy-plugin](https://github.com/apache/maven-deploy-plugin) from 2.8.2 to 3.0.0.
- [Release notes](https://github.com/apache/maven-deploy-plugin/releases)
- [Commits](https://github.com/apache/maven-deploy-plugin/compare/maven-deploy-plugin-2.8.2...maven-deploy-plugin-3.0.0)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-deploy-plugin
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-21 12:21:17 +09:00
dependabot[bot] 974139e14c Bump maven-jar-plugin from 3.2.0 to 3.2.2
Bumps [maven-jar-plugin](https://github.com/apache/maven-jar-plugin) from 3.2.0 to 3.2.2.
- [Release notes](https://github.com/apache/maven-jar-plugin/releases)
- [Commits](https://github.com/apache/maven-jar-plugin/compare/maven-jar-plugin-3.2.0...maven-jar-plugin-3.2.2)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-jar-plugin
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-21 12:20:59 +09:00
dependabot[bot] bbb3bc4aa9 Bump maven-javadoc-plugin from 3.3.1 to 3.4.0
Bumps [maven-javadoc-plugin](https://github.com/apache/maven-javadoc-plugin) from 3.3.1 to 3.4.0.
- [Release notes](https://github.com/apache/maven-javadoc-plugin/releases)
- [Commits](https://github.com/apache/maven-javadoc-plugin/compare/maven-javadoc-plugin-3.3.1...maven-javadoc-plugin-3.4.0)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-javadoc-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-21 12:20:47 +09:00
Thiago da Silva 00894152cd Allow setting of v4-max-non-chunked-request-size in Docker container 2022-07-20 13:35:50 +09:00
dependabot[bot] d0fec21e93 Bump checkstyle from 9.1 to 10.3.1
Bumps [checkstyle](https://github.com/checkstyle/checkstyle) from 9.1 to 10.3.1.
- [Release notes](https://github.com/checkstyle/checkstyle/releases)
- [Commits](https://github.com/checkstyle/checkstyle/compare/checkstyle-9.1...checkstyle-10.3.1)

---
updated-dependencies:
- dependency-name: com.puppycrawl.tools:checkstyle
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-20 13:22:56 +09:00
dependabot[bot] 3528bf3ae1 Bump maven-compiler-plugin from 3.8.1 to 3.10.1
Bumps [maven-compiler-plugin](https://github.com/apache/maven-compiler-plugin) from 3.8.1 to 3.10.1.
- [Release notes](https://github.com/apache/maven-compiler-plugin/releases)
- [Commits](https://github.com/apache/maven-compiler-plugin/compare/maven-compiler-plugin-3.8.1...maven-compiler-plugin-3.10.1)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-compiler-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-20 13:21:33 +09:00
dependabot[bot] 9c681e944c Bump spotbugs-annotations from 3.1.12 to 4.7.1
Bumps [spotbugs-annotations](https://github.com/spotbugs/spotbugs) from 3.1.12 to 4.7.1.
- [Release notes](https://github.com/spotbugs/spotbugs/releases)
- [Changelog](https://github.com/spotbugs/spotbugs/blob/master/CHANGELOG.md)
- [Commits](https://github.com/spotbugs/spotbugs/compare/3.1.12...4.7.1)

---
updated-dependencies:
- dependency-name: com.github.spotbugs:spotbugs-annotations
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-20 13:21:02 +09:00
dependabot[bot] d66c669f7c Bump spotbugs-maven-plugin from 4.4.2 to 4.7.1.0
Bumps [spotbugs-maven-plugin](https://github.com/spotbugs/spotbugs-maven-plugin) from 4.4.2 to 4.7.1.0.
- [Release notes](https://github.com/spotbugs/spotbugs-maven-plugin/releases)
- [Commits](https://github.com/spotbugs/spotbugs-maven-plugin/compare/spotbugs-maven-plugin-4.4.2...spotbugs-maven-plugin-4.7.1.0)

---
updated-dependencies:
- dependency-name: com.github.spotbugs:spotbugs-maven-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-20 13:20:48 +09:00
dependabot[bot] 271b050a52 Bump maven-assembly-plugin from 3.3.0 to 3.4.1
Bumps [maven-assembly-plugin](https://github.com/apache/maven-assembly-plugin) from 3.3.0 to 3.4.1.
- [Release notes](https://github.com/apache/maven-assembly-plugin/releases)
- [Commits](https://github.com/apache/maven-assembly-plugin/compare/maven-assembly-plugin-3.3.0...maven-assembly-plugin-3.4.1)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-assembly-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-19 20:47:37 +09:00
dependabot[bot] 4fade1f4ed Bump junit-jupiter from 5.8.1 to 5.8.2
Bumps [junit-jupiter](https://github.com/junit-team/junit5) from 5.8.1 to 5.8.2.
- [Release notes](https://github.com/junit-team/junit5/releases)
- [Commits](https://github.com/junit-team/junit5/compare/r5.8.1...r5.8.2)

---
updated-dependencies:
- dependency-name: org.junit.jupiter:junit-jupiter
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-19 20:46:44 +09:00
dependabot[bot] 460a852d66 Bump maven-shade-plugin from 3.2.4 to 3.3.0
Bumps [maven-shade-plugin](https://github.com/apache/maven-shade-plugin) from 3.2.4 to 3.3.0.
- [Release notes](https://github.com/apache/maven-shade-plugin/releases)
- [Commits](https://github.com/apache/maven-shade-plugin/compare/maven-shade-plugin-3.2.4...maven-shade-plugin-3.3.0)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-shade-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-19 20:46:00 +09:00
dependabot[bot] d177920228 Bump spotbugs from 4.4.0 to 4.7.1
Bumps [spotbugs](https://github.com/spotbugs/spotbugs) from 4.4.0 to 4.7.1.
- [Release notes](https://github.com/spotbugs/spotbugs/releases)
- [Changelog](https://github.com/spotbugs/spotbugs/blob/master/CHANGELOG.md)
- [Commits](https://github.com/spotbugs/spotbugs/compare/4.4.0...4.7.1)

---
updated-dependencies:
- dependency-name: com.github.spotbugs:spotbugs
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-19 20:45:11 +09:00
Andrew Gaul 687973ba78 Rip out useless semantic lint 2022-07-17 23:19:35 +09:00
dependabot[bot] 6fef2aa779 Bump maven-enforcer-plugin from 3.0.0 to 3.1.0
Bumps [maven-enforcer-plugin](https://github.com/apache/maven-enforcer) from 3.0.0 to 3.1.0.
- [Release notes](https://github.com/apache/maven-enforcer/releases)
- [Commits](https://github.com/apache/maven-enforcer/compare/enforcer-3.0.0...enforcer-3.1.0)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-enforcer-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-17 23:16:10 +09:00
dependabot[bot] ed2a046c09 Bump maven-clean-plugin from 3.1.0 to 3.2.0
Bumps [maven-clean-plugin](https://github.com/apache/maven-clean-plugin) from 3.1.0 to 3.2.0.
- [Release notes](https://github.com/apache/maven-clean-plugin/releases)
- [Commits](https://github.com/apache/maven-clean-plugin/compare/maven-clean-plugin-3.1.0...maven-clean-plugin-3.2.0)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-clean-plugin
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-17 23:15:51 +09:00
dependabot[bot] 7dcf0a5af7 Bump junit from 4.13.1 to 4.13.2
Bumps [junit](https://github.com/junit-team/junit4) from 4.13.1 to 4.13.2.
- [Release notes](https://github.com/junit-team/junit4/releases)
- [Changelog](https://github.com/junit-team/junit4/blob/main/doc/ReleaseNotes4.13.1.md)
- [Commits](https://github.com/junit-team/junit4/compare/r4.13.1...r4.13.2)

---
updated-dependencies:
- dependency-name: junit:junit
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-17 23:15:40 +09:00
dependabot[bot] c38508edf6 Bump nexus-staging-maven-plugin from 1.6.8 to 1.6.13
Bumps nexus-staging-maven-plugin from 1.6.8 to 1.6.13.

---
updated-dependencies:
- dependency-name: org.sonatype.plugins:nexus-staging-maven-plugin
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-17 23:15:25 +09:00
Ryan Faircloth d71e1e812a ci: Enhancements to CI and release
fixes #408
fixes #423

This PR makes several changes to the CI*
Adds PR lint to ensure conventional commit syntax is used for all PRs
Uses semantic-release tool to review commit log on branch and generate version numbers
Publishes containers (non pr) to ghcr.io
Publishes release containers to hub.docker.com
Completes common tags (versions, sha-1 and ref) on all branches

Notes Unable to test actually publishing containers to dockerhub however this was taken from a similar working project.
2022-07-17 22:37:01 +09:00
dependabot[bot] ba0fd6dad7 Bump aws-java-sdk-s3 from 1.12.63 to 1.12.261
Bumps [aws-java-sdk-s3](https://github.com/aws/aws-sdk-java) from 1.12.63 to 1.12.261.
- [Release notes](https://github.com/aws/aws-sdk-java/releases)
- [Changelog](https://github.com/aws/aws-sdk-java/blob/master/CHANGELOG.md)
- [Commits](https://github.com/aws/aws-sdk-java/compare/1.12.63...1.12.261)

---
updated-dependencies:
- dependency-name: com.amazonaws:aws-java-sdk-s3
  dependency-type: direct:development
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-16 06:43:18 +09:00
Christoph Kreutzer 1d450fa221 Docker entrypoint: authentication for Azure Blob
When using the s3proxy as a proxy for S3 -> Azure Blob Storage, one needs to set the authentication parameters. When running in a container, these can be exposed via environment variables, but are currently not passed through to the jclouds config.
2022-06-21 14:54:19 +09:00
Andrew Gaul 29723040b5 Minio removed its cloud gateway 2022-06-20 22:21:45 +09:00
Adrian Woodhead 7344b0c60e
Upgrade Jackson to 2.13.3
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-JAVA-COMFASTERXMLJACKSONCORE-2421244

Co-authored-by: snyk-bot <snyk-bot@snyk.io>
2022-05-26 07:32:19 +09:00
Andrew Gaul 71541ac167 Bump to 2.0.0-SNAPSHOT 2022-04-03 20:05:29 +09:00
Andrew Gaul b5d090d9f8 S3Proxy 2.0.0 release
Fixes #410.
2022-04-03 19:55:32 +09:00
Andrew Gaul 11dc9d3121 Upgrade to Modernizer Maven Plugin 2.4.0
Release notes:

https://github.com/gaul/modernizer-maven-plugin/releases/tag/modernizer-maven-plugin-2.4.0
2022-04-01 21:46:42 +09:00
Andrew Gaul 508a43e110 Upgrade slf4j 1.7.36
Release notes:

https://www.slf4j.org/news.html
2022-04-01 21:45:23 +09:00
Andrew Gaul 058a55d931 Upgrade to Jetty 9.4.45.v20220203
Release notes:

https://www.eclipse.org/lists/jetty-announce/msg00164.html
2022-04-01 21:36:29 +09:00
Andrew Gaul 04e35b1671 Upgrade to jackson-dataformat-xml 2.13.2
Release notes:

https://github.com/FasterXML/jackson-dataformat-xml/blob/2.13/release-notes/VERSION-2.x
2022-04-01 21:34:38 +09:00
Andrew Gaul f1ba56a1a1 Upgrade to logback 1.2.11
Release notes:

https://logback.qos.ch/news.html
2022-04-01 21:33:47 +09:00
xavier.gourmandin1 2e17734dc1 fix: #412 fix NPE when creating S3ProxyExtention or S3ProxyRule with Auth type = NONE 2022-04-01 21:16:09 +09:00
Florin Peter 217308abd7
Support for transparent encryption
Co-authored-by: Florin Peter <Florin-Alexandru.Peter@t-systems.com>
2022-03-26 22:05:08 +09:00
Andrew Gaul f536835aa8 pgrade to jclouds 2.5.0
Release notes:

https://jclouds.apache.org/releasenotes/2.5.0/

Fixes #396.
2022-03-26 19:52:18 +09:00
Xavier Gourmandin 05a39cf745
Support for Junit 5 Extension mechanism
Fixes #288
2022-03-02 23:36:11 +09:00
Liam 5aec5c132c Support configurable base directory for S3Proxy 2022-02-10 09:55:19 +09:00
Liam 0c3d81feb2 Make S3Proxy compatible with ExAws by checking for parameter presence 2022-02-10 09:54:24 +09:00
Timur Alperovich 11868c22cc Use Java 11 slim image
Fixes #397
2022-02-07 13:43:18 +09:00
Andrew Gaul 2e61c38626 Configure Jetty to use legacy compliance
This prevents changing header value cases which previously caused S3
signature mismatches.  Fixes #392.
References eclipse/jetty.project#7168.
2022-01-22 19:41:04 +09:00
Tamas Cservenak 2839107c6f UT for #392
This UT reproduces issue #392:
attempts a plain simple putObject without
any "trickery", and fails with SignatureDoesNotMatch.

The trick is, to use such Content-Type header that
is cached by Jetty, as it seems all the UTs are using
content types that are not quite common, hence Jetty
cache is not pre-populated with those.
2022-01-22 19:41:04 +09:00
Tamas Cservenak 075ff7f126 Update build
In general update the ancient build plugins
around.

Changes:
* bring all maven plugins to latest versions
* update checkstyle (as pre 8.41 is not supported anymore)
* update checkstyle rules
* make checkstyle plugin not fail the build (as there are issues)
* fix maven-shade-plugin warnings about dupe resource
* fix modernizer issue as it fails the build
* make build reproducible

Added comment about parent POM: the currently used parent is
10 years old, moreover, if you look in Maven Central,
the GA is unmaintained (last release was in 2014). Better introduce
own parent POM, or just copy whatever needed into this POM
(related to staging) and stop using the ancient POM.
2022-01-22 19:07:24 +09:00
Sean C. Sullivan ab6e321964 ci: temurin jdk 2022-01-20 09:18:00 +09:00
Nicklas Ansman Giertz 245fe54aa0 Start publishing arm64
Also switch to the official docker steps in GitHub Actions.
2021-11-16 18:32:20 +09:00
Nicklas Ansman Giertz 5a34d1bc75 Ensure docker login refers to docker.io 2021-11-16 11:08:25 +09:00
Andrew Gaul 28c29bb116 Refer to Docker Hub andrewgaul/s3proxy repo
This differs in organization from the GitHub gaul/s3proxy repo.
References #388.
2021-11-16 09:30:05 +09:00
Andrew Gaul be765854be Set some bash safety options
References #384.
2021-11-16 09:09:06 +09:00
Nicklas Ansman Giertz a2dc43d23e Fix a missing variable when publishing 2021-11-16 08:57:11 +09:00
Nicklas Ansman Giertz 688af7ed00 Replace the travis scripts with GitHub Actions 2021-11-15 22:07:51 +09:00
Andrew Gaul db2cc2a0ff Cache replaceAll regex when normalizing headers 2021-11-01 19:00:28 +09:00
Andrew Gaul cb3845c1ab Reduce allocations in buildCanonicalHeaders 2021-10-31 21:09:48 +09:00
Andrew Gaul b6c2dc9b9c Re-use XmlMapper instances
These can be expensive to create:

https://stackoverflow.com/a/57671444
2021-10-31 19:31:55 +09:00
Andrew Gaul 8e2319de3c Bump to 1.9.1-SNAPSHOT 2021-10-26 20:03:40 +09:00
56 zmienionych plików z 4493 dodań i 469 usunięć

11
.github/dependabot.yml vendored 100644
Wyświetl plik

@ -0,0 +1,11 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: "maven" # See documentation for possible values
directory: "/" # Location of package manifests
schedule:
interval: "monthly"

135
.github/workflows/ci-main.yml vendored 100644
Wyświetl plik

@ -0,0 +1,135 @@
name: Main CI
on:
push:
branches:
- "master"
pull_request:
branches:
- "*"
env:
dockerhub_publish: ${{ secrets.DOCKER_PASS != '' }}
jobs:
meta:
runs-on: ubuntu-latest
outputs:
container_tags: ${{ steps.docker_action_meta.outputs.tags }}
container_labels: ${{ steps.docker_action_meta.outputs.labels }}
container_buildtime: ${{ fromJSON(steps.docker_action_meta.outputs.json).labels['org.opencontainers.image.created'] }}
container_version: ${{ fromJSON(steps.docker_action_meta.outputs.json).labels['org.opencontainers.image.version'] }}
container_revision: ${{ fromJSON(steps.docker_action_meta.outputs.json).labels['org.opencontainers.image.revision'] }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: false
persist-credentials: false
- name: Docker meta
id: docker_action_meta
uses: docker/metadata-action@v4.0.1
with:
images: |
name=ghcr.io/${{ github.repository }}/container
name=andrewgaul/s3proxy,enable=${{ env.dockerhub_publish }}
flavor: |
latest=false
tags: |
type=sha,format=long
type=sha
type=match,pattern=s3proxy-(.*),group=1
type=ref,event=branch
type=ref,event=pr
type=ref,event=tag
labels: |
org.opencontainers.image.licenses=Apache-2.0
runTests:
runs-on: ubuntu-latest
needs: [meta]
steps:
- uses: actions/checkout@v4
with:
submodules: "recursive"
- uses: actions/setup-java@v4
with:
distribution: "temurin"
java-version: "11"
cache: "maven"
- uses: actions/setup-python@v4
with:
python-version: "3.8"
cache: "pip"
#Run tests
- name: Maven Set version
run: |
mvn versions:set -DnewVersion=${{ needs.meta.outputs.version }}
- name: Maven Package
run: |
mvn package verify -DskipTests
- name: Maven Test
run: |
mvn test
- name: Other Test
run: |
./src/test/resources/run-s3-tests.sh
#Store the target
- uses: actions/upload-artifact@v2
with:
name: s3proxy
path: target/s3proxy
- uses: actions/upload-artifact@v2
with:
name: pom
path: pom.xml
Containerize:
runs-on: ubuntu-latest
needs: [runTests, meta]
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v2
with:
name: s3proxy
path: target
- uses: actions/download-artifact@v2
with:
name: pom
path: .
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to DockerHub
uses: docker/login-action@v3
if: github.event_name != 'pull_request' && env.dockerhub_publish == 'true'
with:
username: ${{ secrets.DOCKER_USER }}
password: ${{ secrets.DOCKER_PASS }}
- name: Login to GHCR
uses: docker/login-action@v3
if: github.event_name != 'pull_request'
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v5
with:
context: .
platforms: linux/amd64,linux/arm64,linux/arm/v7
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ needs.meta.outputs.container_tags }}
labels: ${{ needs.meta.outputs.container_labels }}
build-args: |
BUILDTIME=${{ needs.meta.outputs.container_buildtime }}
VERSION=${{ needs.meta.outputs.container_version }}
REVISION=${{ needs.meta.outputs.container_revision }}
cache-from: type=gha
cache-to: type=gha,mode=max

29
.releaserc 100644
Wyświetl plik

@ -0,0 +1,29 @@
{
"tagFormat": 's3proxy-${version}',
"branches": [
{
"name": 'master',
prerelease: false
},
{
"name": 'releases\/+([0-9])?(\.\d+)(\.\d+|z|$)',
prerelease: false
},
{
"name": 'next',
prerelease: false
},
{
name: 'next-major',
prerelease: true
},
{
name: 'develop',
prerelease: true
},
{
name: 'develop\/.*',
prerelease: true
}
]
}

Wyświetl plik

@ -1,29 +0,0 @@
dist: bionic
version: ~> 1.0
os: linux
language: java
jdk:
- openjdk11
env:
global:
- secure: "kLL/DJT13SO4WGpqzBTRqzbk4sOg6zeczQXSbC6I2W4hUCrr1hgFZt2bhZAI7AD89Q633Yj0XczOFWAlKIb0h/1jauCHwxmhDKieDI6VibnoauuFlKDT/279768Wg4oVnRM+uSeSd69jrsTnngbCckKAgSqow0XR3BcM6iSITaM=" # DOCKER_USER
- secure: "FykiXU5ZFxIvw28pp0BgLJULRAOs1QSc10Cx2hp8fW1I1U/JUxv1RlawePoyNgpGNCZvLQMZJtaccEZUBzc0ucFnMZD5Cu+powGzdXos0v3W7Fb5YI7lSabIa8Zq6mYYp+toaMJRx3dJcIwAtFiMRzbqM3d9DECKLb/9N2IvLRU=" # DOCKER_PASS
- COMMIT=${TRAVIS_COMMIT::8}
- REPO=andrewgaul/s3proxy
addons:
apt_packages:
- python3.6
- python-pip
- python-virtualenv
- libevent-dev
services:
- docker
script:
- mvn test
# Work around Travis environment failure: https://github.com/travis-ci/travis-ci/issues/7940
- export BOTO_CONFIG=/dev/null
- ./src/test/resources/run-s3-tests.sh
- docker build -f Dockerfile -t $REPO:$COMMIT .
after_success:
- if [ "$TRAVIS_PULL_REQUEST" == "false" ]; then $TRAVIS_BUILD_DIR/publish_img.sh; fi

Wyświetl plik

@ -1,22 +1,11 @@
# Multistage - Builder
FROM maven:3.6.3-jdk-11-slim as s3proxy-builder
LABEL maintainer="Andrew Gaul <andrew@gaul.org>"
WORKDIR /opt/s3proxy
COPY . /opt/s3proxy/
RUN mvn package -DskipTests
# Multistage - Image
FROM openjdk:8-jre-alpine
FROM docker.io/library/eclipse-temurin:17-jre
LABEL maintainer="Andrew Gaul <andrew@gaul.org>"
WORKDIR /opt/s3proxy
COPY \
--from=s3proxy-builder \
/opt/s3proxy/target/s3proxy \
/opt/s3proxy/src/main/resources/run-docker-container.sh \
target/s3proxy \
src/main/resources/run-docker-container.sh \
/opt/s3proxy/
ENV \
@ -26,11 +15,18 @@ ENV \
S3PROXY_IDENTITY="local-identity" \
S3PROXY_CREDENTIAL="local-credential" \
S3PROXY_VIRTUALHOST="" \
S3PROXY_KEYSTORE_PATH="keystore.jks" \
S3PROXY_KEYSTORE_PASSWORD="password" \
S3PROXY_CORS_ALLOW_ALL="false" \
S3PROXY_CORS_ALLOW_ORIGINS="" \
S3PROXY_CORS_ALLOW_METHODS="" \
S3PROXY_CORS_ALLOW_HEADERS="" \
S3PROXY_CORS_ALLOW_CREDENTIAL="" \
S3PROXY_IGNORE_UNKNOWN_HEADERS="false" \
S3PROXY_ENCRYPTED_BLOBSTORE="" \
S3PROXY_ENCRYPTED_BLOBSTORE_PASSWORD="" \
S3PROXY_ENCRYPTED_BLOBSTORE_SALT="" \
S3PROXY_READ_ONLY_BLOBSTORE="false" \
JCLOUDS_PROVIDER="filesystem" \
JCLOUDS_ENDPOINT="" \
JCLOUDS_REGION="" \
@ -39,9 +35,8 @@ ENV \
JCLOUDS_CREDENTIAL="remote-credential" \
JCLOUDS_KEYSTONE_VERSION="" \
JCLOUDS_KEYSTONE_SCOPE="" \
JCLOUDS_KEYSTONE_PROJECT_DOMAIN_NAME=""
EXPOSE 80
VOLUME /data
JCLOUDS_KEYSTONE_PROJECT_DOMAIN_NAME="" \
JCLOUDS_FILESYSTEM_BASEDIR="/data"
EXPOSE 80 443
ENTRYPOINT ["/opt/s3proxy/run-docker-container.sh"]

Wyświetl plik

@ -23,7 +23,8 @@ and has instructions on how to run it.
Users can [download releases](https://github.com/gaul/s3proxy/releases)
from GitHub. Developers can build the project by running `mvn package` which
produces a binary at `target/s3proxy`. S3Proxy requires Java 8 or newer to run.
produces a binary at `target/s3proxy`. S3Proxy requires Java 11 or newer to
run.
Configure S3Proxy via a properties file. An example using the local
file system as the storage backend with anonymous access:
@ -95,17 +96,23 @@ s3proxy.bucket-locator.2=another-bucket
In addition to the explicit names, [glob syntax](https://docs.oracle.com/javase/tutorial/essential/io/fileOps.html#glob) can be used to configure many
buckets for a given backend.
A bucket (or a glob) cannot be assigned cannot be assigned to multiple backends.
A bucket (or a glob) cannot be assigned to multiple backends.
## Middlewares
S3Proxy can modify its behavior based on middlewares:
* [bucket aliasing](https://github.com/gaul/s3proxy/wiki/Middleware-alias-blobstore)
* [bucket locator](https://github.com/gaul/s3proxy/wiki/Middleware-bucket-locator)
* [eventual consistency modeling](https://github.com/gaul/s3proxy/wiki/Middleware---eventual-consistency)
* [large object mocking](https://github.com/gaul/s3proxy/wiki/Middleware-large-object-mocking)
* [read-only](https://github.com/gaul/s3proxy/wiki/Middleware-read-only)
* [sharded backend containers](https://github.com/gaul/s3proxy/wiki/Middleware-sharded-backend)
* [regex rename blobs](https://github.com/gaul/s3proxy/wiki/Middleware-regex)
## SSL Support
S3Proxy can listen on HTTPS by setting the `secure-endpoint` and [configuring a keystore](http://wiki.eclipse.org/Jetty/Howto/Configure_SSL#Generating_Keys_and_Certificates_with_JDK_keytool). You can read more about how configure S3Proxy for SSL Support in [the dedicated wiki page](https://github.com/gaul/s3proxy/wiki/SSL-support) with Docker, Kubernetes or simply Java.
## Limitations
@ -135,10 +142,11 @@ file (and corresponding ENV variables for Docker):
s3proxy.cors-allow-origins=https://example\.com https://.+\.example\.com https://example\.cloud
s3proxy.cors-allow-methods=GET PUT
s3proxy.cors-allow-headers=Accept Content-Type
s3proxy.cors-allow-credential=true
```
CORS cannot be configured per bucket. `s3proxy.cors-allow-all=true` will accept any origin and header.
Actual CORS requests are supported for GET, PUT and POST methods.
Actual CORS requests are supported for GET, PUT, POST, HEAD and DELETE methods.
The wiki collects
[compatibility notes](https://github.com/gaul/s3proxy/wiki/Storage-backend-compatibility)
@ -154,12 +162,12 @@ for specific storage backends.
* [Apache jclouds](https://jclouds.apache.org/) provides storage backend support for S3Proxy
* [Ceph s3-tests](https://github.com/ceph/s3-tests) help maintain and improve compatibility with the S3 API
* [fake-s3](https://github.com/jubos/fake-s3), [gofakes3](https://github.com/johannesboyne/gofakes3), [S3 ninja](https://github.com/scireum/s3ninja), and [s3rver](https://github.com/jamhall/s3rver) provide functionality similar to S3Proxy when using the filesystem backend
* [fake-s3](https://github.com/jubos/fake-s3), [gofakes3](https://github.com/johannesboyne/gofakes3), [minio](https://github.com/minio/minio), [S3 ninja](https://github.com/scireum/s3ninja), and [s3rver](https://github.com/jamhall/s3rver) provide functionality similar to S3Proxy when using the filesystem backend
* [GlacierProxy](https://github.com/bouncestorage/glacier-proxy) and [SwiftProxy](https://github.com/bouncestorage/swiftproxy) provide similar functionality for the Amazon Glacier and OpenStack Swift APIs
* [minio](https://github.com/minio/minio) and [Zenko](https://www.zenko.io/) provide similar multi-cloud functionality
* [s3mock](https://github.com/findify/s3mock) mocks the S3 API for Java/Scala projects
* [sbt-s3](https://github.com/localytics/sbt-s3) runs S3Proxy via the Scala Build Tool
* [swift3](https://github.com/openstack/swift3) provides an S3 middleware for OpenStack Swift
* [Zenko](https://www.zenko.io/) provide similar multi-cloud functionality
## License

76
docs/Encryption.md 100644
Wyświetl plik

@ -0,0 +1,76 @@
S3Proxy
# Encryption
## Motivation
The motivation behind this implementation is to provide a fully transparent and secure encryption to the s3 client while having the ability to write into different clouds.
## Cipher mode
The chosen cipher is ```AES/CFB/NoPadding``` because it provides the ability to read from an offset like in the middle of a ```Blob```.
While reading from an offset the decryption process needs to consider the previous 16 bytes of the AES block.
### Key generation
The encryption uses a 128-bit key that will be derived from a given password and salt in combination with random initialization vector that will be stored in each part padding.
## How a blob is encrypted
Every uploaded part get a padding of 64 bytes that includes the necessary information for decryption. The input stream from a s3 client is passed through ```CipherInputStream``` and piped to append the 64 byte part padding at the end the encrypted stream. The encrypted input stream is then processed by the ```BlobStore``` to save the ```Blob```.
| Name | Byte size | Description |
|-----------|-----------|----------------------------------------------------------------|
| Delimiter | 8 byte | The delimiter is used to detect if the ```Blob``` is encrypted |
| IV | 16 byte | AES initialization vector |
| Part | 4 byte | The part number |
| Size | 8 byte | The unencrypted size of the ```Blob``` |
| Version | 2 byte | Version can be used in the future if changes are necessary |
| Reserved | 26 byte | Reserved for future use |
### Multipart handling
A single ```Blob``` can be uploaded by the client into multiple parts. After the completion all parts are concatenated into a single ```Blob```.
This procedure will result in multiple parts and paddings being held by a single ```Blob```.
### Single blob example
```
-------------------------------------
| ENCRYPTED BYTES | PADDING |
-------------------------------------
```
### Multipart blob example
```
-------------------------------------------------------------------------------------
| ENCRYPTED BYTES | PADDING | ENCRYPTED BYTES | PADDING | ENCRYPTED BYTES | PADDING |
-------------------------------------------------------------------------------------
```
## How a blob is decrypted
The decryption is way more complex than the encryption. Decryption process needs to take care of the following circumstances:
- decryption of the entire ```Blob```
- decryption from a specific offset by skipping initial bytes
- decryption of bytes by reading from the end (tail)
- decryption of a specific byte range like middle of the ```Blob```
- decryption of all previous situation by considering a underlying multipart ```Blob```
### Single blob decryption
First the ```BlobMetadata``` is requested to get the encrypted ```Blob``` size. The last 64 bytes of ```PartPadding``` are fetched and inspected to detect if a decryption is necessary.
The cipher is than initialized with the IV and the key.
### Multipart blob decryption
The process is similar to the single ```Blob``` decryption but with the difference that a list of parts is computed by fetching all ```PartPadding``` from end to the beginning.
## Blob suffix
Each stored ```Blob``` will get a suffix named ```.s3enc``` this helps to determine if a ```Blob``` is encrypted. For the s3 client the ```.s3enc``` suffix is not visible and the ```Blob``` size will always show the unencrypted size.
## Tested jClouds provider
- S3
- Minio
- OBS from OpenTelekomCloud
- AWS S3
- Azure
- GCP
- Local
## Limitation
- All blobs are encrypted with the same key that is derived from a given password
- No support for re-encryption
- Returned eTag always differs therefore clients should not verify it
- Decryption of a ```Blob``` will always result in multiple calls against the backend for instance a GET will result in a HEAD + GET because the size of the blob needs to be determined

158
pom.xml
Wyświetl plik

@ -1,6 +1,7 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<!-- TODO: introduce own parent, do not depend on 10 years old unmaintained parent -->
<parent>
<groupId>org.sonatype.oss</groupId>
<artifactId>oss-parent</artifactId>
@ -9,7 +10,7 @@
<groupId>org.gaul</groupId>
<artifactId>s3proxy</artifactId>
<version>1.9.0</version>
<version>2.3.0-SNAPSHOT</version>
<packaging>jar</packaging>
<name>S3Proxy</name>
@ -53,7 +54,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-gpg-plugin</artifactId>
<version>1.6</version>
<version>3.0.1</version>
<executions>
<execution>
<id>sign-artifacts</id>
@ -71,10 +72,45 @@
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<version>3.2.1</version>
<executions>
<execution>
<id>enforce-maven</id>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<requireMavenVersion>
<version>3.2.5</version>
</requireMavenVersion>
</rules>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-clean-plugin</artifactId>
<version>3.2.0</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-install-plugin</artifactId>
<version>3.1.1</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-deploy-plugin</artifactId>
<version>3.1.0</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>3.0.0</version>
<version>3.2.1</version>
<executions>
<execution>
<id>check</id>
@ -89,12 +125,25 @@
<headerLocation>src/main/resources/copyright_header.txt</headerLocation>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
<violationSeverity>warning</violationSeverity>
<failOnViolation>true</failOnViolation>
</configuration>
<dependencies>
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
<version>10.17.0</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<version>3.3.0</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.0</version>
<version>3.11.0</version>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
@ -103,27 +152,13 @@
<compilerArgs>
<arg>-Xlint</arg>
<arg>-XDcompilePolicy=simple</arg>
<arg>-Xplugin:ErrorProne
-Xep:DefaultCharset:OFF
-Xep:HidingField:OFF
-Xep:JavaUtilDate:OFF
-Xep:MutableConstantField:OFF
-Xep:ProtectedMembersInFinalClass:OFF
</arg>
</compilerArgs>
<annotationProcessorPaths>
<path>
<groupId>com.google.errorprone</groupId>
<artifactId>error_prone_core</artifactId>
<version>2.9.0</version>
</path>
</annotationProcessorPaths>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>3.0.2</version>
<version>3.3.0</version>
<configuration>
<archive>
<manifest>
@ -136,7 +171,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>3.0.1</version>
<version>3.5.0</version>
<executions>
<execution>
<id>attach-javadocs</id>
@ -149,7 +184,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>3.1.0</version>
<version>3.4.1</version>
<executions>
<execution>
<phase>package</phase>
@ -158,6 +193,17 @@
</goals>
<configuration>
<createDependencyReducedPom>false</createDependencyReducedPom>
<filters>
<filter>
<artifact>org.eclipse.jetty:*</artifact>
<excludes>
<exclude>META-INF/MANIFEST.MF</exclude>
<exclude>META-INF/LICENSE</exclude>
<exclude>META-INF/NOTICE.txt</exclude>
<exclude>about.html</exclude>
</excludes>
</filter>
</filters>
<artifactSet>
<includes>
<include>org.eclipse.jetty:*</include>
@ -172,11 +218,11 @@
</configuration>
</execution>
</executions>
</plugin>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>3.0.0</version>
<version>3.5.0</version>
<configuration>
<descriptors>
<descriptor>src/main/assembly/jar-with-dependencies.xml</descriptor>
@ -201,7 +247,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.0.1</version>
<version>3.2.1</version>
<executions>
<execution>
<id>attach-sources</id>
@ -226,9 +272,14 @@
<artifactId>surefire-testng</artifactId>
<version>${surefire.version}</version>
</dependency>
<dependency>
<groupId>org.apache.maven.surefire</groupId>
<artifactId>surefire-junit-platform</artifactId>
<version>${surefire.version}</version>
</dependency>
</dependencies>
<configuration>
<parallel>all</parallel>
<parallel>classes</parallel>
<threadCount>1</threadCount>
<argLine>-Xmx512m</argLine>
<redirectTestOutputToFile>true</redirectTestOutputToFile>
@ -246,14 +297,7 @@
<plugin>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-maven-plugin</artifactId>
<version>4.2.0</version>
<dependencies>
<dependency>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs</artifactId>
<version>4.4.0</version>
</dependency>
</dependencies>
<version>4.8.3.1</version>
<configuration>
<effort>Max</effort>
<omitVisitors>CrossSiteScripting,DefaultEncodingDetector,FindNullDeref</omitVisitors>
@ -269,8 +313,9 @@
<plugin>
<groupId>org.skife.maven</groupId>
<artifactId>really-executable-jar-maven-plugin</artifactId>
<version>1.4.1</version>
<version>2.1.1</version>
<configuration>
<inputFile>target/s3proxy-${version}-jar-with-dependencies.jar</inputFile>
<programFile>s3proxy</programFile>
</configuration>
<executions>
@ -285,7 +330,7 @@
<plugin>
<groupId>org.gaul</groupId>
<artifactId>modernizer-maven-plugin</artifactId>
<version>2.1.0</version>
<version>2.9.0</version>
<executions>
<execution>
<id>modernizer</id>
@ -302,7 +347,7 @@
<plugin>
<groupId>org.sonatype.plugins</groupId>
<artifactId>nexus-staging-maven-plugin</artifactId>
<version>1.6.5</version>
<version>1.7.0</version>
<extensions>true</extensions>
<configuration>
<serverId>ossrh</serverId>
@ -315,11 +360,12 @@
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<java.version>1.8</java.version>
<jclouds.version>2.4.0</jclouds.version>
<slf4j.version>1.7.28</slf4j.version>
<java.version>11</java.version>
<jclouds.version>2.6.0</jclouds.version>
<jetty.version>11.0.20</jetty.version>
<slf4j.version>2.0.13</slf4j.version>
<shade.prefix>${project.groupId}.shaded</shade.prefix>
<surefire.version>2.20</surefire.version>
<surefire.version>3.2.5</surefire.version>
</properties>
<repositories>
@ -336,7 +382,7 @@
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-s3</artifactId>
<version>1.12.63</version>
<version>1.12.261</version>
<scope>test</scope>
<exclusions>
<exclusion>
@ -348,12 +394,12 @@
<dependency>
<groupId>args4j</groupId>
<artifactId>args4j</artifactId>
<version>2.33</version>
<version>2.37</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.2.3</version>
<version>1.5.6</version>
</dependency>
<dependency>
<groupId>javax.xml.bind</groupId>
@ -363,19 +409,26 @@
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.13.1</version>
<version>4.13.2</version>
<!-- Required for S3ProxyRule -->
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter</artifactId>
<version>5.10.2</version>
<!-- Required for S3ProxyExtension -->
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-xml</artifactId>
<version>2.12.3</version>
<version>2.17.0</version>
</dependency>
<dependency>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-annotations</artifactId>
<version>3.1.12</version>
<version>4.8.5</version>
<scope>provided</scope>
</dependency>
<dependency>
@ -386,7 +439,12 @@
<dependency>
<groupId>commons-fileupload</groupId>
<artifactId>commons-fileupload</artifactId>
<version>1.4</version>
<version>1.5</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>1.15</version>
</dependency>
<dependency>
<groupId>org.apache.jclouds</groupId>
@ -408,12 +466,12 @@
<artifactId>assertj-core</artifactId>
<scope>test</scope>
<!-- we need to use the same version as in jclouds because we pull in their tests -->
<version>1.7.1</version>
<version>3.25.3</version>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
<version>9.4.41.v20210516</version>
<version>${jetty.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
@ -451,7 +509,7 @@
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.8.21</version>
<version>7.5.1</version>
<scope>test</scope>
</dependency>
<dependency>

Wyświetl plik

@ -1,7 +0,0 @@
#!/bin/bash
docker login -u $DOCKER_USER -p $DOCKER_PASS
export TAG=`if [ "$TRAVIS_BRANCH" == "master" ]; then echo "latest"; else echo $TRAVIS_BRANCH ; fi`
docker tag $REPO:$COMMIT $REPO:$TAG
docker tag $REPO:$COMMIT $REPO:travis-$TRAVIS_BUILD_NUMBER
docker push $REPO

@ -1 +1 @@
Subproject commit f5d5faf49d1efc94cd8e976ec0fac33e6ed04c4e
Subproject commit 7ea471905cad975cced20384ef404d3f26f62df0

Wyświetl plik

@ -17,7 +17,6 @@
package org.gaul.s3proxy;
import static java.util.Objects.requireNonNull;
import static com.google.common.base.Preconditions.checkArgument;
import java.util.HashMap;
@ -193,21 +192,6 @@ public final class AliasBlobStore extends ForwardingBlobStore {
return delegate().deleteContainerIfEmpty(getContainer(container));
}
@Override
public boolean directoryExists(String container, String directory) {
return delegate().directoryExists(getContainer(container), directory);
}
@Override
public void createDirectory(String container, String directory) {
delegate().createDirectory(getContainer(container), directory);
}
@Override
public void deleteDirectory(String container, String directory) {
delegate().deleteDirectory(getContainer(container), directory);
}
@Override
public boolean blobExists(String container, String name) {
return delegate().blobExists(getContainer(container), name);

Wyświetl plik

@ -18,6 +18,7 @@ package org.gaul.s3proxy;
final class AwsHttpHeaders {
static final String ACL = "x-amz-acl";
static final String API_VERSION = "x-amz-api-version";
static final String CONTENT_SHA256 = "x-amz-content-sha256";
static final String COPY_SOURCE = "x-amz-copy-source";
static final String COPY_SOURCE_IF_MATCH = "x-amz-copy-source-if-match";

Wyświetl plik

@ -17,7 +17,6 @@
package org.gaul.s3proxy;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.security.InvalidKeyException;
import java.security.MessageDigest;
@ -29,11 +28,11 @@ import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import javax.annotation.Nullable;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import javax.servlet.http.HttpServletRequest;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
@ -45,6 +44,8 @@ import com.google.common.io.BaseEncoding;
import com.google.common.net.HttpHeaders;
import com.google.common.net.PercentEscaper;
import jakarta.servlet.http.HttpServletRequest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -77,6 +78,7 @@ final class AwsSignature {
"versions",
"website"
);
private static final Pattern REPEATING_WHITESPACE = Pattern.compile("\\s+");
private AwsSignature() { }
@ -210,33 +212,44 @@ final class AwsSignature {
private static String buildCanonicalHeaders(HttpServletRequest request,
List<String> signedHeaders) {
List<String> headers = new ArrayList<>();
List<String> headers = new ArrayList<>(
/*initialCapacity=*/ signedHeaders.size());
for (String header : signedHeaders) {
headers.add(header.toLowerCase());
}
Collections.sort(headers);
List<String> headersWithValues = new ArrayList<>();
StringBuilder headersWithValues = new StringBuilder();
boolean firstHeader = true;
for (String header : headers) {
List<String> values = new ArrayList<>();
StringBuilder headerWithValue = new StringBuilder();
headerWithValue.append(header);
headerWithValue.append(":");
if (firstHeader) {
firstHeader = false;
} else {
headersWithValues.append('\n');
}
headersWithValues.append(header);
headersWithValues.append(':');
boolean firstValue = true;
for (String value : Collections.list(request.getHeaders(header))) {
if (firstValue) {
firstValue = false;
} else {
headersWithValues.append(',');
}
value = value.trim();
if (!value.startsWith("\"")) {
value = value.replaceAll("\\s+", " ");
value = REPEATING_WHITESPACE.matcher(value).replaceAll(" ");
}
values.add(value);
headersWithValues.append(value);
}
headerWithValue.append(Joiner.on(",").join(values));
headersWithValues.add(headerWithValue.toString());
}
return Joiner.on("\n").join(headersWithValues);
return headersWithValues.toString();
}
private static String buildCanonicalQueryString(HttpServletRequest request)
throws UnsupportedEncodingException {
private static String buildCanonicalQueryString(
HttpServletRequest request) {
// The parameters are required to be sorted
List<String> parameters = Collections.list(request.getParameterNames());
Collections.sort(parameters);

Wyświetl plik

@ -35,11 +35,12 @@ import org.slf4j.LoggerFactory;
public final class CrossOriginResourceSharing {
protected static final Collection<String> SUPPORTED_METHODS =
ImmutableList.of("GET", "HEAD", "PUT", "POST");
ImmutableList.of("GET", "HEAD", "PUT", "POST", "DELETE");
private static final String HEADER_VALUE_SEPARATOR = ", ";
private static final String ALLOW_ANY_ORIGIN = "*";
private static final String ALLOW_ANY_HEADER = "*";
private static final String ALLOW_CREDENTIALS = "true";
private static final Logger logger = LoggerFactory.getLogger(
CrossOriginResourceSharing.class);
@ -50,16 +51,18 @@ public final class CrossOriginResourceSharing {
private final Set<Pattern> allowedOrigins;
private final Set<String> allowedMethods;
private final Set<String> allowedHeaders;
private final String allowCredentials;
public CrossOriginResourceSharing() {
// CORS Allow all
this(Lists.newArrayList(ALLOW_ANY_ORIGIN), SUPPORTED_METHODS,
Lists.newArrayList(ALLOW_ANY_HEADER));
Lists.newArrayList(ALLOW_ANY_HEADER), "");
}
public CrossOriginResourceSharing(Collection<String> allowedOrigins,
Collection<String> allowedMethods,
Collection<String> allowedHeaders) {
Collection<String> allowedHeaders,
String allowCredentials) {
Set<Pattern> allowedPattern = new HashSet<Pattern>();
boolean anyOriginAllowed = false;
@ -92,9 +95,12 @@ public final class CrossOriginResourceSharing {
this.allowedHeadersRaw = Joiner.on(HEADER_VALUE_SEPARATOR).join(
this.allowedHeaders);
this.allowCredentials = allowCredentials;
logger.info("CORS allowed origins: {}", allowedOrigins);
logger.info("CORS allowed methods: {}", allowedMethods);
logger.info("CORS allowed headers: {}", allowedHeaders);
logger.info("CORS allow credentials: {}", allowCredentials);
}
public String getAllowedMethods() {
@ -166,6 +172,10 @@ public final class CrossOriginResourceSharing {
return result;
}
public boolean isAllowCredentials() {
return ALLOW_CREDENTIALS.equals(allowCredentials);
}
@Override
public boolean equals(Object object) {
if (this == object) {

Wyświetl plik

@ -0,0 +1,773 @@
/*
* Copyright 2014-2021 Andrew Gaul <andrew@gaul.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gaul.s3proxy;
import static com.google.common.base.Preconditions.checkArgument;
import java.io.IOException;
import java.io.InputStream;
import java.security.spec.KeySpec;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.regex.Matcher;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.PBEKeySpec;
import javax.crypto.spec.SecretKeySpec;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableSet;
import com.google.common.hash.HashCode;
import org.apache.commons.codec.digest.DigestUtils;
import org.gaul.s3proxy.crypto.Constants;
import org.gaul.s3proxy.crypto.Decryption;
import org.gaul.s3proxy.crypto.Encryption;
import org.jclouds.blobstore.BlobStore;
import org.jclouds.blobstore.domain.Blob;
import org.jclouds.blobstore.domain.BlobAccess;
import org.jclouds.blobstore.domain.BlobBuilder;
import org.jclouds.blobstore.domain.BlobMetadata;
import org.jclouds.blobstore.domain.MultipartPart;
import org.jclouds.blobstore.domain.MultipartUpload;
import org.jclouds.blobstore.domain.MutableBlobMetadata;
import org.jclouds.blobstore.domain.PageSet;
import org.jclouds.blobstore.domain.StorageMetadata;
import org.jclouds.blobstore.domain.internal.MutableBlobMetadataImpl;
import org.jclouds.blobstore.domain.internal.PageSetImpl;
import org.jclouds.blobstore.options.CopyOptions;
import org.jclouds.blobstore.options.GetOptions;
import org.jclouds.blobstore.options.ListContainerOptions;
import org.jclouds.blobstore.options.PutOptions;
import org.jclouds.blobstore.util.ForwardingBlobStore;
import org.jclouds.io.ContentMetadata;
import org.jclouds.io.MutableContentMetadata;
import org.jclouds.io.Payload;
import org.jclouds.io.Payloads;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings("UnstableApiUsage")
public final class EncryptedBlobStore extends ForwardingBlobStore {
private final Logger logger =
LoggerFactory.getLogger(EncryptedBlobStore.class);
private SecretKeySpec secretKey;
private EncryptedBlobStore(BlobStore blobStore, Properties properties)
throws IllegalArgumentException {
super(blobStore);
String password = properties.getProperty(
S3ProxyConstants.PROPERTY_ENCRYPTED_BLOBSTORE_PASSWORD);
checkArgument(!Strings.isNullOrEmpty(password),
"Password for encrypted blobstore is not set");
String salt = properties.getProperty(
S3ProxyConstants.PROPERTY_ENCRYPTED_BLOBSTORE_SALT);
checkArgument(!Strings.isNullOrEmpty(salt),
"Salt for encrypted blobstore is not set");
initStore(password, salt);
}
static BlobStore newEncryptedBlobStore(BlobStore blobStore,
Properties properties) throws IOException {
return new EncryptedBlobStore(blobStore, properties);
}
private void initStore(String password, String salt)
throws IllegalArgumentException {
try {
SecretKeyFactory factory =
SecretKeyFactory.getInstance("PBKDF2WithHmacSHA256");
KeySpec spec =
new PBEKeySpec(password.toCharArray(), salt.getBytes(), 65536,
128);
SecretKey tmp = factory.generateSecret(spec);
secretKey = new SecretKeySpec(tmp.getEncoded(), "AES");
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
}
private Blob cipheredBlob(String container, Blob blob, InputStream payload,
long contentLength,
boolean addEncryptedMetadata) {
// make a copy of the blob with the new payload stream
BlobMetadata blobMeta = blob.getMetadata();
ContentMetadata contentMeta = blob.getMetadata().getContentMetadata();
Map<String, String> userMetadata = blobMeta.getUserMetadata();
String contentType = contentMeta.getContentType();
// suffix the content type with -s3enc if we need to encrypt
if (addEncryptedMetadata) {
blobMeta = setEncryptedSuffix(blobMeta);
} else {
// remove the -s3enc suffix while decrypting
// but not if it contains a multipart meta
if (!blobMeta.getUserMetadata()
.containsKey(Constants.METADATA_IS_ENCRYPTED_MULTIPART)) {
blobMeta = removeEncryptedSuffix(blobMeta);
}
}
// we do not set contentMD5 as it will not match due to the encryption
Blob cipheredBlob = blobBuilder(container)
.name(blobMeta.getName())
.type(blobMeta.getType())
.tier(blobMeta.getTier())
.userMetadata(userMetadata)
.payload(payload)
.cacheControl(contentMeta.getCacheControl())
.contentDisposition(contentMeta.getContentDisposition())
.contentEncoding(contentMeta.getContentEncoding())
.contentLanguage(contentMeta.getContentLanguage())
.contentLength(contentLength)
.contentType(contentType)
.build();
cipheredBlob.getMetadata().setUri(blobMeta.getUri());
cipheredBlob.getMetadata().setETag(blobMeta.getETag());
cipheredBlob.getMetadata().setLastModified(blobMeta.getLastModified());
cipheredBlob.getMetadata().setSize(blobMeta.getSize());
cipheredBlob.getMetadata().setPublicUri(blobMeta.getPublicUri());
cipheredBlob.getMetadata().setContainer(blobMeta.getContainer());
return cipheredBlob;
}
private Blob encryptBlob(String container, Blob blob) {
try {
// open the streams and pass them through the encryption
InputStream isRaw = blob.getPayload().openStream();
Encryption encryption =
new Encryption(secretKey, isRaw, 1);
InputStream is = encryption.openStream();
// adjust the encrypted content length by
// adding the padding block size
long contentLength =
blob.getMetadata().getContentMetadata().getContentLength() +
Constants.PADDING_BLOCK_SIZE;
return cipheredBlob(container, blob, is, contentLength, true);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private Payload encryptPayload(Payload payload, int partNumber) {
try {
// open the streams and pass them through the encryption
InputStream isRaw = payload.openStream();
Encryption encryption =
new Encryption(secretKey, isRaw, partNumber);
InputStream is = encryption.openStream();
Payload cipheredPayload = Payloads.newInputStreamPayload(is);
MutableContentMetadata contentMetadata =
payload.getContentMetadata();
HashCode md5 = null;
contentMetadata.setContentMD5(md5);
cipheredPayload.setContentMetadata(payload.getContentMetadata());
cipheredPayload.setSensitive(payload.isSensitive());
// adjust the encrypted content length by
// adding the padding block size
long contentLength =
payload.getContentMetadata().getContentLength() +
Constants.PADDING_BLOCK_SIZE;
cipheredPayload.getContentMetadata()
.setContentLength(contentLength);
return cipheredPayload;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private Blob decryptBlob(Decryption decryption, String container,
Blob blob) {
try {
// handle blob does not exist
if (blob == null) {
return null;
}
// open the streams and pass them through the decryption
InputStream isRaw = blob.getPayload().openStream();
InputStream is = decryption.openStream(isRaw);
// adjust the content length if the blob is encrypted
long contentLength =
blob.getMetadata().getContentMetadata().getContentLength();
if (decryption.isEncrypted()) {
contentLength = decryption.getContentLength();
}
return cipheredBlob(container, blob, is, contentLength, false);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
// filter the list by showing the unencrypted blob size
private PageSet<? extends StorageMetadata> filteredList(
PageSet<? extends StorageMetadata> pageSet) {
ImmutableSet.Builder<StorageMetadata> builder = ImmutableSet.builder();
for (StorageMetadata sm : pageSet) {
if (sm instanceof BlobMetadata) {
MutableBlobMetadata mbm =
new MutableBlobMetadataImpl((BlobMetadata) sm);
// if blob is encrypted remove the -s3enc suffix
// from content type
if (isEncrypted(mbm)) {
mbm = removeEncryptedSuffix((BlobMetadata) sm);
mbm = calculateBlobSize(mbm);
}
builder.add(mbm);
} else {
builder.add(sm);
}
}
// make sure the marker do not show blob with .s3enc suffix
String marker = pageSet.getNextMarker();
if (marker != null && isEncrypted(marker)) {
marker = removeEncryptedSuffix(marker);
}
return new PageSetImpl<>(builder.build(), marker);
}
private boolean isEncrypted(BlobMetadata blobMeta) {
return isEncrypted(blobMeta.getName());
}
private boolean isEncrypted(String blobName) {
return blobName.endsWith(Constants.S3_ENC_SUFFIX);
}
private MutableBlobMetadata setEncryptedSuffix(BlobMetadata blobMeta) {
MutableBlobMetadata bm = new MutableBlobMetadataImpl(blobMeta);
if (blobMeta.getName() != null && !isEncrypted(blobMeta.getName())) {
bm.setName(blobNameWithSuffix(blobMeta.getName()));
}
return bm;
}
private String removeEncryptedSuffix(String blobName) {
return blobName.substring(0,
blobName.length() - Constants.S3_ENC_SUFFIX.length());
}
private MutableBlobMetadata removeEncryptedSuffix(BlobMetadata blobMeta) {
MutableBlobMetadata bm = new MutableBlobMetadataImpl(blobMeta);
if (isEncrypted(bm.getName())) {
String blobName = bm.getName();
bm.setName(removeEncryptedSuffix(blobName));
}
return bm;
}
private MutableBlobMetadata calculateBlobSize(BlobMetadata blobMeta) {
MutableBlobMetadata mbm = removeEncryptedSuffix(blobMeta);
// we are using on non-s3 backends like azure or gcp a metadata key to
// calculate the part padding sizes that needs to be removed
if (mbm.getUserMetadata()
.containsKey(Constants.METADATA_ENCRYPTION_PARTS)) {
int parts = Integer.parseInt(
mbm.getUserMetadata().get(Constants.METADATA_ENCRYPTION_PARTS));
int partPaddingSizes = Constants.PADDING_BLOCK_SIZE * parts;
long size = blobMeta.getSize() - partPaddingSizes;
mbm.setSize(size);
mbm.getContentMetadata().setContentLength(size);
} else {
// on s3 backends like aws or minio we rely on the eTag suffix
Matcher matcher =
Constants.MPU_ETAG_SUFFIX_PATTERN.matcher(blobMeta.getETag());
if (matcher.find()) {
int parts = Integer.parseInt(matcher.group(1));
int partPaddingSizes = Constants.PADDING_BLOCK_SIZE * parts;
long size = blobMeta.getSize() - partPaddingSizes;
mbm.setSize(size);
mbm.getContentMetadata().setContentLength(size);
} else {
long size = blobMeta.getSize() - Constants.PADDING_BLOCK_SIZE;
mbm.setSize(size);
mbm.getContentMetadata().setContentLength(size);
}
}
return mbm;
}
private boolean multipartRequiresStub() {
String blobStoreType = getBlobStoreType();
return Quirks.MULTIPART_REQUIRES_STUB.contains(blobStoreType);
}
private String blobNameWithSuffix(String container, String name) {
String nameWithSuffix = blobNameWithSuffix(name);
if (delegate().blobExists(container, nameWithSuffix)) {
name = nameWithSuffix;
}
return name;
}
private String blobNameWithSuffix(String name) {
return name + Constants.S3_ENC_SUFFIX;
}
private String getBlobStoreType() {
return delegate().getContext().unwrap().getProviderMetadata().getId();
}
private String generateUploadId(String container, String blobName) {
String path = container + "/" + blobName;
return DigestUtils.sha256Hex(path);
}
@Override
public Blob getBlob(String containerName, String blobName) {
return getBlob(containerName, blobName, new GetOptions());
}
@Override
public Blob getBlob(String containerName, String blobName,
GetOptions getOptions) {
// adjust the blob name
blobName = blobNameWithSuffix(blobName);
// get the metadata to determine the blob size
BlobMetadata meta = delegate().blobMetadata(containerName, blobName);
try {
// we have a blob that ends with .s3enc
if (meta != null) {
// init defaults
long offset = 0;
long end = 0;
long length = -1;
if (getOptions.getRanges().size() > 0) {
// S3 doesn't allow multiple ranges
String range = getOptions.getRanges().get(0);
String[] ranges = range.split("-", 2);
if (ranges[0].isEmpty()) {
// handle to read from the end
end = Long.parseLong(ranges[1]);
length = end;
} else if (ranges[1].isEmpty()) {
// handle to read from an offset till the end
offset = Long.parseLong(ranges[0]);
} else {
// handle to read from an offset
offset = Long.parseLong(ranges[0]);
end = Long.parseLong(ranges[1]);
length = end - offset + 1;
}
}
// init decryption
Decryption decryption =
new Decryption(secretKey, delegate(), meta, offset, length);
if (decryption.isEncrypted() &&
getOptions.getRanges().size() > 0) {
// clear current ranges to avoid multiple ranges
getOptions.getRanges().clear();
long startAt = decryption.getStartAt();
long endAt = decryption.getEncryptedSize();
if (offset == 0 && end > 0 && length == end) {
// handle to read from the end
startAt = decryption.calculateTail();
} else if (offset > 0 && end > 0) {
// handle to read from an offset
endAt = decryption.calculateEndAt(end);
}
getOptions.range(startAt, endAt);
}
Blob blob =
delegate().getBlob(containerName, blobName, getOptions);
return decryptBlob(decryption, containerName, blob);
} else {
// we suppose to return a unencrypted blob
// since no metadata was found
blobName = removeEncryptedSuffix(blobName);
return delegate().getBlob(containerName, blobName, getOptions);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public String putBlob(String containerName, Blob blob) {
return delegate().putBlob(containerName,
encryptBlob(containerName, blob));
}
@Override
public String putBlob(String containerName, Blob blob,
PutOptions putOptions) {
return delegate().putBlob(containerName,
encryptBlob(containerName, blob), putOptions);
}
@Override
public String copyBlob(String fromContainer, String fromName,
String toContainer, String toName, CopyOptions options) {
// if we copy an encrypted blob
// make sure to add suffix to the destination blob name
String blobName = blobNameWithSuffix(fromName);
if (delegate().blobExists(fromContainer, blobName)) {
fromName = blobName;
toName = blobNameWithSuffix(toName);
}
return delegate().copyBlob(fromContainer, fromName, toContainer, toName,
options);
}
@Override
public void removeBlob(String container, String name) {
name = blobNameWithSuffix(container, name);
delegate().removeBlob(container, name);
}
@Override
public void removeBlobs(String container, Iterable<String> names) {
List<String> filteredNames = new ArrayList<>();
// filter the list of blobs to determine
// if we need to delete encrypted blobs
for (String name : names) {
name = blobNameWithSuffix(container, name);
filteredNames.add(name);
}
delegate().removeBlobs(container, filteredNames);
}
@Override
public BlobAccess getBlobAccess(String container, String name) {
name = blobNameWithSuffix(container, name);
return delegate().getBlobAccess(container, name);
}
@Override
public boolean blobExists(String container, String name) {
name = blobNameWithSuffix(container, name);
return delegate().blobExists(container, name);
}
@Override
public void setBlobAccess(String container, String name,
BlobAccess access) {
name = blobNameWithSuffix(container, name);
delegate().setBlobAccess(container, name, access);
}
@Override
public PageSet<? extends StorageMetadata> list() {
PageSet<? extends StorageMetadata> pageSet = delegate().list();
return filteredList(pageSet);
}
@Override
public PageSet<? extends StorageMetadata> list(String container) {
PageSet<? extends StorageMetadata> pageSet = delegate().list(container);
return filteredList(pageSet);
}
@Override
public PageSet<? extends StorageMetadata> list(String container,
ListContainerOptions options) {
PageSet<? extends StorageMetadata> pageSet =
delegate().list(container, options);
return filteredList(pageSet);
}
@Override
public MultipartUpload initiateMultipartUpload(String container,
BlobMetadata blobMetadata, PutOptions options) {
MutableBlobMetadata mbm = new MutableBlobMetadataImpl(blobMetadata);
mbm = setEncryptedSuffix(mbm);
MultipartUpload mpu =
delegate().initiateMultipartUpload(container, mbm, options);
// handle non-s3 backends
// by setting a metadata key for multipart stubs
if (multipartRequiresStub()) {
mbm.getUserMetadata()
.put(Constants.METADATA_IS_ENCRYPTED_MULTIPART, "true");
if (getBlobStoreType().equals("azureblob")) {
// use part 0 as a placeholder
delegate().uploadMultipartPart(mpu, 0,
Payloads.newStringPayload("dummy"));
// since azure does not have a uploadId
// we use the sha256 of the path
String uploadId = generateUploadId(container, mbm.getName());
mpu = MultipartUpload.create(mpu.containerName(),
mpu.blobName(), uploadId, mpu.blobMetadata(), options);
} else if (getBlobStoreType().equals("google-cloud-storage")) {
mbm.getUserMetadata()
.put(Constants.METADATA_MULTIPART_KEY, mbm.getName());
// since gcp does not have a uploadId
// we use the sha256 of the path
String uploadId = generateUploadId(container, mbm.getName());
// to emulate later the list of multipart uploads
// we create a placeholer
BlobBuilder builder =
blobBuilder(Constants.MPU_FOLDER + uploadId)
.payload("")
.userMetadata(mbm.getUserMetadata());
delegate().putBlob(container, builder.build(), options);
// final mpu on gcp
mpu = MultipartUpload.create(mpu.containerName(),
mpu.blobName(), uploadId, mpu.blobMetadata(), options);
}
}
return mpu;
}
@Override
public List<MultipartUpload> listMultipartUploads(String container) {
List<MultipartUpload> mpus = new ArrayList<>();
// emulate list of multipart uploads on gcp
if (getBlobStoreType().equals("google-cloud-storage")) {
ListContainerOptions options = new ListContainerOptions();
PageSet<? extends StorageMetadata> mpuList =
delegate().list(container,
options.prefix(Constants.MPU_FOLDER));
// find all blobs in .mpu folder and build the list
for (StorageMetadata blob : mpuList) {
Map<String, String> meta = blob.getUserMetadata();
if (meta.containsKey(Constants.METADATA_MULTIPART_KEY)) {
String blobName =
meta.get(Constants.METADATA_MULTIPART_KEY);
String uploadId =
blob.getName()
.substring(blob.getName().lastIndexOf("/") + 1);
MultipartUpload mpu =
MultipartUpload.create(container,
blobName, uploadId, null, null);
mpus.add(mpu);
}
}
} else {
mpus = delegate().listMultipartUploads(container);
}
List<MultipartUpload> filtered = new ArrayList<>();
// filter the list uploads by removing the .s3enc suffix
for (MultipartUpload mpu : mpus) {
String blobName = mpu.blobName();
if (isEncrypted(blobName)) {
blobName = removeEncryptedSuffix(mpu.blobName());
String uploadId = mpu.id();
// since azure not have a uploadId
// we use the sha256 of the path
if (getBlobStoreType().equals("azureblob")) {
uploadId = generateUploadId(container, mpu.blobName());
}
MultipartUpload mpuWithoutSuffix =
MultipartUpload.create(mpu.containerName(),
blobName, uploadId, mpu.blobMetadata(),
mpu.putOptions());
filtered.add(mpuWithoutSuffix);
} else {
filtered.add(mpu);
}
}
return filtered;
}
@Override
public List<MultipartPart> listMultipartUpload(MultipartUpload mpu) {
mpu = filterMultipartUpload(mpu);
List<MultipartPart> parts = delegate().listMultipartUpload(mpu);
List<MultipartPart> filteredParts = new ArrayList<>();
// fix wrong multipart size due to the part padding
for (MultipartPart part : parts) {
// we use part 0 as a placeholder and hide it on azure
if (getBlobStoreType().equals("azureblob") &&
part.partNumber() == 0) {
continue;
}
MultipartPart newPart = MultipartPart.create(
part.partNumber(),
part.partSize() - Constants.PADDING_BLOCK_SIZE,
part.partETag(),
part.lastModified()
);
filteredParts.add(newPart);
}
return filteredParts;
}
@Override
public MultipartPart uploadMultipartPart(MultipartUpload mpu,
int partNumber, Payload payload) {
mpu = filterMultipartUpload(mpu);
return delegate().uploadMultipartPart(mpu, partNumber,
encryptPayload(payload, partNumber));
}
private MultipartUpload filterMultipartUpload(MultipartUpload mpu) {
MutableBlobMetadata mbm = null;
if (mpu.blobMetadata() != null) {
mbm = new MutableBlobMetadataImpl(mpu.blobMetadata());
mbm = setEncryptedSuffix(mbm);
}
String blobName = mpu.blobName();
if (!isEncrypted(blobName)) {
blobName = blobNameWithSuffix(blobName);
}
return MultipartUpload.create(mpu.containerName(), blobName, mpu.id(),
mbm, mpu.putOptions());
}
@Override
public String completeMultipartUpload(MultipartUpload mpu,
List<MultipartPart> parts) {
MutableBlobMetadata mbm =
new MutableBlobMetadataImpl(mpu.blobMetadata());
String blobName = mpu.blobName();
// always set .s3enc suffix except on gcp
// and blob name starts with multipart upload id
if (getBlobStoreType().equals("google-cloud-storage") &&
mpu.blobName().startsWith(mpu.id())) {
logger.debug("skip suffix on gcp");
} else {
mbm = setEncryptedSuffix(mbm);
if (!isEncrypted(mpu.blobName())) {
blobName = blobNameWithSuffix(blobName);
}
}
MultipartUpload mpuWithSuffix =
MultipartUpload.create(mpu.containerName(),
blobName, mpu.id(), mbm, mpu.putOptions());
// this will only work for non s3 backends like azure and gcp
if (multipartRequiresStub()) {
long partCount = parts.size();
// special handling for GCP to sum up all parts
if (getBlobStoreType().equals("google-cloud-storage")) {
partCount = 0;
for (MultipartPart part : parts) {
blobName =
String.format("%s_%08d",
mpu.id(),
part.partNumber());
BlobMetadata metadata =
delegate().blobMetadata(mpu.containerName(), blobName);
if (metadata != null && metadata.getUserMetadata()
.containsKey(Constants.METADATA_ENCRYPTION_PARTS)) {
String partMetaCount = metadata.getUserMetadata()
.get(Constants.METADATA_ENCRYPTION_PARTS);
partCount = partCount + Long.parseLong(partMetaCount);
} else {
partCount++;
}
}
}
mpuWithSuffix.blobMetadata().getUserMetadata()
.put(Constants.METADATA_ENCRYPTION_PARTS,
String.valueOf(partCount));
mpuWithSuffix.blobMetadata().getUserMetadata()
.remove(Constants.METADATA_IS_ENCRYPTED_MULTIPART);
}
String eTag = delegate().completeMultipartUpload(mpuWithSuffix, parts);
// cleanup mpu placeholder on gcp
if (getBlobStoreType().equals("google-cloud-storage")) {
delegate().removeBlob(mpu.containerName(),
Constants.MPU_FOLDER + mpu.id());
}
return eTag;
}
@Override
public BlobMetadata blobMetadata(String container, String name) {
name = blobNameWithSuffix(container, name);
BlobMetadata blobMetadata = delegate().blobMetadata(container, name);
if (blobMetadata != null) {
// only remove the -s3enc suffix
// if the blob is encrypted and not a multipart stub
if (isEncrypted(blobMetadata) &&
!blobMetadata.getUserMetadata()
.containsKey(Constants.METADATA_IS_ENCRYPTED_MULTIPART)) {
blobMetadata = removeEncryptedSuffix(blobMetadata);
blobMetadata = calculateBlobSize(blobMetadata);
}
}
return blobMetadata;
}
@Override
public long getMaximumMultipartPartSize() {
long max = delegate().getMaximumMultipartPartSize();
return max - Constants.PADDING_BLOCK_SIZE;
}
}

Wyświetl plik

@ -17,7 +17,6 @@
package org.gaul.s3proxy;
import static java.util.Objects.requireNonNull;
import static com.google.common.base.Preconditions.checkArgument;
import java.util.Deque;

Wyświetl plik

@ -35,6 +35,7 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableBiMap;
@ -247,6 +248,13 @@ public final class Main {
blobStore = AliasBlobStore.newAliasBlobStore(blobStore, aliases);
}
ImmutableList<Map.Entry<Pattern, String>> regexs =
RegexBlobStore.parseRegexs(properties);
if (!regexs.isEmpty()) {
System.err.println("Using regex backend");
blobStore = RegexBlobStore.newRegexBlobStore(blobStore, regexs);
}
ImmutableMap<String, Integer> shards =
ShardedBlobStore.parseBucketShards(properties);
ImmutableMap<String, String> prefixes =
@ -257,6 +265,14 @@ public final class Main {
shards, prefixes);
}
String encryptedBlobStore = properties.getProperty(
S3ProxyConstants.PROPERTY_ENCRYPTED_BLOBSTORE);
if ("true".equalsIgnoreCase(encryptedBlobStore)) {
System.err.println("Using encrypted storage backend");
blobStore = EncryptedBlobStore.newEncryptedBlobStore(blobStore,
properties);
}
return blobStore;
}
@ -317,6 +333,9 @@ public final class Main {
StandardCharsets.UTF_8).read();
}
properties.remove(Constants.PROPERTY_CREDENTIAL);
// We also need to clear the system property, otherwise the
// credential will be overridden by the system property.
System.clearProperty(Constants.PROPERTY_CREDENTIAL);
}
if (identity == null || credential == null) {

Wyświetl plik

@ -18,6 +18,7 @@ package org.gaul.s3proxy;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Arrays;
import java.util.List;
@ -119,7 +120,7 @@ final class NullBlobStore extends ForwardingBlobStore {
PutOptions options) {
long length;
try (InputStream is = blob.getPayload().openStream()) {
length = ByteStreams.copy(is, ByteStreams.nullOutputStream());
length = is.transferTo(OutputStream.nullOutputStream());
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
@ -175,7 +176,7 @@ final class NullBlobStore extends ForwardingBlobStore {
int partNumber, Payload payload) {
long length;
try (InputStream is = payload.openStream()) {
length = ByteStreams.copy(is, ByteStreams.nullOutputStream());
length = is.transferTo(OutputStream.nullOutputStream());
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}

Wyświetl plik

@ -0,0 +1,243 @@
/*
* Copyright 2014-2021 Andrew Gaul <andrew@gaul.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gaul.s3proxy;
import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Objects.requireNonNull;
import java.io.File;
import java.io.InputStream;
import java.util.AbstractMap.SimpleEntry;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.google.common.collect.ImmutableList;
import org.jclouds.blobstore.BlobStore;
import org.jclouds.blobstore.domain.Blob;
import org.jclouds.blobstore.domain.BlobAccess;
import org.jclouds.blobstore.domain.BlobMetadata;
import org.jclouds.blobstore.options.CopyOptions;
import org.jclouds.blobstore.options.PutOptions;
import org.jclouds.blobstore.util.ForwardingBlobStore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class implements a middleware to apply regex to blob names.
* The regex are configured as:
* s3proxy.regex-blobstore.match.&lt;regex name&gt; = &lt;regex match
* expression&gt;
* s3proxy.regex-blobstore.replace.&lt;regex name&gt; = &lt;regex replace
* expression&gt;
*
* You can add multiple regex, they will be applied from the beginning to the
* end,
* stopping as soon as the first regex matches.
*/
public final class RegexBlobStore extends ForwardingBlobStore {
private static final Logger logger = LoggerFactory.getLogger(
RegexBlobStore.class);
private final ImmutableList<Entry<Pattern, String>> regexs;
private RegexBlobStore(BlobStore blobStore,
ImmutableList<Entry<Pattern, String>> regexs) {
super(blobStore);
this.regexs = requireNonNull(regexs);
}
static BlobStore newRegexBlobStore(BlobStore delegate,
ImmutableList<Entry<Pattern, String>> regexs) {
return new RegexBlobStore(delegate, regexs);
}
public static ImmutableList<Map.Entry<Pattern, String>> parseRegexs(
Properties properties) {
List<Entry<String, String>> configRegex = new ArrayList<>();
List<Entry<Pattern, String>> regexs = new ArrayList<>();
for (String key : properties.stringPropertyNames()) {
if (key.startsWith(S3ProxyConstants.PROPERTY_REGEX_BLOBSTORE)) {
String propKey = key.substring(
S3ProxyConstants.PROPERTY_REGEX_BLOBSTORE.length() + 1);
String value = properties.getProperty(key);
configRegex.add(new SimpleEntry<>(propKey, value));
}
}
for (Entry<String, String> entry : configRegex) {
String key = entry.getKey();
if (key.startsWith(
S3ProxyConstants.PROPERTY_REGEX_BLOBSTORE_MATCH)) {
String regexName = key.substring(S3ProxyConstants
.PROPERTY_REGEX_BLOBSTORE_MATCH.length() + 1);
String regex = entry.getValue();
Pattern pattern = Pattern.compile(regex);
String replace = properties.getProperty(String.join(
".", S3ProxyConstants.PROPERTY_REGEX_BLOBSTORE,
S3ProxyConstants.PROPERTY_REGEX_BLOBSTORE_REPLACE,
regexName));
checkArgument(
replace != null,
"Regex %s has no replace property associated",
regexName);
logger.info(
"Adding new regex with name {} replaces with {} to {}",
regexName, regex, replace);
regexs.add(new SimpleEntry<>(pattern, replace));
}
}
return ImmutableList.copyOf(regexs);
}
@Override
public boolean directoryExists(String container, String directory) {
return super.directoryExists(container, replaceBlobName(directory));
}
@Override
public void createDirectory(String container, String directory) {
super.createDirectory(container, replaceBlobName(directory));
}
@Override
public void deleteDirectory(String container, String directory) {
super.deleteDirectory(container, replaceBlobName(directory));
}
@Override
public boolean blobExists(String container, String name) {
return super.blobExists(container, replaceBlobName(name));
}
@Override
public String putBlob(String containerName, Blob blob) {
String name = blob.getMetadata().getName();
String newName = replaceBlobName(name);
blob.getMetadata().setName(newName);
logger.debug("Renaming blob name from {} to {}", name, newName);
return super.putBlob(containerName, blob);
}
@Override
public String putBlob(String containerName, Blob blob,
PutOptions putOptions) {
String name = blob.getMetadata().getName();
String newName = replaceBlobName(name);
blob.getMetadata().setName(newName);
logger.debug("Renaming blob name from {} to {}", name, newName);
return super.putBlob(containerName, blob, putOptions);
}
@Override
public String copyBlob(String fromContainer, String fromName,
String toContainer, String toName, CopyOptions options) {
return super.copyBlob(fromContainer, replaceBlobName(fromName),
toContainer, replaceBlobName(toName), options);
}
@Override
public BlobMetadata blobMetadata(String container, String name) {
return super.blobMetadata(container, replaceBlobName(name));
}
@Override
public Blob getBlob(String containerName, String name) {
return super.getBlob(containerName, replaceBlobName(name));
}
@Override
public void removeBlob(String container, String name) {
super.removeBlob(container, replaceBlobName(name));
}
@Override
public void removeBlobs(String container, Iterable<String> iterable) {
List<String> blobs = new ArrayList<>();
for (String name : iterable) {
blobs.add(replaceBlobName(name));
}
super.removeBlobs(container, blobs);
}
@Override
public BlobAccess getBlobAccess(String container, String name) {
return super.getBlobAccess(container, replaceBlobName(name));
}
@Override
public void setBlobAccess(String container, String name,
BlobAccess access) {
super.setBlobAccess(container, replaceBlobName(name), access);
}
@Override
public void downloadBlob(String container, String name, File destination) {
super.downloadBlob(container, replaceBlobName(name), destination);
}
@Override
public void downloadBlob(String container, String name, File destination,
ExecutorService executor) {
super.downloadBlob(container, replaceBlobName(name), destination,
executor);
}
@Override
public InputStream streamBlob(String container, String name) {
return super.streamBlob(container, replaceBlobName(name));
}
@Override
public InputStream streamBlob(String container, String name,
ExecutorService executor) {
return super.streamBlob(container, replaceBlobName(name), executor);
}
private String replaceBlobName(String name) {
String newName = name;
for (Map.Entry<Pattern, String> entry : this.regexs) {
Pattern pattern = entry.getKey();
Matcher match = pattern.matcher(name);
if (match.find()) {
return match.replaceAll(entry.getValue());
}
}
return newName;
}
}

Wyświetl plik

@ -18,10 +18,10 @@ package org.gaul.s3proxy;
import static java.util.Objects.requireNonNull;
import javax.servlet.http.HttpServletResponse;
import com.google.common.base.CaseFormat;
import jakarta.servlet.http.HttpServletResponse;
/**
* List of S3 error codes. Reference:
* http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html

Wyświetl plik

@ -17,7 +17,6 @@
package org.gaul.s3proxy;
import static java.util.Objects.requireNonNull;
import static com.google.common.base.Preconditions.checkArgument;
import java.net.URI;
@ -31,7 +30,11 @@ import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import org.eclipse.jetty.http.HttpCompliance;
import org.eclipse.jetty.http.UriCompliance;
import org.eclipse.jetty.server.HttpConfiguration;
import org.eclipse.jetty.server.HttpConnectionFactory;
import org.eclipse.jetty.server.SecureRequestCustomizer;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.server.handler.ContextHandler;
@ -51,12 +54,6 @@ public final class S3Proxy {
private final boolean listenHTTP;
private final boolean listenHTTPS;
static {
// Prevent Jetty from rewriting headers:
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=414449
System.setProperty("org.eclipse.jetty.http.HttpParser.STRICT", "true");
}
S3Proxy(Builder builder) {
checkArgument(builder.endpoint != null ||
builder.secureEndpoint != null,
@ -88,8 +85,14 @@ public final class S3Proxy {
context.setContextPath(builder.servicePath);
}
HttpConfiguration httpConfiguration = new HttpConfiguration();
httpConfiguration.setHttpCompliance(HttpCompliance.LEGACY);
httpConfiguration.setUriCompliance(UriCompliance.LEGACY);
SecureRequestCustomizer src = new SecureRequestCustomizer();
src.setSniHostCheck(false);
httpConfiguration.addCustomizer(src);
HttpConnectionFactory httpConnectionFactory =
new HttpConnectionFactory();
new HttpConnectionFactory(httpConfiguration);
ServerConnector connector;
if (builder.endpoint != null) {
connector = new ServerConnector(server, httpConnectionFactory);
@ -102,7 +105,8 @@ public final class S3Proxy {
}
if (builder.secureEndpoint != null) {
SslContextFactory sslContextFactory = new SslContextFactory();
SslContextFactory.Server sslContextFactory =
new SslContextFactory.Server();
sslContextFactory.setKeyStorePath(builder.keyStorePath);
sslContextFactory.setKeyStorePassword(builder.keyStorePassword);
connector = new ServerConnector(server, sslContextFactory,
@ -137,7 +141,7 @@ public final class S3Proxy {
private String keyStorePassword;
private String virtualHost;
private long maxSinglePartObjectSize = 5L * 1024 * 1024 * 1024;
private long v4MaxNonChunkedRequestSize = 32 * 1024 * 1024;
private long v4MaxNonChunkedRequestSize = 128 * 1024 * 1024;
private boolean ignoreUnknownHeaders;
private CrossOriginResourceSharing corsRules;
private int jettyMaxThreads = 200; // sourced from QueuedThreadPool()
@ -158,16 +162,18 @@ public final class S3Proxy {
S3ProxyConstants.PROPERTY_ENDPOINT);
String secureEndpoint = properties.getProperty(
S3ProxyConstants.PROPERTY_SECURE_ENDPOINT);
if (endpoint == null && secureEndpoint == null) {
boolean hasEndpoint = !Strings.isNullOrEmpty(endpoint);
boolean hasSecureEndpoint = !Strings.isNullOrEmpty(secureEndpoint);
if (!hasEndpoint && !hasSecureEndpoint) {
throw new IllegalArgumentException(
"Properties file must contain: " +
S3ProxyConstants.PROPERTY_ENDPOINT + " or " +
S3ProxyConstants.PROPERTY_SECURE_ENDPOINT);
}
if (endpoint != null) {
if (hasEndpoint) {
builder.endpoint(new URI(endpoint));
}
if (secureEndpoint != null) {
if (hasSecureEndpoint) {
builder.secureEndpoint(new URI(secureEndpoint));
}
@ -264,6 +270,9 @@ public final class S3Proxy {
S3ProxyConstants.PROPERTY_CORS_ALLOW_METHODS, "");
String corsAllowHeaders = properties.getProperty(
S3ProxyConstants.PROPERTY_CORS_ALLOW_HEADERS, "");
String allowCredentials = properties.getProperty(
S3ProxyConstants.PROPERTY_CORS_ALLOW_CREDENTIAL, "");
Splitter splitter = Splitter.on(" ").trimResults()
.omitEmptyStrings();
@ -282,7 +291,8 @@ public final class S3Proxy {
builder.corsRules(new CrossOriginResourceSharing(
Lists.newArrayList(splitter.split(corsAllowOrigins)),
Lists.newArrayList(splitter.split(corsAllowMethods)),
Lists.newArrayList(splitter.split(corsAllowHeaders))));
Lists.newArrayList(splitter.split(corsAllowHeaders)),
allowCredentials));
}
String jettyMaxThreads = properties.getProperty(
@ -318,8 +328,10 @@ public final class S3Proxy {
public Builder awsAuthentication(AuthenticationType authenticationType,
String identity, String credential) {
this.authenticationType = authenticationType;
this.identity = requireNonNull(identity);
this.credential = requireNonNull(credential);
if (!AuthenticationType.NONE.equals(authenticationType)) {
this.identity = requireNonNull(identity);
this.credential = requireNonNull(credential);
}
return this;
}

Wyświetl plik

@ -40,6 +40,8 @@ public final class S3ProxyConstants {
"s3proxy.cors-allow-methods";
public static final String PROPERTY_CORS_ALLOW_HEADERS =
"s3proxy.cors-allow-headers";
public static final String PROPERTY_CORS_ALLOW_CREDENTIAL =
"s3proxy.cors-allow-credential";
public static final String PROPERTY_CREDENTIAL =
"s3proxy.credential";
public static final String PROPERTY_IGNORE_UNKNOWN_HEADERS =
@ -93,6 +95,13 @@ public final class S3ProxyConstants {
/** Alias a backend bucket to an alternate name. */
public static final String PROPERTY_ALIAS_BLOBSTORE =
"s3proxy.alias-blobstore";
/** Alias a backend bucket to an alternate name. */
public static final String PROPERTY_REGEX_BLOBSTORE =
"s3proxy.regex-blobstore";
public static final String PROPERTY_REGEX_BLOBSTORE_MATCH =
"match";
public static final String PROPERTY_REGEX_BLOBSTORE_REPLACE =
"replace";
/** Discard object data. */
public static final String PROPERTY_NULL_BLOBSTORE =
"s3proxy.null-blobstore";
@ -107,6 +116,13 @@ public final class S3ProxyConstants {
public static final String PROPERTY_MAXIMUM_TIME_SKEW =
"s3proxy.maximum-timeskew";
public static final String PROPERTY_ENCRYPTED_BLOBSTORE =
"s3proxy.encrypted-blobstore";
public static final String PROPERTY_ENCRYPTED_BLOBSTORE_PASSWORD =
"s3proxy.encrypted-blobstore-password";
public static final String PROPERTY_ENCRYPTED_BLOBSTORE_SALT =
"s3proxy.encrypted-blobstore-salt";
static final String PROPERTY_ALT_JCLOUDS_PREFIX = "alt.";
private S3ProxyConstants() {

Wyświetl plik

@ -53,8 +53,6 @@ import java.util.concurrent.atomic.AtomicReference;
import javax.annotation.Nullable;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
@ -68,8 +66,8 @@ import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import com.google.common.collect.Streams;
import com.google.common.escape.Escaper;
import com.google.common.hash.HashCode;
import com.google.common.hash.HashFunction;
@ -82,6 +80,9 @@ import com.google.common.net.HostAndPort;
import com.google.common.net.HttpHeaders;
import com.google.common.net.PercentEscaper;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import org.apache.commons.fileupload.MultipartStream;
import org.jclouds.blobstore.BlobStore;
import org.jclouds.blobstore.KeyNotFoundException;
@ -162,6 +163,7 @@ public class S3ProxyHandler {
/** All supported x-amz- headers, except for x-amz-meta- user metadata. */
private static final Set<String> SUPPORTED_X_AMZ_HEADERS = ImmutableSet.of(
AwsHttpHeaders.ACL,
AwsHttpHeaders.API_VERSION,
AwsHttpHeaders.CONTENT_SHA256,
AwsHttpHeaders.COPY_SOURCE,
AwsHttpHeaders.COPY_SOURCE_IF_MATCH,
@ -200,6 +202,7 @@ public class S3ProxyHandler {
private final CrossOriginResourceSharing corsRules;
private final String servicePath;
private final int maximumTimeSkew;
private final XmlMapper mapper = new XmlMapper();
private final XMLOutputFactory xmlOutputFactory =
XMLOutputFactory.newInstance();
private BlobStoreLocator blobStoreLocator;
@ -299,8 +302,8 @@ public class S3ProxyHandler {
String hostHeader = request.getHeader(HttpHeaders.HOST);
if (hostHeader != null && virtualHost.isPresent()) {
hostHeader = HostAndPort.fromString(hostHeader).getHost();
String virtualHostSuffix = "." + virtualHost.get();
if (!hostHeader.equals(virtualHost.get())) {
String virtualHostSuffix = "." + virtualHost.orElseThrow();
if (!hostHeader.equals(virtualHost.orElseThrow())) {
if (hostHeader.endsWith(virtualHostSuffix)) {
String bucket = hostHeader.substring(0,
hostHeader.length() - virtualHostSuffix.length());
@ -320,7 +323,7 @@ public class S3ProxyHandler {
for (String headerName : Collections.list(request.getHeaderNames())) {
for (String headerValue : Collections.list(request.getHeaders(
headerName))) {
logger.debug("header: {}: {}", headerName,
logger.trace("header: {}: {}", headerName,
Strings.nullToEmpty(headerValue));
}
if (headerName.equalsIgnoreCase(HttpHeaders.DATE)) {
@ -469,7 +472,7 @@ public class S3ProxyHandler {
String[] path = uri.split("/", 3);
for (int i = 0; i < path.length; i++) {
path[i] = URLDecoder.decode(path[i], UTF_8);
path[i] = URLDecoder.decode(path[i], StandardCharsets.UTF_8);
}
Map.Entry<String, BlobStore> provider =
@ -609,8 +612,9 @@ public class S3ProxyHandler {
}
}
if (!constantTimeEquals(expectedSignature,
authHeader.getSignature())) {
// AWS does not check signatures with OPTIONS verb
if (!method.equals("OPTIONS") && !constantTimeEquals(
expectedSignature, authHeader.getSignature())) {
throw new S3Exception(S3ErrorCode.SIGNATURE_DOES_NOT_MATCH);
}
}
@ -657,31 +661,33 @@ public class S3ProxyHandler {
switch (method) {
case "DELETE":
if (path.length <= 2 || path[2].isEmpty()) {
handleContainerDelete(response, blobStore, path[1]);
handleContainerDelete(request, response, blobStore, path[1]);
return;
} else if (uploadId != null) {
handleAbortMultipartUpload(request, response, blobStore,
path[1], path[2], uploadId);
return;
} else {
handleBlobRemove(response, blobStore, path[1], path[2]);
handleBlobRemove(request, response, blobStore, path[1],
path[2]);
return;
}
case "GET":
if (uri.equals("/")) {
handleContainerList(response, blobStore);
handleContainerList(request, response, blobStore);
return;
} else if (path.length <= 2 || path[2].isEmpty()) {
if ("".equals(request.getParameter("acl"))) {
handleGetContainerAcl(response, blobStore, path[1]);
if (request.getParameter("acl") != null) {
handleGetContainerAcl(request, response, blobStore,
path[1]);
return;
} else if ("".equals(request.getParameter("location"))) {
handleContainerLocation(response);
} else if (request.getParameter("location") != null) {
handleContainerLocation(request, response);
return;
} else if ("".equals(request.getParameter("policy"))) {
} else if (request.getParameter("policy") != null) {
handleBucketPolicy(blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("uploads"))) {
} else if (request.getParameter("uploads") != null) {
handleListMultipartUploads(request, response, blobStore,
path[1]);
return;
@ -689,8 +695,8 @@ public class S3ProxyHandler {
handleBlobList(request, response, blobStore, path[1]);
return;
} else {
if ("".equals(request.getParameter("acl"))) {
handleGetBlobAcl(response, blobStore, path[1],
if (request.getParameter("acl") != null) {
handleGetBlobAcl(request, response, blobStore, path[1],
path[2]);
return;
} else if (uploadId != null) {
@ -704,7 +710,7 @@ public class S3ProxyHandler {
}
case "HEAD":
if (path.length <= 2 || path[2].isEmpty()) {
handleContainerExists(blobStore, path[1]);
handleContainerExists(request, response, blobStore, path[1]);
return;
} else {
handleBlobMetadata(request, response, blobStore, path[1],
@ -712,10 +718,11 @@ public class S3ProxyHandler {
return;
}
case "POST":
if ("".equals(request.getParameter("delete"))) {
handleMultiBlobRemove(response, is, blobStore, path[1]);
if (request.getParameter("delete") != null) {
handleMultiBlobRemove(request, response, is, blobStore,
path[1]);
return;
} else if ("".equals(request.getParameter("uploads"))) {
} else if (request.getParameter("uploads") != null) {
handleInitiateMultipartUpload(request, response, blobStore,
path[1], path[2]);
return;
@ -728,7 +735,7 @@ public class S3ProxyHandler {
break;
case "PUT":
if (path.length <= 2 || path[2].isEmpty()) {
if ("".equals(request.getParameter("acl"))) {
if (request.getParameter("acl") != null) {
handleSetContainerAcl(request, response, is, blobStore,
path[1]);
return;
@ -750,7 +757,7 @@ public class S3ProxyHandler {
path[2]);
return;
} else {
if ("".equals(request.getParameter("acl"))) {
if (request.getParameter("acl") != null) {
handleSetBlobAcl(request, response, is, blobStore, path[1],
path[2]);
return;
@ -845,15 +852,6 @@ public class S3ProxyHandler {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
} else {
String containerName = path[1];
/*
* Only check access on bucket level. The preflight request
* might be for a PUT, so the object is not yet there.
*/
ContainerAccess access = blobStore.getContainerAccess(
containerName);
if (access == ContainerAccess.PRIVATE) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
handleOptionsBlob(request, response, blobStore, containerName);
return;
}
@ -865,15 +863,16 @@ public class S3ProxyHandler {
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
private void handleGetContainerAcl(HttpServletResponse response,
BlobStore blobStore, String containerName)
throws IOException, S3Exception {
private void handleGetContainerAcl(HttpServletRequest request,
HttpServletResponse response, BlobStore blobStore,
String containerName) throws IOException, S3Exception {
if (!blobStore.containerExists(containerName)) {
throw new S3Exception(S3ErrorCode.NO_SUCH_BUCKET);
}
ContainerAccess access = blobStore.getContainerAccess(containerName);
response.setCharacterEncoding(UTF_8);
addCorsResponseHeader(request, response);
try (Writer writer = response.getWriter()) {
response.setContentType(XML_CONTENT_TYPE);
XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(
@ -930,7 +929,7 @@ public class S3ProxyHandler {
}
}
private static void handleSetContainerAcl(HttpServletRequest request,
private void handleSetContainerAcl(HttpServletRequest request,
HttpServletResponse response, InputStream is, BlobStore blobStore,
String containerName) throws IOException, S3Exception {
ContainerAccess access;
@ -951,7 +950,7 @@ public class S3ProxyHandler {
int ch = pis.read();
if (ch != -1) {
pis.unread(ch);
AccessControlPolicy policy = new XmlMapper().readValue(
AccessControlPolicy policy = mapper.readValue(
pis, AccessControlPolicy.class);
String accessString = mapXmlAclsToCannedPolicy(policy);
if (accessString.equals("private")) {
@ -964,14 +963,16 @@ public class S3ProxyHandler {
}
blobStore.setContainerAccess(containerName, access);
addCorsResponseHeader(request, response);
}
private void handleGetBlobAcl(HttpServletResponse response,
BlobStore blobStore, String containerName,
String blobName) throws IOException {
private void handleGetBlobAcl(HttpServletRequest request,
HttpServletResponse response, BlobStore blobStore,
String containerName, String blobName) throws IOException {
BlobAccess access = blobStore.getBlobAccess(containerName, blobName);
response.setCharacterEncoding(UTF_8);
addCorsResponseHeader(request, response);
try (Writer writer = response.getWriter()) {
response.setContentType(XML_CONTENT_TYPE);
XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(
@ -1028,7 +1029,7 @@ public class S3ProxyHandler {
}
}
private static void handleSetBlobAcl(HttpServletRequest request,
private void handleSetBlobAcl(HttpServletRequest request,
HttpServletResponse response, InputStream is, BlobStore blobStore,
String containerName, String blobName)
throws IOException, S3Exception {
@ -1050,7 +1051,7 @@ public class S3ProxyHandler {
int ch = pis.read();
if (ch != -1) {
pis.unread(ch);
AccessControlPolicy policy = new XmlMapper().readValue(
AccessControlPolicy policy = mapper.readValue(
pis, AccessControlPolicy.class);
String accessString = mapXmlAclsToCannedPolicy(policy);
if (accessString.equals("private")) {
@ -1063,6 +1064,7 @@ public class S3ProxyHandler {
}
blobStore.setBlobAccess(containerName, blobName, access);
addCorsResponseHeader(request, response);
}
/** Map XML ACLs to a canned policy if an exact tranformation exists. */
@ -1102,11 +1104,13 @@ public class S3ProxyHandler {
}
}
private void handleContainerList(HttpServletResponse response,
BlobStore blobStore) throws IOException {
private void handleContainerList(HttpServletRequest request,
HttpServletResponse response, BlobStore blobStore)
throws IOException {
PageSet<? extends StorageMetadata> buckets = blobStore.list();
response.setCharacterEncoding(UTF_8);
addCorsResponseHeader(request, response);
try (Writer writer = response.getWriter()) {
response.setContentType(XML_CONTENT_TYPE);
XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(
@ -1145,9 +1149,10 @@ public class S3ProxyHandler {
}
}
private void handleContainerLocation(HttpServletResponse response)
throws IOException {
private void handleContainerLocation(HttpServletRequest request,
HttpServletResponse response) throws IOException {
response.setCharacterEncoding(UTF_8);
addCorsResponseHeader(request, response);
try (Writer writer = response.getWriter()) {
response.setContentType(XML_CONTENT_TYPE);
XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(
@ -1175,17 +1180,20 @@ public class S3ProxyHandler {
HttpServletResponse response, BlobStore blobStore,
String container) throws IOException, S3Exception {
if (request.getParameter("delimiter") != null ||
request.getParameter("prefix") != null ||
request.getParameter("max-uploads") != null ||
request.getParameter("key-marker") != null ||
request.getParameter("upload-id-marker") != null) {
throw new UnsupportedOperationException();
}
String encodingType = request.getParameter("encoding-type");
String prefix = request.getParameter("prefix");
List<MultipartUpload> uploads = blobStore.listMultipartUploads(
container);
response.setCharacterEncoding(UTF_8);
addCorsResponseHeader(request, response);
try (Writer writer = response.getWriter()) {
response.setContentType(XML_CONTENT_TYPE);
XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(
@ -1202,11 +1210,23 @@ public class S3ProxyHandler {
xml.writeEmptyElement("NextKeyMarker");
xml.writeEmptyElement("NextUploadIdMarker");
xml.writeEmptyElement("Delimiter");
xml.writeEmptyElement("Prefix");
if (Strings.isNullOrEmpty(prefix)) {
xml.writeEmptyElement("Prefix");
} else {
writeSimpleElement(xml, "Prefix", encodeBlob(
encodingType, prefix));
}
writeSimpleElement(xml, "MaxUploads", "1000");
writeSimpleElement(xml, "IsTruncated", "false");
for (MultipartUpload upload : uploads) {
if (prefix != null &&
!upload.blobName().startsWith(prefix)) {
continue;
}
xml.writeStartElement("Upload");
writeSimpleElement(xml, "Key", upload.blobName());
@ -1234,14 +1254,16 @@ public class S3ProxyHandler {
}
}
private static void handleContainerExists(BlobStore blobStore,
private void handleContainerExists(HttpServletRequest request,
HttpServletResponse response, BlobStore blobStore,
String containerName) throws IOException, S3Exception {
if (!blobStore.containerExists(containerName)) {
throw new S3Exception(S3ErrorCode.NO_SUCH_BUCKET);
}
addCorsResponseHeader(request, response);
}
private static void handleContainerCreate(HttpServletRequest request,
private void handleContainerCreate(HttpServletRequest request,
HttpServletResponse response, InputStream is, BlobStore blobStore,
String containerName) throws IOException, S3Exception {
if (containerName.isEmpty()) {
@ -1270,7 +1292,7 @@ public class S3ProxyHandler {
locationString = null;
} else {
pis.unread(ch);
CreateBucketRequest cbr = new XmlMapper().readValue(
CreateBucketRequest cbr = mapper.readValue(
pis, CreateBucketRequest.class);
locationString = cbr.locationConstraint;
}
@ -1314,11 +1336,12 @@ public class S3ProxyHandler {
}
response.addHeader(HttpHeaders.LOCATION, "/" + containerName);
addCorsResponseHeader(request, response);
}
private static void handleContainerDelete(HttpServletResponse response,
BlobStore blobStore, String containerName)
throws IOException, S3Exception {
private void handleContainerDelete(HttpServletRequest request,
HttpServletResponse response, BlobStore blobStore,
String containerName) throws IOException, S3Exception {
if (!blobStore.containerExists(containerName)) {
throw new S3Exception(S3ErrorCode.NO_SUCH_BUCKET);
}
@ -1337,6 +1360,7 @@ public class S3ProxyHandler {
throw new S3Exception(S3ErrorCode.BUCKET_NOT_EMPTY);
}
addCorsResponseHeader(request, response);
response.setStatus(HttpServletResponse.SC_NO_CONTENT);
}
@ -1471,10 +1495,13 @@ public class S3ProxyHandler {
isListV2 ? "NextContinuationToken" : "NextMarker",
encodeBlob(encodingType, nextMarker));
if (Quirks.OPAQUE_MARKERS.contains(blobStoreType)) {
StorageMetadata sm = Iterables.getLast(set, null);
StorageMetadata sm = Streams.findLast(
set.stream()).orElse(null);
if (sm != null) {
lastKeyToMarker.put(Maps.immutableEntry(containerName,
sm.getName()), nextMarker);
lastKeyToMarker.put(Maps.immutableEntry(
containerName,
encodeBlob(encodingType, nextMarker)),
nextMarker);
}
}
} else {
@ -1487,8 +1514,11 @@ public class S3ProxyHandler {
case FOLDER:
// fallthrough
case RELATIVE_PATH:
commonPrefixes.add(metadata.getName());
continue;
if (delimiter != null) {
commonPrefixes.add(metadata.getName());
continue;
}
break;
default:
break;
}
@ -1509,10 +1539,16 @@ public class S3ProxyHandler {
writeSimpleElement(xml, "ETag", maybeQuoteETag(eTag));
}
writeSimpleElement(xml, "Size",
String.valueOf(metadata.getSize()));
writeSimpleElement(xml, "StorageClass",
StorageClass.fromTier(metadata.getTier()).toString());
Long size = metadata.getSize();
if (size != null) {
writeSimpleElement(xml, "Size", String.valueOf(size));
}
Tier tier = metadata.getTier();
if (tier != null) {
writeSimpleElement(xml, "StorageClass",
StorageClass.fromTier(tier).toString());
}
if (fetchOwner) {
writeOwnerStanza(xml);
@ -1537,17 +1573,20 @@ public class S3ProxyHandler {
}
}
private static void handleBlobRemove(HttpServletResponse response,
BlobStore blobStore, String containerName,
String blobName) throws IOException, S3Exception {
private void handleBlobRemove(HttpServletRequest request,
HttpServletResponse response, BlobStore blobStore,
String containerName, String blobName)
throws IOException, S3Exception {
blobStore.removeBlob(containerName, blobName);
addCorsResponseHeader(request, response);
response.sendError(HttpServletResponse.SC_NO_CONTENT);
}
private void handleMultiBlobRemove(HttpServletResponse response,
InputStream is, BlobStore blobStore, String containerName)
private void handleMultiBlobRemove(HttpServletRequest request,
HttpServletResponse response, InputStream is,
BlobStore blobStore, String containerName)
throws IOException, S3Exception {
DeleteMultipleObjectsRequest dmor = new XmlMapper().readValue(
DeleteMultipleObjectsRequest dmor = mapper.readValue(
is, DeleteMultipleObjectsRequest.class);
if (dmor.objects == null) {
throw new S3Exception(S3ErrorCode.MALFORMED_X_M_L);
@ -1562,6 +1601,7 @@ public class S3ProxyHandler {
blobStore.removeBlobs(containerName, blobNames);
response.setCharacterEncoding(UTF_8);
addCorsResponseHeader(request, response);
try (Writer writer = response.getWriter()) {
response.setContentType(XML_CONTENT_TYPE);
XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(
@ -1588,7 +1628,7 @@ public class S3ProxyHandler {
}
}
private static void handleBlobMetadata(HttpServletRequest request,
private void handleBlobMetadata(HttpServletRequest request,
HttpServletResponse response,
BlobStore blobStore, String containerName,
String blobName) throws IOException, S3Exception {
@ -1633,6 +1673,7 @@ public class S3ProxyHandler {
response.setStatus(HttpServletResponse.SC_OK);
addMetadataToResponse(request, response, metadata);
addCorsResponseHeader(request, response);
}
private void handleOptionsBlob(HttpServletRequest request,
@ -1746,7 +1787,7 @@ public class S3ProxyHandler {
try (InputStream is = blob.getPayload().openStream();
OutputStream os = response.getOutputStream()) {
ByteStreams.copy(is, os);
is.transferTo(os);
os.flush();
}
}
@ -1756,7 +1797,8 @@ public class S3ProxyHandler {
String destContainerName, String destBlobName)
throws IOException, S3Exception {
String copySourceHeader = request.getHeader(AwsHttpHeaders.COPY_SOURCE);
copySourceHeader = URLDecoder.decode(copySourceHeader, UTF_8);
copySourceHeader = URLDecoder.decode(
copySourceHeader, StandardCharsets.UTF_8);
if (copySourceHeader.startsWith("/")) {
// Some clients like boto do not include the leading slash
copySourceHeader = copySourceHeader.substring(1);
@ -1853,6 +1895,7 @@ public class S3ProxyHandler {
BlobMetadata blobMetadata = blobStore.blobMetadata(destContainerName,
destBlobName);
response.setCharacterEncoding(UTF_8);
addCorsResponseHeader(request, response);
try (Writer writer = response.getWriter()) {
response.setContentType(XML_CONTENT_TYPE);
XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(
@ -2182,6 +2225,7 @@ public class S3ProxyHandler {
}
response.setCharacterEncoding(UTF_8);
addCorsResponseHeader(request, response);
try (Writer writer = response.getWriter()) {
response.setContentType(XML_CONTENT_TYPE);
XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(
@ -2199,8 +2243,6 @@ public class S3ProxyHandler {
} catch (XMLStreamException xse) {
throw new IOException(xse);
}
addCorsResponseHeader(request, response);
}
private void handleCompleteMultipartUpload(HttpServletRequest request,
@ -2211,7 +2253,7 @@ public class S3ProxyHandler {
PutOptions options;
if (Quirks.MULTIPART_REQUIRES_STUB.contains(getBlobStoreType(
blobStore))) {
metadata = blobStore.getBlob(containerName, uploadId).getMetadata();
metadata = blobStore.blobMetadata(containerName, uploadId);
BlobAccess access = blobStore.getBlobAccess(containerName,
uploadId);
options = new PutOptions().setBlobAccess(access);
@ -2262,7 +2304,7 @@ public class S3ProxyHandler {
} else {
CompleteMultipartUploadRequest cmu;
try {
cmu = new XmlMapper().readValue(
cmu = mapper.readValue(
is, CompleteMultipartUploadRequest.class);
} catch (JsonParseException jpe) {
throw new S3Exception(S3ErrorCode.MALFORMED_X_M_L, jpe);
@ -2307,6 +2349,7 @@ public class S3ProxyHandler {
}
response.setCharacterEncoding(UTF_8);
addCorsResponseHeader(request, response);
try (PrintWriter writer = response.getWriter()) {
response.setStatus(HttpServletResponse.SC_OK);
response.setContentType(XML_CONTENT_TYPE);
@ -2370,8 +2413,6 @@ public class S3ProxyHandler {
} catch (XMLStreamException xse) {
throw new IOException(xse);
}
addCorsResponseHeader(request, response);
}
private void handleAbortMultipartUpload(HttpServletRequest request,
@ -2437,6 +2478,7 @@ public class S3ProxyHandler {
String encodingType = request.getParameter("encoding-type");
response.setCharacterEncoding(UTF_8);
addCorsResponseHeader(request, response);
try (Writer writer = response.getWriter()) {
response.setContentType(XML_CONTENT_TYPE);
XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(
@ -2494,8 +2536,6 @@ public class S3ProxyHandler {
} catch (XMLStreamException xse) {
throw new IOException(xse);
}
addCorsResponseHeader(request, response);
}
private void handleCopyPart(HttpServletRequest request,
@ -2504,7 +2544,8 @@ public class S3ProxyHandler {
throws IOException, S3Exception {
// TODO: duplicated from handlePutBlob
String copySourceHeader = request.getHeader(AwsHttpHeaders.COPY_SOURCE);
copySourceHeader = URLDecoder.decode(copySourceHeader, UTF_8);
copySourceHeader = URLDecoder.decode(
copySourceHeader, StandardCharsets.UTF_8);
if (copySourceHeader.startsWith("/")) {
// Some clients like boto do not include the leading slash
copySourceHeader = copySourceHeader.substring(1);
@ -2577,11 +2618,21 @@ public class S3ProxyHandler {
"ArgumentValue", partNumberString));
}
// GCS only supports 32 parts so partition MPU into 32-part chunks.
String blobStoreType = getBlobStoreType(blobStore);
if (blobStoreType.equals("google-cloud-storage")) {
// fix up 1-based part numbers
uploadId = String.format(
"%s_%08d", uploadId, ((partNumber - 1) / 32) + 1);
partNumber = ((partNumber - 1) % 32) + 1;
}
// TODO: how to reconstruct original mpu?
MultipartUpload mpu = MultipartUpload.create(containerName,
blobName, uploadId, createFakeBlobMetadata(blobStore),
new PutOptions());
// TODO: Blob can leak on precondition failures.
Blob blob = blobStore.getBlob(sourceContainerName, sourceBlobName,
options);
if (blob == null) {
@ -2628,7 +2679,6 @@ public class S3ProxyHandler {
long contentLength =
blobMetadata.getContentMetadata().getContentLength();
String blobStoreType = getBlobStoreType(blobStore);
try (InputStream is = blob.getPayload().openStream()) {
if (blobStoreType.equals("azureblob")) {
// Azure has a smaller maximum part size than S3. Split a
@ -2662,6 +2712,7 @@ public class S3ProxyHandler {
}
response.setCharacterEncoding(UTF_8);
addCorsResponseHeader(request, response);
try (Writer writer = response.getWriter()) {
response.setContentType(XML_CONTENT_TYPE);
XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(
@ -2680,8 +2731,6 @@ public class S3ProxyHandler {
} catch (XMLStreamException xse) {
throw new IOException(xse);
}
addCorsResponseHeader(request, response);
}
private void handleUploadPart(HttpServletRequest request,
@ -2967,6 +3016,10 @@ public class S3ProxyHandler {
corsRules.getAllowedOrigin(corsOrigin));
response.addHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS,
corsRules.getAllowedMethods());
if (corsRules.isAllowCredentials()) {
response.addHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS,
"true");
}
}
}

Wyświetl plik

@ -21,11 +21,12 @@ import java.io.InputStream;
import java.util.concurrent.TimeoutException;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.google.common.collect.ImmutableMap;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.handler.AbstractHandler;
import org.jclouds.blobstore.BlobStore;
@ -38,6 +39,7 @@ import org.jclouds.util.Throwables2;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Jetty-specific handler for S3 requests. */
final class S3ProxyHandlerJetty extends AbstractHandler {
private static final Logger logger = LoggerFactory.getLogger(
@ -108,12 +110,14 @@ final class S3ProxyHandlerJetty extends AbstractHandler {
new S3Exception(S3ErrorCode.BAD_DIGEST));
break;
default:
logger.debug("HttpResponseException:", hre);
response.sendError(status, hre.getContent());
break;
}
baseRequest.setHandled(true);
return;
} catch (IllegalArgumentException iae) {
logger.debug("IllegalArgumentException:", iae);
response.sendError(HttpServletResponse.SC_BAD_REQUEST,
iae.getMessage());
baseRequest.setHandled(true);
@ -129,6 +133,7 @@ final class S3ProxyHandlerJetty extends AbstractHandler {
baseRequest.setHandled(true);
return;
} catch (UnsupportedOperationException uoe) {
logger.debug("UnsupportedOperationException:", uoe);
response.sendError(HttpServletResponse.SC_NOT_IMPLEMENTED,
uoe.getMessage());
baseRequest.setHandled(true);

Wyświetl plik

@ -0,0 +1,48 @@
/*
* Copyright 2014-2021 Andrew Gaul <andrew@gaul.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gaul.s3proxy.crypto;
import java.nio.charset.StandardCharsets;
import java.util.regex.Pattern;
public final class Constants {
public static final short VERSION = 1;
public static final String AES_CIPHER = "AES/CFB/NoPadding";
public static final String S3_ENC_SUFFIX = ".s3enc";
public static final String MPU_FOLDER = ".mpu/";
public static final Pattern MPU_ETAG_SUFFIX_PATTERN =
Pattern.compile(".*-([0-9]+)");
public static final String METADATA_ENCRYPTION_PARTS =
"s3proxy_encryption_parts";
public static final String METADATA_IS_ENCRYPTED_MULTIPART =
"s3proxy_encryption_multipart";
public static final String METADATA_MULTIPART_KEY =
"s3proxy_mpu_key";
public static final int AES_BLOCK_SIZE = 16;
public static final int PADDING_BLOCK_SIZE = 64;
public static final byte[] DELIMITER =
"-S3-ENC-".getBytes(StandardCharsets.UTF_8);
public static final int PADDING_DELIMITER_LENGTH = DELIMITER.length;
public static final int PADDING_IV_LENGTH = 16;
public static final int PADDING_PART_LENGTH = 4;
public static final int PADDING_SIZE_LENGTH = 8;
public static final int PADDING_VERSION_LENGTH = 2;
private Constants() {
throw new AssertionError("Cannot instantiate utility constructor");
}
}

Wyświetl plik

@ -0,0 +1,319 @@
/*
* Copyright 2014-2021 Andrew Gaul <andrew@gaul.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gaul.s3proxy.crypto;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Map;
import java.util.TreeMap;
import javax.annotation.concurrent.ThreadSafe;
import javax.crypto.SecretKey;
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.input.BoundedInputStream;
import org.jclouds.blobstore.BlobStore;
import org.jclouds.blobstore.domain.Blob;
import org.jclouds.blobstore.domain.BlobMetadata;
import org.jclouds.blobstore.options.GetOptions;
@ThreadSafe
public class Decryption {
private final SecretKey encryptionKey;
private TreeMap<Integer, PartPadding> partList;
private long outputOffset;
private long outputLength;
private boolean skipFirstBlock;
private long unencryptedSize;
private long encryptedSize;
private long startAt;
private int skipParts;
private long skipPartBytes;
private boolean isEncrypted;
public Decryption(SecretKeySpec key, BlobStore blobStore,
BlobMetadata meta,
long offset, long length) throws IOException {
encryptionKey = key;
outputLength = length;
isEncrypted = true;
// if blob does not exist or size is smaller than the part padding
// then the file is considered not encrypted
if (meta == null || meta.getSize() <= 64) {
blobIsNotEncrypted(offset);
return;
}
// get the 64 byte of part padding from the end of the blob
GetOptions options = new GetOptions();
options.range(meta.getSize() - Constants.PADDING_BLOCK_SIZE,
meta.getSize());
Blob blob =
blobStore.getBlob(meta.getContainer(), meta.getName(), options);
// read the padding structure
PartPadding lastPartPadding = PartPadding.readPartPaddingFromBlob(blob);
if (!Arrays.equals(
lastPartPadding.getDelimiter().getBytes(StandardCharsets.UTF_8),
Constants.DELIMITER)) {
blobIsNotEncrypted(offset);
return;
}
partList = new TreeMap<>();
// detect multipart
if (lastPartPadding.getPart() > 1 &&
meta.getSize() >
(lastPartPadding.getSize() + Constants.PADDING_BLOCK_SIZE)) {
unencryptedSize = lastPartPadding.getSize();
encryptedSize =
lastPartPadding.getSize() + Constants.PADDING_BLOCK_SIZE;
// note that parts are in reversed order
int part = 1;
// add the last part to the list
partList.put(part, lastPartPadding);
// loop part by part from end to the beginning
// to build a list of all blocks
while (encryptedSize < meta.getSize()) {
// get the next block
// rewind by the current encrypted block size
// minus the encryption padding
options = new GetOptions();
long startAt = (meta.getSize() - encryptedSize) -
Constants.PADDING_BLOCK_SIZE;
long endAt = meta.getSize() - encryptedSize - 1;
options.range(startAt, endAt);
blob = blobStore.getBlob(meta.getContainer(), meta.getName(),
options);
part++;
// read the padding structure
PartPadding partPadding =
PartPadding.readPartPaddingFromBlob(blob);
// add the part to the list
this.partList.put(part, partPadding);
// update the encrypted size
encryptedSize = encryptedSize +
(partPadding.getSize() + Constants.PADDING_BLOCK_SIZE);
unencryptedSize = this.unencryptedSize + partPadding.getSize();
}
} else {
// add the single part to the list
partList.put(1, lastPartPadding);
// update the unencrypted size
unencryptedSize = meta.getSize() - Constants.PADDING_BLOCK_SIZE;
// update the encrypted size
encryptedSize = meta.getSize();
}
// calculate the offset
calculateOffset(offset);
// if there is a offset and a length set the output length
if (offset > 0 && length == 0) {
outputLength = unencryptedSize - offset;
}
}
private void blobIsNotEncrypted(long offset) {
isEncrypted = false;
startAt = offset;
}
// calculate the tail bytes we need to read
// because we know the unencryptedSize we can return startAt offset
public final long calculateTail() {
long offset = unencryptedSize - outputLength;
calculateOffset(offset);
return startAt;
}
public final long getEncryptedSize() {
return encryptedSize;
}
public final long calculateEndAt(long endAt) {
// need to have always one more
endAt++;
// handle multipart
if (partList.size() > 1) {
long plaintextSize = 0;
// always skip 1 part at the end
int partCounter = 1;
// we need the map in reversed order
for (Map.Entry<Integer, PartPadding> part : partList.descendingMap()
.entrySet()) {
// check the parts that are between offset and end
plaintextSize = plaintextSize + part.getValue().getSize();
if (endAt > plaintextSize) {
partCounter++;
} else {
break;
}
}
// add the paddings of all parts
endAt = endAt + ((long) Constants.PADDING_BLOCK_SIZE * partCounter);
} else {
// we need to read one AES block more in AES CFB mode
long rest = endAt % Constants.AES_BLOCK_SIZE;
if (rest > 0) {
endAt = endAt + Constants.AES_BLOCK_SIZE;
}
}
return endAt;
}
// open the streams and pipes
public final InputStream openStream(InputStream is) throws IOException {
// if the blob is not encrypted return the unencrypted stream
if (!isEncrypted) {
return is;
}
// pass input stream through decryption
InputStream dis = new DecryptionInputStream(is, encryptionKey, partList,
skipParts, skipPartBytes);
// skip some bytes if necessary
long offset = outputOffset;
if (this.skipFirstBlock) {
offset = offset + Constants.AES_BLOCK_SIZE;
}
IOUtils.skipFully(dis, offset);
// trim the stream to a specific length if needed
return new BoundedInputStream(dis, outputLength);
}
private void calculateOffset(long offset) {
startAt = 0;
skipParts = 0;
// handle multipart
if (partList.size() > 1) {
// init counters
long plaintextSize = 0;
long encryptedSize = 0;
long partOffset;
long partStartAt = 0;
// we need the map in reversed order
for (Map.Entry<Integer, PartPadding> part : partList.descendingMap()
.entrySet()) {
// compute the plaintext size of the current part
plaintextSize = plaintextSize + part.getValue().getSize();
// check if the offset is located in another part
if (offset > plaintextSize) {
// compute the encrypted size of the skipped part
encryptedSize = encryptedSize + part.getValue().getSize() +
Constants.PADDING_BLOCK_SIZE;
// compute offset in this part
partOffset = offset - plaintextSize;
// skip the first block in CFB mode
skipFirstBlock = partOffset >= 16;
// compute the offset of the output
outputOffset = partOffset % Constants.AES_BLOCK_SIZE;
// skip this part
skipParts++;
// we always need to read one previous AES block in CFB mode
// if we read from offset
if (partOffset > Constants.AES_BLOCK_SIZE) {
long rest = partOffset % Constants.AES_BLOCK_SIZE;
partStartAt =
(partOffset - Constants.AES_BLOCK_SIZE) - rest;
} else {
partStartAt = 0;
}
} else {
// start at a specific byte position
// while respecting other parts
startAt = encryptedSize + partStartAt;
// skip part bytes if we are not starting
// from the beginning of a part
skipPartBytes = partStartAt;
break;
}
}
}
// handle single part
if (skipParts == 0) {
// skip the first block in CFB mode
skipFirstBlock = offset >= 16;
// compute the offset of the output
outputOffset = offset % Constants.AES_BLOCK_SIZE;
// we always need to read one previous AES block in CFB mode
// if we read from offset
if (offset > Constants.AES_BLOCK_SIZE) {
long rest = offset % Constants.AES_BLOCK_SIZE;
startAt = (offset - Constants.AES_BLOCK_SIZE) - rest;
}
// skip part bytes if we are not starting
// from the beginning of a part
skipPartBytes = startAt;
}
}
public final long getStartAt() {
return startAt;
}
public final boolean isEncrypted() {
return isEncrypted;
}
public final long getContentLength() {
if (outputLength > 0) {
return outputLength;
} else {
return unencryptedSize;
}
}
}

Wyświetl plik

@ -0,0 +1,381 @@
/*
* Copyright 2014-2021 Andrew Gaul <andrew@gaul.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gaul.s3proxy.crypto;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.TreeMap;
import javax.annotation.concurrent.ThreadSafe;
import javax.crypto.Cipher;
import javax.crypto.SecretKey;
import javax.crypto.ShortBufferException;
import org.apache.commons.io.IOUtils;
@ThreadSafe
public class DecryptionInputStream extends FilterInputStream {
// the cipher engine to use to process stream data
private final Cipher cipher;
// the secret key
private final SecretKey key;
// the list of parts we expect in the stream
private final TreeMap<Integer, PartPadding> parts;
/* the buffer holding data that have been read in from the
underlying stream, but have not been processed by the cipher
engine. */
private final byte[] ibuffer = new byte[4096];
// having reached the end of the underlying input stream
private boolean done;
/* the buffer holding data that have been processed by the cipher
engine, but have not been read out */
private byte[] obuffer;
// the offset pointing to the next "new" byte
private int ostart;
// the offset pointing to the last "new" byte
private int ofinish;
// stream status
private boolean closed;
// the current part
private int part;
// the remaining bytes of the current part
private long partBytesRemain;
/**
* Constructs a CipherInputStream from an InputStream and a
* Cipher.
* <br>Note: if the specified input stream or cipher is
* null, a NullPointerException may be thrown later when
* they are used.
*
* @param is the to-be-processed input stream
* @param key the decryption key
* @param parts the list of parts
* @param skipParts the amount of parts to skip
* @param skipPartBytes the amount of part bytes to skip
* @throws IOException if cipher fails
*/
public DecryptionInputStream(InputStream is, SecretKey key,
TreeMap<Integer, PartPadding> parts, int skipParts,
long skipPartBytes) throws IOException {
super(is);
in = is;
this.parts = parts;
this.key = key;
PartPadding partPadding = parts.get(parts.size() - skipParts);
try {
// init the cipher
cipher = Cipher.getInstance(Constants.AES_CIPHER);
cipher.init(Cipher.DECRYPT_MODE, key, partPadding.getIv());
} catch (Exception e) {
throw new IOException(e);
}
// set the part to begin with
part = parts.size() - skipParts;
// adjust part size due to offset
partBytesRemain = parts.get(part).getSize() - skipPartBytes;
}
/**
* Ensure obuffer is big enough for the next update or doFinal
* operation, given the input length <code>inLen</code> (in bytes)
* The ostart and ofinish indices are reset to 0.
*
* @param inLen the input length (in bytes)
*/
private void ensureCapacity(int inLen) {
int minLen = cipher.getOutputSize(inLen);
if (obuffer == null || obuffer.length < minLen) {
obuffer = new byte[minLen];
}
ostart = 0;
ofinish = 0;
}
/**
* Private convenience function, read in data from the underlying
* input stream and process them with cipher. This method is called
* when the processed bytes inside obuffer has been exhausted.
* <p>
* Entry condition: ostart = ofinish
* <p>
* Exit condition: ostart = 0 AND ostart <= ofinish
* <p>
* return (ofinish-ostart) (we have this many bytes for you)
* return 0 (no data now, but could have more later)
* return -1 (absolutely no more data)
* <p>
* Note: Exceptions are only thrown after the stream is completely read.
* For AEAD ciphers a read() of any length will internally cause the
* whole stream to be read fully and verify the authentication tag before
* returning decrypted data or exceptions.
*/
private int getMoreData() throws IOException {
if (done) {
return -1;
}
int readLimit = ibuffer.length;
if (partBytesRemain < ibuffer.length) {
readLimit = (int) partBytesRemain;
}
int readin;
if (partBytesRemain == 0) {
readin = -1;
} else {
readin = in.read(ibuffer, 0, readLimit);
}
if (readin == -1) {
ensureCapacity(0);
try {
ofinish = cipher.doFinal(obuffer, 0);
} catch (Exception e) {
throw new IOException(e);
}
int nextPart = part - 1;
if (parts.containsKey(nextPart)) {
// reset cipher
PartPadding partPadding = parts.get(nextPart);
try {
cipher.init(Cipher.DECRYPT_MODE, key, partPadding.getIv());
} catch (Exception e) {
throw new IOException(e);
}
// update to the next part
part = nextPart;
// update the remaining bytes of the next part
partBytesRemain = parts.get(nextPart).getSize();
IOUtils.skip(in, Constants.PADDING_BLOCK_SIZE);
return ofinish;
} else {
done = true;
if (ofinish == 0) {
return -1;
} else {
return ofinish;
}
}
}
ensureCapacity(readin);
try {
ofinish = cipher.update(ibuffer, 0, readin, obuffer, ostart);
} catch (ShortBufferException e) {
throw new IOException(e);
}
partBytesRemain = partBytesRemain - readin;
return ofinish;
}
/**
* Reads the next byte of data from this input stream. The value
* byte is returned as an <code>int</code> in the range
* <code>0</code> to <code>255</code>. If no byte is available
* because the end of the stream has been reached, the value
* <code>-1</code> is returned. This method blocks until input data
* is available, the end of the stream is detected, or an exception
* is thrown.
*
* @return the next byte of data, or <code>-1</code> if the end of the
* stream is reached.
* @throws IOException if an I/O error occurs.
*/
@Override
public final int read() throws IOException {
if (ostart >= ofinish) {
// we loop for new data as the spec says we are blocking
int i = 0;
while (i == 0) {
i = getMoreData();
}
if (i == -1) {
return -1;
}
}
return (int) obuffer[ostart++] & 0xff;
}
/**
* Reads up to <code>b.length</code> bytes of data from this input
* stream into an array of bytes.
* <p>
* The <code>read</code> method of <code>InputStream</code> calls
* the <code>read</code> method of three arguments with the arguments
* <code>b</code>, <code>0</code>, and <code>b.length</code>.
*
* @param b the buffer into which the data is read.
* @return the total number of bytes read into the buffer, or
* <code>-1</code> is there is no more data because the end of
* the stream has been reached.
* @throws IOException if an I/O error occurs.
* @see java.io.InputStream#read(byte[], int, int)
*/
@Override
public final int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
/**
* Reads up to <code>len</code> bytes of data from this input stream
* into an array of bytes. This method blocks until some input is
* available. If the first argument is <code>null,</code> up to
* <code>len</code> bytes are read and discarded.
*
* @param b the buffer into which the data is read.
* @param off the start offset in the destination array
* <code>buf</code>
* @param len the maximum number of bytes read.
* @return the total number of bytes read into the buffer, or
* <code>-1</code> if there is no more data because the end of
* the stream has been reached.
* @throws IOException if an I/O error occurs.
* @see java.io.InputStream#read()
*/
@Override
public final int read(byte[] b, int off, int len) throws IOException {
if (ostart >= ofinish) {
// we loop for new data as the spec says we are blocking
int i = 0;
while (i == 0) {
i = getMoreData();
}
if (i == -1) {
return -1;
}
}
if (len <= 0) {
return 0;
}
int available = ofinish - ostart;
if (len < available) {
available = len;
}
if (b != null) {
System.arraycopy(obuffer, ostart, b, off, available);
}
ostart = ostart + available;
return available;
}
/**
* Skips <code>n</code> bytes of input from the bytes that can be read
* from this input stream without blocking.
*
* <p>Fewer bytes than requested might be skipped.
* The actual number of bytes skipped is equal to <code>n</code> or
* the result of a call to
* {@link #available() available},
* whichever is smaller.
* If <code>n</code> is less than zero, no bytes are skipped.
*
* <p>The actual number of bytes skipped is returned.
*
* @param n the number of bytes to be skipped.
* @return the actual number of bytes skipped.
* @throws IOException if an I/O error occurs.
*/
@Override
public final long skip(long n) throws IOException {
int available = ofinish - ostart;
if (n > available) {
n = available;
}
if (n < 0) {
return 0;
}
ostart += (int) n;
return n;
}
/**
* Returns the number of bytes that can be read from this input
* stream without blocking. The <code>available</code> method of
* <code>InputStream</code> returns <code>0</code>. This method
* <B>should</B> be overridden by subclasses.
*
* @return the number of bytes that can be read from this input stream
* without blocking.
*/
@Override
public final int available() {
return ofinish - ostart;
}
/**
* Closes this input stream and releases any system resources
* associated with the stream.
* <p>
* The <code>close</code> method of <code>CipherInputStream</code>
* calls the <code>close</code> method of its underlying input
* stream.
*
* @throws IOException if an I/O error occurs.
*/
@Override
public final void close() throws IOException {
if (closed) {
return;
}
closed = true;
in.close();
// Throw away the unprocessed data and throw no crypto exceptions.
// AEAD ciphers are fully readed before closing. Any authentication
// exceptions would occur while reading.
if (!done) {
ensureCapacity(0);
try {
cipher.doFinal(obuffer, 0);
} catch (Exception e) {
// Catch exceptions as the rest of the stream is unused.
}
}
obuffer = null;
}
/**
* Tests if this input stream supports the <code>mark</code>
* and <code>reset</code> methods, which it does not.
*
* @return <code>false</code>, since this class does not support the
* <code>mark</code> and <code>reset</code> methods.
* @see java.io.InputStream#mark(int)
* @see java.io.InputStream#reset()
*/
@Override
public final boolean markSupported() {
return false;
}
}

Wyświetl plik

@ -0,0 +1,56 @@
/*
* Copyright 2014-2021 Andrew Gaul <andrew@gaul.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gaul.s3proxy.crypto;
import java.io.IOException;
import java.io.InputStream;
import java.security.SecureRandom;
import javax.annotation.concurrent.ThreadSafe;
import javax.crypto.Cipher;
import javax.crypto.CipherInputStream;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
@ThreadSafe
public class Encryption {
private final InputStream cis;
private final IvParameterSpec iv;
private final int part;
public Encryption(SecretKeySpec key, InputStream isRaw, int partNumber)
throws Exception {
iv = generateIV();
Cipher cipher = Cipher.getInstance(Constants.AES_CIPHER);
cipher.init(Cipher.ENCRYPT_MODE, key, iv);
cis = new CipherInputStream(isRaw, cipher);
part = partNumber;
}
public final InputStream openStream() throws IOException {
return new EncryptionInputStream(cis, part, iv);
}
private IvParameterSpec generateIV() {
byte[] iv = new byte[Constants.AES_BLOCK_SIZE];
SecureRandom randomSecureRandom = new SecureRandom();
randomSecureRandom.nextBytes(iv);
return new IvParameterSpec(iv);
}
}

Wyświetl plik

@ -0,0 +1,130 @@
/*
* Copyright 2014-2021 Andrew Gaul <andrew@gaul.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gaul.s3proxy.crypto;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import javax.crypto.spec.IvParameterSpec;
public class EncryptionInputStream extends InputStream {
private final int part;
private final IvParameterSpec iv;
private boolean hasPadding;
private long size;
private InputStream in;
public EncryptionInputStream(InputStream in, int part,
IvParameterSpec iv) {
this.part = part;
this.iv = iv;
this.in = in;
}
// Padding (64 byte)
// Delimiter (8 byte)
// IV (16 byte)
// Part (4 byte)
// Size (8 byte)
// Version (2 byte)
// Reserved (26 byte)
final void padding() throws IOException {
if (in != null) {
in.close();
}
if (!hasPadding) {
ByteBuffer bb = ByteBuffer.allocate(Constants.PADDING_BLOCK_SIZE);
bb.put(Constants.DELIMITER);
bb.put(iv.getIV());
bb.putInt(part);
bb.putLong(size);
bb.putShort(Constants.VERSION);
in = new ByteArrayInputStream(bb.array());
hasPadding = true;
} else {
in = null;
}
}
@Override
public final int available() throws IOException {
if (in == null) {
return 0; // no way to signal EOF from available()
}
return in.available();
}
@Override
public final int read() throws IOException {
while (in != null) {
int c = in.read();
if (c != -1) {
size++;
return c;
}
padding();
}
return -1;
}
@Override
public final int read(byte[] b, int off, int len) throws IOException {
if (in == null) {
return -1;
} else if (b == null) {
throw new NullPointerException();
} else if (off < 0 || len < 0 || len > b.length - off) {
throw new IndexOutOfBoundsException();
} else if (len == 0) {
return 0;
}
do {
int n = in.read(b, off, len);
if (n > 0) {
size = size + n;
return n;
}
padding();
} while (in != null);
return -1;
}
@Override
public final void close() throws IOException {
IOException ioe = null;
while (in != null) {
try {
in.close();
} catch (IOException e) {
if (ioe == null) {
ioe = e;
} else {
ioe.addSuppressed(e);
}
}
padding();
}
if (ioe != null) {
throw ioe;
}
}
}

Wyświetl plik

@ -0,0 +1,88 @@
/*
* Copyright 2014-2021 Andrew Gaul <andrew@gaul.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gaul.s3proxy.crypto;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import javax.crypto.spec.IvParameterSpec;
import org.apache.commons.io.IOUtils;
import org.jclouds.blobstore.domain.Blob;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class PartPadding {
private static final Logger logger =
LoggerFactory.getLogger(PartPadding.class);
private String delimiter;
private IvParameterSpec iv;
private int part;
private long size;
private short version;
public static PartPadding readPartPaddingFromBlob(Blob blob)
throws IOException {
PartPadding partPadding = new PartPadding();
InputStream is = blob.getPayload().openStream();
byte[] paddingBytes = IOUtils.toByteArray(is);
ByteBuffer bb = ByteBuffer.wrap(paddingBytes);
byte[] delimiterBytes = new byte[Constants.PADDING_DELIMITER_LENGTH];
bb.get(delimiterBytes);
partPadding.delimiter =
new String(delimiterBytes, StandardCharsets.UTF_8);
byte[] ivBytes = new byte[Constants.PADDING_IV_LENGTH];
bb.get(ivBytes);
partPadding.iv = new IvParameterSpec(ivBytes);
partPadding.part = bb.getInt();
partPadding.size = bb.getLong();
partPadding.version = bb.getShort();
logger.debug("delimiter {}", partPadding.delimiter);
logger.debug("iv {}", Arrays.toString(ivBytes));
logger.debug("part {}", partPadding.part);
logger.debug("size {}", partPadding.size);
logger.debug("version {}", partPadding.version);
return partPadding;
}
public final String getDelimiter() {
return delimiter;
}
public final IvParameterSpec getIv() {
return iv;
}
public final int getPart() {
return part;
}
public final long getSize() {
return size;
}
}

Wyświetl plik

@ -0,0 +1,108 @@
/*
* Copyright 2014-2021 Andrew Gaul <andrew@gaul.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gaul.s3proxy.junit;
import java.net.URI;
import org.gaul.s3proxy.AuthenticationType;
import org.junit.jupiter.api.extension.AfterEachCallback;
import org.junit.jupiter.api.extension.BeforeEachCallback;
import org.junit.jupiter.api.extension.ExtensionContext;
/**
* A JUnit 5 Extension that manages an S3Proxy instance which tests
* can use as an S3 API endpoint.
*/
public final class S3ProxyExtension
implements AfterEachCallback, BeforeEachCallback {
private final S3ProxyJunitCore core;
public static final class Builder {
private final S3ProxyJunitCore.Builder builder;
private Builder() {
builder = new S3ProxyJunitCore.Builder();
}
public Builder withCredentials(AuthenticationType authType,
String accessKey, String secretKey) {
builder.withCredentials(authType, accessKey, secretKey);
return this;
}
public Builder withCredentials(String accessKey, String secretKey) {
builder.withCredentials(accessKey, secretKey);
return this;
}
public Builder withSecretStore(String path, String password) {
builder.withSecretStore(path, password);
return this;
}
public Builder withPort(int port) {
builder.withPort(port);
return this;
}
public Builder withBlobStoreProvider(String blobStoreProvider) {
builder.withBlobStoreProvider(blobStoreProvider);
return this;
}
public Builder ignoreUnknownHeaders() {
builder.ignoreUnknownHeaders();
return this;
}
public S3ProxyExtension build() {
return new S3ProxyExtension(this);
}
}
private S3ProxyExtension(Builder builder) {
core = new S3ProxyJunitCore(builder.builder);
}
public static Builder builder() {
return new Builder();
}
@Override
public void beforeEach(ExtensionContext extensionContext) throws Exception {
core.beforeEach();
}
@Override
public void afterEach(ExtensionContext extensionContext) {
core.afterEach();
}
public URI getUri() {
return core.getUri();
}
public String getAccessKey() {
return core.getAccessKey();
}
public String getSecretKey() {
return core.getSecretKey();
}
}

Wyświetl plik

@ -0,0 +1,182 @@
/*
* Copyright 2014-2021 Andrew Gaul <andrew@gaul.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gaul.s3proxy.junit;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
import java.util.Properties;
import org.apache.commons.io.FileUtils;
import org.eclipse.jetty.util.component.AbstractLifeCycle;
import org.gaul.s3proxy.AuthenticationType;
import org.gaul.s3proxy.S3Proxy;
import org.jclouds.ContextBuilder;
import org.jclouds.blobstore.BlobStore;
import org.jclouds.blobstore.BlobStoreContext;
import org.jclouds.blobstore.domain.StorageMetadata;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class S3ProxyJunitCore {
private static final Logger logger = LoggerFactory.getLogger(
S3ProxyJunitCore.class);
private static final String LOCALHOST = "127.0.0.1";
private final String accessKey;
private final String secretKey;
private final String endpointFormat;
private final S3Proxy s3Proxy;
private final BlobStoreContext blobStoreContext;
private URI endpointUri;
private final File blobStoreLocation;
public static final class Builder {
private AuthenticationType authType = AuthenticationType.NONE;
private String accessKey;
private String secretKey;
private String secretStorePath;
private String secretStorePassword;
private int port = -1;
private boolean ignoreUnknownHeaders;
private String blobStoreProvider = "filesystem";
public Builder withCredentials(AuthenticationType authType,
String accessKey, String secretKey) {
this.authType = authType;
this.accessKey = accessKey;
this.secretKey = secretKey;
return this;
}
public Builder withCredentials(String accessKey, String secretKey) {
return withCredentials(AuthenticationType.AWS_V2_OR_V4, accessKey,
secretKey);
}
public Builder withSecretStore(String path, String password) {
secretStorePath = path;
secretStorePassword = password;
return this;
}
public Builder withPort(int port) {
this.port = port;
return this;
}
public Builder withBlobStoreProvider(String blobStoreProvider) {
this.blobStoreProvider = blobStoreProvider;
return this;
}
public Builder ignoreUnknownHeaders() {
ignoreUnknownHeaders = true;
return this;
}
public S3ProxyJunitCore build() {
return new S3ProxyJunitCore(this);
}
}
S3ProxyJunitCore(Builder builder) {
accessKey = builder.accessKey;
secretKey = builder.secretKey;
Properties properties = new Properties();
try {
blobStoreLocation = Files.createTempDirectory("S3Proxy")
.toFile();
properties.setProperty("jclouds.filesystem.basedir",
blobStoreLocation.getCanonicalPath());
} catch (IOException e) {
throw new RuntimeException("Unable to initialize Blob Store", e);
}
ContextBuilder blobStoreContextBuilder = ContextBuilder.newBuilder(
builder.blobStoreProvider)
.overrides(properties);
if (!AuthenticationType.NONE.equals(builder.authType)) {
blobStoreContextBuilder = blobStoreContextBuilder.credentials(
accessKey, secretKey);
}
blobStoreContext = blobStoreContextBuilder.build(
BlobStoreContext.class);
S3Proxy.Builder s3ProxyBuilder = S3Proxy.builder()
.blobStore(blobStoreContext.getBlobStore())
.awsAuthentication(builder.authType, accessKey, secretKey)
.ignoreUnknownHeaders(builder.ignoreUnknownHeaders);
if (builder.secretStorePath != null ||
builder.secretStorePassword != null) {
s3ProxyBuilder.keyStore(builder.secretStorePath,
builder.secretStorePassword);
}
int port = Math.max(builder.port, 0);
endpointFormat = "http://%s:%d";
String endpoint = String.format(endpointFormat, LOCALHOST, port);
s3ProxyBuilder.endpoint(URI.create(endpoint));
s3Proxy = s3ProxyBuilder.build();
}
public final void beforeEach() throws Exception {
logger.debug("S3 proxy is starting");
s3Proxy.start();
while (!s3Proxy.getState().equals(AbstractLifeCycle.STARTED)) {
Thread.sleep(10);
}
endpointUri = URI.create(String.format(endpointFormat, LOCALHOST,
s3Proxy.getPort()));
logger.debug("S3 proxy is running");
}
public final void afterEach() {
logger.debug("S3 proxy is stopping");
try {
s3Proxy.stop();
BlobStore blobStore = blobStoreContext.getBlobStore();
for (StorageMetadata metadata : blobStore.list()) {
blobStore.deleteContainer(metadata.getName());
}
blobStoreContext.close();
} catch (Exception e) {
throw new RuntimeException("Unable to stop S3 proxy", e);
}
FileUtils.deleteQuietly(blobStoreLocation);
logger.debug("S3 proxy has stopped");
}
public final URI getUri() {
return endpointUri;
}
public final String getAccessKey() {
return accessKey;
}
public final String getSecretKey() {
return secretKey;
}
}

Wyświetl plik

@ -16,25 +16,12 @@
package org.gaul.s3proxy.junit;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
import java.util.Properties;
import com.google.common.annotations.Beta;
import org.apache.commons.io.FileUtils;
import org.eclipse.jetty.util.component.AbstractLifeCycle;
import org.gaul.s3proxy.AuthenticationType;
import org.gaul.s3proxy.S3Proxy;
import org.jclouds.ContextBuilder;
import org.jclouds.blobstore.BlobStore;
import org.jclouds.blobstore.BlobStoreContext;
import org.jclouds.blobstore.domain.StorageMetadata;
import org.junit.rules.ExternalResource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A JUnit Rule that manages an S3Proxy instance which tests can use as an S3
@ -42,63 +29,45 @@ import org.slf4j.LoggerFactory;
*/
@Beta
public final class S3ProxyRule extends ExternalResource {
private static final Logger logger = LoggerFactory.getLogger(
S3ProxyRule.class);
private static final String LOCALHOST = "127.0.0.1";
private final String accessKey;
private final String secretKey;
private final String endpointFormat;
private final S3Proxy s3Proxy;
private final BlobStoreContext blobStoreContext;
private URI endpointUri;
private final File blobStoreLocation;
private final S3ProxyJunitCore core;
public static final class Builder {
private AuthenticationType authType = AuthenticationType.NONE;
private String accessKey;
private String secretKey;
private String secretStorePath;
private String secretStorePassword;
private int port = -1;
private boolean ignoreUnknownHeaders;
private String blobStoreProvider = "filesystem";
private Builder() { }
private final S3ProxyJunitCore.Builder builder;
private Builder() {
builder = new S3ProxyJunitCore.Builder();
}
public Builder withCredentials(AuthenticationType authType,
String accessKey, String secretKey) {
this.authType = authType;
this.accessKey = accessKey;
this.secretKey = secretKey;
String accessKey, String secretKey) {
builder.withCredentials(authType, accessKey, secretKey);
return this;
}
public Builder withCredentials(String accessKey, String secretKey) {
return withCredentials(AuthenticationType.AWS_V2_OR_V4, accessKey,
secretKey);
builder.withCredentials(accessKey, secretKey);
return this;
}
public Builder withSecretStore(String path, String password) {
secretStorePath = path;
secretStorePassword = password;
builder.withSecretStore(path, password);
return this;
}
public Builder withPort(int port) {
this.port = port;
builder.withPort(port);
return this;
}
public Builder withBlobStoreProvider(String blobStoreProvider) {
this.blobStoreProvider = blobStoreProvider;
builder.withBlobStoreProvider(blobStoreProvider);
return this;
}
public Builder ignoreUnknownHeaders() {
ignoreUnknownHeaders = true;
builder.ignoreUnknownHeaders();
return this;
}
@ -108,41 +77,7 @@ public final class S3ProxyRule extends ExternalResource {
}
private S3ProxyRule(Builder builder) {
accessKey = builder.accessKey;
secretKey = builder.secretKey;
Properties properties = new Properties();
try {
blobStoreLocation = Files.createTempDirectory("S3ProxyRule")
.toFile();
properties.setProperty("jclouds.filesystem.basedir",
blobStoreLocation.getCanonicalPath());
} catch (IOException e) {
throw new RuntimeException("Unable to initialize Blob Store", e);
}
blobStoreContext = ContextBuilder.newBuilder(
builder.blobStoreProvider)
.credentials(accessKey, secretKey)
.overrides(properties).build(BlobStoreContext.class);
S3Proxy.Builder s3ProxyBuilder = S3Proxy.builder()
.blobStore(blobStoreContext.getBlobStore())
.awsAuthentication(builder.authType, accessKey, secretKey)
.ignoreUnknownHeaders(builder.ignoreUnknownHeaders);
if (builder.secretStorePath != null ||
builder.secretStorePassword != null) {
s3ProxyBuilder.keyStore(builder.secretStorePath,
builder.secretStorePassword);
}
int port = builder.port < 0 ? 0 : builder.port;
endpointFormat = "http://%s:%d";
String endpoint = String.format(endpointFormat, LOCALHOST, port);
s3ProxyBuilder.endpoint(URI.create(endpoint));
s3Proxy = s3ProxyBuilder.build();
core = new S3ProxyJunitCore(builder.builder);
}
public static Builder builder() {
@ -151,43 +86,23 @@ public final class S3ProxyRule extends ExternalResource {
@Override
protected void before() throws Throwable {
logger.debug("S3 proxy is starting");
s3Proxy.start();
while (!s3Proxy.getState().equals(AbstractLifeCycle.STARTED)) {
Thread.sleep(10);
}
endpointUri = URI.create(String.format(endpointFormat, LOCALHOST,
s3Proxy.getPort()));
logger.debug("S3 proxy is running");
core.beforeEach();
}
@Override
protected void after() {
logger.debug("S3 proxy is stopping");
try {
s3Proxy.stop();
BlobStore blobStore = blobStoreContext.getBlobStore();
for (StorageMetadata metadata : blobStore.list()) {
blobStore.deleteContainer(metadata.getName());
}
blobStoreContext.close();
} catch (Exception e) {
throw new RuntimeException("Unable to stop S3 proxy", e);
}
FileUtils.deleteQuietly(blobStoreLocation);
logger.debug("S3 proxy has stopped");
core.afterEach();
}
public URI getUri() {
return endpointUri;
return core.getUri();
}
public String getAccessKey() {
return accessKey;
return core.getAccessKey();
}
public String getSecretKey() {
return secretKey;
return core.getSecretKey();
}
}

Wyświetl plik

@ -9,10 +9,10 @@
<metadata name="net.sf.eclipsecs.core.lastEnabledSeverity" value="inherit"/>
</module>
<module name="NewlineAtEndOfFile"/>
<module name="SuppressionCommentFilter"/>
<module name="Translation"/>
<module name="LineLength"/>
<module name="TreeWalker">
<property name="cacheFile" value="target/cachefile"/>
<module name="SuppressionCommentFilter"/>
<module name="AbbreviationAsWordInName"/>
<module name="AbstractClassName"/>
<module name="AnnotationLocation"/>
@ -38,7 +38,6 @@
<module name="EqualsHashCode"/>
<module name="ExplicitInitialization"/>
<module name="FallThrough"/>
<module name="FileContentsHolder"/>
<module name="FinalClass"/>
<module name="GenericWhitespace"/>
<!--
@ -55,7 +54,7 @@
<property name="classes" value="java.lang.Boolean,java.lang.Short,java.lang.Integer,java.lang.Long"/>
</module>
<module name="ImportOrder">
<property name="groups" value="java,javax,com,org"/>
<property name="groups" value="java,javax,com,jakarta,org"/>
<property name="separated" value="true"/>
<property name="option" value="top"/>
</module>
@ -67,7 +66,6 @@
<module name="InterfaceIsType"/>
<module name="JavadocStyle"/>
<module name="LeftCurly"/>
<module name="LineLength"/>
<module name="LocalFinalVariableName"/>
<module name="LocalVariableName"/>
<module name="MagicNumber">
@ -120,7 +118,6 @@
</module>
<module name="TypecastParenPad"/>
<module name="TypeName"/>
<module name="UnnecessaryParentheses"/>
<module name="UnusedImports"/>
<module name="UpperEll"/>
<module name="VisibilityModifier">

Wyświetl plik

@ -1,9 +1,13 @@
#!/bin/sh
exec java \
$S3PROXY_JAVA_OPTS \
-DLOG_LEVEL="${LOG_LEVEL}" \
-Ds3proxy.endpoint="${S3PROXY_ENDPOINT}" \
-Ds3proxy.secure-endpoint="${S3PROXY_SECURE_ENDPOINT}" \
-Ds3proxy.virtual-host="${S3PROXY_VIRTUALHOST}" \
-Ds3proxy.keystore-path="${S3PROXY_KEYSTORE_PATH}" \
-Ds3proxy.keystore-password="${S3PROXY_KEYSTORE_PASSWORD}" \
-Ds3proxy.authorization="${S3PROXY_AUTHORIZATION}" \
-Ds3proxy.identity="${S3PROXY_IDENTITY}" \
-Ds3proxy.credential="${S3PROXY_CREDENTIAL}" \
@ -11,7 +15,13 @@ exec java \
-Ds3proxy.cors-allow-origins="${S3PROXY_CORS_ALLOW_ORIGINS}" \
-Ds3proxy.cors-allow-methods="${S3PROXY_CORS_ALLOW_METHODS}" \
-Ds3proxy.cors-allow-headers="${S3PROXY_CORS_ALLOW_HEADERS}" \
-Ds3proxy.cors-allow-credential="${S3PROXY_CORS_ALLOW_CREDENTIAL}" \
-Ds3proxy.ignore-unknown-headers="${S3PROXY_IGNORE_UNKNOWN_HEADERS}" \
-Ds3proxy.encrypted-blobstore="${S3PROXY_ENCRYPTED_BLOBSTORE}" \
-Ds3proxy.encrypted-blobstore-password="${S3PROXY_ENCRYPTED_BLOBSTORE_PASSWORD}" \
-Ds3proxy.encrypted-blobstore-salt="${S3PROXY_ENCRYPTED_BLOBSTORE_SALT}" \
-Ds3proxy.v4-max-non-chunked-request-size="${S3PROXY_V4_MAX_NON_CHUNKED_REQ_SIZE:-33554432}" \
-Ds3proxy.read-only-blobstore="${S3PROXY_READ_ONLY_BLOBSTORE:-false}" \
-Djclouds.provider="${JCLOUDS_PROVIDER}" \
-Djclouds.identity="${JCLOUDS_IDENTITY}" \
-Djclouds.credential="${JCLOUDS_CREDENTIAL}" \
@ -21,6 +31,9 @@ exec java \
-Djclouds.keystone.version="${JCLOUDS_KEYSTONE_VERSION}" \
-Djclouds.keystone.scope="${JCLOUDS_KEYSTONE_SCOPE}" \
-Djclouds.keystone.project-domain-name="${JCLOUDS_KEYSTONE_PROJECT_DOMAIN_NAME}" \
-Djclouds.filesystem.basedir="/data" \
-Djclouds.filesystem.basedir="${JCLOUDS_FILESYSTEM_BASEDIR}" \
-Djclouds.azureblob.tenantId="${JCLOUDS_AZUREBLOB_TENANTID}" \
-Djclouds.azureblob.auth="${JCLOUDS_AZUREBLOB_AUTH}" \
-Djclouds.azureblob.account="${JCLOUDS_AZUREBLOB_ACCOUNT}" \
-jar /opt/s3proxy/s3proxy \
--properties /dev/null

Wyświetl plik

@ -44,7 +44,6 @@ import org.jclouds.blobstore.domain.StorageMetadata;
import org.jclouds.blobstore.options.PutOptions;
import org.jclouds.io.Payloads;
import org.jclouds.logging.slf4j.config.SLF4JLoggingModule;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@ -123,6 +122,7 @@ public final class AliasBlobStoreTest {
createContainer(aliasContainerName);
String blobName = TestUtils.createRandomBlobName();
ByteSource content = TestUtils.randomByteSource().slice(0, 1024);
@SuppressWarnings("deprecation")
String contentMD5 = Hashing.md5().hashBytes(content.read()).toString();
Blob blob = aliasBlobStore.blobBuilder(blobName).payload(content)
.build();
@ -143,6 +143,7 @@ public final class AliasBlobStoreTest {
createContainer(aliasContainerName);
String blobName = TestUtils.createRandomBlobName();
ByteSource content = TestUtils.randomByteSource().slice(0, 1024);
@SuppressWarnings("deprecation")
HashCode contentHash = Hashing.md5().hashBytes(content.read());
Blob blob = aliasBlobStore.blobBuilder(blobName).build();
MultipartUpload mpu = aliasBlobStore.initiateMultipartUpload(
@ -156,9 +157,10 @@ public final class AliasBlobStoreTest {
parts.add(part);
String mpuETag = aliasBlobStore.completeMultipartUpload(mpu,
parts.build());
@SuppressWarnings("deprecation")
HashCode contentHash2 = Hashing.md5().hashBytes(contentHash.asBytes());
assertThat(mpuETag).isEqualTo(
String.format("\"%s-1\"",
Hashing.md5().hashBytes(contentHash.asBytes())));
String.format("\"%s-1\"", contentHash2));
blob = aliasBlobStore.getBlob(aliasContainerName, blobName);
try (InputStream actual = blob.getPayload().openStream();
InputStream expected = content.openStream()) {

Wyświetl plik

@ -35,7 +35,6 @@ import com.amazonaws.services.s3.model.S3Object;
import com.google.common.io.ByteSource;
import org.jclouds.blobstore.BlobStoreContext;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

Wyświetl plik

@ -20,6 +20,7 @@ import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assume.assumeTrue;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.net.URL;
import java.net.URLConnection;
@ -93,15 +94,12 @@ import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.google.common.io.ByteSource;
import com.google.common.io.ByteStreams;
import org.assertj.core.api.Fail;
import org.jclouds.ContextBuilder;
import org.jclouds.blobstore.BlobStore;
import org.jclouds.blobstore.BlobStoreContext;
import org.jclouds.rest.HttpClient;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
@ -653,7 +651,7 @@ public final class AwsSdkTest {
S3Object object = client.getObject(getObjectRequest);
try (InputStream is = object.getObjectContent()) {
assertThat(is).isNotNull();
ByteStreams.copy(is, ByteStreams.nullOutputStream());
is.transferTo(OutputStream.nullOutputStream());
}
ObjectMetadata reponseMetadata = object.getObjectMetadata();
@ -1003,6 +1001,27 @@ public final class AwsSdkTest {
client.deleteObject(containerName, blobName);
}
@Test
public void testSinglepartUploadJettyCachedHeader() throws Exception {
String blobName = "singlepart-upload-jetty-cached";
String contentType = "text/plain";
ObjectMetadata metadata = new ObjectMetadata();
metadata.setContentLength(BYTE_SOURCE.size());
metadata.setContentType(contentType);
client.putObject(containerName, blobName, BYTE_SOURCE.openStream(),
metadata);
S3Object object = client.getObject(containerName, blobName);
try (InputStream actual = object.getObjectContent();
InputStream expected = BYTE_SOURCE.openStream()) {
assertThat(actual).hasContentEqualTo(expected);
}
ObjectMetadata newContentMetadata = object.getObjectMetadata();
assertThat(newContentMetadata.getContentType()).isEqualTo(
contentType);
}
@Test
public void testSinglepartUpload() throws Exception {
String blobName = "singlepart-upload";
@ -1425,7 +1444,7 @@ public final class AwsSdkTest {
.withMatchingETagConstraint(result.getETag()));
try (InputStream is = object.getObjectContent()) {
assertThat(is).isNotNull();
ByteStreams.copy(is, ByteStreams.nullOutputStream());
is.transferTo(OutputStream.nullOutputStream());
}
object = client.getObject(

Wyświetl plik

@ -40,7 +40,6 @@ import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.CannedAccessControlList;
import com.google.common.io.ByteSource;
import com.google.common.net.HttpHeaders;
@ -59,10 +58,8 @@ import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.ssl.SSLContextBuilder;
import org.jclouds.blobstore.BlobStoreContext;
import org.jclouds.blobstore.domain.Blob;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@ -157,7 +154,7 @@ public final class CrossOriginResourceSharingAllowAllResponseTest {
HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS)).isTrue();
assertThat(response.getFirstHeader(
HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS).getValue())
.isEqualTo("GET, HEAD, PUT, POST");
.isEqualTo("GET, HEAD, PUT, POST, DELETE");
assertThat(response.containsHeader(
HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS)).isTrue();
assertThat(response.getFirstHeader(
@ -181,7 +178,7 @@ public final class CrossOriginResourceSharingAllowAllResponseTest {
HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS)).isTrue();
assertThat(response.getFirstHeader(
HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS).getValue())
.isEqualTo("GET, HEAD, PUT, POST");
.isEqualTo("GET, HEAD, PUT, POST, DELETE");
}
@Test

Wyświetl plik

@ -40,7 +40,6 @@ import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.CannedAccessControlList;
import com.google.common.io.ByteSource;
import com.google.common.net.HttpHeaders;
@ -59,10 +58,8 @@ import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.ssl.SSLContextBuilder;
import org.jclouds.blobstore.BlobStoreContext;
import org.jclouds.blobstore.domain.Blob;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@ -140,58 +137,6 @@ public final class CrossOriginResourceSharingResponseTest {
}
}
@Test
public void testCorsPreflightNegative() throws Exception {
// No CORS headers
HttpOptions request = new HttpOptions(presignedGET);
HttpResponse response = httpClient.execute(request);
/*
* For non presigned URLs that should give a 400, but the
* Access-Control-Request-Method header is needed for presigned URLs
* to calculate the same signature. If this is missing it fails already
* with 403 - Signature mismatch before processing the OPTIONS request
* See testCorsPreflightPublicRead for that cases
*/
assertThat(response.getStatusLine().getStatusCode())
.isEqualTo(HttpStatus.SC_FORBIDDEN);
// Not allowed origin
request.reset();
request.setHeader(HttpHeaders.ORIGIN, "https://example.org");
request.setHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET");
response = httpClient.execute(request);
assertThat(response.getStatusLine().getStatusCode())
.isEqualTo(HttpStatus.SC_FORBIDDEN);
// Not allowed method
request.reset();
request.setHeader(HttpHeaders.ORIGIN, "https://example.com");
request.setHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "PATCH");
response = httpClient.execute(request);
assertThat(response.getStatusLine().getStatusCode())
.isEqualTo(HttpStatus.SC_FORBIDDEN);
// Not allowed header
request.reset();
request.setHeader(HttpHeaders.ORIGIN, "https://example.com");
request.setHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET");
request.setHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS,
"Accept-Encoding");
response = httpClient.execute(request);
assertThat(response.getStatusLine().getStatusCode())
.isEqualTo(HttpStatus.SC_FORBIDDEN);
// Not allowed header combination
request.reset();
request.setHeader(HttpHeaders.ORIGIN, "https://example.com");
request.setHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET");
request.setHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS,
"Accept, Accept-Encoding");
response = httpClient.execute(request);
assertThat(response.getStatusLine().getStatusCode())
.isEqualTo(HttpStatus.SC_FORBIDDEN);
}
@Test
public void testCorsPreflight() throws Exception {
// Allowed origin and method
@ -303,6 +248,9 @@ public final class CrossOriginResourceSharingResponseTest {
assertThat(response.getFirstHeader(
HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS).getValue())
.isEqualTo("Accept, Content-Type");
assertThat(response.getFirstHeader(
HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS))
.isNull();
}
@Test

Wyświetl plik

@ -38,9 +38,10 @@ public final class CrossOriginResourceSharingRuleTest {
"https://.+\\.example\\.com",
"https://example\\.cloud"),
Lists.newArrayList("GET", "PUT"),
Lists.newArrayList("Accept", "Content-Type"));
Lists.newArrayList("Accept", "Content-Type"),
"true");
// CORS disabled
corsOff = new CrossOriginResourceSharing(null, null, null);
corsOff = new CrossOriginResourceSharing(null, null, null, null);
}
@Test
@ -106,6 +107,12 @@ public final class CrossOriginResourceSharingRuleTest {
probe = "POST";
assertThat(corsAll.isMethodAllowed(probe))
.as("check '%s' as method", probe).isTrue();
probe = "HEAD";
assertThat(corsAll.isMethodAllowed(probe))
.as("check '%s' as method", probe).isTrue();
probe = "DELETE";
assertThat(corsAll.isMethodAllowed(probe))
.as("check '%s' as method", probe).isTrue();
}
@Test
@ -174,4 +181,10 @@ public final class CrossOriginResourceSharingRuleTest {
assertThat(corsCfg.isEveryHeaderAllowed(probe))
.as("check '%s' as header", probe).isTrue();
}
@Test
public void testAllowCredentials() {
assertThat(corsOff.isAllowCredentials()).isFalse();
assertThat(corsCfg.isAllowCredentials()).isTrue();
}
}

Wyświetl plik

@ -0,0 +1,283 @@
/*
* Copyright 2014-2021 Andrew Gaul <andrew@gaul.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gaul.s3proxy;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.ByteSource;
import com.google.common.util.concurrent.Uninterruptibles;
import org.assertj.core.api.Fail;
import org.gaul.s3proxy.crypto.Constants;
import org.jclouds.aws.AWSResponseException;
import org.jclouds.blobstore.BlobStoreContext;
import org.jclouds.blobstore.domain.PageSet;
import org.jclouds.blobstore.domain.StorageMetadata;
import org.jclouds.blobstore.options.ListContainerOptions;
import org.jclouds.http.options.GetOptions;
import org.jclouds.io.Payload;
import org.jclouds.io.Payloads;
import org.jclouds.s3.S3ClientLiveTest;
import org.jclouds.s3.domain.ListMultipartUploadsResponse;
import org.jclouds.s3.domain.ObjectMetadataBuilder;
import org.jclouds.s3.domain.S3Object;
import org.jclouds.s3.reference.S3Constants;
import org.testng.SkipException;
import org.testng.annotations.AfterSuite;
import org.testng.annotations.Test;
@SuppressWarnings("UnstableApiUsage")
@Test(testName = "EncryptedBlobStoreLiveTest")
public final class EncryptedBlobStoreLiveTest extends S3ClientLiveTest {
private static final int AWAIT_CONSISTENCY_TIMEOUT_SECONDS =
Integer.parseInt(
System.getProperty(
"test.blobstore.await-consistency-timeout-seconds",
"0"));
private static final long MINIMUM_MULTIPART_SIZE = 5 * 1024 * 1024;
private S3Proxy s3Proxy;
private BlobStoreContext context;
@AfterSuite
@Override
public void destroyResources() throws Exception {
context.close();
s3Proxy.stop();
}
@Override
protected void awaitConsistency() {
Uninterruptibles.sleepUninterruptibly(
AWAIT_CONSISTENCY_TIMEOUT_SECONDS, TimeUnit.SECONDS);
}
@Override
protected Properties setupProperties() {
TestUtils.S3ProxyLaunchInfo info;
try {
info = TestUtils.startS3Proxy("s3proxy-encryption.conf");
s3Proxy = info.getS3Proxy();
context = info.getBlobStore().getContext();
} catch (Exception e) {
throw new RuntimeException(e);
}
Properties props = super.setupProperties();
props.setProperty(org.jclouds.Constants.PROPERTY_IDENTITY,
info.getS3Identity());
props.setProperty(org.jclouds.Constants.PROPERTY_CREDENTIAL,
info.getS3Credential());
props.setProperty(org.jclouds.Constants.PROPERTY_ENDPOINT,
info.getEndpoint().toString() + info.getServicePath());
props.setProperty(org.jclouds.Constants.PROPERTY_STRIP_EXPECT_HEADER,
"true");
props.setProperty(S3Constants.PROPERTY_S3_SERVICE_PATH,
info.getServicePath());
endpoint = info.getEndpoint().toString() + info.getServicePath();
return props;
}
@Test
public void testOneCharAndCopy() throws InterruptedException {
String blobName = TestUtils.createRandomBlobName();
String containerName = this.getContainerName();
S3Object object = this.getApi().newS3Object();
object.getMetadata().setKey(blobName);
object.setPayload("1");
this.getApi().putObject(containerName, object);
object = this.getApi().getObject(containerName, blobName);
assertThat(object.getMetadata().getContentMetadata()
.getContentLength()).isEqualTo(1L);
PageSet<? extends StorageMetadata>
list = view.getBlobStore().list(containerName);
assertThat(list).hasSize(1);
StorageMetadata md = list.iterator().next();
assertThat(md.getName()).isEqualTo(blobName);
assertThat(md.getSize()).isEqualTo(1L);
this.getApi().copyObject(containerName, blobName, containerName,
blobName + "-copy");
list = view.getBlobStore().list(containerName);
assertThat(list).hasSize(2);
for (StorageMetadata sm : list) {
assertThat(sm.getSize()).isEqualTo(1L);
assertThat(sm.getName()).doesNotContain(
Constants.S3_ENC_SUFFIX);
}
ListContainerOptions lco = new ListContainerOptions();
lco.maxResults(1);
list = view.getBlobStore().list(containerName, lco);
assertThat(list).hasSize(1);
assertThat(list.getNextMarker()).doesNotContain(
Constants.S3_ENC_SUFFIX);
}
@Test
public void testPartialContent() throws InterruptedException, IOException {
String blobName = TestUtils.createRandomBlobName();
String containerName = this.getContainerName();
String content = "123456789A123456789B123456";
S3Object object = this.getApi().newS3Object();
object.getMetadata().setKey(blobName);
object.setPayload(content);
this.getApi().putObject(containerName, object);
// get only 20 bytes
GetOptions options = new GetOptions();
options.range(0, 19);
object = this.getApi().getObject(containerName, blobName, options);
InputStreamReader r =
new InputStreamReader(object.getPayload().openStream());
BufferedReader reader = new BufferedReader(r);
String partialContent = reader.lines().collect(Collectors.joining());
assertThat(partialContent).isEqualTo(content.substring(0, 20));
}
@Test
public void testMultipart() throws InterruptedException, IOException {
String blobName = TestUtils.createRandomBlobName();
String containerName = this.getContainerName();
// 15mb of data
ByteSource byteSource = TestUtils.randomByteSource().slice(
0, MINIMUM_MULTIPART_SIZE * 3);
// first 2 parts with 6mb and last part with 3mb
long partSize = 6 * 1024 * 1024;
long lastPartSize = 3 * 1024 * 1024;
ByteSource byteSource1 = byteSource.slice(0, partSize);
ByteSource byteSource2 = byteSource.slice(partSize, partSize);
ByteSource byteSource3 = byteSource.slice(partSize * 2,
lastPartSize);
String uploadId = this.getApi().initiateMultipartUpload(containerName,
ObjectMetadataBuilder.create().key(blobName).build());
assertThat(this.getApi().listMultipartPartsFull(containerName,
blobName, uploadId)).isEmpty();
ListMultipartUploadsResponse
response = this.getApi()
.listMultipartUploads(containerName, null, null, null, blobName,
null);
assertThat(response.uploads()).hasSize(1);
Payload part1 =
Payloads.newInputStreamPayload(byteSource1.openStream());
part1.getContentMetadata().setContentLength(byteSource1.size());
Payload part2 =
Payloads.newInputStreamPayload(byteSource2.openStream());
part2.getContentMetadata().setContentLength(byteSource2.size());
Payload part3 =
Payloads.newInputStreamPayload(byteSource3.openStream());
part3.getContentMetadata().setContentLength(byteSource3.size());
String eTagOf1 = this.getApi()
.uploadPart(containerName, blobName, 1, uploadId, part1);
String eTagOf2 = this.getApi()
.uploadPart(containerName, blobName, 2, uploadId, part2);
String eTagOf3 = this.getApi()
.uploadPart(containerName, blobName, 3, uploadId, part3);
this.getApi().completeMultipartUpload(containerName, blobName, uploadId,
ImmutableMap.of(1, eTagOf1, 2, eTagOf2, 3, eTagOf3));
S3Object object = this.getApi().getObject(containerName, blobName);
try (InputStream actual = object.getPayload().openStream();
InputStream expected = byteSource.openStream()) {
assertThat(actual).hasContentEqualTo(expected);
}
// get a 5mb slice that overlap parts
long partialStart = 5 * 1024 * 1024;
ByteSource partialContent =
byteSource.slice(partialStart, partialStart);
GetOptions options = new GetOptions();
options.range(partialStart, (partialStart * 2) - 1);
object = this.getApi().getObject(containerName, blobName, options);
try (InputStream actual = object.getPayload().openStream();
InputStream expected = partialContent.openStream()) {
assertThat(actual).hasContentEqualTo(expected);
}
}
@Override
public void testMultipartSynchronously() {
throw new SkipException("list multipart synchronously not supported");
}
@Override
@Test
public void testUpdateObjectACL() throws InterruptedException,
ExecutionException, TimeoutException, IOException {
try {
super.testUpdateObjectACL();
Fail.failBecauseExceptionWasNotThrown(AWSResponseException.class);
} catch (AWSResponseException are) {
assertThat(are.getError().getCode()).isEqualTo("NotImplemented");
throw new SkipException("XML ACLs not supported", are);
}
}
@Override
@Test
public void testPublicWriteOnObject() throws InterruptedException,
ExecutionException, TimeoutException, IOException {
try {
super.testPublicWriteOnObject();
Fail.failBecauseExceptionWasNotThrown(AWSResponseException.class);
} catch (AWSResponseException are) {
assertThat(are.getError().getCode()).isEqualTo("NotImplemented");
throw new SkipException("public-read-write-acl not supported", are);
}
}
@Override
public void testCopyCannedAccessPolicyPublic() {
throw new SkipException("blob access control not supported");
}
@Override
public void testPutCannedAccessPolicyPublic() {
throw new SkipException("blob access control not supported");
}
@Override
public void testUpdateObjectCannedACL() {
throw new SkipException("blob access control not supported");
}
}

Wyświetl plik

@ -0,0 +1,835 @@
/*
* Copyright 2014-2021 Andrew Gaul <andrew@gaul.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gaul.s3proxy;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
import java.util.stream.Collectors;
import com.google.common.collect.ImmutableList;
import com.google.inject.Module;
import org.gaul.s3proxy.crypto.Constants;
import org.jclouds.ContextBuilder;
import org.jclouds.blobstore.BlobStore;
import org.jclouds.blobstore.BlobStoreContext;
import org.jclouds.blobstore.domain.Blob;
import org.jclouds.blobstore.domain.BlobAccess;
import org.jclouds.blobstore.domain.BlobMetadata;
import org.jclouds.blobstore.domain.MultipartPart;
import org.jclouds.blobstore.domain.MultipartUpload;
import org.jclouds.blobstore.domain.PageSet;
import org.jclouds.blobstore.domain.StorageMetadata;
import org.jclouds.blobstore.domain.StorageType;
import org.jclouds.blobstore.options.CopyOptions;
import org.jclouds.blobstore.options.GetOptions;
import org.jclouds.blobstore.options.ListContainerOptions;
import org.jclouds.blobstore.options.PutOptions;
import org.jclouds.io.Payload;
import org.jclouds.io.Payloads;
import org.jclouds.logging.slf4j.config.SLF4JLoggingModule;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings("UnstableApiUsage")
public final class EncryptedBlobStoreTest {
private static final Logger logger =
LoggerFactory.getLogger(EncryptedBlobStoreTest.class);
private BlobStoreContext context;
private BlobStore blobStore;
private String containerName;
private BlobStore encryptedBlobStore;
private static Blob makeBlob(BlobStore blobStore, String blobName,
InputStream is, long contentLength) {
return blobStore.blobBuilder(blobName)
.payload(is)
.contentLength(contentLength)
.build();
}
private static Blob makeBlob(BlobStore blobStore, String blobName,
byte[] payload, long contentLength) {
return blobStore.blobBuilder(blobName)
.payload(payload)
.contentLength(contentLength)
.build();
}
private static Blob makeBlobWithContentType(BlobStore blobStore,
String blobName,
long contentLength,
InputStream is,
String contentType) {
return blobStore.blobBuilder(blobName)
.payload(is)
.contentLength(contentLength)
.contentType(contentType)
.build();
}
@Before
public void setUp() throws Exception {
String password = "Password1234567!";
String salt = "12345678";
containerName = TestUtils.createRandomContainerName();
//noinspection UnstableApiUsage
context = ContextBuilder
.newBuilder("transient")
.credentials("identity", "credential")
.modules(ImmutableList.<Module>of(new SLF4JLoggingModule()))
.build(BlobStoreContext.class);
blobStore = context.getBlobStore();
blobStore.createContainerInLocation(null, containerName);
Properties properties = new Properties();
properties.put(S3ProxyConstants.PROPERTY_ENCRYPTED_BLOBSTORE, "true");
properties.put(S3ProxyConstants.PROPERTY_ENCRYPTED_BLOBSTORE_PASSWORD,
password);
properties.put(S3ProxyConstants.PROPERTY_ENCRYPTED_BLOBSTORE_SALT,
salt);
encryptedBlobStore =
EncryptedBlobStore.newEncryptedBlobStore(blobStore, properties);
}
@After
public void tearDown() throws Exception {
if (context != null) {
blobStore.deleteContainer(containerName);
context.close();
}
}
@Test
public void testBlobNotExists() {
String blobName = TestUtils.createRandomBlobName();
Blob blob = encryptedBlobStore.getBlob(containerName, blobName);
assertThat(blob).isNull();
blob = encryptedBlobStore.getBlob(containerName, blobName,
new GetOptions());
assertThat(blob).isNull();
}
@Test
public void testBlobNotEncrypted() throws Exception {
String[] tests = new String[] {
"1", // only 1 char
"123456789A12345", // lower then the AES block
"123456789A1234567", // one byte bigger then the AES block
"123456789A123456123456789B123456123456789C" +
"1234123456789A123456123456789B123456123456789C1234"
};
Map<String, Long> contentLengths = new HashMap<>();
for (String content : tests) {
String blobName = TestUtils.createRandomBlobName();
InputStream is = new ByteArrayInputStream(
content.getBytes(StandardCharsets.UTF_8));
contentLengths.put(blobName, (long) content.length());
Blob blob = makeBlob(blobStore, blobName, is, content.length());
blobStore.putBlob(containerName, blob);
blob = encryptedBlobStore.getBlob(containerName, blobName);
InputStream blobIs = blob.getPayload().openStream();
InputStreamReader r = new InputStreamReader(blobIs);
BufferedReader reader = new BufferedReader(r);
String plaintext = reader.lines().collect(Collectors.joining());
logger.debug("plaintext {}", plaintext);
assertThat(content).isEqualTo(plaintext);
GetOptions options = new GetOptions();
blob = encryptedBlobStore.getBlob(containerName, blobName, options);
blobIs = blob.getPayload().openStream();
r = new InputStreamReader(blobIs);
reader = new BufferedReader(r);
plaintext = reader.lines().collect(Collectors.joining());
logger.debug("plaintext {} with empty options ", plaintext);
assertThat(content).isEqualTo(plaintext);
}
PageSet<? extends StorageMetadata> blobs =
encryptedBlobStore.list(containerName, new ListContainerOptions());
for (StorageMetadata blob : blobs) {
assertThat(blob.getSize()).isEqualTo(
contentLengths.get(blob.getName()));
}
blobs = encryptedBlobStore.list();
StorageMetadata metadata = blobs.iterator().next();
assertThat(StorageType.CONTAINER).isEqualTo(metadata.getType());
}
@Test
public void testListEncrypted() {
String[] contents = new String[] {
"1", // only 1 char
"123456789A12345", // lower then the AES block
"123456789A1234567", // one byte bigger then the AES block
"123456789A123456123456789B123456123456789C1234"
};
Map<String, Long> contentLengths = new HashMap<>();
for (String content : contents) {
String blobName = TestUtils.createRandomBlobName();
InputStream is = new ByteArrayInputStream(
content.getBytes(StandardCharsets.UTF_8));
contentLengths.put(blobName, (long) content.length());
Blob blob =
makeBlob(encryptedBlobStore, blobName, is, content.length());
encryptedBlobStore.putBlob(containerName, blob);
}
PageSet<? extends StorageMetadata> blobs =
encryptedBlobStore.list(containerName);
for (StorageMetadata blob : blobs) {
assertThat(blob.getSize()).isEqualTo(
contentLengths.get(blob.getName()));
}
blobs =
encryptedBlobStore.list(containerName, new ListContainerOptions());
for (StorageMetadata blob : blobs) {
assertThat(blob.getSize()).isEqualTo(
contentLengths.get(blob.getName()));
encryptedBlobStore.removeBlob(containerName, blob.getName());
}
blobs =
encryptedBlobStore.list(containerName, new ListContainerOptions());
assertThat(blobs.size()).isEqualTo(0);
}
@Test
public void testListEncryptedMultipart() {
String blobName = TestUtils.createRandomBlobName();
String[] contentParts = new String[] {
"123456789A123456123456789B123456123456789C1234",
"123456789D123456123456789E123456123456789F123456",
"123456789G123456123456789H123456123456789I123"
};
String content = contentParts[0] + contentParts[1] + contentParts[2];
BlobMetadata blobMetadata = makeBlob(encryptedBlobStore, blobName,
content.getBytes(StandardCharsets.UTF_8),
content.length()).getMetadata();
MultipartUpload mpu =
encryptedBlobStore.initiateMultipartUpload(containerName,
blobMetadata, new PutOptions());
Payload payload1 = Payloads.newByteArrayPayload(
contentParts[0].getBytes(StandardCharsets.UTF_8));
Payload payload2 = Payloads.newByteArrayPayload(
contentParts[1].getBytes(StandardCharsets.UTF_8));
Payload payload3 = Payloads.newByteArrayPayload(
contentParts[2].getBytes(StandardCharsets.UTF_8));
encryptedBlobStore.uploadMultipartPart(mpu, 1, payload1);
encryptedBlobStore.uploadMultipartPart(mpu, 2, payload2);
encryptedBlobStore.uploadMultipartPart(mpu, 3, payload3);
List<MultipartPart> parts = encryptedBlobStore.listMultipartUpload(mpu);
int index = 0;
for (MultipartPart part : parts) {
assertThat((long) contentParts[index].length()).isEqualTo(
part.partSize());
index++;
}
encryptedBlobStore.completeMultipartUpload(mpu, parts);
PageSet<? extends StorageMetadata> blobs =
encryptedBlobStore.list(containerName);
StorageMetadata metadata = blobs.iterator().next();
assertThat((long) content.length()).isEqualTo(metadata.getSize());
ListContainerOptions options = new ListContainerOptions();
blobs = encryptedBlobStore.list(containerName, options.withDetails());
metadata = blobs.iterator().next();
assertThat((long) content.length()).isEqualTo(metadata.getSize());
blobs = encryptedBlobStore.list();
metadata = blobs.iterator().next();
assertThat(StorageType.CONTAINER).isEqualTo(metadata.getType());
List<String> singleList = new ArrayList<>();
singleList.add(blobName);
encryptedBlobStore.removeBlobs(containerName, singleList);
blobs = encryptedBlobStore.list(containerName);
assertThat(blobs.size()).isEqualTo(0);
}
@Test
public void testBlobNotEncryptedRanges() throws Exception {
for (int run = 0; run < 100; run++) {
String[] tests = new String[] {
"123456789A12345", // lower then the AES block
"123456789A1234567", // one byte bigger then the AES block
"123456789A123456123456789B123456123456789C" +
"1234123456789A123456123456789B123456123456789C1234"
};
for (String content : tests) {
String blobName = TestUtils.createRandomBlobName();
Random rand = new Random();
InputStream is = new ByteArrayInputStream(
content.getBytes(StandardCharsets.UTF_8));
Blob blob = makeBlob(blobStore, blobName, is, content.length());
blobStore.putBlob(containerName, blob);
GetOptions options = new GetOptions();
int offset = rand.nextInt(content.length() - 1);
logger.debug("content {} with offset {}", content, offset);
options.startAt(offset);
blob = encryptedBlobStore.getBlob(containerName, blobName,
options);
InputStream blobIs = blob.getPayload().openStream();
InputStreamReader r = new InputStreamReader(blobIs);
BufferedReader reader = new BufferedReader(r);
String plaintext = reader.lines().collect(Collectors.joining());
logger.debug("plaintext {} with offset {}", plaintext, offset);
assertThat(plaintext).isEqualTo(content.substring(offset));
options = new GetOptions();
int tail = rand.nextInt(content.length());
if (tail == 0) {
tail++;
}
logger.debug("content {} with tail {}", content, tail);
options.tail(tail);
blob = encryptedBlobStore.getBlob(containerName, blobName,
options);
blobIs = blob.getPayload().openStream();
r = new InputStreamReader(blobIs);
reader = new BufferedReader(r);
plaintext = reader.lines().collect(Collectors.joining());
logger.debug("plaintext {} with tail {}", plaintext, tail);
assertThat(plaintext).isEqualTo(
content.substring(content.length() - tail));
options = new GetOptions();
offset = 1;
int end = content.length() - 2;
logger.debug("content {} with range {}-{}", content, offset,
end);
options.range(offset, end);
blob = encryptedBlobStore.getBlob(containerName, blobName,
options);
blobIs = blob.getPayload().openStream();
r = new InputStreamReader(blobIs);
reader = new BufferedReader(r);
plaintext = reader.lines().collect(Collectors.joining());
logger.debug("plaintext {} with range {}-{}", plaintext, offset,
end);
assertThat(plaintext).isEqualTo(
content.substring(offset, end + 1));
}
}
}
@Test
public void testEncryptContent() throws Exception {
String[] tests = new String[] {
"1", // only 1 char
"123456789A12345", // lower then the AES block
"123456789A1234567", // one byte bigger then the AES block
"123456789A123456123456789B123456123456789C1234"
};
for (String content : tests) {
String blobName = TestUtils.createRandomBlobName();
String contentType = "plain/text";
InputStream is = new ByteArrayInputStream(
content.getBytes(StandardCharsets.UTF_8));
Blob blob = makeBlobWithContentType(encryptedBlobStore, blobName,
content.length(), is, contentType);
encryptedBlobStore.putBlob(containerName, blob);
blob = encryptedBlobStore.getBlob(containerName, blobName);
InputStream blobIs = blob.getPayload().openStream();
InputStreamReader r = new InputStreamReader(blobIs);
BufferedReader reader = new BufferedReader(r);
String plaintext = reader.lines().collect(Collectors.joining());
logger.debug("plaintext {}", plaintext);
assertThat(plaintext).isEqualTo(content);
blob = blobStore.getBlob(containerName,
blobName + Constants.S3_ENC_SUFFIX);
blobIs = blob.getPayload().openStream();
r = new InputStreamReader(blobIs);
reader = new BufferedReader(r);
String encrypted = reader.lines().collect(Collectors.joining());
logger.debug("encrypted {}", encrypted);
assertThat(content).isNotEqualTo(encrypted);
assertThat(encryptedBlobStore.blobExists(containerName,
blobName)).isTrue();
BlobAccess access =
encryptedBlobStore.getBlobAccess(containerName, blobName);
assertThat(access).isEqualTo(BlobAccess.PRIVATE);
encryptedBlobStore.setBlobAccess(containerName, blobName,
BlobAccess.PUBLIC_READ);
access = encryptedBlobStore.getBlobAccess(containerName, blobName);
assertThat(access).isEqualTo(BlobAccess.PUBLIC_READ);
}
}
@Test
public void testEncryptContentWithOptions() throws Exception {
String[] tests = new String[] {
"1", // only 1 char
"123456789A12345", // lower then the AES block
"123456789A1234567", // one byte bigger then the AES block
"123456789A123456123456789B123456123456789C1234"
};
for (String content : tests) {
String blobName = TestUtils.createRandomBlobName();
String contentType = "plain/text; charset=utf-8";
InputStream is = new ByteArrayInputStream(
content.getBytes(StandardCharsets.UTF_8));
Blob blob = makeBlobWithContentType(encryptedBlobStore, blobName,
content.length(), is, contentType);
PutOptions options = new PutOptions();
encryptedBlobStore.putBlob(containerName, blob, options);
blob = encryptedBlobStore.getBlob(containerName, blobName);
InputStream blobIs = blob.getPayload().openStream();
InputStreamReader r = new InputStreamReader(blobIs);
BufferedReader reader = new BufferedReader(r);
String plaintext = reader.lines().collect(Collectors.joining());
logger.debug("plaintext {}", plaintext);
assertThat(content).isEqualTo(plaintext);
blob = blobStore.getBlob(containerName,
blobName + Constants.S3_ENC_SUFFIX);
blobIs = blob.getPayload().openStream();
r = new InputStreamReader(blobIs);
reader = new BufferedReader(r);
String encrypted = reader.lines().collect(Collectors.joining());
logger.debug("encrypted {}", encrypted);
assertThat(content).isNotEqualTo(encrypted);
BlobMetadata metadata =
encryptedBlobStore.blobMetadata(containerName,
blobName + Constants.S3_ENC_SUFFIX);
assertThat(contentType).isEqualTo(
metadata.getContentMetadata().getContentType());
encryptedBlobStore.copyBlob(containerName, blobName,
containerName, blobName + "-copy", CopyOptions.NONE);
blob = blobStore.getBlob(containerName,
blobName + Constants.S3_ENC_SUFFIX);
blobIs = blob.getPayload().openStream();
r = new InputStreamReader(blobIs);
reader = new BufferedReader(r);
encrypted = reader.lines().collect(Collectors.joining());
logger.debug("encrypted {}", encrypted);
assertThat(content).isNotEqualTo(encrypted);
blob =
encryptedBlobStore.getBlob(containerName, blobName + "-copy");
blobIs = blob.getPayload().openStream();
r = new InputStreamReader(blobIs);
reader = new BufferedReader(r);
plaintext = reader.lines().collect(Collectors.joining());
logger.debug("plaintext {}", plaintext);
assertThat(content).isEqualTo(plaintext);
}
}
@Test
public void testEncryptMultipartContent() throws Exception {
String blobName = TestUtils.createRandomBlobName();
String content1 = "123456789A123456123456789B123456123456789C1234";
String content2 = "123456789D123456123456789E123456123456789F123456";
String content3 = "123456789G123456123456789H123456123456789I123";
String content = content1 + content2 + content3;
BlobMetadata blobMetadata = makeBlob(encryptedBlobStore, blobName,
content.getBytes(StandardCharsets.UTF_8),
content.length()).getMetadata();
MultipartUpload mpu =
encryptedBlobStore.initiateMultipartUpload(containerName,
blobMetadata, new PutOptions());
Payload payload1 = Payloads.newByteArrayPayload(
content1.getBytes(StandardCharsets.UTF_8));
Payload payload2 = Payloads.newByteArrayPayload(
content2.getBytes(StandardCharsets.UTF_8));
Payload payload3 = Payloads.newByteArrayPayload(
content3.getBytes(StandardCharsets.UTF_8));
encryptedBlobStore.uploadMultipartPart(mpu, 1, payload1);
encryptedBlobStore.uploadMultipartPart(mpu, 2, payload2);
encryptedBlobStore.uploadMultipartPart(mpu, 3, payload3);
List<MultipartUpload> mpus =
encryptedBlobStore.listMultipartUploads(containerName);
assertThat(mpus.size()).isEqualTo(1);
List<MultipartPart> parts = encryptedBlobStore.listMultipartUpload(mpu);
assertThat(mpus.get(0).id()).isEqualTo(mpu.id());
encryptedBlobStore.completeMultipartUpload(mpu, parts);
Blob blob = encryptedBlobStore.getBlob(containerName, blobName);
InputStream blobIs = blob.getPayload().openStream();
InputStreamReader r = new InputStreamReader(blobIs);
BufferedReader reader = new BufferedReader(r);
String plaintext = reader.lines().collect(Collectors.joining());
logger.debug("plaintext {}", plaintext);
assertThat(plaintext).isEqualTo(content);
blob = blobStore.getBlob(containerName,
blobName + Constants.S3_ENC_SUFFIX);
blobIs = blob.getPayload().openStream();
r = new InputStreamReader(blobIs);
reader = new BufferedReader(r);
String encrypted = reader.lines().collect(Collectors.joining());
logger.debug("encrypted {}", encrypted);
assertThat(content).isNotEqualTo(encrypted);
}
@Test
public void testReadPartial() throws Exception {
for (int offset = 0; offset < 60; offset++) {
logger.debug("Test with offset {}", offset);
String blobName = TestUtils.createRandomBlobName();
String content =
"123456789A123456123456789B123456123456789" +
"C123456789D123456789E12345";
InputStream is = new ByteArrayInputStream(
content.getBytes(StandardCharsets.UTF_8));
Blob blob =
makeBlob(encryptedBlobStore, blobName, is, content.length());
encryptedBlobStore.putBlob(containerName, blob);
GetOptions options = new GetOptions();
options.startAt(offset);
blob = encryptedBlobStore.getBlob(containerName, blobName, options);
InputStream blobIs = blob.getPayload().openStream();
InputStreamReader r = new InputStreamReader(blobIs);
BufferedReader reader = new BufferedReader(r);
String plaintext = reader.lines().collect(Collectors.joining());
logger.debug("plaintext {}", plaintext);
assertThat(plaintext).isEqualTo(content.substring(offset));
}
}
@Test
public void testReadTail() throws Exception {
for (int length = 1; length < 60; length++) {
logger.debug("Test with length {}", length);
String blobName = TestUtils.createRandomBlobName();
String content =
"123456789A123456123456789B123456123456789C" +
"123456789D123456789E12345";
InputStream is = new ByteArrayInputStream(
content.getBytes(StandardCharsets.UTF_8));
Blob blob =
makeBlob(encryptedBlobStore, blobName, is, content.length());
encryptedBlobStore.putBlob(containerName, blob);
GetOptions options = new GetOptions();
options.tail(length);
blob = encryptedBlobStore.getBlob(containerName, blobName, options);
InputStream blobIs = blob.getPayload().openStream();
InputStreamReader r = new InputStreamReader(blobIs);
BufferedReader reader = new BufferedReader(r);
String plaintext = reader.lines().collect(Collectors.joining());
logger.debug("plaintext {}", plaintext);
assertThat(plaintext).isEqualTo(
content.substring(content.length() - length));
}
}
@Test
public void testReadPartialWithRandomEnd() throws Exception {
for (int run = 0; run < 100; run++) {
for (int offset = 0; offset < 50; offset++) {
Random rand = new Random();
int end = offset + rand.nextInt(20) + 2;
int size = end - offset + 1;
logger.debug("Test with offset {} and end {} size {}",
offset, end, size);
String blobName = TestUtils.createRandomBlobName();
String content =
"123456789A123456-123456789B123456-123456789C123456-" +
"123456789D123456-123456789E123456";
InputStream is = new ByteArrayInputStream(
content.getBytes(StandardCharsets.UTF_8));
Blob blob = makeBlob(encryptedBlobStore, blobName, is,
content.length());
encryptedBlobStore.putBlob(containerName, blob);
GetOptions options = new GetOptions();
options.range(offset, end);
blob = encryptedBlobStore.getBlob(containerName, blobName,
options);
InputStream blobIs = blob.getPayload().openStream();
InputStreamReader r = new InputStreamReader(blobIs);
BufferedReader reader = new BufferedReader(r);
String plaintext = reader.lines().collect(Collectors.joining());
logger.debug("plaintext {}", plaintext);
assertThat(plaintext).hasSize(size);
assertThat(plaintext).isEqualTo(
content.substring(offset, end + 1));
}
}
}
@Test
public void testMultipartReadPartial() throws Exception {
for (int offset = 0; offset < 130; offset++) {
logger.debug("Test with offset {}", offset);
String blobName = TestUtils.createRandomBlobName();
String content1 = "PART1-789A123456123456789B123456123456789C1234";
String content2 =
"PART2-789D123456123456789E123456123456789F123456";
String content3 = "PART3-789G123456123456789H123456123456789I123";
String content = content1 + content2 + content3;
BlobMetadata blobMetadata = makeBlob(encryptedBlobStore, blobName,
content.getBytes(StandardCharsets.UTF_8),
content.length()).getMetadata();
MultipartUpload mpu =
encryptedBlobStore.initiateMultipartUpload(containerName,
blobMetadata, new PutOptions());
Payload payload1 = Payloads.newByteArrayPayload(
content1.getBytes(StandardCharsets.UTF_8));
Payload payload2 = Payloads.newByteArrayPayload(
content2.getBytes(StandardCharsets.UTF_8));
Payload payload3 = Payloads.newByteArrayPayload(
content3.getBytes(StandardCharsets.UTF_8));
encryptedBlobStore.uploadMultipartPart(mpu, 1, payload1);
encryptedBlobStore.uploadMultipartPart(mpu, 2, payload2);
encryptedBlobStore.uploadMultipartPart(mpu, 3, payload3);
List<MultipartPart> parts =
encryptedBlobStore.listMultipartUpload(mpu);
encryptedBlobStore.completeMultipartUpload(mpu, parts);
GetOptions options = new GetOptions();
options.startAt(offset);
Blob blob =
encryptedBlobStore.getBlob(containerName, blobName, options);
InputStream blobIs = blob.getPayload().openStream();
InputStreamReader r = new InputStreamReader(blobIs);
BufferedReader reader = new BufferedReader(r);
String plaintext = reader.lines().collect(Collectors.joining());
logger.debug("plaintext {}", plaintext);
assertThat(plaintext).isEqualTo(content.substring(offset));
}
}
@Test
public void testMultipartReadTail() throws Exception {
for (int length = 1; length < 130; length++) {
logger.debug("Test with length {}", length);
String blobName = TestUtils.createRandomBlobName();
String content1 = "PART1-789A123456123456789B123456123456789C1234";
String content2 =
"PART2-789D123456123456789E123456123456789F123456";
String content3 = "PART3-789G123456123456789H123456123456789I123";
String content = content1 + content2 + content3;
BlobMetadata blobMetadata = makeBlob(encryptedBlobStore, blobName,
content.getBytes(StandardCharsets.UTF_8),
content.length()).getMetadata();
MultipartUpload mpu =
encryptedBlobStore.initiateMultipartUpload(containerName,
blobMetadata, new PutOptions());
Payload payload1 = Payloads.newByteArrayPayload(
content1.getBytes(StandardCharsets.UTF_8));
Payload payload2 = Payloads.newByteArrayPayload(
content2.getBytes(StandardCharsets.UTF_8));
Payload payload3 = Payloads.newByteArrayPayload(
content3.getBytes(StandardCharsets.UTF_8));
encryptedBlobStore.uploadMultipartPart(mpu, 1, payload1);
encryptedBlobStore.uploadMultipartPart(mpu, 2, payload2);
encryptedBlobStore.uploadMultipartPart(mpu, 3, payload3);
List<MultipartPart> parts =
encryptedBlobStore.listMultipartUpload(mpu);
encryptedBlobStore.completeMultipartUpload(mpu, parts);
GetOptions options = new GetOptions();
options.tail(length);
Blob blob =
encryptedBlobStore.getBlob(containerName, blobName, options);
InputStream blobIs = blob.getPayload().openStream();
InputStreamReader r = new InputStreamReader(blobIs);
BufferedReader reader = new BufferedReader(r);
String plaintext = reader.lines().collect(Collectors.joining());
logger.debug("plaintext {}", plaintext);
assertThat(plaintext).isEqualTo(
content.substring(content.length() - length));
}
}
@Test
public void testMultipartReadPartialWithRandomEnd() throws Exception {
for (int run = 0; run < 100; run++) {
// total len = 139
for (int offset = 0; offset < 70; offset++) {
Random rand = new Random();
int end = offset + rand.nextInt(60) + 2;
int size = end - offset + 1;
logger.debug("Test with offset {} and end {} size {}",
offset, end, size);
String blobName = TestUtils.createRandomBlobName();
String content1 =
"PART1-789A123456123456789B123456123456789C1234";
String content2 =
"PART2-789D123456123456789E123456123456789F123456";
String content3 =
"PART3-789G123456123456789H123456123456789I123";
String content = content1 + content2 + content3;
BlobMetadata blobMetadata =
makeBlob(encryptedBlobStore, blobName,
content.getBytes(StandardCharsets.UTF_8),
content.length()).getMetadata();
MultipartUpload mpu =
encryptedBlobStore.initiateMultipartUpload(containerName,
blobMetadata, new PutOptions());
Payload payload1 = Payloads.newByteArrayPayload(
content1.getBytes(StandardCharsets.UTF_8));
Payload payload2 = Payloads.newByteArrayPayload(
content2.getBytes(StandardCharsets.UTF_8));
Payload payload3 = Payloads.newByteArrayPayload(
content3.getBytes(StandardCharsets.UTF_8));
encryptedBlobStore.uploadMultipartPart(mpu, 1, payload1);
encryptedBlobStore.uploadMultipartPart(mpu, 2, payload2);
encryptedBlobStore.uploadMultipartPart(mpu, 3, payload3);
List<MultipartPart> parts =
encryptedBlobStore.listMultipartUpload(mpu);
encryptedBlobStore.completeMultipartUpload(mpu, parts);
GetOptions options = new GetOptions();
options.range(offset, end);
Blob blob = encryptedBlobStore.getBlob(containerName, blobName,
options);
InputStream blobIs = blob.getPayload().openStream();
InputStreamReader r = new InputStreamReader(blobIs);
BufferedReader reader = new BufferedReader(r);
String plaintext = reader.lines().collect(Collectors.joining());
logger.debug("plaintext {}", plaintext);
assertThat(plaintext).isEqualTo(
content.substring(offset, end + 1));
}
}
}
}

Wyświetl plik

@ -32,7 +32,6 @@ import org.jclouds.ContextBuilder;
import org.jclouds.blobstore.BlobStore;
import org.jclouds.blobstore.BlobStoreContext;
import org.jclouds.logging.slf4j.config.SLF4JLoggingModule;
import org.junit.Before;
import org.junit.Test;

Wyświetl plik

@ -32,7 +32,7 @@ import org.jclouds.blobstore.BlobStoreContext;
import org.jclouds.s3.reference.S3Constants;
import org.jclouds.s3.services.BucketsLiveTest;
import org.testng.SkipException;
import org.testng.annotations.AfterClass;
import org.testng.annotations.AfterSuite;
import org.testng.annotations.Test;
@Test(testName = "JcloudsBucketsLiveTest")
@ -46,10 +46,11 @@ public final class JcloudsBucketsLiveTest extends BucketsLiveTest {
private BlobStoreContext context;
private String blobStoreType;
@AfterClass
public void tearDown() throws Exception {
s3Proxy.stop();
@AfterSuite
@Override
public void destroyResources() throws Exception {
context.close();
s3Proxy.stop();
}
@Override

Wyświetl plik

@ -27,7 +27,7 @@ import org.jclouds.blobstore.domain.Blob;
import org.jclouds.s3.blobstore.integration.S3BlobIntegrationLiveTest;
import org.jclouds.s3.reference.S3Constants;
import org.testng.SkipException;
import org.testng.annotations.AfterClass;
import org.testng.annotations.AfterSuite;
import org.testng.annotations.Test;
@Test(testName = "JcloudsS3BlobIntegrationLiveTest")
@ -42,10 +42,11 @@ public final class JcloudsS3BlobIntegrationLiveTest
private BlobStoreContext context;
private String blobStoreType;
@AfterClass
public void tearDown() throws Exception {
s3Proxy.stop();
@AfterSuite
@Override
public void destroyResources() throws Exception {
context.close();
s3Proxy.stop();
}
@Override

Wyświetl plik

@ -25,7 +25,7 @@ import org.jclouds.Constants;
import org.jclouds.blobstore.BlobStoreContext;
import org.jclouds.s3.blobstore.integration.S3BlobSignerLiveTest;
import org.jclouds.s3.reference.S3Constants;
import org.testng.annotations.AfterClass;
import org.testng.annotations.AfterSuite;
import org.testng.annotations.Test;
@Test(testName = "JcloudsS3BlobSignerLiveTest")
@ -38,10 +38,11 @@ public final class JcloudsS3BlobSignerLiveTest extends S3BlobSignerLiveTest {
private S3Proxy s3Proxy;
private BlobStoreContext context;
@AfterClass
public void tearDown() throws Exception {
s3Proxy.stop();
@AfterSuite
@Override
public void destroyResources() throws Exception {
context.close();
s3Proxy.stop();
}
@Override

Wyświetl plik

@ -36,7 +36,7 @@ import org.jclouds.s3.S3ClientLiveTest;
import org.jclouds.s3.domain.S3Object;
import org.jclouds.s3.reference.S3Constants;
import org.testng.SkipException;
import org.testng.annotations.AfterClass;
import org.testng.annotations.AfterSuite;
import org.testng.annotations.Test;
@Test(testName = "JcloudsS3ClientLiveTest")
@ -50,10 +50,11 @@ public final class JcloudsS3ClientLiveTest extends S3ClientLiveTest {
private BlobStoreContext context;
private String blobStoreType;
@AfterClass
public void tearDown() throws Exception {
s3Proxy.stop();
@AfterSuite
@Override
public void destroyResources() throws Exception {
context.close();
s3Proxy.stop();
}
@Override

Wyświetl plik

@ -26,7 +26,7 @@ import org.jclouds.blobstore.BlobStoreContext;
import org.jclouds.s3.blobstore.integration.S3ContainerIntegrationLiveTest;
import org.jclouds.s3.reference.S3Constants;
import org.testng.SkipException;
import org.testng.annotations.AfterClass;
import org.testng.annotations.AfterSuite;
import org.testng.annotations.Test;
@Test(testName = "JcloudsS3ContainerIntegrationLiveTest")
@ -41,10 +41,11 @@ public final class JcloudsS3ContainerIntegrationLiveTest
private BlobStoreContext context;
private String blobStoreType;
@AfterClass
public void tearDown() throws Exception {
s3Proxy.stop();
@AfterSuite
@Override
public void destroyResources() throws Exception {
context.close();
s3Proxy.stop();
}
@Override

Wyświetl plik

@ -20,13 +20,13 @@ import static org.assertj.core.api.Assertions.assertThat;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.List;
import java.util.Random;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.ByteSource;
import com.google.common.io.ByteStreams;
import com.google.common.net.MediaType;
import com.google.inject.Module;
@ -91,10 +91,10 @@ public final class NullBlobStoreTest {
// content differs, only compare length
try (InputStream actual = blob.getPayload().openStream();
InputStream expected = BYTE_SOURCE.openStream()) {
long actualLength = ByteStreams.copy(actual,
ByteStreams.nullOutputStream());
long expectedLength = ByteStreams.copy(expected,
ByteStreams.nullOutputStream());
long actualLength = actual.transferTo(
OutputStream.nullOutputStream());
long expectedLength = expected.transferTo(
OutputStream.nullOutputStream());
assertThat(actualLength).isEqualTo(expectedLength);
}
@ -157,10 +157,10 @@ public final class NullBlobStoreTest {
// content differs, only compare length
try (InputStream actual = newBlob.getPayload().openStream();
InputStream expected = byteSource.openStream()) {
long actualLength = ByteStreams.copy(actual,
ByteStreams.nullOutputStream());
long expectedLength = ByteStreams.copy(expected,
ByteStreams.nullOutputStream());
long actualLength = actual.transferTo(
OutputStream.nullOutputStream());
long expectedLength = expected.transferTo(
OutputStream.nullOutputStream());
assertThat(actualLength).isEqualTo(expectedLength);
}

Wyświetl plik

@ -0,0 +1,141 @@
/*
* Copyright 2014-2021 Andrew Gaul <andrew@gaul.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gaul.s3proxy;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.IOException;
import java.io.InputStream;
import java.util.AbstractMap.SimpleEntry;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
import java.util.regex.Pattern;
import com.google.common.collect.ImmutableList;
import com.google.common.hash.Hashing;
import com.google.common.io.ByteSource;
import com.google.inject.Module;
import org.assertj.core.api.Assertions;
import org.jclouds.ContextBuilder;
import org.jclouds.blobstore.BlobStore;
import org.jclouds.blobstore.BlobStoreContext;
import org.jclouds.blobstore.domain.Blob;
import org.jclouds.blobstore.domain.BlobMetadata;
import org.jclouds.logging.slf4j.config.SLF4JLoggingModule;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public final class RegexBlobStoreTest {
private BlobStoreContext context;
private BlobStore delegate;
private String containerName;
@Before
public void setUp() throws Exception {
containerName = createRandomContainerName();
context = ContextBuilder
.newBuilder("transient")
.credentials("identity", "credential")
.modules(ImmutableList.<Module>of(new SLF4JLoggingModule()))
.build(BlobStoreContext.class);
delegate = context.getBlobStore();
delegate.createContainerInLocation(null, containerName);
}
@After
public void tearDown() throws Exception {
if (context != null) {
delegate.deleteContainer(containerName);
context.close();
}
}
@Test
public void testRemoveSomeCharsFromName() throws IOException {
ImmutableList.Builder<Map.Entry<Pattern, String>> regexBuilder =
new ImmutableList.Builder<>();
regexBuilder.add(new SimpleEntry<Pattern, String>(Pattern.compile(
"[^a-zA-Z0-9/_.]"), "_"));
BlobStore regexBlobStore = RegexBlobStore.newRegexBlobStore(delegate,
regexBuilder.build());
String initialBlobName = "test/remove:badchars-folder/blob.txt";
String targetBlobName = "test/remove_badchars_folder/blob.txt";
ByteSource content = TestUtils.randomByteSource().slice(0, 1024);
@SuppressWarnings("deprecation")
String contentHash = Hashing.md5().hashBytes(content.read()).toString();
Blob blob = regexBlobStore.blobBuilder(initialBlobName).payload(
content).build();
String eTag = regexBlobStore.putBlob(containerName, blob);
assertThat(eTag).isEqualTo(contentHash);
BlobMetadata blobMetadata = regexBlobStore.blobMetadata(
containerName, targetBlobName);
assertThat(blobMetadata.getETag()).isEqualTo(contentHash);
blob = regexBlobStore.getBlob(containerName, targetBlobName);
try (InputStream actual = blob.getPayload().openStream();
InputStream expected = content.openStream()) {
assertThat(actual).hasContentEqualTo(expected);
}
blob = regexBlobStore.getBlob(containerName, initialBlobName);
try (InputStream actual = blob.getPayload().openStream();
InputStream expected = content.openStream()) {
assertThat(actual).hasContentEqualTo(expected);
}
}
@Test
public void testParseMatchWithoutReplace() {
Properties properties = new Properties();
properties.put(
String.format("%s.%s.sample1",
S3ProxyConstants.PROPERTY_REGEX_BLOBSTORE,
S3ProxyConstants.PROPERTY_REGEX_BLOBSTORE_MATCH),
"test");
properties.put(
String.format("%s.%s.sample2",
S3ProxyConstants.PROPERTY_REGEX_BLOBSTORE,
S3ProxyConstants.PROPERTY_REGEX_BLOBSTORE_MATCH),
"test");
properties.put(
String.format("%s.%s.sample1",
S3ProxyConstants.PROPERTY_REGEX_BLOBSTORE,
S3ProxyConstants.PROPERTY_REGEX_BLOBSTORE_REPLACE),
"test");
try {
RegexBlobStore.parseRegexs(properties);
Assertions.failBecauseExceptionWasNotThrown(
IllegalArgumentException.class);
} catch (IllegalArgumentException exc) {
assertThat(exc.getMessage()).isEqualTo(
"Regex sample2 has no replace property associated");
}
}
private static String createRandomContainerName() {
return "container-" + new Random().nextInt(Integer.MAX_VALUE);
}
}

Wyświetl plik

@ -35,7 +35,6 @@ import org.jclouds.blobstore.domain.PageSet;
import org.jclouds.blobstore.domain.StorageMetadata;
import org.jclouds.blobstore.options.CopyOptions;
import org.jclouds.logging.slf4j.config.SLF4JLoggingModule;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

Wyświetl plik

@ -34,7 +34,6 @@ import com.google.common.io.Resources;
import com.google.inject.Module;
import org.eclipse.jetty.util.component.AbstractLifeCycle;
import org.jclouds.Constants;
import org.jclouds.ContextBuilder;
import org.jclouds.JcloudsVersion;
@ -188,6 +187,14 @@ final class TestUtils {
BlobStoreContext context = builder.build(BlobStoreContext.class);
info.blobStore = context.getBlobStore();
String encrypted = info.getProperties().getProperty(
S3ProxyConstants.PROPERTY_ENCRYPTED_BLOBSTORE);
if (encrypted != null && encrypted.equals("true")) {
info.blobStore =
EncryptedBlobStore.newEncryptedBlobStore(info.blobStore,
info.getProperties());
}
S3Proxy.Builder s3ProxyBuilder = S3Proxy.Builder.fromProperties(
info.getProperties());
s3ProxyBuilder.blobStore(info.blobStore);

Wyświetl plik

@ -0,0 +1,106 @@
/*
* Copyright 2014-2021 Andrew Gaul <andrew@gaul.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gaul.s3proxy.junit;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.List;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.client.builder.AwsClientBuilder;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.Bucket;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
/**
* This is an example of how one would use the S3Proxy JUnit extension in a unit
* test as opposed to a proper test of the S3ProxyExtension class.
*/
public class S3ProxyExtensionTest {
@RegisterExtension
static final S3ProxyExtension EXTENSION = S3ProxyExtension
.builder()
.withCredentials("access", "secret")
.build();
private static final String MY_TEST_BUCKET = "my-test-bucket";
private AmazonS3 s3Client;
@BeforeEach
public final void setUp() throws Exception {
s3Client = AmazonS3ClientBuilder
.standard()
.withCredentials(
new AWSStaticCredentialsProvider(
new BasicAWSCredentials(
EXTENSION.getAccessKey(), EXTENSION.getSecretKey())))
.withEndpointConfiguration(
new AwsClientBuilder.EndpointConfiguration(
EXTENSION.getUri().toString(), Regions.US_EAST_1.getName()))
.build();
s3Client.createBucket(MY_TEST_BUCKET);
}
@Test
public final void listBucket() {
List<Bucket> buckets = s3Client.listBuckets();
assertThat(buckets).hasSize(1);
assertThat(buckets.get(0).getName())
.isEqualTo(MY_TEST_BUCKET);
}
@Test
public final void uploadFile() throws Exception {
String testInput = "content";
s3Client.putObject(MY_TEST_BUCKET, "file.txt", testInput);
List<S3ObjectSummary> summaries = s3Client
.listObjects(MY_TEST_BUCKET)
.getObjectSummaries();
assertThat(summaries).hasSize(1);
assertThat(summaries.get(0).getKey()).isEqualTo("file.txt");
assertThat(summaries.get(0).getSize()).isEqualTo(testInput.length());
}
@Test
public final void doesBucketExistV2() {
assertThat(s3Client.doesBucketExistV2(MY_TEST_BUCKET)).isTrue();
// Issue #299
assertThat(s3Client.doesBucketExistV2("nonexistingbucket")).isFalse();
}
@Test
public final void createExtentionWithoutCredentials() {
S3ProxyExtension extension = S3ProxyExtension
.builder()
.build();
assertThat(extension.getAccessKey()).isNull();
assertThat(extension.getSecretKey()).isNull();
assertThat(extension.getUri()).isNull();
}
}

Wyświetl plik

@ -95,4 +95,13 @@ public class S3ProxyRuleTest {
assertThat(s3Client.doesBucketExistV2("nonexistingbucket")).isFalse();
}
@Test
public final void createExtentionWithoutCredentials() {
S3ProxyRule extension = S3ProxyRule
.builder()
.build();
assertThat(extension.getAccessKey()).isNull();
assertThat(extension.getSecretKey()).isNull();
assertThat(extension.getUri()).isNull();
}
}

Wyświetl plik

@ -0,0 +1,20 @@
s3proxy.endpoint=http://127.0.0.1:0
s3proxy.secure-endpoint=https://127.0.0.1:0
#s3proxy.service-path=s3proxy
# authorization must be aws-v2, aws-v4, aws-v2-or-v4, or none
s3proxy.authorization=aws-v2-or-v4
s3proxy.identity=local-identity
s3proxy.credential=local-credential
s3proxy.keystore-path=keystore.jks
s3proxy.keystore-password=password
jclouds.provider=transient
jclouds.identity=remote-identity
jclouds.credential=remote-credential
# endpoint is optional for some providers
#jclouds.endpoint=http://127.0.0.1:8081
jclouds.filesystem.basedir=/tmp/blobstore
s3proxy.encrypted-blobstore=true
s3proxy.encrypted-blobstore-password=1234567890123456
s3proxy.encrypted-blobstore-salt=12345678