Initial commit; quite sloppy, but it works!

trunk
Una Thompson 2019-09-05 01:55:17 -07:00
commit 464cfc8ec8
11 zmienionych plików z 765 dodań i 0 usunięć

12
.gitignore vendored 100644
Wyświetl plik

@ -0,0 +1,12 @@
.classpath
.gradle
.project
.metadata
.settings
/bin
/build
hs_err_pid*.log
*.iml
*.DS_Store
.idea/
data.mv

9
README.md 100644
Wyświetl plik

@ -0,0 +1,9 @@
# Jortage Proxy
A miniature S3-to-S3 proxy that accepts incoming requests, hashes the uploaded
file, stores their desired path in an MVStore key-value store, along with the
hash, and then uploads the original file to a backing S3 server named after the
hash to offer limited deduplication.
It rejects any attempt to delete files, as this is designed for Project Jortage,
a fediverse storage pool. Mastodon won't ever attempt to delete media, and
allowing people to would allow them to yank media that isn't theirs.

18
build.gradle 100644
Wyświetl plik

@ -0,0 +1,18 @@
plugins {
id 'java'
id 'us.kirchmeier.capsule' version '1.0.2'
}
repositories {
mavenCentral()
}
dependencies {
implementation 'blue.endless:jankson:1.1.2'
implementation 'com.h2database:h2-mvstore:1.4.199'
implementation 'org.gaul:s3proxy:1.6.1'
}
task capsule(type: FatCapsule) {
applicationClass 'com.jortage.proxy.JortageProxy'
}

13
config.jkson 100644
Wyświetl plik

@ -0,0 +1,13 @@
{
backend: {
endpoint: "https://sfo2.digitaloceanspaces.com"
accessKeyId: "ACCESS_KEY_ID"
secretAccessKey: "SECRET_ACCESS_KEY"
bucket: "mybucket"
publicHost: "https://sfo2.digitaloceanspaces.com/mybucket"
}
users: {
// ACCESS_KEY_ID: "SECRET_ACCESS_KEY"
test: "test"
}
}

BIN
gradle/wrapper/gradle-wrapper.jar vendored 100644

Plik binarny nie jest wyświetlany.

Wyświetl plik

@ -0,0 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-4.6-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

172
gradlew vendored 100755
Wyświetl plik

@ -0,0 +1,172 @@
#!/usr/bin/env sh
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"

84
gradlew.bat vendored 100644
Wyświetl plik

@ -0,0 +1,84 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

Wyświetl plik

@ -0,0 +1,280 @@
package com.jortage.proxy;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import org.jclouds.blobstore.BlobStore;
import org.jclouds.blobstore.BlobStoreContext;
import org.jclouds.blobstore.domain.Blob;
import org.jclouds.blobstore.domain.BlobAccess;
import org.jclouds.blobstore.domain.BlobBuilder;
import org.jclouds.blobstore.domain.BlobMetadata;
import org.jclouds.blobstore.domain.ContainerAccess;
import org.jclouds.blobstore.domain.MultipartPart;
import org.jclouds.blobstore.domain.MultipartUpload;
import org.jclouds.blobstore.domain.PageSet;
import org.jclouds.blobstore.domain.StorageMetadata;
import org.jclouds.blobstore.options.CopyOptions;
import org.jclouds.blobstore.options.CreateContainerOptions;
import org.jclouds.blobstore.options.GetOptions;
import org.jclouds.blobstore.options.ListContainerOptions;
import org.jclouds.blobstore.options.PutOptions;
import org.jclouds.blobstore.util.ForwardingBlobStore;
import org.jclouds.domain.Location;
import org.jclouds.io.Payload;
import org.jclouds.io.payloads.FilePayload;
import com.google.common.hash.Hashing;
import com.google.common.hash.HashingOutputStream;
import com.google.common.io.ByteStreams;
public class JortageBlobStore extends ForwardingBlobStore {
private final String identity;
private final String bucket;
private final Map<String, String> paths;
public JortageBlobStore(BlobStore blobStore, String bucket, String identity, Map<String, String> paths) {
super(blobStore);
this.bucket = bucket;
this.identity = identity;
this.paths = paths;
}
private String buildKey(String container, String name) {
return JortageProxy.buildKey(identity, container, name);
}
private String map(String container, String name) {
String hash = paths.get(buildKey(container, name));
if (hash == null) throw new IllegalArgumentException("Not found");
return JortageProxy.hashToPath(hash);
}
@Override
public BlobStoreContext getContext() {
return delegate().getContext();
}
@Override
public BlobBuilder blobBuilder(String name) {
return delegate().blobBuilder(name);
}
@Override
public Blob getBlob(String container, String name) {
return delegate().getBlob(bucket, map(container, name));
}
@Override
public Blob getBlob(String container, String name, GetOptions getOptions) {
return delegate().getBlob(bucket, map(container, name), getOptions);
}
@Override
public BlobAccess getBlobAccess(String container, String name) {
return BlobAccess.PUBLIC_READ;
}
@Override
public PageSet<? extends StorageMetadata> list() {
throw new UnsupportedOperationException();
}
@Override
public PageSet<? extends StorageMetadata> list(String container) {
throw new UnsupportedOperationException();
}
@Override
public PageSet<? extends StorageMetadata> list(String container,
ListContainerOptions options) {
throw new UnsupportedOperationException();
}
@Override
public ContainerAccess getContainerAccess(String container) {
return ContainerAccess.PUBLIC_READ;
}
@Override
public boolean blobExists(String container, String name) {
return delegate().blobExists(bucket, map(container, name));
}
@Override
public BlobMetadata blobMetadata(String container, String name) {
return delegate().blobMetadata(bucket, map(container, name));
}
@Override
public boolean directoryExists(String container, String directory) {
throw new UnsupportedOperationException();
}
@Override
public String putBlob(String container, Blob blob) {
File tempFile = null;
try {
File f = File.createTempFile("jortage-proxy-", ".dat");
tempFile = f;
String contentType = blob.getPayload().getContentMetadata().getContentType();
String hash;
try (InputStream is = blob.getPayload().openStream();
FileOutputStream fos = new FileOutputStream(f)) {
HashingOutputStream hos = new HashingOutputStream(Hashing.sha512(), fos);
ByteStreams.copy(is, hos);
hash = hos.hash().toString();
}
try (Payload payload = new FilePayload(f)) {
payload.getContentMetadata().setContentType(contentType);
if (delegate().blobExists(bucket, JortageProxy.hashToPath(hash))) {
return delegate().blobMetadata(bucket, JortageProxy.hashToPath(hash)).getETag();
}
Blob blob2 = blobBuilder(JortageProxy.hashToPath(hash))
.payload(payload)
.userMetadata(blob.getMetadata().getUserMetadata())
.build();
String etag = delegate().putBlob(bucket, blob2, new PutOptions().setBlobAccess(BlobAccess.PUBLIC_READ));
paths.put(buildKey(container, blob.getMetadata().getName()), hash);
return etag;
}
} catch (IOException e) {
throw new UncheckedIOException(e);
} finally {
if (tempFile != null) tempFile.delete();
}
}
@Override
public String putBlob(String containerName, Blob blob, PutOptions putOptions) {
if (putOptions.isMultipart()) throw new UnsupportedOperationException("multipart blobs not supported");
return putBlob(containerName, blob);
}
@Override
public boolean createContainerInLocation(Location location,
String container) {
throw new UnsupportedOperationException("Read-only BlobStore");
}
@Override
public boolean createContainerInLocation(Location location,
String container, CreateContainerOptions createContainerOptions) {
throw new UnsupportedOperationException("Read-only BlobStore");
}
@Override
public void setContainerAccess(String container, ContainerAccess
containerAccess) {
throw new UnsupportedOperationException("Read-only BlobStore");
}
@Override
public void clearContainer(String container) {
throw new UnsupportedOperationException("Read-only BlobStore");
}
@Override
public void clearContainer(String container, ListContainerOptions options) {
throw new UnsupportedOperationException("Read-only BlobStore");
}
@Override
public void deleteContainer(String container) {
throw new UnsupportedOperationException("Read-only BlobStore");
}
@Override
public boolean deleteContainerIfEmpty(String container) {
throw new UnsupportedOperationException("Read-only BlobStore");
}
@Override
public void createDirectory(String container, String directory) {
throw new UnsupportedOperationException("Read-only BlobStore");
}
@Override
public void deleteDirectory(String container, String directory) {
throw new UnsupportedOperationException("Read-only BlobStore");
}
@Override
public String copyBlob(String fromContainer, String fromName, String toContainer, String toName,
CopyOptions options) {
throw new UnsupportedOperationException("Read-only BlobStore");
}
@Override
public void removeBlob(String container, String name) {
throw new UnsupportedOperationException("Read-only BlobStore");
}
@Override
public void removeBlobs(String container, Iterable<String> iterable) {
throw new UnsupportedOperationException("Read-only BlobStore");
}
@Override
public void setBlobAccess(String container, String name,
BlobAccess access) {
throw new UnsupportedOperationException("Read-only BlobStore");
}
@Override
public MultipartUpload initiateMultipartUpload(String container, BlobMetadata blobMetadata, PutOptions options) {
throw new UnsupportedOperationException("Read-only BlobStore");
}
@Override
public void abortMultipartUpload(MultipartUpload mpu) {
throw new UnsupportedOperationException("Read-only BlobStore");
}
@Override
public String completeMultipartUpload(MultipartUpload mpu, List<MultipartPart> parts) {
throw new UnsupportedOperationException("Read-only BlobStore");
}
@Override
public MultipartPart uploadMultipartPart(MultipartUpload mpu, int partNumber, Payload payload) {
throw new UnsupportedOperationException("Read-only BlobStore");
}
@Override
public List<MultipartPart> listMultipartUpload(MultipartUpload mpu) {
throw new UnsupportedOperationException("Read-only BlobStore");
}
// TODO: should ReadOnlyBlobStore allow listing parts and uploads?
@Override
public List<MultipartUpload> listMultipartUploads(String container) {
throw new UnsupportedOperationException("Read-only BlobStore");
}
@Override
public void downloadBlob(String container, String name, File destination) {
throw new UnsupportedOperationException();
}
@Override
public void downloadBlob(String container, String name, File destination, ExecutorService executor) {
throw new UnsupportedOperationException();
}
@Override
public InputStream streamBlob(String container, String name) {
throw new UnsupportedOperationException();
}
@Override
public InputStream streamBlob(String container, String name, ExecutorService executor) {
throw new UnsupportedOperationException();
}
}

Wyświetl plik

@ -0,0 +1,155 @@
package com.jortage.proxy;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URI;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Map.Entry;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.AbstractHandler;
import org.gaul.s3proxy.AuthenticationType;
import org.gaul.s3proxy.BlobStoreLocator;
import org.gaul.s3proxy.S3Proxy;
import org.h2.mvstore.MVMap;
import org.h2.mvstore.MVStore;
import org.h2.mvstore.type.StringDataType;
import org.jclouds.ContextBuilder;
import org.jclouds.blobstore.BlobStore;
import org.jclouds.blobstore.BlobStoreContext;
import org.jclouds.logging.slf4j.config.SLF4JLoggingModule;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Maps;
import blue.endless.jankson.Jankson;
import blue.endless.jankson.JsonObject;
import blue.endless.jankson.JsonPrimitive;
public class JortageProxy {
private static final Splitter SPLITTER = Splitter.on('/').limit(3).omitEmptyStrings();
private static final File configFile = new File("config.jkson");
private static JsonObject config;
private static long configFileLastLoaded;
private static BlobStore backingBlobStore;
private static String bucket;
private static String publicHost;
public static void main(String[] args) throws Exception {
reloadConfig();
MVStore store = new MVStore.Builder()
.fileName("data.mv")
.compress()
.open();
MVMap<String, String> paths = store.openMap("paths", new MVMap.Builder<String, String>()
.keyType(new StringDataType())
.valueType(new StringDataType()));
S3Proxy s3Proxy = S3Proxy.builder()
.awsAuthentication(AuthenticationType.AWS_V4, "DUMMY", "DUMMY")
.endpoint(URI.create("http://localhost:23278"))
.build();
s3Proxy.setBlobStoreLocator(new BlobStoreLocator() {
@Override
public Entry<String, BlobStore> locateBlobStore(String identity, String container, String blob) {
System.out.println("identity: "+identity);
if (System.currentTimeMillis()-configFileLastLoaded > 500 && configFile.lastModified() > configFileLastLoaded) reloadConfig();
if (config.containsKey("users") && config.getObject("users").containsKey(identity)) {
return Maps.immutableEntry(((JsonPrimitive)config.getObject("users").get(identity)).asString(), new JortageBlobStore(backingBlobStore, bucket, identity, paths));
} else {
throw new RuntimeException("Access denied");
}
}
});
s3Proxy.start();
System.err.println("S3 listening on localhost:23278");
Server redir = new Server(new InetSocketAddress("localhost", 23279));
redir.setHandler(new AbstractHandler() {
@Override
public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException {
baseRequest.setHandled(true);
if ("/".equals(target) || "/index.html".equals(target) || "".equals(target)) {
response.setHeader("Location", "https://jortage.com");
response.setStatus(301);
return;
}
List<String> split = SPLITTER.splitToList(target);
if (split.size() != 3) {
response.sendError(400);
return;
} else {
for (Map.Entry<String, String> en : paths.entrySet()) {
System.out.println(en);
}
String identity = split.get(0);
String container = split.get(1);
String name = split.get(2);
String key = buildKey(identity, container, name);
if (paths.containsKey(key)) {
response.setHeader("Location", publicHost+"/"+hashToPath(paths.get(key)));
response.setStatus(301);
return;
} else {
response.sendError(404);
return;
}
}
}
});
redir.start();
System.err.println("Redirector listening on localhost:23279");
while (true) {
Thread.sleep(15000);
store.commit();
}
}
private static void reloadConfig() {
try {
config = Jankson.builder().build().load(configFile);
configFileLastLoaded = System.currentTimeMillis();
bucket = ((JsonPrimitive)config.getObject("backend").get("bucket")).asString();
publicHost = ((JsonPrimitive)config.getObject("backend").get("publicHost")).asString();
Properties props = new Properties();
props.put("jclouds.wire", "debug");
backingBlobStore = ContextBuilder.newBuilder("s3")
.credentials(((JsonPrimitive)config.getObject("backend").get("accessKeyId")).asString(), ((JsonPrimitive)config.getObject("backend").get("secretAccessKey")).asString())
.modules(ImmutableList.of(new SLF4JLoggingModule()))
.endpoint(((JsonPrimitive)config.getObject("backend").get("endpoint")).asString())
.overrides(props)
.build(BlobStoreContext.class)
.getBlobStore();
System.err.println("Config file reloaded.");
} catch (Exception e) {
e.printStackTrace();
System.err.println("Failed to reload config. The config is not changed.");
}
}
public static String buildKey(String identity, String container, String name) {
return identity+":"+container+":"+name;
}
public static String hashToPath(String hash) {
return "blobs/"+hash.substring(0, 1)+"/"+hash.substring(1, 4)+"/"+hash;
}
}

Wyświetl plik

@ -0,0 +1,17 @@
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{MM-dd HH:mm:ss.SSS} %p %t %c{30}:%L %X{clientId}|%X{sessionId}:%X{messageId}:%X{fileId}] %m%n</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>${LOG_LEVEL:-info}</level>
</filter>
</appender>
<logger name="org.eclipse.jetty" level="${JETTY_LOG_LEVEL:-info}" />
<logger name="org.gaul.shaded.org.eclipse.jetty" level="${JETTY_LOG_LEVEL:-info}" />
<root level="${LOG_LEVEL:-info}">
<appender-ref ref="STDOUT" />
</root>
</configuration>