mirror of
https://gitlab.com/mangadex-pub/mangadex_at_home.git
synced 2024-11-16 16:12:32 +01:00
Rc4 release
- use openssl to reduce memory footprint - invalidate everyone's caches again (in order to fix 128 bit SSL on older JREs) - Add more logging - Hide secret printouts - Rebrand
This commit is contained in:
parent
a92ee85dd7
commit
d9fb96c08d
20
build.gradle
20
build.gradle
@ -1,14 +1,14 @@
|
||||
plugins {
|
||||
id 'java'
|
||||
id 'org.jetbrains.kotlin.jvm' version '1.3.72'
|
||||
id 'application'
|
||||
id 'com.github.johnrengelman.shadow' version '5.2.0'
|
||||
id "java"
|
||||
id "org.jetbrains.kotlin.jvm" version "1.3.72"
|
||||
id "application"
|
||||
id "com.github.johnrengelman.shadow" version "5.2.0"
|
||||
id "com.diffplug.gradle.spotless" version "3.18.0"
|
||||
}
|
||||
|
||||
group = 'com.mangadex'
|
||||
version = '1.0.0-rc3'
|
||||
mainClassName = 'mdnet.base.MangadexClient'
|
||||
group = "com.mangadex"
|
||||
version = "1.0.0-rc4"
|
||||
mainClassName = "mdnet.base.MangaDexClient"
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
@ -16,16 +16,16 @@ repositories {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation group: 'com.konghq', name: 'unirest-java', version: '3.7.02'
|
||||
|
||||
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8"
|
||||
implementation group: "com.konghq", name: "unirest-java", version: "3.7.02"
|
||||
|
||||
implementation group: "org.http4k", name: "http4k-core", version: "$http_4k_version"
|
||||
implementation group: "org.http4k", name: "http4k-server-netty", version: "$http_4k_version"
|
||||
implementation group: "org.http4k", name: "http4k-client-apache", version: "$http_4k_version"
|
||||
implementation group: 'commons-io', name: 'commons-io', version: '2.7'
|
||||
implementation group: "commons-io", name: "commons-io", version: "2.7"
|
||||
|
||||
implementation "ch.qos.logback:logback-classic:$logback_version"
|
||||
runtimeOnly 'io.netty:netty-tcnative-boringssl-static:2.0.30.Final'
|
||||
}
|
||||
|
||||
java {
|
||||
|
@ -50,7 +50,7 @@ public final class ClientSettings {
|
||||
public String toString() {
|
||||
return "ClientSettings{" + "maxCacheSizeMib=" + maxCacheSizeMib + ", maxBandwidthMibPerHour="
|
||||
+ maxBandwidthMibPerHour + ", maxBurstRateKibPerSecond=" + maxBurstRateKibPerSecond + ", clientPort="
|
||||
+ clientPort + ", clientSecret='" + clientSecret + '\'' + '}';
|
||||
+ clientPort + ", clientSecret='" + "<hidden>" + '\'' + '}';
|
||||
}
|
||||
|
||||
public static boolean isSecretValid(String clientSecret) {
|
||||
|
@ -16,8 +16,8 @@ import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
public class MangadexClient {
|
||||
private final static Logger LOGGER = LoggerFactory.getLogger(MangadexClient.class);
|
||||
public class MangaDexClient {
|
||||
private final static Logger LOGGER = LoggerFactory.getLogger(MangaDexClient.class);
|
||||
|
||||
// This lock protects the Http4kServer from concurrent restart attempts
|
||||
private final Object shutdownLock = new Object();
|
||||
@ -31,16 +31,16 @@ public class MangadexClient {
|
||||
private Http4kServer engine;
|
||||
private DiskLruCache cache;
|
||||
|
||||
public MangadexClient(ClientSettings clientSettings) {
|
||||
public MangaDexClient(ClientSettings clientSettings) {
|
||||
this.clientSettings = clientSettings;
|
||||
this.serverHandler = new ServerHandler(clientSettings);
|
||||
this.statistics = new AtomicReference<>();
|
||||
|
||||
try {
|
||||
cache = DiskLruCache.open(new File("cache"), 2, 3,
|
||||
cache = DiskLruCache.open(new File("cache"), 3, 3,
|
||||
clientSettings.getMaxCacheSizeMib() * 1024 * 1024 /* MiB to bytes */);
|
||||
} catch (IOException e) {
|
||||
MangadexClient.dieWithError(e);
|
||||
MangaDexClient.dieWithError(e);
|
||||
}
|
||||
}
|
||||
|
||||
@ -73,10 +73,11 @@ public class MangadexClient {
|
||||
}
|
||||
statistics.set(new Statistics());
|
||||
|
||||
if (LOGGER.isInfoEnabled()) {
|
||||
LOGGER.info("Restarting server stopped due to hourly bandwidth limit");
|
||||
}
|
||||
if (engine == null) {
|
||||
if (LOGGER.isInfoEnabled()) {
|
||||
LOGGER.info("Restarting server stopped due to hourly bandwidth limit");
|
||||
}
|
||||
|
||||
loginAndStartServer();
|
||||
}
|
||||
} else {
|
||||
@ -132,7 +133,7 @@ public class MangadexClient {
|
||||
private void loginAndStartServer() {
|
||||
serverSettings = serverHandler.loginToControl();
|
||||
if (serverSettings == null) {
|
||||
MangadexClient.dieWithError("Failed to get a login response from server - check API secret for validity");
|
||||
MangaDexClient.dieWithError("Failed to get a login response from server - check API secret for validity");
|
||||
}
|
||||
engine = ApplicationKt.getServer(cache, serverSettings, clientSettings, statistics);
|
||||
engine.start();
|
||||
@ -168,38 +169,38 @@ public class MangadexClient {
|
||||
public static void main(String[] args) {
|
||||
System.out.println("Mangadex@Home Client " + Constants.CLIENT_VERSION + " (Build " + Constants.CLIENT_BUILD
|
||||
+ ") initializing\n");
|
||||
System.out.println("Copyright (c) 2020, Mangadex");
|
||||
System.out.println("Copyright (c) 2020, MangaDex Network");
|
||||
|
||||
try {
|
||||
String file = "settings.json";
|
||||
if (args.length == 1) {
|
||||
file = args[0];
|
||||
} else if (args.length != 0) {
|
||||
MangadexClient.dieWithError("Expected one argument: path to config file, or nothing");
|
||||
MangaDexClient.dieWithError("Expected one argument: path to config file, or nothing");
|
||||
}
|
||||
|
||||
ClientSettings settings = new Gson().fromJson(new FileReader(file), ClientSettings.class);
|
||||
|
||||
if (!ClientSettings.isSecretValid(settings.getClientSecret()))
|
||||
MangadexClient.dieWithError("Config Error: API Secret is invalid, must be 52 alphanumeric characters");
|
||||
MangaDexClient.dieWithError("Config Error: API Secret is invalid, must be 52 alphanumeric characters");
|
||||
|
||||
if (settings.getClientPort() == 0) {
|
||||
MangadexClient.dieWithError("Config Error: Invalid port number");
|
||||
MangaDexClient.dieWithError("Config Error: Invalid port number");
|
||||
}
|
||||
|
||||
if (settings.getMaxCacheSizeMib() < 1024) {
|
||||
MangadexClient.dieWithError("Config Error: Invalid max cache size, must be >= 1024 MiB (1GiB)");
|
||||
MangaDexClient.dieWithError("Config Error: Invalid max cache size, must be >= 1024 MiB (1GiB)");
|
||||
}
|
||||
|
||||
if (LOGGER.isInfoEnabled()) {
|
||||
LOGGER.info("Client settings loaded: {}", settings);
|
||||
}
|
||||
|
||||
MangadexClient client = new MangadexClient(settings);
|
||||
MangaDexClient client = new MangaDexClient(settings);
|
||||
Runtime.getRuntime().addShutdownHook(new Thread(client::shutdown));
|
||||
client.runLoop();
|
||||
} catch (FileNotFoundException e) {
|
||||
MangadexClient.dieWithError(e);
|
||||
MangaDexClient.dieWithError(e);
|
||||
}
|
||||
}
|
||||
|
@ -83,8 +83,8 @@ public final class ServerSettings {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "TlsCert{" + "createdAt='" + createdAt + '\'' + ", privateKey='" + privateKey + '\''
|
||||
+ ", certificate='" + certificate + '\'' + '}';
|
||||
return "TlsCert{" + "createdAt='" + createdAt + '\'' + ", privateKey='" + "<hidden>" + '\''
|
||||
+ ", certificate='" + "<hidden>" + '\'' + '}';
|
||||
}
|
||||
|
||||
@Override
|
||||
|
44
src/main/java/mdnet/cache/DiskLruCache.java
vendored
44
src/main/java/mdnet/cache/DiskLruCache.java
vendored
@ -244,8 +244,7 @@ public final class DiskLruCache implements Closeable {
|
||||
}
|
||||
|
||||
private void readJournal() throws IOException {
|
||||
StrictLineReader reader = new StrictLineReader(new FileInputStream(journalFile), StandardCharsets.UTF_8);
|
||||
try {
|
||||
try (StrictLineReader reader = new StrictLineReader(new FileInputStream(journalFile), StandardCharsets.UTF_8)) {
|
||||
String magic = reader.readLine();
|
||||
String version = reader.readLine();
|
||||
String appVersionString = reader.readLine();
|
||||
@ -276,8 +275,6 @@ public final class DiskLruCache implements Closeable {
|
||||
journalWriter = new BufferedWriter(
|
||||
new OutputStreamWriter(new FileOutputStream(journalFile, true), StandardCharsets.UTF_8));
|
||||
}
|
||||
} finally {
|
||||
Util.closeQuietly(reader);
|
||||
}
|
||||
}
|
||||
|
||||
@ -352,9 +349,8 @@ public final class DiskLruCache implements Closeable {
|
||||
journalWriter.close();
|
||||
}
|
||||
|
||||
Writer writer = new BufferedWriter(
|
||||
new OutputStreamWriter(new FileOutputStream(journalFileTmp), StandardCharsets.UTF_8));
|
||||
try {
|
||||
try (Writer writer = new BufferedWriter(
|
||||
new OutputStreamWriter(new FileOutputStream(journalFileTmp), StandardCharsets.UTF_8))) {
|
||||
writer.write(MAGIC);
|
||||
writer.write("\n");
|
||||
writer.write(VERSION_1);
|
||||
@ -372,8 +368,6 @@ public final class DiskLruCache implements Closeable {
|
||||
writer.write(CLEAN + ' ' + entry.key + entry.getLengths() + '\n');
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
Util.closeQuietly(writer);
|
||||
}
|
||||
|
||||
if (journalFile.exists()) {
|
||||
@ -430,7 +424,10 @@ public final class DiskLruCache implements Closeable {
|
||||
// A file must have been deleted manually!
|
||||
for (int i = 0; i < valueCount; i++) {
|
||||
if (ins[i] != null) {
|
||||
Util.closeQuietly(ins[i]);
|
||||
try {
|
||||
ins[i].close();
|
||||
} catch (IOException ignored) {
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
@ -698,11 +695,8 @@ public final class DiskLruCache implements Closeable {
|
||||
* Returns the string value for {@code index}. This consumes the InputStream!
|
||||
*/
|
||||
public String getString(int index) throws IOException {
|
||||
InputStream in = getInputStream(index);
|
||||
try {
|
||||
try (InputStream in = getInputStream(index)) {
|
||||
return IOUtils.toString(in, StandardCharsets.UTF_8);
|
||||
} finally {
|
||||
Util.closeQuietly(in);
|
||||
}
|
||||
}
|
||||
|
||||
@ -713,7 +707,10 @@ public final class DiskLruCache implements Closeable {
|
||||
|
||||
public void close() {
|
||||
for (InputStream in : ins) {
|
||||
Util.closeQuietly(in);
|
||||
try {
|
||||
in.close();
|
||||
} catch (IOException ignored) {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -741,7 +738,7 @@ public final class DiskLruCache implements Closeable {
|
||||
* Returns an unbuffered input stream to read the last committed value, or null
|
||||
* if no value has been committed.
|
||||
*/
|
||||
public InputStream newInputStream(int index) throws IOException {
|
||||
public InputStream newInputStream(int index) {
|
||||
synchronized (DiskLruCache.this) {
|
||||
if (entry.currentEditor != this) {
|
||||
throw new IllegalStateException();
|
||||
@ -762,11 +759,8 @@ public final class DiskLruCache implements Closeable {
|
||||
* committed.
|
||||
*/
|
||||
public String getString(int index) throws IOException {
|
||||
InputStream in = newInputStream(index);
|
||||
try {
|
||||
try (InputStream in = newInputStream(index)) {
|
||||
return in != null ? IOUtils.toString(in, StandardCharsets.UTF_8) : null;
|
||||
} finally {
|
||||
Util.closeQuietly(in);
|
||||
}
|
||||
}
|
||||
|
||||
@ -774,11 +768,8 @@ public final class DiskLruCache implements Closeable {
|
||||
* Write a string to the specified index.
|
||||
*/
|
||||
public void setString(int index, String value) throws IOException {
|
||||
OutputStream out = newOutputStream(index);
|
||||
try {
|
||||
try (OutputStream out = newOutputStream(index)) {
|
||||
IOUtils.write(value, out, StandardCharsets.UTF_8);
|
||||
} finally {
|
||||
Util.closeQuietly(out);
|
||||
}
|
||||
}
|
||||
|
||||
@ -811,6 +802,7 @@ public final class DiskLruCache implements Closeable {
|
||||
outputStream = new FileOutputStream(dirtyFile);
|
||||
} catch (FileNotFoundException e2) {
|
||||
// We are unable to recover. Silently eat the writes.
|
||||
LOGGER.warn("Returning NULL_OUTPUT_STREAM", e2);
|
||||
return NULL_OUTPUT_STREAM;
|
||||
}
|
||||
}
|
||||
@ -863,6 +855,7 @@ public final class DiskLruCache implements Closeable {
|
||||
try {
|
||||
out.write(oneByte);
|
||||
} catch (IOException e) {
|
||||
LOGGER.warn("FaultHidingOutputStream exception in write()", e);
|
||||
hasErrors = true;
|
||||
}
|
||||
}
|
||||
@ -872,6 +865,7 @@ public final class DiskLruCache implements Closeable {
|
||||
try {
|
||||
out.write(buffer, offset, length);
|
||||
} catch (IOException e) {
|
||||
LOGGER.warn("FaultHidingOutputStream exception in write()", e);
|
||||
hasErrors = true;
|
||||
}
|
||||
}
|
||||
@ -881,6 +875,7 @@ public final class DiskLruCache implements Closeable {
|
||||
try {
|
||||
out.close();
|
||||
} catch (IOException e) {
|
||||
LOGGER.warn("FaultHidingOutputStream exception in close()", e);
|
||||
hasErrors = true;
|
||||
}
|
||||
}
|
||||
@ -890,6 +885,7 @@ public final class DiskLruCache implements Closeable {
|
||||
try {
|
||||
out.flush();
|
||||
} catch (IOException e) {
|
||||
LOGGER.warn("FaultHidingOutputStream exception in flush()", e);
|
||||
hasErrors = true;
|
||||
}
|
||||
}
|
||||
|
12
src/main/java/mdnet/cache/Util.java
vendored
12
src/main/java/mdnet/cache/Util.java
vendored
@ -16,7 +16,6 @@
|
||||
|
||||
package mdnet.cache;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
@ -43,15 +42,4 @@ final class Util {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void closeQuietly(/* Auto */Closeable closeable) {
|
||||
if (closeable != null) {
|
||||
try {
|
||||
closeable.close();
|
||||
} catch (RuntimeException rethrown) {
|
||||
throw rethrown;
|
||||
} catch (Exception ignored) {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -47,102 +47,120 @@ fun getServer(cache: DiskLruCache, serverSettings: ServerSettings, clientSetting
|
||||
.build())
|
||||
.build())
|
||||
|
||||
val app = { request: Request ->
|
||||
val app = { dataSaver: Boolean ->
|
||||
{ request: Request ->
|
||||
|
||||
val chapterHash = Path.of("chapterHash")(request)
|
||||
val fileName = Path.of("fileName")(request)
|
||||
val cacheId = md5String("$chapterHash.$fileName")
|
||||
val chapterHash = Path.of("chapterHash")(request)
|
||||
val fileName = Path.of("fileName")(request)
|
||||
|
||||
statistics.get().requestsServed.incrementAndGet()
|
||||
val rc4Bytes = if (dataSaver) {
|
||||
md5Bytes("saver$chapterHash.$fileName")
|
||||
} else {
|
||||
md5Bytes("$chapterHash.$fileName")
|
||||
}
|
||||
val cacheId = printHexString(rc4Bytes)
|
||||
|
||||
// Netty doesn't do Content-Length or Content-Type, so we have the pleasure of doing that ourselves
|
||||
fun respondWithImage(input: InputStream, length: String, type: String, lastModified: String): Response =
|
||||
Response(Status.OK).header("Content-Length", length)
|
||||
.header("Content-Type", type)
|
||||
.header("X-Content-Type-Options", "nosniff")
|
||||
.header("Last-Modified", lastModified)
|
||||
.header("Cache-Control", listOf("public", MaxAgeTtl(Constants.MAX_AGE_CACHE).toHeaderValue()).joinToString(", "))
|
||||
.header("Timing-Allow-Origin", "https://mangadex.org")
|
||||
.body(input, length.toLong())
|
||||
statistics.get().requestsServed.incrementAndGet()
|
||||
|
||||
val snapshot = cache.get(cacheId)
|
||||
if (snapshot != null) {
|
||||
statistics.get().cacheHits.incrementAndGet()
|
||||
|
||||
// our files never change, so it's safe to use the browser cache
|
||||
if (request.header("If-Modified-Since") != null) {
|
||||
if (LOGGER.isTraceEnabled) {
|
||||
LOGGER.trace("Request for $chapterHash/$fileName cached by browser")
|
||||
}
|
||||
|
||||
val lastModified = snapshot.getString(2)
|
||||
snapshot.close()
|
||||
|
||||
Response(Status.NOT_MODIFIED)
|
||||
// Netty doesn't do Content-Length or Content-Type, so we have the pleasure of doing that ourselves
|
||||
fun respondWithImage(input: InputStream, length: String, type: String, lastModified: String): Response =
|
||||
Response(Status.OK).header("Content-Length", length)
|
||||
.header("Content-Type", type)
|
||||
.header("X-Content-Type-Options", "nosniff")
|
||||
.header("Last-Modified", lastModified)
|
||||
} else {
|
||||
if (LOGGER.isTraceEnabled) {
|
||||
LOGGER.trace("Request for $chapterHash/$fileName hit cache")
|
||||
}
|
||||
.header(
|
||||
"Cache-Control",
|
||||
listOf("public", MaxAgeTtl(Constants.MAX_AGE_CACHE).toHeaderValue()).joinToString(", ")
|
||||
)
|
||||
.header("Timing-Allow-Origin", "https://mangadex.org")
|
||||
.body(input, length.toLong())
|
||||
|
||||
respondWithImage(CipherInputStream(snapshot.getInputStream(0), getRc4(cacheId)),
|
||||
snapshot.getLength(0).toString(), snapshot.getString(1), snapshot.getString(2))
|
||||
}
|
||||
} else {
|
||||
statistics.get().cacheMisses.incrementAndGet()
|
||||
if (LOGGER.isTraceEnabled) {
|
||||
LOGGER.trace("Request for $chapterHash/$fileName missed cache")
|
||||
}
|
||||
val mdResponse = client(Request(Method.GET, "${serverSettings.imageServer}${request.uri}"))
|
||||
val snapshot = cache.get(cacheId)
|
||||
if (snapshot != null) {
|
||||
statistics.get().cacheHits.incrementAndGet()
|
||||
|
||||
if (mdResponse.status != Status.OK) {
|
||||
if (LOGGER.isTraceEnabled) {
|
||||
LOGGER.trace("Request for $chapterHash/$fileName errored with status {}", mdResponse.status)
|
||||
}
|
||||
mdResponse.close()
|
||||
Response(mdResponse.status)
|
||||
} else {
|
||||
val contentLength = mdResponse.header("Content-Length")!!
|
||||
val contentType = mdResponse.header("Content-Type")!!
|
||||
|
||||
val editor = cache.edit(cacheId)
|
||||
|
||||
val lastModified = mdResponse.header("Last-Modified")!!
|
||||
|
||||
// A null editor means that this file is being written to
|
||||
// concurrently so we skip the cache process
|
||||
if (editor != null) {
|
||||
// our files never change, so it's safe to use the browser cache
|
||||
if (request.header("If-Modified-Since") != null) {
|
||||
if (LOGGER.isTraceEnabled) {
|
||||
LOGGER.trace("Request for $chapterHash/$fileName is being cached and served")
|
||||
LOGGER.trace("Request for $chapterHash/$fileName cached by browser")
|
||||
}
|
||||
editor.setString(1, contentType)
|
||||
editor.setString(2, lastModified)
|
||||
|
||||
val tee = CachingInputStream(mdResponse.body.stream,
|
||||
executor, CipherOutputStream(editor.newOutputStream(0), getRc4(cacheId))) {
|
||||
// Note: if neither of the options get called/are in the log
|
||||
// check that tee gets closed and for exceptions in this lambda
|
||||
if (editor.getLength(0) == contentLength.toLong()) {
|
||||
if (LOGGER.isTraceEnabled) {
|
||||
LOGGER.trace("Cache download $chapterHash/$fileName committed")
|
||||
}
|
||||
val lastModified = snapshot.getString(2)
|
||||
snapshot.close()
|
||||
|
||||
editor.commit()
|
||||
} else {
|
||||
if (LOGGER.isTraceEnabled) {
|
||||
LOGGER.trace("Cache download $chapterHash/$fileName aborted")
|
||||
}
|
||||
|
||||
editor.abort()
|
||||
}
|
||||
}
|
||||
respondWithImage(tee, contentLength, contentType, lastModified)
|
||||
Response(Status.NOT_MODIFIED)
|
||||
.header("Last-Modified", lastModified)
|
||||
} else {
|
||||
if (LOGGER.isTraceEnabled) {
|
||||
LOGGER.trace("Request for $chapterHash/$fileName is being served")
|
||||
LOGGER.trace("Request for $chapterHash/$fileName hit cache")
|
||||
}
|
||||
|
||||
respondWithImage(mdResponse.body.stream, contentLength, contentType, lastModified)
|
||||
respondWithImage(
|
||||
CipherInputStream(snapshot.getInputStream(0), getRc4(rc4Bytes)),
|
||||
snapshot.getLength(0).toString(), snapshot.getString(1), snapshot.getString(2)
|
||||
)
|
||||
}
|
||||
} else {
|
||||
statistics.get().cacheMisses.incrementAndGet()
|
||||
if (LOGGER.isTraceEnabled) {
|
||||
LOGGER.trace("Request for $chapterHash/$fileName missed cache")
|
||||
}
|
||||
val mdResponse = client(Request(Method.GET, "${serverSettings.imageServer}${request.uri}"))
|
||||
|
||||
if (mdResponse.status != Status.OK) {
|
||||
if (LOGGER.isTraceEnabled) {
|
||||
LOGGER.trace("Request for $chapterHash/$fileName errored with status {}", mdResponse.status)
|
||||
}
|
||||
mdResponse.close()
|
||||
Response(mdResponse.status)
|
||||
} else {
|
||||
val contentLength = mdResponse.header("Content-Length")!!
|
||||
val contentType = mdResponse.header("Content-Type")!!
|
||||
|
||||
if (LOGGER.isTraceEnabled) {
|
||||
LOGGER.trace("Grabbing DiskLruCache editor instance")
|
||||
}
|
||||
val editor = cache.edit(cacheId)
|
||||
|
||||
val lastModified = mdResponse.header("Last-Modified")!!
|
||||
|
||||
// A null editor means that this file is being written to
|
||||
// concurrently so we skip the cache process
|
||||
if (editor != null) {
|
||||
if (LOGGER.isTraceEnabled) {
|
||||
LOGGER.trace("Request for $chapterHash/$fileName is being cached and served")
|
||||
}
|
||||
editor.setString(1, contentType)
|
||||
editor.setString(2, lastModified)
|
||||
|
||||
val tee = CachingInputStream(
|
||||
mdResponse.body.stream,
|
||||
executor, CipherOutputStream(editor.newOutputStream(0), getRc4(rc4Bytes))
|
||||
) {
|
||||
// Note: if neither of the options get called/are in the log
|
||||
// check that tee gets closed and for exceptions in this lambda
|
||||
if (editor.getLength(0) == contentLength.toLong()) {
|
||||
if (LOGGER.isTraceEnabled) {
|
||||
LOGGER.trace("Cache download $chapterHash/$fileName committed")
|
||||
}
|
||||
|
||||
editor.commit()
|
||||
} else {
|
||||
if (LOGGER.isTraceEnabled) {
|
||||
LOGGER.trace("Cache download $chapterHash/$fileName aborted")
|
||||
}
|
||||
|
||||
editor.abort()
|
||||
}
|
||||
}
|
||||
respondWithImage(tee, contentLength, contentType, lastModified)
|
||||
} else {
|
||||
if (LOGGER.isTraceEnabled) {
|
||||
LOGGER.trace("Request for $chapterHash/$fileName is being served")
|
||||
}
|
||||
|
||||
respondWithImage(mdResponse.body.stream, contentLength, contentType, lastModified)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -155,15 +173,16 @@ fun getServer(cache: DiskLruCache, serverSettings: ServerSettings, clientSetting
|
||||
.then(addCommonHeaders())
|
||||
.then(
|
||||
routes(
|
||||
"/data/{chapterHash}/{fileName}" bind Method.GET to app
|
||||
"/data/{chapterHash}/{fileName}" bind Method.GET to app(false)
|
||||
// "/data-saver/{chapterHash}/{fileName}" bind Method.GET to app(true)
|
||||
)
|
||||
)
|
||||
.asServer(Netty(serverSettings.tls, clientSettings, statistics))
|
||||
}
|
||||
|
||||
private fun getRc4(key: String): Cipher {
|
||||
private fun getRc4(key: ByteArray): Cipher {
|
||||
val rc4 = Cipher.getInstance("RC4")
|
||||
rc4.init(Cipher.ENCRYPT_MODE, SecretKeySpec(key.toByteArray(), "RC4"))
|
||||
rc4.init(Cipher.ENCRYPT_MODE, SecretKeySpec(key, "RC4"))
|
||||
return rc4
|
||||
}
|
||||
|
||||
@ -185,17 +204,23 @@ private fun catchAllHideDetails(): Filter {
|
||||
try {
|
||||
next(request)
|
||||
} catch (e: Exception) {
|
||||
if (LOGGER.isWarnEnabled) {
|
||||
LOGGER.warn("Request error detected", e)
|
||||
}
|
||||
Response(Status.INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun md5String(stringToHash: String): String {
|
||||
private fun md5Bytes(stringToHash: String): ByteArray {
|
||||
val digest = MessageDigest.getInstance("MD5")
|
||||
return digest.digest(stringToHash.toByteArray())
|
||||
}
|
||||
|
||||
private fun printHexString(bytes: ByteArray): String {
|
||||
val sb = StringBuilder()
|
||||
for (b in digest.digest(stringToHash.toByteArray())) {
|
||||
for (b in bytes) {
|
||||
sb.append(String.format("%02x", b))
|
||||
}
|
||||
return sb.toString()
|
||||
|
Loading…
Reference in New Issue
Block a user