/* Mangadex@Home Copyright (c) 2020, MangaDex Network This file is part of MangaDex@Home. MangaDex@Home is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. MangaDex@Home is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this MangaDex@Home. If not, see . */ package mdnet.server import io.micrometer.core.instrument.Timer import io.micrometer.prometheus.PrometheusMeterRegistry import mdnet.Constants import mdnet.cache.CachingInputStream import mdnet.cache.Image import mdnet.cache.ImageMetadata import mdnet.cache.ImageStorage import mdnet.logging.info import mdnet.logging.trace import mdnet.logging.warn import org.http4k.core.* import org.http4k.filter.CachingFilters import org.http4k.lens.Path import org.slf4j.LoggerFactory import java.io.BufferedInputStream import java.io.BufferedOutputStream import java.io.InputStream import java.time.Clock import java.util.concurrent.Executors class ImageServer( private val storage: ImageStorage, private val upstream: HttpHandler, registry: PrometheusMeterRegistry ) { private val executor = Executors.newCachedThreadPool() private val cacheLookupTimer = Timer.builder("cache.lookup") .publishPercentiles(0.5, 0.75, 0.9, 0.99) .register(registry) // This is part of the ImageServer, and it expects `chapterHash` and `fileName` path segments. fun handler(dataSaver: Boolean): HttpHandler = baseHandler().then { request -> val chapterHash = Path.of("chapterHash")(request) val fileName = Path.of("fileName")(request) val sanitizedUri = if (dataSaver) { "/data-saver" } else { "/data" } + "/$chapterHash/$fileName" val imageId = if (dataSaver) { md5Bytes("saver$chapterHash.$fileName") } else { md5Bytes("$chapterHash.$fileName") }.let { printHexString(it) } val image: Image? = cacheLookupTimer.recordCallable { storage.loadImage(imageId) } if (image != null) { request.handleCacheHit(sanitizedUri, image) } else { request.handleCacheMiss(sanitizedUri, imageId) } } private fun Request.handleCacheHit(sanitizedUri: String, image: Image): Response { // our files never change, so it's safe to use the browser cache return if (this.header("If-Modified-Since") != null) { LOGGER.info { "Request for $sanitizedUri cached by browser" } val lastModified = image.data.lastModified Response(Status.NOT_MODIFIED) .header("Last-Modified", lastModified) } else { LOGGER.info { "Request for $sanitizedUri is being served from cache" } respondWithImage( BufferedInputStream(image.stream), image.data.size, image.data.contentType, image.data.lastModified, true ) } } private fun Request.handleCacheMiss(sanitizedUri: String, imageId: String): Response { val mdResponse = upstream(Request(Method.GET, sanitizedUri)) if (mdResponse.status != Status.OK) { LOGGER.warn { "Upstream query for $sanitizedUri errored with status ${mdResponse.status}" } mdResponse.close() return Response(mdResponse.status) } val contentType = mdResponse.header("Content-Type")!! val contentLength = mdResponse.header("Content-Length")?.toInt() val lastModified = mdResponse.header("Last-Modified") if (!contentType.isImageMimetype()) { LOGGER.warn { "Upstream query for $sanitizedUri returned bad mimetype $contentType" } mdResponse.close() return Response(Status.INTERNAL_SERVER_ERROR) } // bad upstream responses mean we can't cache, so bail if (contentLength == null || lastModified == null) { LOGGER.info { "Request for $sanitizedUri is being served due to upstream issues" } return respondWithImage(mdResponse.body.stream, contentLength, contentType, lastModified, false) } LOGGER.trace { "Upstream query for $sanitizedUri succeeded" } val writer = storage.storeImage(imageId, ImageMetadata(contentType, lastModified, contentLength)) // A null writer means that this file is being written to // concurrently so we skip the cache process return if (writer != null) { LOGGER.info { "Request for $sanitizedUri is being cached and served" } val tee = CachingInputStream( mdResponse.body.stream, executor, BufferedOutputStream(writer.stream), ) { try { if (writer.commit(contentLength)) { LOGGER.info { "Cache download for $sanitizedUri committed" } } else { LOGGER.info { "Cache download for $sanitizedUri aborted" } } } catch (e: Exception) { LOGGER.warn(e) { "Cache go/no go for $sanitizedUri failed" } } } respondWithImage(tee, contentLength, contentType, lastModified, false) } else { LOGGER.info { "Request for $sanitizedUri is being served due to write errors" } respondWithImage(mdResponse.body.stream, contentLength, contentType, lastModified, false) } } private fun respondWithImage(input: InputStream, length: Int?, type: String, lastModified: String?, cached: Boolean): Response = Response(Status.OK) .header("Content-Type", type) .header("X-Content-Type-Options", "nosniff") .let { if (length != null) { it.body(input, length.toLong()).header("Content-Length", length.toString()) } else { it.body(input).header("Transfer-Encoding", "chunked") } } .let { if (lastModified != null) { it.header("Last-Modified", lastModified) } else { it } } .header("X-Cache", if (cached) "HIT" else "MISS") companion object { private val LOGGER = LoggerFactory.getLogger(ImageServer::class.java) private fun String.isImageMimetype() = this.toLowerCase().startsWith("image/") private fun baseHandler(): Filter = CachingFilters.Response.MaxAge(Clock.systemUTC(), Constants.MAX_AGE_CACHE) .then { next: HttpHandler -> { request: Request -> val response = next(request) response.header("access-control-allow-origin", "https://mangadex.org") .header("access-control-expose-headers", "*") .header("access-control-allow-methods", "GET") .header("timing-allow-origin", "https://mangadex.org") } } } }