1
0
Fork 1
mirror of https://gitlab.com/mangadex-pub/mangadex_at_home.git synced 2024-01-19 02:48:37 +00:00

Merge branch 'circuit' into 'master'

Add circuit breaker

See merge request mangadex-pub/mangadex_at_home!74
This commit is contained in:
carbotaniuman 2021-02-05 18:22:02 +00:00
commit d4d528fcdc
9 changed files with 426 additions and 296 deletions

View file

@ -13,6 +13,7 @@ build:
stage: build
script:
- ./gradlew build
- "ls -lah build/libs"
publish:
image: alpine

View file

@ -17,16 +17,20 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Security
## [2.0.0-rc8] - 2021-02-04
### Added
- [2021-02-04] Installed a circuit breaker in order to save upstream [@carbotaniuman].
## [2.0.0-rc7] - 2021-01-31
### Added
- [2021-01-28] Add distinct timerange and total egress stats to dashboard [@_tde9]
- [2021-01-28] Add average cache hitrate panel [@_tde9]
- [2021-01-31] Add distinct timerange and total egress stats to dashboard [@_tde9].
- [2021-01-31] Add average cache hitrate panel [@_tde9].
### Changed
- [2021-01-29] Add HikariCP connection pool [@carbotaniuman].
- [2021-01-31] Add HikariCP connection pool [@carbotaniuman].
### Fixed
- [2021-01-28] Fix client reqs dashboard panel [@_tde9]
- [2021-01-31] Fix client reqs dashboard panel [@_tde9].
## [2.0.0-rc6] - 2021-01-28
### Fixed
@ -328,7 +332,8 @@ This release contains many breaking changes! Of note are the changes to the cach
### Fixed
- [2020-06-11] Tweaked logging configuration to reduce log file sizes by [@carbotaniuman].
[Unreleased]: https://gitlab.com/mangadex/mangadex_at_home/-/compare/2.0.0-rc7...HEAD
[Unreleased]: https://gitlab.com/mangadex/mangadex_at_home/-/compare/2.0.0-rc8...HEAD
[2.0.0-rc8]: https://gitlab.com/mangadex/mangadex_at_home/-/compare/2.0.0-rc7...2.0.0-rc8
[2.0.0-rc7]: https://gitlab.com/mangadex/mangadex_at_home/-/compare/2.0.0-rc6...2.0.0-rc7
[2.0.0-rc6]: https://gitlab.com/mangadex/mangadex_at_home/-/compare/2.0.0-rc5...2.0.0-rc6
[2.0.0-rc5]: https://gitlab.com/mangadex/mangadex_at_home/-/compare/2.0.0-rc4...2.0.0-rc5

View file

@ -33,6 +33,8 @@ dependencies {
implementation group: "com.maxmind.geoip2", name: "geoip2", version: "2.15.0"
implementation group: "org.http4k", name: "http4k-core", version: "$http_4k_version"
implementation group: "org.http4k", name: "http4k-resilience4j", version: "$http_4k_version"
implementation group: "io.github.resilience4j", name: "resilience4j-micrometer", version: "1.6.1"
implementation group: "org.http4k", name: "http4k-format-jackson", version: "$http_4k_version"
implementation group: "com.fasterxml.jackson.dataformat", name: "jackson-dataformat-yaml", version: "2.12.1"
implementation group: "com.fasterxml.jackson.datatype", name: "jackson-datatype-jsr310", version: "2.12.1"

View file

@ -21,7 +21,7 @@ package mdnet
import java.time.Duration
object Constants {
const val CLIENT_BUILD = 25
const val CLIENT_BUILD = 26
@JvmField val MAX_AGE_CACHE: Duration = Duration.ofDays(14)

View file

@ -0,0 +1,191 @@
/*
Mangadex@Home
Copyright (c) 2020, MangaDex Network
This file is part of MangaDex@Home.
MangaDex@Home is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
MangaDex@Home is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this MangaDex@Home. If not, see <http://www.gnu.org/licenses/>.
*/
package mdnet.server
import io.micrometer.core.instrument.Timer
import io.micrometer.prometheus.PrometheusMeterRegistry
import mdnet.Constants
import mdnet.cache.CachingInputStream
import mdnet.cache.Image
import mdnet.cache.ImageMetadata
import mdnet.cache.ImageStorage
import mdnet.logging.info
import mdnet.logging.trace
import mdnet.logging.warn
import org.http4k.core.*
import org.http4k.filter.CachingFilters
import org.http4k.lens.Path
import org.slf4j.LoggerFactory
import java.io.BufferedInputStream
import java.io.BufferedOutputStream
import java.io.InputStream
import java.time.Clock
import java.util.concurrent.Executors
class ImageServer(
private val storage: ImageStorage,
private val upstream: HttpHandler,
registry: PrometheusMeterRegistry
) {
private val executor = Executors.newCachedThreadPool()
private val cacheLookupTimer = Timer.builder("cache_lookup")
.publishPercentiles(0.5, 0.75, 0.9, 0.99)
.register(registry)
// This is part of the ImageServer, and it expects `chapterHash` and `fileName` path segments.
fun handler(dataSaver: Boolean): HttpHandler = baseHandler().then { request ->
val chapterHash = Path.of("chapterHash")(request)
val fileName = Path.of("fileName")(request)
val sanitizedUri = if (dataSaver) {
"/data-saver"
} else {
"/data"
} + "/$chapterHash/$fileName"
val imageId = if (dataSaver) {
md5Bytes("saver$chapterHash.$fileName")
} else {
md5Bytes("$chapterHash.$fileName")
}.let {
printHexString(it)
}
val image: Image? = cacheLookupTimer.recordCallable { storage.loadImage(imageId) }
if (image != null) {
request.handleCacheHit(sanitizedUri, image)
} else {
request.handleCacheMiss(sanitizedUri, imageId)
}
}
private fun Request.handleCacheHit(sanitizedUri: String, image: Image): Response {
// our files never change, so it's safe to use the browser cache
return if (this.header("If-Modified-Since") != null) {
LOGGER.info { "Request for $sanitizedUri cached by browser" }
val lastModified = image.data.lastModified
Response(Status.NOT_MODIFIED)
.header("Last-Modified", lastModified)
} else {
LOGGER.info { "Request for $sanitizedUri is being served" }
respondWithImage(
BufferedInputStream(image.stream),
image.data.size, image.data.contentType, image.data.lastModified,
true
)
}
}
private fun Request.handleCacheMiss(sanitizedUri: String, imageId: String): Response {
val mdResponse = upstream(Request(Method.GET, sanitizedUri))
if (mdResponse.status != Status.OK) {
LOGGER.warn { "Upstream query for $sanitizedUri errored with status ${mdResponse.status}" }
mdResponse.close()
return Response(mdResponse.status)
}
val contentType = mdResponse.header("Content-Type")!!
val contentLength = mdResponse.header("Content-Length")?.toInt()
val lastModified = mdResponse.header("Last-Modified")
if (!contentType.isImageMimetype()) {
LOGGER.warn { "Upstream query for $sanitizedUri returned bad mimetype $contentType" }
mdResponse.close()
return Response(Status.INTERNAL_SERVER_ERROR)
}
// bad upstream responses mean we can't cache, so bail
if (contentLength == null || lastModified == null) {
LOGGER.info { "Request for $sanitizedUri is being served due to upstream issues" }
return respondWithImage(mdResponse.body.stream, contentLength, contentType, lastModified, false)
}
LOGGER.trace { "Upstream query for $sanitizedUri succeeded" }
val writer = storage.storeImage(imageId, ImageMetadata(contentType, lastModified, contentLength))
// A null writer means that this file is being written to
// concurrently so we skip the cache process
return if (writer != null) {
LOGGER.info { "Request for $sanitizedUri is being cached and served" }
val tee = CachingInputStream(
mdResponse.body.stream,
executor, BufferedOutputStream(writer.stream),
) {
try {
if (writer.commit(contentLength)) {
LOGGER.info { "Cache download for $sanitizedUri committed" }
} else {
LOGGER.info { "Cache download for $sanitizedUri aborted" }
}
} catch (e: Exception) {
LOGGER.warn(e) { "Cache go/no go for $sanitizedUri failed" }
}
}
respondWithImage(tee, contentLength, contentType, lastModified, false)
} else {
LOGGER.info { "Request for $sanitizedUri is being served" }
respondWithImage(mdResponse.body.stream, contentLength, contentType, lastModified, false)
}
}
private fun respondWithImage(input: InputStream, length: Int?, type: String, lastModified: String?, cached: Boolean): Response =
Response(Status.OK)
.header("Content-Type", type)
.header("X-Content-Type-Options", "nosniff")
.let {
if (length != null) {
it.body(input, length.toLong()).header("Content-Length", length.toString())
} else {
it.body(input).header("Transfer-Encoding", "chunked")
}
}
.let {
if (lastModified != null) {
it.header("Last-Modified", lastModified)
} else {
it
}
}
.header("X-Cache", if (cached) "HIT" else "MISS")
companion object {
private val LOGGER = LoggerFactory.getLogger(ImageServer::class.java)
private fun String.isImageMimetype() = this.toLowerCase().startsWith("image/")
private fun baseHandler(): Filter =
CachingFilters.Response.MaxAge(Clock.systemUTC(), Constants.MAX_AGE_CACHE)
.then { next: HttpHandler ->
{ request: Request ->
val response = next(request)
response.header("access-control-allow-origin", "https://mangadex.org")
.header("access-control-allow-headers", "*")
.header("access-control-allow-methods", "GET")
.header("timing-allow-origin", "https://mangadex.org")
}
}
}
}

View file

@ -18,208 +18,28 @@ along with this MangaDex@Home. If not, see <http://www.gnu.org/licenses/>.
*/
package mdnet.server
import com.fasterxml.jackson.core.JsonProcessingException
import com.fasterxml.jackson.databind.DeserializationFeature
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule
import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper
import com.fasterxml.jackson.module.kotlin.readValue
import io.github.resilience4j.circuitbreaker.CircuitBreakerConfig
import io.github.resilience4j.circuitbreaker.CircuitBreakerRegistry
import io.github.resilience4j.micrometer.tagged.TaggedCircuitBreakerMetrics
import io.micrometer.core.instrument.FunctionCounter
import io.micrometer.core.instrument.Timer
import io.micrometer.prometheus.PrometheusMeterRegistry
import mdnet.Constants
import mdnet.cache.CachingInputStream
import mdnet.cache.Image
import mdnet.cache.ImageMetadata
import mdnet.cache.ImageStorage
import mdnet.data.Statistics
import mdnet.data.Token
import mdnet.logging.info
import mdnet.logging.trace
import mdnet.logging.warn
import mdnet.metrics.GeoIpMetricsFilterBuilder
import mdnet.metrics.PostTransactionLabeler
import mdnet.netty.Netty
import mdnet.security.TweetNaclFast
import mdnet.settings.MetricsSettings
import mdnet.settings.RemoteSettings
import mdnet.settings.ServerSettings
import org.http4k.core.*
import org.http4k.filter.CachingFilters
import org.http4k.filter.ClientFilters
import org.http4k.filter.MicrometerMetrics
import org.http4k.filter.ServerFilters
import org.http4k.lens.LensFailure
import org.http4k.lens.Path
import org.http4k.routing.bind
import org.http4k.routing.routes
import org.http4k.filter.*
import org.http4k.routing.*
import org.http4k.server.Http4kServer
import org.http4k.server.asServer
import org.slf4j.LoggerFactory
import java.io.BufferedInputStream
import java.io.BufferedOutputStream
import java.io.InputStream
import java.time.Clock
import java.time.OffsetDateTime
import java.util.*
import java.util.concurrent.Executors
private val LOGGER = LoggerFactory.getLogger(ImageServer::class.java)
private val JACKSON: ObjectMapper = jacksonObjectMapper()
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
.registerModule(JavaTimeModule())
class ImageServer(
private val storage: ImageStorage,
private val upstream: HttpHandler,
registry: PrometheusMeterRegistry
) {
private val executor = Executors.newCachedThreadPool()
private val cacheLookupTimer = Timer
.builder("cache_lookup")
.publishPercentiles(0.5, 0.75, 0.9, 0.99)
.register(registry)
// This is part of the ImageServer, and it expects `chapterHash` and `fileName` path segments.
fun handler(dataSaver: Boolean): HttpHandler = baseHandler().then { request ->
val chapterHash = Path.of("chapterHash")(request)
val fileName = Path.of("fileName")(request)
val sanitizedUri = if (dataSaver) {
"/data-saver"
} else {
"/data"
} + "/$chapterHash/$fileName"
val imageId = if (dataSaver) {
md5Bytes("saver$chapterHash.$fileName")
} else {
md5Bytes("$chapterHash.$fileName")
}.let {
printHexString(it)
}
val image: Image? = cacheLookupTimer.recordCallable { storage.loadImage(imageId) }
if (image != null) {
request.handleCacheHit(sanitizedUri, image)
} else {
request.handleCacheMiss(sanitizedUri, imageId)
}
}
private fun Request.handleCacheHit(sanitizedUri: String, image: Image): Response {
// our files never change, so it's safe to use the browser cache
return if (this.header("If-Modified-Since") != null) {
LOGGER.info { "Request for $sanitizedUri cached by browser" }
val lastModified = image.data.lastModified
Response(Status.NOT_MODIFIED)
.header("Last-Modified", lastModified)
} else {
LOGGER.info { "Request for $sanitizedUri is being served" }
respondWithImage(
BufferedInputStream(image.stream),
image.data.size, image.data.contentType, image.data.lastModified,
true
)
}
}
private fun Request.handleCacheMiss(sanitizedUri: String, imageId: String): Response {
val mdResponse = upstream(Request(Method.GET, sanitizedUri))
if (mdResponse.status != Status.OK) {
LOGGER.warn { "Upstream query for $sanitizedUri errored with status ${mdResponse.status}" }
mdResponse.close()
return Response(mdResponse.status)
}
val contentType = mdResponse.header("Content-Type")!!
val contentLength = mdResponse.header("Content-Length")?.toInt()
val lastModified = mdResponse.header("Last-Modified")
if (!contentType.isImageMimetype()) {
LOGGER.warn { "Upstream query for $sanitizedUri returned bad mimetype $contentType" }
mdResponse.close()
return Response(Status.INTERNAL_SERVER_ERROR)
}
// bad upstream responses mean we can't cache, so bail
if (contentLength == null || lastModified == null) {
LOGGER.info { "Request for $sanitizedUri is being served due to upstream issues" }
return respondWithImage(mdResponse.body.stream, contentLength, contentType, lastModified, false)
}
LOGGER.trace { "Upstream query for $sanitizedUri succeeded" }
val writer = storage.storeImage(imageId, ImageMetadata(contentType, lastModified, contentLength))
// A null writer means that this file is being written to
// concurrently so we skip the cache process
return if (writer != null) {
LOGGER.info { "Request for $sanitizedUri is being cached and served" }
val tee = CachingInputStream(
mdResponse.body.stream,
executor, BufferedOutputStream(writer.stream),
) {
try {
if (writer.commit(contentLength)) {
LOGGER.info { "Cache download for $sanitizedUri committed" }
} else {
LOGGER.info { "Cache download for $sanitizedUri aborted" }
}
} catch (e: Exception) {
LOGGER.warn(e) { "Cache go/no go for $sanitizedUri failed" }
}
}
respondWithImage(tee, contentLength, contentType, lastModified, false)
} else {
LOGGER.info { "Request for $sanitizedUri is being served" }
respondWithImage(mdResponse.body.stream, contentLength, contentType, lastModified, false)
}
}
private fun respondWithImage(input: InputStream, length: Int?, type: String, lastModified: String?, cached: Boolean): Response =
Response(Status.OK)
.header("Content-Type", type)
.header("X-Content-Type-Options", "nosniff")
.let {
if (length != null) {
it.body(input, length.toLong()).header("Content-Length", length.toString())
} else {
it.body(input).header("Transfer-Encoding", "chunked")
}
}
.let {
if (lastModified != null) {
it.header("Last-Modified", lastModified)
} else {
it
}
}
.header("X-Cache", if (cached) "HIT" else "MISS")
companion object {
private fun baseHandler(): Filter =
CachingFilters.Response.MaxAge(Clock.systemUTC(), Constants.MAX_AGE_CACHE)
.then { next: HttpHandler ->
{ request: Request ->
val response = next(request)
response.header("access-control-allow-origin", "https://mangadex.org")
.header("access-control-allow-headers", "*")
.header("access-control-allow-methods", "GET")
.header("timing-allow-origin", "https://mangadex.org")
}
}
}
}
private fun String.isImageMimetype() = this.toLowerCase().startsWith("image/")
import java.time.Duration
fun getServer(
storage: ImageStorage,
@ -230,10 +50,46 @@ fun getServer(
registry: PrometheusMeterRegistry,
client: HttpHandler
): Http4kServer {
val upstream =
ClientFilters.SetBaseUriFrom(remoteSettings.imageServer)
.then(ClientFilters.MicrometerMetrics.RequestTimer(registry))
.then(client)
val circuitBreakerRegistry = CircuitBreakerRegistry.ofDefaults()
val circuitBreaker = circuitBreakerRegistry.circuitBreaker(
"upstream",
CircuitBreakerConfig.custom()
.slidingWindow(50, 20, CircuitBreakerConfig.SlidingWindowType.COUNT_BASED)
.permittedNumberOfCallsInHalfOpenState(10)
.slowCallDurationThreshold(Duration.ofSeconds(20))
.waitDurationInOpenState(Duration.ofMinutes(2))
.build()
)
circuitBreaker.eventPublisher.onFailureRateExceeded {
LOGGER.warn { "Circuit breaker has exceeded failure rate" }
}
circuitBreaker.eventPublisher.onSlowCallRateExceeded {
LOGGER.warn { "Circuit breaker has exceeded slow call rate" }
}
circuitBreaker.eventPublisher.onReset {
LOGGER.warn { "Circuit breaker has rest" }
}
circuitBreaker.eventPublisher.onStateTransition {
LOGGER.warn { "Circuit breaker has moved from ${it.stateTransition.fromState} to ${it.stateTransition.toState}" }
}
TaggedCircuitBreakerMetrics
.ofCircuitBreakerRegistry(circuitBreakerRegistry)
.bindTo(registry)
val circuited = ResilienceFilters.CircuitBreak(
circuitBreaker,
isError = { r: Response -> !r.status.successful }
)
val upstream = ClientFilters.MicrometerMetrics.RequestTimer(registry)
.then(ClientFilters.SetBaseUriFrom(remoteSettings.imageServer))
.then(circuited)
.then(client)
val imageServer = ImageServer(
storage = storage,
@ -247,7 +103,7 @@ fun getServer(
{ it.bytesSent.get().toDouble() }
).register(registry)
val verifier = tokenVerifier(
val verifier = TokenVerifier(
tokenKey = remoteSettings.tokenKey,
shouldVerify = { chapter, _ ->
!remoteSettings.disableTokens && !(chapter == "1b682e7b24ae7dbdc5064eeeb8e8e353" || chapter == "8172a46adc798f4f4ace6663322a383e")
@ -281,7 +137,7 @@ fun getServer(
),
"/prometheus" bind Method.GET to {
Response(Status.OK).body(registry.scrape())
}
},
).withFilter(
ServerFilters.MicrometerMetrics.RequestTimer(registry, labeler = PostTransactionLabeler())
).withFilter(
@ -291,6 +147,8 @@ fun getServer(
.asServer(Netty(remoteSettings.tls!!, serverSettings, statistics))
}
private val LOGGER = LoggerFactory.getLogger(ImageServer::class.java)
fun timeRequest(): Filter {
return Filter { next: HttpHandler ->
{ request: Request ->
@ -308,60 +166,3 @@ fun timeRequest(): Filter {
}
}
}
fun tokenVerifier(tokenKey: ByteArray, shouldVerify: (String, String) -> Boolean): Filter {
val box = TweetNaclFast.SecretBox(tokenKey)
return Filter { next ->
then@{
val chapterHash = Path.of("chapterHash")(it)
val fileName = Path.of("fileName")(it)
if (shouldVerify(chapterHash, fileName)) {
val cleanedUri = it.uri.path.replaceBefore("/data", "/{token}")
val tokenArr = try {
val toDecode = try {
Path.of("token")(it)
} catch (e: LensFailure) {
LOGGER.info(e) { "Request for $cleanedUri rejected for missing token" }
return@then Response(Status.FORBIDDEN).body("Token is missing")
}
Base64.getUrlDecoder().decode(toDecode)
} catch (e: IllegalArgumentException) {
LOGGER.info(e) { "Request for $cleanedUri rejected for non-base64 token" }
return@then Response(Status.FORBIDDEN).body("Token is invalid base64")
}
if (tokenArr.size < 24) {
LOGGER.info { "Request for $cleanedUri rejected for invalid token" }
return@then Response(Status.FORBIDDEN)
}
val token = try {
JACKSON.readValue<Token>(
box.open(tokenArr.sliceArray(24 until tokenArr.size), tokenArr.sliceArray(0 until 24)).apply {
if (this == null) {
LOGGER.info { "Request for $cleanedUri rejected for invalid token" }
return@then Response(Status.FORBIDDEN)
}
}
)
} catch (e: JsonProcessingException) {
LOGGER.info(e) { "Request for $cleanedUri rejected for invalid token" }
return@then Response(Status.FORBIDDEN).body("Token is invalid")
}
if (OffsetDateTime.now().isAfter(token.expires)) {
LOGGER.info { "Request for $cleanedUri rejected for expired token" }
return@then Response(Status.GONE).body("Token has expired")
}
if (token.hash != chapterHash) {
LOGGER.info { "Request for $cleanedUri rejected for inapplicable token" }
return@then Response(Status.FORBIDDEN).body("Token is inapplicable for the image")
}
}
return@then next(it)
}
}
}

View file

@ -0,0 +1,102 @@
/*
Mangadex@Home
Copyright (c) 2020, MangaDex Network
This file is part of MangaDex@Home.
MangaDex@Home is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
MangaDex@Home is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this MangaDex@Home. If not, see <http://www.gnu.org/licenses/>.
*/
package mdnet.server
import com.fasterxml.jackson.core.JsonProcessingException
import com.fasterxml.jackson.databind.DeserializationFeature
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule
import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper
import com.fasterxml.jackson.module.kotlin.readValue
import mdnet.data.Token
import mdnet.logging.info
import mdnet.security.TweetNaclFast
import org.http4k.core.Filter
import org.http4k.core.HttpHandler
import org.http4k.core.Response
import org.http4k.core.Status
import org.http4k.lens.LensFailure
import org.http4k.lens.Path
import org.slf4j.LoggerFactory
import java.time.OffsetDateTime
import java.util.Base64
class TokenVerifier(tokenKey: ByteArray, private val shouldVerify: (String, String) -> Boolean) : Filter {
private val box = TweetNaclFast.SecretBox(tokenKey)
override fun invoke(next: HttpHandler): HttpHandler {
return then@{
val chapterHash = Path.of("chapterHash")(it)
val fileName = Path.of("fileName")(it)
if (shouldVerify(chapterHash, fileName)) {
val cleanedUri = it.uri.path.replaceBefore("/data", "/{token}")
val tokenArr = try {
val toDecode = try {
Path.of("token")(it)
} catch (e: LensFailure) {
LOGGER.info(e) { "Request for $cleanedUri rejected for missing token" }
return@then Response(Status.FORBIDDEN).body("Token is missing")
}
Base64.getUrlDecoder().decode(toDecode)
} catch (e: IllegalArgumentException) {
LOGGER.info(e) { "Request for $cleanedUri rejected for non-base64 token" }
return@then Response(Status.FORBIDDEN).body("Token is invalid base64")
}
if (tokenArr.size < 24) {
LOGGER.info { "Request for $cleanedUri rejected for invalid token" }
return@then Response(Status.FORBIDDEN)
}
val token = try {
JACKSON.readValue<Token>(
box.open(tokenArr.sliceArray(24 until tokenArr.size), tokenArr.sliceArray(0 until 24)).apply {
if (this == null) {
LOGGER.info { "Request for $cleanedUri rejected for invalid token" }
return@then Response(Status.FORBIDDEN)
}
}
)
} catch (e: JsonProcessingException) {
LOGGER.info(e) { "Request for $cleanedUri rejected for invalid token" }
return@then Response(Status.FORBIDDEN).body("Token is invalid")
}
if (OffsetDateTime.now().isAfter(token.expires)) {
LOGGER.info { "Request for $cleanedUri rejected for expired token" }
return@then Response(Status.GONE).body("Token has expired")
}
if (token.hash != chapterHash) {
LOGGER.info { "Request for $cleanedUri rejected for inapplicable token" }
return@then Response(Status.FORBIDDEN).body("Token is inapplicable for the image")
}
}
return@then next(it)
}
}
companion object {
private val LOGGER = LoggerFactory.getLogger(TokenVerifier::class.java)
private val JACKSON: ObjectMapper = jacksonObjectMapper()
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
.registerModule(JavaTimeModule())
}
}

View file

@ -31,17 +31,14 @@ import io.mockk.every
import io.mockk.mockk
import io.mockk.verify
import mdnet.cache.ImageStorage
import mdnet.security.TweetNaclFast
import org.apache.commons.io.IOUtils
import org.http4k.core.HttpHandler
import org.http4k.core.Method
import org.http4k.core.Request
import org.http4k.core.Response
import org.http4k.core.Status
import org.http4k.core.then
import org.http4k.kotest.shouldHaveHeader
import org.http4k.kotest.shouldHaveStatus
import org.http4k.kotest.shouldNotHaveStatus
import org.http4k.routing.bind
import org.http4k.routing.routes
import org.ktorm.database.Database
@ -249,41 +246,3 @@ class ImageServerTest : FreeSpec() {
.header("Content-Length", "${data.size}")
.header("Last-Modified", "Wed, 21 Oct 2015 07:28:00 GMT")
}
class TokenVerifierTest : FreeSpec() {
init {
val remoteKeys = TweetNaclFast.Box.keyPair()
val clientKeys = TweetNaclFast.Box.keyPair()
val box = TweetNaclFast.Box(clientKeys.publicKey, remoteKeys.secretKey)
val backend = tokenVerifier(box.before()) { _, _ ->
true
}.then {
Response(Status.OK)
}
val handler = routes(
"/data/{chapterHash}/{fileName}" bind Method.GET to backend,
"/data-saver/{chapterHash}/{fileName}" bind Method.GET to backend,
"/{token}/data/{chapterHash}/{fileName}" bind Method.GET to backend,
"/{token}/data-saver/{chapterHash}/{fileName}" bind Method.GET to backend,
)
"invalid" - {
"missing token should fail" {
val response = handler(Request(Method.GET, "/data/02181a8f5fe8cd408720a771dd129fd8/T2.png"))
response.shouldNotHaveStatus(Status.OK)
}
"too short token should fail" {
val response = handler(Request(Method.GET, "/a/data/02181a8f5fe8cd408720a771dd129fd8/T2.png"))
response.shouldNotHaveStatus(Status.OK)
}
"invalid token should fail" {
val response = handler(Request(Method.GET, "/MTIzM2Vhd2Z3YWVmbG1pbzJuM29pNG4yaXAzNG1wMSwyWzMscHdxZWVlZWVlZXBscWFkcVt3ZGwxWzJsM3BbMWwycFsxZSxwMVssZmRbcGF3LGZwW2F3ZnBbLA==/data/02181a8f5fe8cd408720a771dd129fd8/T2.png"))
response.shouldNotHaveStatus(Status.OK)
}
}
}
}

View file

@ -0,0 +1,69 @@
/*
Mangadex@Home
Copyright (c) 2020, MangaDex Network
This file is part of MangaDex@Home.
MangaDex@Home is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
MangaDex@Home is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this MangaDex@Home. If not, see <http://www.gnu.org/licenses/>.
*/
package mdnet.server
import io.kotest.core.spec.style.FreeSpec
import mdnet.security.TweetNaclFast
import org.http4k.core.*
import org.http4k.kotest.shouldNotHaveStatus
import org.http4k.routing.bind
import org.http4k.routing.routes
class TokenVerifierTest : FreeSpec() {
init {
val remoteKeys = TweetNaclFast.Box.keyPair()
val clientKeys = TweetNaclFast.Box.keyPair()
val box = TweetNaclFast.Box(clientKeys.publicKey, remoteKeys.secretKey)
val backend = TokenVerifier(box.before()) { _, _ ->
true
}.then {
Response(Status.OK)
}
val handler = routes(
"/data/{chapterHash}/{fileName}" bind Method.GET to backend,
"/data-saver/{chapterHash}/{fileName}" bind Method.GET to backend,
"/{token}/data/{chapterHash}/{fileName}" bind Method.GET to backend,
"/{token}/data-saver/{chapterHash}/{fileName}" bind Method.GET to backend,
)
"invalid" - {
"missing token should fail" {
val response = handler(Request(Method.GET, "/data/02181a8f5fe8cd408720a771dd129fd8/T2.png"))
response.shouldNotHaveStatus(Status.OK)
}
"too short token should fail" {
val response = handler(Request(Method.GET, "/a/data/02181a8f5fe8cd408720a771dd129fd8/T2.png"))
response.shouldNotHaveStatus(Status.OK)
}
"invalid token should fail" {
val response = handler(
Request(
Method.GET,
"/MTIzM2Vhd2Z3YWVmbG1pbzJuM29pNG4yaXAzNG1wMSwyWzMscHdxZWVlZWVlZXBscWFkcVt3ZGwxWzJsM3BbMWwycFsxZSxwMVssZmRbcGF3LGZwW2F3ZnBbLA==/data/02181a8f5fe8cd408720a771dd129fd8/T2.png"
)
)
response.shouldNotHaveStatus(Status.OK)
}
}
}
}