From d8c4d9fc5a62741f0c4c2b692a3a94874714bbcc Mon Sep 17 00:00:00 2001 From: kim Date: Mon, 28 Apr 2025 20:12:27 +0000 Subject: [feature] proof of work scraper deterrence (#4043) This adds a proof-of-work based scraper deterrence to GoToSocial's middleware stack on profile and status web pages. Heavily inspired by https://github.com/TecharoHQ/anubis, but massively stripped back for our own usecase. Todo: - ~~add configuration option so this is disabled by default~~ - ~~fix whatever weirdness is preventing this working with CSP (even in debug)~~ - ~~use our standard templating mechanism going through apiutil helper func~~ - ~~probably some absurdly small performance improvements to be made in pooling re-used hex encode / hash encode buffers~~ the web endpoints aren't as hot a path as API / ActivityPub, will leave as-is for now as it is already very minimal and well optimized - ~~verify the cryptographic assumptions re: using a portion of token as challenge data~~ this isn't a serious application of cryptography, if it turns out to be a problem we'll fix it, but it definitely should not be easily possible to guess a SHA256 hash from the first 1/4 of it even if mathematically it might make it a bit easier - ~~theme / make look nice??~~ - ~~add a spinner~~ - ~~add entry in example configuration~~ - ~~add documentation~~ Verification page originally based on https://github.com/LucienV1/powtect Co-authored-by: tobi Reviewed-on: https://codeberg.org/superseriousbusiness/gotosocial/pulls/4043 Reviewed-by: tobi Co-authored-by: kim Co-committed-by: kim --- example/config.yaml | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) (limited to 'example') diff --git a/example/config.yaml b/example/config.yaml index 2b3a873fb..c7504ce56 100644 --- a/example/config.yaml +++ b/example/config.yaml @@ -1261,3 +1261,22 @@ advanced-csp-extra-uris: [] # Options: ["block", "allow", ""] # Default: "" advanced-header-filter-mode: "" + +# Bool. Enables a proof-of-work based deterrence against scrapers +# on profile and status web pages. This will generate a unique but +# deterministic challenge for each HTTP client to complete before +# accessing the above mentioned endpoints, on success being given +# a cookie that permits challenge-less access within a 1hr window. +# +# The outcome of this is that it should make scraping of these +# endpoints economically unfeasible, while having a negligible +# performance impact on your own instance. +# +# The downside is that it requires javascript to be enabled. +# +# For more details please check the documentation at: +# https://docs.gotosocial.org/en/latest/admin/scraper_deterrence +# +# Options: [true, false] +# Default: true +advanced-scraper-deterrence: false -- cgit v1.2.3