From 6801ce299a3a0016bae08ee8f64602aeb0274659 Mon Sep 17 00:00:00 2001 From: kim Date: Wed, 17 Sep 2025 14:16:53 +0200 Subject: [chore] remove nollamas middleware for now (after discussions with a security advisor) (#4433) i'll keep this on a separate branch for now while i experiment with other possible alternatives, but for now both our hacky implementation especially, and more popular ones (like anubis) aren't looking too great on the deterrent front: https://github.com/eternal-flame-AD/pow-buster Co-authored-by: tobi Reviewed-on: https://codeberg.org/superseriousbusiness/gotosocial/pulls/4433 Co-authored-by: kim Co-committed-by: kim --- example/config.yaml | 37 ------------------------------------- 1 file changed, 37 deletions(-) (limited to 'example') diff --git a/example/config.yaml b/example/config.yaml index a3b9ab5cd..3e7375627 100644 --- a/example/config.yaml +++ b/example/config.yaml @@ -1338,40 +1338,3 @@ advanced-csp-extra-uris: [] # Options: ["block", "allow", ""] # Default: "" advanced-header-filter-mode: "" - -# Bool. Enables a proof-of-work based deterrence against scrapers -# on profile and status web pages. This will generate a unique but -# deterministic challenge for each HTTP client to complete before -# accessing the above mentioned endpoints, on success being given -# a cookie that permits challenge-less access within a 1hr window. -# -# The outcome of this is that it should make scraping of these -# endpoints economically unfeasible, while having a negligible -# performance impact on your own instance. -# -# The downside is that it requires javascript to be enabled. -# -# For more details please check the documentation at: -# https://docs.gotosocial.org/en/latest/advanced/scraper_deterrence -# -# Options: [true, false] -# Default: true -advanced-scraper-deterrence-enabled: false - -# Uint. Allows tweaking the difficulty of the proof-of-work algorithm -# used in the scraper deterrence. This determines roughly how many hash -# encode rounds we require the client to complete to find a solution. -# Higher values will take longer to find solutions for, and vice-versa. -# -# The downside is that if your deterrence takes too long to solve, -# it may deter some users from viewing your web status / profile page. -# And conversely, the longer it takes for a solution to be found, the -# more you'll be incurring increased CPU usage for scrapers, and possibly -# even cause their operation to time out before completion. -# -# For more details please check the documentation at: -# https://docs.gotosocial.org/en/latest/advanced/scraper_deterrence -# -# Examples: [50000, 100000, 500000] -# Default: 100000 -advanced-scraper-deterrence-difficulty: 100000 -- cgit v1.2.3