From fab897ca7613afedec1da1e2280caeb800fc2953 Mon Sep 17 00:00:00 2001 From: Franck Cuny Date: Thu, 23 Jun 2022 19:13:53 -0700 Subject: fix(modules/cgit): exclude all web crawlers Change-Id: I96db1763dcc85d43ca5913a95d702cf96830c7b0 Reviewed-on: https://cl.fcuny.net/c/world/+/488 Tested-by: CI Reviewed-by: Franck Cuny --- modules/services/cgit/default.nix | 6 ++++++ 1 file changed, 6 insertions(+) (limited to 'modules/services/cgit') diff --git a/modules/services/cgit/default.nix b/modules/services/cgit/default.nix index b4e82ab..39e1633 100644 --- a/modules/services/cgit/default.nix +++ b/modules/services/cgit/default.nix @@ -1,6 +1,11 @@ { config, pkgs, lib, ... }: let cfg = config.my.services.cgit; + # there's no need for web crawlers on that site + robots-deny = pkgs.writeText "robots.txt" '' + User-agent: * + Disallow: / + ''; cgitrc = '' # Global configuration virtual-root=/ @@ -104,6 +109,7 @@ in enableACME = true; locations = { "~* ^.+.(css|png|ico)$" = { root = "${pkgs.cgit}/cgit"; }; + "/robots.txt".alias = robots-deny; "/".extraConfig = '' include ${pkgs.nginx}/conf/fastcgi_params; fastcgi_param CGIT_CONFIG ${pkgs.writeText "cgitrc" cgitrc}; -- cgit 1.4.1