Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions changelogs.opensafely.org.conf.template.disabled
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,11 @@ server {
root /var/www/html;
listen ${PORT};

location = /robots.txt {
add_header 'Content-Type' 'text/plain; charset=UTF-8' always;
return 200 "User-agent: *\nDisallow: /\n";
}

location / {
limit_except GET { deny all; }
proxy_pass https://changelogs.ubuntu.com;
Expand Down
19 changes: 18 additions & 1 deletion ci-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,12 @@ assert-header() {

### github-proxy.opensafely.org ###

# test robots is disallowed
try github-proxy.opensafely.org/robots.txt 200
assert-in-body 'User-agent: *'
assert-in-body 'Disallow: /'
assert-header 'Content-Type: text/plain; charset=UTF-8'

# test we can query the clone metadata endpoint
try github-proxy.opensafely.org/opensafely/documentation/info/refs?service=git-upload-pack 200
assert-header 'X-GitHub-Request-Id:'
Expand Down Expand Up @@ -160,6 +166,12 @@ assert-in-body ed25519

### docker-proxy.opensafely.org ###

# test robots is disallowed
try docker-proxy.opensafely.org/robots.txt 200
assert-in-body 'User-agent: *'
assert-in-body 'Disallow: /'
assert-header 'Content-Type: text/plain; charset=UTF-8'

# test the initial docker request is rewritten correctly
try docker-proxy.opensafely.org/v2/ 401
assert-in-body '{"errors":[{"code":"UNAUTHORIZED","message":"authentication required"}]}'
Expand All @@ -185,9 +197,14 @@ digest=$(jq -r .config.digest < "$body")
try "docker-proxy.opensafely.org/v2/opensafely-core/busybox/blobs/$digest?" 200 "$token"

### changelogs.opensafely.org ###

# This allows us to use the do-release-upgrade tool to perform major backend OS upgrades.
# Disabled as we don't typically needed unless we are using do-release-upgrade

#try changelogs.opensafely.org/meta-release-lts 200
# test robots is disallowed
# try changelogs.opensafely.org/robots.txt 200
# assert-in-body 'User-agent: *'
# assert-in-body 'Disallow: /'
# assert-header 'Content-Type: text/plain; charset=UTF-8'

exit $return_code
5 changes: 5 additions & 0 deletions ghcr.io.conf.template
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,11 @@ server {
root /var/www/html;
listen ${PORT};

location = /robots.txt {
add_header 'Content-Type' 'text/plain; charset=UTF-8' always;
return 200 "User-agent: *\nDisallow: /\n";
}

# no buffering
proxy_buffering off;
proxy_request_buffering off;
Expand Down
5 changes: 5 additions & 0 deletions github.com.conf.template
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,11 @@ server {
root /var/www/html/;
listen ${PORT};

location = /robots.txt {
add_header 'Content-Type' 'text/plain; charset=UTF-8' always;
return 200 "User-agent: *\nDisallow: /\n";
}

# We `git fetch` commits from a persistant bare repo, which over time can
# mean sending a lot of local state up to github as part of fetching. So
# this needs to be larger than you'd think
Expand Down