Use cache to accelerate static file serving with nginx

This lets whitenoise handle the headers, and nginx serves them quickly from the cache
This commit is contained in:
Jake Howard 2024-01-13 22:52:37 +00:00
parent 8ce25dcf2d
commit f5a18fdca0
Signed by: jake
GPG key ID: 57AFB45680EDD477
3 changed files with 33 additions and 28 deletions

View file

@ -6,5 +6,5 @@ max_requests = 1200
max_requests_jitter = 50 max_requests_jitter = 50
forwarded_allow_ips = "*" forwarded_allow_ips = "*"
# Run an additional thread so the GIL isn't sitting completely idle # Run additional threads so the GIL isn't sitting completely idle
threads = 2 threads = 4

View file

@ -1,7 +1,10 @@
proxy_cache_path /tmp/nginx_cache levels=1:2 keys_zone=nginxcache:10m max_size=150m;
server { server {
listen 8000; listen 8000;
access_log /dev/stdout; access_log /dev/stdout;
error_log /dev/stderr;
gzip_static on; gzip_static on;
@ -12,30 +15,36 @@ server {
more_set_headers "Server: Wouldn't you like to know"; more_set_headers "Server: Wouldn't you like to know";
server_tokens off; server_tokens off;
proxy_buffers 32 4k;
proxy_connect_timeout 240;
proxy_headers_hash_bucket_size 128;
proxy_headers_hash_max_size 1024;
proxy_http_version 1.1;
proxy_read_timeout 240;
proxy_send_timeout 240;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header Proxy "";
proxy_cache_use_stale error timeout http_500 http_502 http_503 http_504;
proxy_cache_lock on;
proxy_cache_valid 404 1m;
location / { location / {
proxy_buffers 32 4k;
proxy_connect_timeout 240;
proxy_headers_hash_bucket_size 128;
proxy_headers_hash_max_size 1024;
proxy_http_version 1.1;
proxy_read_timeout 240;
proxy_send_timeout 240;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header Proxy "";
proxy_pass http://localhost:8080; proxy_pass http://localhost:8080;
} }
location /static { location /static {
add_header Cache-Control "public, immutable, max-age=31536000"; proxy_cache nginxcache;
alias /app/collected-static; add_header X-Cache-Status $upstream_cache_status;
proxy_pass http://localhost:8080;
} }
location /media { location /media {
add_header Cache-Control "public, immutable, max-age=3600"; proxy_cache nginxcache;
alias /app/media; add_header X-Cache-Status $upstream_cache_status;
proxy_pass http://localhost:8080;
} }
} }

View file

@ -58,6 +58,11 @@ urlpatterns = [
), ),
path("favicon.ico", FaviconView.as_view()), path("favicon.ico", FaviconView.as_view()),
path("", include(favicon_urls)), path("", include(favicon_urls)),
re_path(
r"^%s(?P<path>.*)$" % re.escape(settings.MEDIA_URL.lstrip("/")),
cache_control(max_age=60 * 60)(serve),
{"document_root": settings.MEDIA_ROOT},
),
] ]
@ -72,15 +77,6 @@ if settings.DEBUG:
# Add django-debug-toolbar # Add django-debug-toolbar
urlpatterns.append(path("__debug__/", include("debug_toolbar.urls"))) urlpatterns.append(path("__debug__/", include("debug_toolbar.urls")))
urlpatterns.append(
# Media is served by nginx in production
re_path(
r"^%s(?P<path>.*)$" % re.escape(settings.MEDIA_URL.lstrip("/")),
cache_control(max_age=60 * 60)(serve),
{"document_root": settings.MEDIA_ROOT},
)
)
if settings.DEBUG or settings.TEST: if settings.DEBUG or settings.TEST:
urlpatterns.extend( urlpatterns.extend(