Last post update: 2024-03-10 08:35:28
Do you use Conan package manager from multiple computers on your local network and want to speedup package builds by caching external source code downloads? If yes, then this post should be useful to you as it describes steps needed to selectively change download and get URLs by using Conan hook and monkey patching.
Create ~/.conan/hooks/rewrite_urls.py file with the following code:
import conan.tools.files
from conan.tools.files import get, download
from urllib.parse import urlsplit, urlunsplit
from fnmatch import fnmatch
def rewrite(url):
if type(url) is list:
result = []
for u in url:
result.append(rewrite(u))
return result
parts = list(urlsplit(url))
if fnmatch(parts[1], "*example.com*"):
return url
parts[2] = "/" + parts[0] + "/" + parts[1] + parts[2]
parts[0] = "https"
parts[1] = "cache.example.com"
return urlunsplit(parts)
def custom_get(conanfile, url, md5=None, sha1=None, sha256=None, destination=".", filename="", keep_permissions=False, pattern=None, verify=True, retry=None, retry_wait=None, auth=None, headers=None, strip_root=False):
get(conanfile, rewrite(url), md5, sha1, sha256, destination, filename, keep_permissions, pattern, verify, retry, retry_wait, auth, headers, strip_root)
def custom_download(conanfile, url, filename, verify=True, retry=None, retry_wait=None, auth=None, headers=None, md5=None, sha1=None, sha256=None):
download(conanfile, rewrite(url), filename, verify, retry, retry_wait, auth, headers, md5, sha1, sha256)
if conan.tools.files.get is not custom_get:
conan.tools.files.get = custom_get
if conan.tools.files.download is not custom_download:
conan.tools.files.download = custom_download
This code uses monkey patching to override download and get functions with simple wrapper functions that change external URLs to https://cache.example.com/scheme/host/path form, but does not change local URLs.
To enable this hook, open ~/.conan/conan.conf file and add hook name to hooks section:
[hooks]
rewrite_urls
The following configuration can be used to use Nginx as caching proxy server. /etc/nginx/sites-available/cache:
proxy_cache_path /var/cache/nginx/proxy/ levels=2 keys_zone=proxy_cache:5m max_size=25g inactive=10y use_temp_path=off;
server {
listen 443 ssl;
server_name cache.example.com;
...
location ~ /(http|https)/([^/]+)/(.*) {
include cache_common;
set $key "$request_method $1://$2/$3";
proxy_cache_key $key;
proxy_pass $1://$2/$3;
error_page 301 302 307 = @handle_redirects;
}
recursive_error_pages on;
location @handle_redirects {
set $saved_redirect_location "$upstream_http_location";
include cache_common;
proxy_cache_key $key;
proxy_pass $saved_redirect_location;
error_page 301 302 307 = @handle_redirects;
}
location / {
index off;
return 404;
}
}
/etc/nginx/cache_common:
resolver 127.0.0.1 ipv6=off;
resolver_timeout 10s;
proxy_http_version 1.1;
proxy_ssl_protocols TLSv1.2 TLSv1.3;
proxy_ssl_trusted_certificate /etc/ssl/certs/ca-certificates.crt;
proxy_ssl_verify on;
proxy_ssl_server_name on;
proxy_ssl_session_reuse off;
proxy_buffering on;
proxy_buffer_size 8k;
proxy_buffers 64 8k;
proxy_set_header Cookie "";
proxy_hide_header Set-Cookie;
proxy_cache_lock on;
proxy_ignore_headers Expires;
proxy_ignore_headers X-Accel-Expires;
proxy_ignore_headers Cache-Control;
proxy_ignore_headers Set-Cookie;
proxy_hide_header X-Accel-Expires;
proxy_hide_header Expires;
proxy_hide_header Cache-Control;
proxy_hide_header Pragma;
expires max;
proxy_cache proxy_cache;
proxy_cache_valid 200 10y;
proxy_intercept_errors on;