Rewrite Conan download and get URLs

Last post update: 2024-03-10 08:35:28

Do you use Conan package manager from multiple computers on your local network and want to speedup package builds by caching external source code downloads? If yes, then this post should be useful to you as it describes steps needed to selectively change download and get URLs by using Conan hook and monkey patching.

Create ~/.conan/hooks/ file with the following code:

from import get, download
from urllib.parse import urlsplit, urlunsplit
from fnmatch import fnmatch

def rewrite(url):
    if type(url) is list:
        result = []
        for u in url:
        return result
    parts = list(urlsplit(url))
    if fnmatch(parts[1], "**"):
        return url
    parts[2] = "/" + parts[0] + "/" + parts[1] + parts[2]
    parts[0] = "https"
    parts[1] = ""
    return urlunsplit(parts)

def custom_get(conanfile, url, md5=None, sha1=None, sha256=None, destination=".", filename="", keep_permissions=False, pattern=None, verify=True, retry=None, retry_wait=None, auth=None, headers=None, strip_root=False):
    get(conanfile, rewrite(url), md5, sha1, sha256, destination, filename, keep_permissions, pattern, verify, retry, retry_wait, auth, headers, strip_root)

def custom_download(conanfile, url, filename, verify=True, retry=None, retry_wait=None, auth=None, headers=None, md5=None, sha1=None, sha256=None):
    download(conanfile, rewrite(url), filename, verify, retry, retry_wait, auth, headers, md5, sha1, sha256)

if is not custom_get: = custom_get
if is not custom_download: = custom_download

This code uses monkey patching to override download and get functions with simple wrapper functions that change external URLs to form, but does not change local URLs.

To enable this hook, open ~/.conan/conan.conf file and add hook name to hooks section:


The following configuration can be used to use Nginx as caching proxy server. /etc/nginx/sites-available/cache:

proxy_cache_path /var/cache/nginx/proxy/ levels=2 keys_zone=proxy_cache:5m max_size=25g inactive=10y use_temp_path=off;
server {
    listen 443 ssl;
    location ~ /(http|https)/([^/]+)/(.*) {
        include cache_common;
        set $key "$request_method $1://$2/$3";
        proxy_cache_key $key;
        proxy_pass $1://$2/$3;
        error_page 301 302 307 = @handle_redirects;
    recursive_error_pages on;
    location @handle_redirects {
        set $saved_redirect_location "$upstream_http_location";
        include cache_common;
        proxy_cache_key $key;
        proxy_pass $saved_redirect_location;
        error_page 301 302 307 = @handle_redirects;
    location / {
        index off;
        return 404;


resolver ipv6=off;
resolver_timeout 10s;
proxy_http_version 1.1;
proxy_ssl_protocols TLSv1.2 TLSv1.3;
proxy_ssl_trusted_certificate /etc/ssl/certs/ca-certificates.crt;
proxy_ssl_verify on;
proxy_ssl_server_name on;
proxy_ssl_session_reuse off;
proxy_buffering on;
proxy_buffer_size 8k;
proxy_buffers 64 8k;
proxy_set_header Cookie "";
proxy_hide_header Set-Cookie;
proxy_cache_lock on;
proxy_ignore_headers Expires;
proxy_ignore_headers X-Accel-Expires;
proxy_ignore_headers Cache-Control;
proxy_ignore_headers Set-Cookie;
proxy_hide_header X-Accel-Expires;
proxy_hide_header Expires;
proxy_hide_header Cache-Control;
proxy_hide_header Pragma;
expires max;
proxy_cache proxy_cache;
proxy_cache_valid 200 10y;
proxy_intercept_errors on;