@@ -0,0 +1,86 @@
From 64ffeea2c2dca4377b7ec4e9e3cf5dfe1a9b6c0a Mon Sep 17 00:00:00 2001
From: Guillaume Ayoub <guillaume@courtbouillon.org>
Date: Wed, 31 Dec 2025 19:09:20 +0100
Subject: [PATCH 1/2] =?UTF-8?q?Don=E2=80=99t=20allow=20redirects=20with=20?=
=?UTF-8?q?deprecated=20default=5Furl=5Ffetcher=20function?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
This is a security fix.
When calling default_url_fetcher in a custom URL fetcher, redirects are handled by
Python and don’ t go though the custom URL fetcher, allowing attackers to make WeasyPrint
reach URLs forbidden by the custom URL fetcher.
See CVE-2025-68616.
---
weasyprint/urls.py | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
Index: weasyprint-65.1/weasyprint/urls.py
===================================================================
--- weasyprint-65.1.orig/weasyprint/urls.py
+++ weasyprint-65.1/weasyprint/urls.py
@@ -10,7 +10,8 @@ import zlib
from gzip import GzipFile
from pathlib import Path
from urllib.parse import quote, unquote, urljoin, urlsplit
-from urllib.request import Request, pathname2url, urlopen
+from urllib.request import Request, pathname2url, build_opener
+from urllib import request
from . import __version__
from .logger import LOGGER
@@ -177,7 +178,8 @@ def ensure_url(string):
return string if url_is_absolute(string) else path2url(string)
-def default_url_fetcher(url, timeout=10, ssl_context=None):
+def default_url_fetcher(url, timeout=10, ssl_context=None,
+ allow_redirects=False):
"""Fetch an external resource such as an image or stylesheet.
Another callable with the same signature can be given as the
@@ -190,6 +192,8 @@ def default_url_fetcher(url, timeout=10,
The number of seconds before HTTP requests are dropped.
:param ssl.SSLContext ssl_context:
An SSL context used for HTTP requests.
+ :param bool allow_redirects:
+ Whether HTTP redirects must be followed.
:raises: An exception indicating failure, e.g. :obj:`ValueError` on
syntactically invalid URL.
:returns: A :obj:`dict` with the following keys:
@@ -214,15 +218,29 @@ def default_url_fetcher(url, timeout=10,
has to be closed manually.
"""
+
if UNICODE_SCHEME_RE.match(url):
# See https://bugs.python.org/issue34702
if url.startswith('file://'):
url = url.split('?')[0]
url = iri_to_uri(url)
- response = urlopen(
- Request(url, headers=HTTP_HEADERS), timeout=timeout,
- context=ssl_context)
+
+ # Default opener, redirects won't be followed
+ handlers = [
+ request.ProxyHandler(), request.UnknownHandler(), request.HTTPHandler(),
+ request.HTTPDefaultErrorHandler(), request.FTPHandler(),
+ request.FileHandler(), request.HTTPErrorProcessor(), request.DataHandler(),
+ request.HTTPSHandler(context=ssl_context)]
+ if allow_redirects:
+ handlers.append(request.HTTPRedirectHandler())
+
+ opener = request.OpenerDirector()
+ for handler in handlers:
+ opener.add_handler(handler)
+
+ response = opener.open(
+ Request(url, headers=HTTP_HEADERS), timeout=timeout)
response_info = response.info()
result = {
'redirected_url': response.geturl(),