new file mode 100644
@@ -0,0 +1,89 @@
+From 046a164f89a0f08d3239ecebd750360f8914df33 Mon Sep 17 00:00:00 2001
+From: Adi Roiban <adiroiban@gmail.com>
+Date: Mon Jul 29 14:28:03 2024 +0100
+Subject: [PATCH] Merge commit from fork
+
+Added HTML output encoding the "URL" parameter of the "redirectTo" function
+
+CVE: CVE-2024-41671
+
+Upstream-Status: Backport [https://github.com/twisted/twisted/commit/046a164f89a0f08d3239ecebd750360f8914df33]
+
+Signed-off-by: Soumya Sambu <soumya.sambu@windriver.com>
+---
+ src/twisted/web/_template_util.py | 2 +-
+ src/twisted/web/test/test_util.py | 39 ++++++++++++++++++++++++++++++-
+ 2 files changed, 39 insertions(+), 2 deletions(-)
+
+diff --git a/src/twisted/web/_template_util.py b/src/twisted/web/_template_util.py
+index 230c33f..7266079 100644
+--- a/src/twisted/web/_template_util.py
++++ b/src/twisted/web/_template_util.py
+@@ -92,7 +92,7 @@ def redirectTo(URL: bytes, request: IRequest) -> bytes:
+ </body>
+ </html>
+ """ % {
+- b"url": URL
++ b"url": escape(URL.decode("utf-8")).encode("utf-8")
+ }
+ return content
+
+diff --git a/src/twisted/web/test/test_util.py b/src/twisted/web/test/test_util.py
+index 1e76300..9847dcb 100644
+--- a/src/twisted/web/test/test_util.py
++++ b/src/twisted/web/test/test_util.py
+@@ -5,7 +5,6 @@
+ Tests for L{twisted.web.util}.
+ """
+
+-
+ import gc
+
+ from twisted.internet import defer
+@@ -64,6 +63,44 @@ class RedirectToTests(TestCase):
+ targetURL = "http://target.example.com/4321"
+ self.assertRaises(TypeError, redirectTo, targetURL, request)
+
++ def test_legitimateRedirect(self):
++ """
++ Legitimate URLs are fully interpolated in the `redirectTo` response body without transformation
++ """
++ request = DummyRequest([b""])
++ html = redirectTo(b"https://twisted.org/", request)
++ expected = b"""
++<html>
++ <head>
++ <meta http-equiv=\"refresh\" content=\"0;URL=https://twisted.org/\">
++ </head>
++ <body bgcolor=\"#FFFFFF\" text=\"#000000\">
++ <a href=\"https://twisted.org/\">click here</a>
++ </body>
++</html>
++"""
++ self.assertEqual(html, expected)
++
++ def test_maliciousRedirect(self):
++ """
++ Malicious URLs are HTML-escaped before interpolating them in the `redirectTo` response body
++ """
++ request = DummyRequest([b""])
++ html = redirectTo(
++ b'https://twisted.org/"><script>alert(document.location)</script>', request
++ )
++ expected = b"""
++<html>
++ <head>
++ <meta http-equiv=\"refresh\" content=\"0;URL=https://twisted.org/"><script>alert(document.location)</script>\">
++ </head>
++ <body bgcolor=\"#FFFFFF\" text=\"#000000\">
++ <a href=\"https://twisted.org/"><script>alert(document.location)</script>\">click here</a>
++ </body>
++</html>
++"""
++ self.assertEqual(html, expected)
++
+
+ class ParentRedirectTests(SynchronousTestCase):
+ """
+--
+2.40.0
new file mode 100644
@@ -0,0 +1,251 @@
+From 4a930de12fb67e88fefcb8822104152f42b27abc Mon Sep 17 00:00:00 2001
+From: Adi Roiban <adiroiban@gmail.com>
+Date: Mon Jul 29 14:27:23 2024 +0100
+Subject: [PATCH] Merge commit from fork
+
+Address GHSA-c8m8-j448-xjx7
+
+CVE: CVE-2024-41671
+
+Upstream-Status: Backport [https://github.com/twisted/twisted/commit/4a930de12fb67e88fefcb8822104152f42b27abc]
+
+Signed-off-by: Soumya Sambu <soumya.sambu@windriver.com>
+---
+ src/twisted/web/http.py | 21 +++--
+ src/twisted/web/test/test_http.py | 122 ++++++++++++++++++++++++++----
+ 2 files changed, 122 insertions(+), 21 deletions(-)
+
+diff --git a/src/twisted/web/http.py b/src/twisted/web/http.py
+index 1c59838..3b784f5 100644
+--- a/src/twisted/web/http.py
++++ b/src/twisted/web/http.py
+@@ -2000,16 +2000,21 @@ class _ChunkedTransferDecoder:
+ @returns: C{False}, as there is either insufficient data to continue,
+ or no data remains.
+ """
+- if (
+- self._receivedTrailerHeadersSize + len(self._buffer)
+- > self._maxTrailerHeadersSize
+- ):
+- raise _MalformedChunkedDataError("Trailer headers data is too long.")
+-
+ eolIndex = self._buffer.find(b"\r\n", self._start)
+
+ if eolIndex == -1:
+ # Still no end of network line marker found.
++ #
++ # Check if we've run up against the trailer size limit: if the next
++ # read contains the terminating CRLF then we'll have this many bytes
++ # of trailers (including the CRLFs).
++ minTrailerSize = (
++ self._receivedTrailerHeadersSize
++ + len(self._buffer)
++ + (1 if self._buffer.endswith(b"\r") else 2)
++ )
++ if minTrailerSize > self._maxTrailerHeadersSize:
++ raise _MalformedChunkedDataError("Trailer headers data is too long.")
+ # Continue processing more data.
+ return False
+
+@@ -2019,6 +2024,8 @@ class _ChunkedTransferDecoder:
+ del self._buffer[0 : eolIndex + 2]
+ self._start = 0
+ self._receivedTrailerHeadersSize += eolIndex + 2
++ if self._receivedTrailerHeadersSize > self._maxTrailerHeadersSize:
++ raise _MalformedChunkedDataError("Trailer headers data is too long.")
+ return True
+
+ # eolIndex in this part of code is equal to 0
+@@ -2342,8 +2349,8 @@ class HTTPChannel(basic.LineReceiver, policies.TimeoutMixin):
+ self.__header = line
+
+ def _finishRequestBody(self, data):
+- self.allContentReceived()
+ self._dataBuffer.append(data)
++ self.allContentReceived()
+
+ def _maybeChooseTransferDecoder(self, header, data):
+ """
+diff --git a/src/twisted/web/test/test_http.py b/src/twisted/web/test/test_http.py
+index 33d0a49..1130d31 100644
+--- a/src/twisted/web/test/test_http.py
++++ b/src/twisted/web/test/test_http.py
+@@ -135,7 +135,7 @@ class DummyHTTPHandler(http.Request):
+ data = self.content.read()
+ length = self.getHeader(b"content-length")
+ if length is None:
+- length = networkString(str(length))
++ length = str(length).encode()
+ request = b"'''\n" + length + b"\n" + data + b"'''\n"
+ self.setResponseCode(200)
+ self.setHeader(b"Request", self.uri)
+@@ -563,17 +563,23 @@ class HTTP0_9Tests(HTTP1_0Tests):
+
+ class PipeliningBodyTests(unittest.TestCase, ResponseTestMixin):
+ """
+- Tests that multiple pipelined requests with bodies are correctly buffered.
++ Pipelined requests get buffered and executed in the order received,
++ not processed in parallel.
+ """
+
+ requests = (
+ b"POST / HTTP/1.1\r\n"
+ b"Content-Length: 10\r\n"
+ b"\r\n"
+- b"0123456789POST / HTTP/1.1\r\n"
+- b"Content-Length: 10\r\n"
+- b"\r\n"
+ b"0123456789"
++ # Chunk encoded request.
++ b"POST / HTTP/1.1\r\n"
++ b"Transfer-Encoding: chunked\r\n"
++ b"\r\n"
++ b"a\r\n"
++ b"0123456789\r\n"
++ b"0\r\n"
++ b"\r\n"
+ )
+
+ expectedResponses = [
+@@ -590,14 +596,16 @@ class PipeliningBodyTests(unittest.TestCase, ResponseTestMixin):
+ b"Request: /",
+ b"Command: POST",
+ b"Version: HTTP/1.1",
+- b"Content-Length: 21",
+- b"'''\n10\n0123456789'''\n",
++ b"Content-Length: 23",
++ b"'''\nNone\n0123456789'''\n",
+ ),
+ ]
+
+- def test_noPipelining(self):
++ def test_stepwiseTinyTube(self):
+ """
+- Test that pipelined requests get buffered, not processed in parallel.
++ Imitate a slow connection that delivers one byte at a time.
++ The request handler (L{DelayedHTTPHandler}) is puppeted to
++ step through the handling of each request.
+ """
+ b = StringTransport()
+ a = http.HTTPChannel()
+@@ -606,10 +614,9 @@ class PipeliningBodyTests(unittest.TestCase, ResponseTestMixin):
+ # one byte at a time, to stress it.
+ for byte in iterbytes(self.requests):
+ a.dataReceived(byte)
+- value = b.value()
+
+ # So far only one request should have been dispatched.
+- self.assertEqual(value, b"")
++ self.assertEqual(b.value(), b"")
+ self.assertEqual(1, len(a.requests))
+
+ # Now, process each request one at a time.
+@@ -618,8 +625,91 @@ class PipeliningBodyTests(unittest.TestCase, ResponseTestMixin):
+ request = a.requests[0].original
+ request.delayedProcess()
+
+- value = b.value()
+- self.assertResponseEquals(value, self.expectedResponses)
++ self.assertResponseEquals(b.value(), self.expectedResponses)
++
++ def test_stepwiseDumpTruck(self):
++ """
++ Imitate a fast connection where several pipelined
++ requests arrive in a single read. The request handler
++ (L{DelayedHTTPHandler}) is puppeted to step through the
++ handling of each request.
++ """
++ b = StringTransport()
++ a = http.HTTPChannel()
++ a.requestFactory = DelayedHTTPHandlerProxy
++ a.makeConnection(b)
++
++ a.dataReceived(self.requests)
++
++ # So far only one request should have been dispatched.
++ self.assertEqual(b.value(), b"")
++ self.assertEqual(1, len(a.requests))
++
++ # Now, process each request one at a time.
++ while a.requests:
++ self.assertEqual(1, len(a.requests))
++ request = a.requests[0].original
++ request.delayedProcess()
++
++ self.assertResponseEquals(b.value(), self.expectedResponses)
++
++ def test_immediateTinyTube(self):
++ """
++ Imitate a slow connection that delivers one byte at a time.
++ (L{DummyHTTPHandler}) immediately responds, but no more
++ than one
++ """
++ b = StringTransport()
++ a = http.HTTPChannel()
++ a.requestFactory = DummyHTTPHandlerProxy # "sync"
++ a.makeConnection(b)
++
++ # one byte at a time, to stress it.
++ for byte in iterbytes(self.requests):
++ a.dataReceived(byte)
++ # There is never more than one request dispatched at a time:
++ self.assertLessEqual(len(a.requests), 1)
++
++ self.assertResponseEquals(b.value(), self.expectedResponses)
++
++ def test_immediateDumpTruck(self):
++ """
++ Imitate a fast connection where several pipelined
++ requests arrive in a single read. The request handler
++ (L{DummyHTTPHandler}) immediately responds.
++ This doesn't check the at-most-one pending request
++ invariant but exercises otherwise uncovered code paths.
++ See GHSA-c8m8-j448-xjx7.
++ """
++ b = StringTransport()
++ a = http.HTTPChannel()
++ a.requestFactory = DummyHTTPHandlerProxy
++ a.makeConnection(b)
++
++ # All bytes at once to ensure there's stuff to buffer.
++ a.dataReceived(self.requests)
++
++ self.assertResponseEquals(b.value(), self.expectedResponses)
++
++ def test_immediateABiggerTruck(self):
++ """
++ Imitate a fast connection where a so many pipelined
++ requests arrive in a single read that backpressure is indicated.
++ The request handler (L{DummyHTTPHandler}) immediately responds.
++ This doesn't check the at-most-one pending request
++ invariant but exercises otherwise uncovered code paths.
++ See GHSA-c8m8-j448-xjx7.
++ @see: L{http.HTTPChannel._optimisticEagerReadSize}
++ """
++ b = StringTransport()
++ a = http.HTTPChannel()
++ a.requestFactory = DummyHTTPHandlerProxy
++ a.makeConnection(b)
++
++ overLimitCount = a._optimisticEagerReadSize // len(self.requests) * 10
++ a.dataReceived(self.requests * overLimitCount)
++
++ self.assertResponseEquals(b.value(), self.expectedResponses * overLimitCount)
+
+ def test_pipeliningReadLimit(self):
+ """
+@@ -1522,7 +1612,11 @@ class ChunkedTransferEncodingTests(unittest.TestCase):
+ lambda b: None, # pragma: nocov
+ )
+ p._maxTrailerHeadersSize = 10
+- p.dataReceived(b"3\r\nabc\r\n0\r\n0123456789")
++ # 9 bytes are received so far, in 2 packets.
++ # For now, all is ok.
++ p.dataReceived(b"3\r\nabc\r\n0\r\n01234567")
++ p.dataReceived(b"\r")
++ # Once the 10th byte is received, the processing fails.
+ self.assertRaises(
+ http._MalformedChunkedDataError,
+ p.dataReceived,
+--
+2.40.0
@@ -6,6 +6,11 @@ HOMEPAGE = "https://twisted.org"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://LICENSE;md5=c1c5d2c2493b848f83864bdedd67bbf5"
+SRC_URI += " \
+ file://CVE-2024-41671-0001.patch \
+ file://CVE-2024-41671-0002.patch \
+"
+
SRC_URI[sha256sum] = "6b38b6ece7296b5e122c9eb17da2eeab3d98a198f50ca9efd00fb03e5b4fd4ae"
inherit pypi python_hatchling