Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(http1): add internal limit for chunked extensions #3495

Merged
merged 1 commit into from
Dec 18, 2023
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
91 changes: 77 additions & 14 deletions src/proto/h1/decode.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,11 @@ use super::DecodedLength;

use self::Kind::{Chunked, Eof, Length};

/// Maximum amount of bytes allowed in chunked extensions.
///
/// This limit is currentlty applied for the entire body, not per chunk.
const CHUNKED_EXTENSIONS_LIMIT: u64 = 1024 * 16;

/// Decoders to handle different Transfer-Encodings.
///
/// If a message body does not include a Transfer-Encoding, it *should*
Expand All @@ -25,7 +30,11 @@ enum Kind {
/// A Reader used when a Content-Length header is passed with a positive integer.
Length(u64),
/// A Reader used when Transfer-Encoding is `chunked`.
Chunked(ChunkedState, u64),
Chunked {
state: ChunkedState,
chunk_len: u64,
extensions_cnt: u64,
},
/// A Reader used for responses that don't indicate a length or chunked.
///
/// The bool tracks when EOF is seen on the transport.
Expand Down Expand Up @@ -73,7 +82,11 @@ impl Decoder {

pub(crate) fn chunked() -> Decoder {
Decoder {
kind: Kind::Chunked(ChunkedState::new(), 0),
kind: Kind::Chunked {
state: ChunkedState::new(),
chunk_len: 0,
extensions_cnt: 0,
},
}
}

Expand All @@ -96,7 +109,12 @@ impl Decoder {
pub(crate) fn is_eof(&self) -> bool {
matches!(
self.kind,
Length(0) | Chunked(ChunkedState::End, _) | Eof(true)
Length(0)
| Chunked {
state: ChunkedState::End,
..
}
| Eof(true)
)
}

Expand Down Expand Up @@ -127,11 +145,15 @@ impl Decoder {
Poll::Ready(Ok(buf))
}
}
Chunked(ref mut state, ref mut size) => {
Chunked {
ref mut state,
ref mut chunk_len,
ref mut extensions_cnt,
} => {
loop {
let mut buf = None;
// advances the chunked state
*state = ready!(state.step(cx, body, size, &mut buf))?;
*state = ready!(state.step(cx, body, chunk_len, extensions_cnt, &mut buf))?;
if *state == ChunkedState::End {
trace!("end of chunked");
return Poll::Ready(Ok(Bytes::new()));
Expand Down Expand Up @@ -202,14 +224,15 @@ impl ChunkedState {
cx: &mut Context<'_>,
body: &mut R,
size: &mut u64,
extensions_cnt: &mut u64,
buf: &mut Option<Bytes>,
) -> Poll<Result<ChunkedState, io::Error>> {
use self::ChunkedState::*;
match *self {
Start => ChunkedState::read_start(cx, body, size),
Size => ChunkedState::read_size(cx, body, size),
SizeLws => ChunkedState::read_size_lws(cx, body),
Extension => ChunkedState::read_extension(cx, body),
Extension => ChunkedState::read_extension(cx, body, extensions_cnt),
SizeLf => ChunkedState::read_size_lf(cx, body, *size),
Body => ChunkedState::read_body(cx, body, size, buf),
BodyCr => ChunkedState::read_body_cr(cx, body),
Expand Down Expand Up @@ -306,6 +329,7 @@ impl ChunkedState {
fn read_extension<R: MemRead>(
cx: &mut Context<'_>,
rdr: &mut R,
extensions_cnt: &mut u64,
) -> Poll<Result<ChunkedState, io::Error>> {
trace!("read_extension");
// We don't care about extensions really at all. Just ignore them.
Expand All @@ -320,7 +344,17 @@ impl ChunkedState {
io::ErrorKind::InvalidData,
"invalid chunk extension contains newline",
))),
_ => Poll::Ready(Ok(ChunkedState::Extension)), // no supported extensions
_ => {
*extensions_cnt += 1;
if *extensions_cnt >= CHUNKED_EXTENSIONS_LIMIT {
Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidData,
"chunk extensions over limit",
)))
} else {
Poll::Ready(Ok(ChunkedState::Extension))
}
} // no supported extensions
}
}
fn read_size_lf<R: MemRead>(
Expand Down Expand Up @@ -491,7 +525,6 @@ mod tests {
}
}

#[cfg(feature = "nightly")]
impl MemRead for Bytes {
fn read_mem(&mut self, _: &mut Context<'_>, len: usize) -> Poll<io::Result<Bytes>> {
let n = std::cmp::min(len, self.len());
Expand Down Expand Up @@ -519,10 +552,12 @@ mod tests {
let mut state = ChunkedState::new();
let rdr = &mut s.as_bytes();
let mut size = 0;
let mut ext_cnt = 0;
loop {
let result =
futures_util::future::poll_fn(|cx| state.step(cx, rdr, &mut size, &mut None))
.await;
let result = futures_util::future::poll_fn(|cx| {
state.step(cx, rdr, &mut size, &mut ext_cnt, &mut None)
})
.await;
let desc = format!("read_size failed for {:?}", s);
state = result.expect(desc.as_str());
if state == ChunkedState::Body || state == ChunkedState::EndCr {
Expand All @@ -536,10 +571,12 @@ mod tests {
let mut state = ChunkedState::new();
let rdr = &mut s.as_bytes();
let mut size = 0;
let mut ext_cnt = 0;
loop {
let result =
futures_util::future::poll_fn(|cx| state.step(cx, rdr, &mut size, &mut None))
.await;
let result = futures_util::future::poll_fn(|cx| {
state.step(cx, rdr, &mut size, &mut ext_cnt, &mut None)
})
.await;
state = match result {
Ok(s) => s,
Err(e) => {
Expand Down Expand Up @@ -632,6 +669,32 @@ mod tests {
assert_eq!("1234567890abcdef", &result);
}

#[tokio::test]
async fn test_read_chunked_extensions_over_limit() {
// construct a chunked body where each individual chunked extension
// is totally fine, but combined is over the limit.
let per_chunk = super::CHUNKED_EXTENSIONS_LIMIT * 2 / 3;
let mut scratch = vec![];
for _ in 0..2 {
scratch.extend(b"1;");
scratch.extend(b"x".repeat(per_chunk as usize));
scratch.extend(b"\r\nA\r\n");
}
scratch.extend(b"0\r\n\r\n");
let mut mock_buf = Bytes::from(scratch);

let mut decoder = Decoder::chunked();
let buf1 = decoder.decode_fut(&mut mock_buf).await.expect("decode1");
assert_eq!(&buf1[..], b"A");

let err = decoder
.decode_fut(&mut mock_buf)
.await
.expect_err("decode2");
assert_eq!(err.kind(), io::ErrorKind::InvalidData);
assert_eq!(err.to_string(), "chunk extensions over limit");
}

#[cfg(not(miri))]
#[tokio::test]
async fn test_read_chunked_trailer_with_missing_lf() {
Expand Down