summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorJohn Mark Bell <jmb@netsurf-browser.org>2009-01-06 12:39:56 +0000
committerJohn Mark Bell <jmb@netsurf-browser.org>2009-01-06 12:39:56 +0000
commit0dd8f84c027ed56deab73e88a35049e24ef0b4e4 (patch)
treee47f4d38fbee56ea86cea78cb9bfac0b08e3420c /src
parente76aa66b639c6c85939660f5eb5dc03c5dee69de (diff)
downloadlibhubbub-0dd8f84c027ed56deab73e88a35049e24ef0b4e4.tar.gz
libhubbub-0dd8f84c027ed56deab73e88a35049e24ef0b4e4.tar.bz2
Fix potential read beyond available input data when processing \r in some states.
What happened was that, given \rabc, we would advance past the \r, then read at current_offset + len (len == 1). I.E. read 'b' instead of 'a'. If the data in the inputstream's internal buffer happened to end immediately after the \r, then we'd read past the end of the buffer thanks to a bug in lpu_inputstream_peek which was fixed in r5965. In any case, we'd still be looking at the wrong character when looking for CRLF pairs. All regression tests now pass again. svn path=/trunk/hubbub/; revision=5967
Diffstat (limited to 'src')
-rw-r--r--src/tokeniser/tokeniser.c10
1 files changed, 5 insertions, 5 deletions
diff --git a/src/tokeniser/tokeniser.c b/src/tokeniser/tokeniser.c
index 7bb53aa..6ffa8b7 100644
--- a/src/tokeniser/tokeniser.c
+++ b/src/tokeniser/tokeniser.c
@@ -1708,7 +1708,7 @@ hubbub_error hubbub_tokeniser_handle_bogus_comment(hubbub_tokeniser *tokeniser)
} else if (c == '\r') {
error = parserutils_inputstream_peek(
tokeniser->input,
- tokeniser->context.pending + len, //XXX
+ tokeniser->context.pending,
&cptr,
&len);
@@ -2224,7 +2224,7 @@ hubbub_error hubbub_tokeniser_handle_doctype_public_dq(
} else if (c == '\r') {
error = parserutils_inputstream_peek(
tokeniser->input,
- tokeniser->context.pending + len, ///XXX
+ tokeniser->context.pending,
&cptr,
&len);
@@ -2274,7 +2274,7 @@ hubbub_error hubbub_tokeniser_handle_doctype_public_sq(
} else if (c == '\r') {
error = parserutils_inputstream_peek(
tokeniser->input,
- tokeniser->context.pending + len, //XXX
+ tokeniser->context.pending,
&cptr,
&len);
@@ -2468,7 +2468,7 @@ hubbub_error hubbub_tokeniser_handle_doctype_system_dq(
} else if (c == '\r') {
error = parserutils_inputstream_peek(
tokeniser->input,
- tokeniser->context.pending + len, //XXX
+ tokeniser->context.pending,
&cptr,
&len);
@@ -2518,7 +2518,7 @@ hubbub_error hubbub_tokeniser_handle_doctype_system_sq(
} else if (c == '\r') {
error = parserutils_inputstream_peek(
tokeniser->input,
- tokeniser->context.pending + len, //XXX
+ tokeniser->context.pending,
&cptr,
&len);