blob: f6cd56dc37862c44a0f2ba1431e2bc2aa0e3d4d0 [file] [log] [blame]
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef THIRD_PARTY_BLINK_RENDERER_CORE_CSS_PARSER_CSS_PARSER_TOKEN_STREAM_H_
#define THIRD_PARTY_BLINK_RENDERER_CORE_CSS_PARSER_CSS_PARSER_TOKEN_STREAM_H_
#include "base/auto_reset.h"
#include "third_party/blink/renderer/core/css/parser/css_parser_token_range.h"
#include "third_party/blink/renderer/core/css/parser/css_tokenizer.h"
#include "third_party/blink/renderer/platform/wtf/allocator/allocator.h"
namespace blink {
namespace detail {
template <typename...>
bool IsTokenTypeOneOf(CSSParserTokenType t) {
return false;
}
template <CSSParserTokenType Head, CSSParserTokenType... Tail>
bool IsTokenTypeOneOf(CSSParserTokenType t) {
return t == Head || IsTokenTypeOneOf<Tail...>(t);
}
} // namespace detail
// A streaming interface to CSSTokenizer that tokenizes on demand.
// Abstractly, the stream ends at either EOF or the beginning/end of a block.
// To consume a block, a BlockGuard must be created first to ensure that
// we finish consuming a block even if there was an error.
//
// Methods prefixed with "Unchecked" can only be called after calls to Peek(),
// EnsureLookAhead(), or AtEnd() with no subsequent modifications to the stream
// such as a consume.
class CORE_EXPORT CSSParserTokenStream {
DISALLOW_NEW();
public:
// Instantiate this to start reading from a block. When the guard is out of
// scope, the rest of the block is consumed.
class BlockGuard {
STACK_ALLOCATED();
public:
explicit BlockGuard(CSSParserTokenStream& stream) : stream_(stream) {
const CSSParserToken next = stream.ConsumeInternal();
DCHECK_EQ(next.GetBlockType(), CSSParserToken::kBlockStart);
initial_stack_depth_ = stream_.BlockStackDepth();
DCHECK_GT(initial_stack_depth_, 0u);
}
~BlockGuard() {
stream_.EnsureLookAhead();
stream_.UncheckedSkipToEndOfBlock();
DCHECK(AtEndOfBlock());
}
bool AtEndOfBlock() const {
return (stream_.BlockStackDepth() == initial_stack_depth_ - 1) ||
stream_.AtEnd();
}
private:
CSSParserTokenStream& stream_;
wtf_size_t initial_stack_depth_;
};
// Instantiate this to set a short-term boundary for range extraction.
class RangeBoundary {
public:
RangeBoundary(CSSParserTokenStream& stream,
CSSParserTokenType boundary_type)
: auto_reset_(&stream.boundary_type_, boundary_type) {}
~RangeBoundary() = default;
private:
base::AutoReset<CSSParserTokenType> auto_reset_;
};
// We found that this value works well empirically by printing out the
// maximum buffer size for a few top alexa websites. It should be slightly
// above the expected number of tokens in the prelude of an at rule and
// the number of tokens in a declaration.
// TODO(crbug.com/661854): Can we streamify at rule parsing so that this is
// only needed for declarations which are easier to think about?
static constexpr size_t InitialBufferSize() { return 128; }
explicit CSSParserTokenStream(CSSTokenizer& tokenizer)
: tokenizer_(tokenizer), next_(kEOFToken) {
buffer_.ReserveInitialCapacity(InitialBufferSize());
}
CSSParserTokenStream(CSSParserTokenStream&&) = default;
CSSParserTokenStream(const CSSParserTokenStream&) = delete;
CSSParserTokenStream& operator=(const CSSParserTokenStream&) = delete;
inline void EnsureLookAhead() {
if (!HasLookAhead()) {
has_look_ahead_ = true;
next_ = tokenizer_.TokenizeSingle();
}
}
// Forcibly read a lookahead token.
inline void LookAhead() {
DCHECK(!HasLookAhead());
next_ = tokenizer_.TokenizeSingle();
has_look_ahead_ = true;
}
inline bool HasLookAhead() const { return has_look_ahead_; }
inline const CSSParserToken& Peek() {
EnsureLookAhead();
return next_;
}
inline const CSSParserToken& UncheckedPeek() const {
DCHECK(HasLookAhead());
return next_;
}
inline const CSSParserToken& Consume() {
EnsureLookAhead();
return UncheckedConsume();
}
const CSSParserToken& UncheckedConsume() {
DCHECK(HasLookAhead());
DCHECK_NE(next_.GetBlockType(), CSSParserToken::kBlockStart);
DCHECK_NE(next_.GetBlockType(), CSSParserToken::kBlockEnd);
has_look_ahead_ = false;
offset_ = tokenizer_.Offset();
return next_;
}
inline bool AtEnd() {
EnsureLookAhead();
return UncheckedAtEnd();
}
inline bool UncheckedAtEnd() const {
DCHECK(HasLookAhead());
return next_.IsEOF() || next_.GetBlockType() == CSSParserToken::kBlockEnd;
}
// Get the index of the character in the original string to be consumed next.
wtf_size_t Offset() const { return offset_; }
// Get the index of the starting character of the look-ahead token.
wtf_size_t LookAheadOffset() const {
DCHECK(HasLookAhead());
return tokenizer_.PreviousOffset();
}
// Returns a view on a range of characters in the original string.
StringView StringRangeAt(wtf_size_t start, wtf_size_t length) const;
// Returns the block stack depth for the underlying tokenizer.
wtf_size_t BlockStackDepth() const;
void ConsumeWhitespace();
CSSParserToken ConsumeIncludingWhitespace();
void UncheckedConsumeComponentValue();
// Either consumes a comment token and returns true, or peeks at the next
// token and return false.
bool ConsumeCommentOrNothing();
// Invalidates any ranges created by previous calls to ConsumeUntil*()
template <CSSParserTokenType... Types>
CSSParserTokenRange ConsumeUntilPeekedTypeIs() {
EnsureLookAhead();
buffer_.Shrink(0);
while (!UncheckedAtEnd() &&
!detail::IsTokenTypeOneOf<Types...>(UncheckedPeek().GetType())) {
ConsumeTokenOrBlockAndAppendToBuffer();
}
return CSSParserTokenRange(buffer_);
}
// Invalidates any ranges created by previous calls to ConsumeUntil*()
CSSParserTokenRange ConsumeUntilPeekedBoundary() {
EnsureLookAhead();
buffer_.Shrink(0);
while (!UncheckedAtEnd() && UncheckedPeek().GetType() != boundary_type_)
ConsumeTokenOrBlockAndAppendToBuffer();
return CSSParserTokenRange(buffer_);
}
private:
inline void ConsumeTokenOrBlockAndAppendToBuffer() {
// Have to use internal consume/peek in here because they can read past
// start/end of blocks
unsigned nesting_level = 0;
do {
const CSSParserToken& token = UncheckedConsumeInternal();
buffer_.push_back(token);
if (token.GetBlockType() == CSSParserToken::kBlockStart)
nesting_level++;
else if (token.GetBlockType() == CSSParserToken::kBlockEnd)
nesting_level--;
} while (!PeekInternal().IsEOF() && nesting_level);
}
const CSSParserToken& PeekInternal() {
EnsureLookAhead();
return UncheckedPeekInternal();
}
const CSSParserToken& UncheckedPeekInternal() const {
DCHECK(HasLookAhead());
return next_;
}
const CSSParserToken& ConsumeInternal() {
EnsureLookAhead();
return UncheckedConsumeInternal();
}
const CSSParserToken& UncheckedConsumeInternal() {
DCHECK(HasLookAhead());
has_look_ahead_ = false;
offset_ = tokenizer_.Offset();
return next_;
}
void UncheckedSkipToEndOfBlock();
Vector<CSSParserToken, 32> buffer_;
CSSTokenizer& tokenizer_;
CSSParserToken next_;
wtf_size_t offset_ = 0;
bool has_look_ahead_ = false;
CSSParserTokenType boundary_type_ = kEOFToken;
};
} // namespace blink
#endif // THIRD_PARTY_BLINK_RENDERER_CORE_CSS_PARSER_CSS_PARSER_TOKEN_STREAM_H_