Go to the documentation of this file.
4 #ifndef LEXY_TOKEN_HPP_INCLUDED
5 #define LEXY_TOKEN_HPP_INCLUDED
18 template <
typename TokenKind,
typename... Tokens>
21 TokenKind
_data[
sizeof...(Tokens)];
23 template <std::size_t... Idx>
27 :
_data{data[Idx]..., new_kind}
30 template <TokenKind Kind,
typename Token>
33 static_assert(lexy::is_token_rule<Token>,
"cannot map non-token to token kind");
38 template <
typename Token>
41 constexpr
auto idx = [] {
46 = {std::is_same_v<typename Token::token_type, typename Tokens::token_type>...};
48 for (std::size_t idx = 0; idx !=
sizeof...(Tokens); ++idx)
52 return sizeof...(Tokens);
54 if constexpr (idx ==
sizeof...(Tokens))
63 template <
typename Token>
69 template <auto TokenKind,
typename Token>
72 static_assert(lexy::is_token_rule<Token>,
"cannot map non-token to token kind");
81 template <
typename TokenKind>
91 template <
typename TokenRule>
94 return !std::is_same_v<kind, lexy::predefined_token_kind> && std::is_enum_v<kind>;
98 template <
typename TokenKind =
void>
101 static_assert(std::is_void_v<TokenKind> || std::is_enum_v<TokenKind>,
102 "invalid type for TokenKind");
105 template <
typename T>
108 using type = std::remove_cv_t<T>;
109 if constexpr (std::is_same_v<type, lexy::predefined_token_kind>)
112 else if constexpr (std::is_void_v<TokenKind>)
114 return std::is_integral_v<T>;
117 return std::is_same_v<type, TokenKind>;
130 :
_value(
static_cast<std::uint_least16_t
>(value))
137 template <
typename TokenRule,
typename = std::enable_if_t<lexy::is_token_rule<TokenRule>>>
140 :
token_kind(token_kind_map_for<TokenKind>.lookup(TokenRule{}))
145 constexpr
auto token_rule_kind = lexy::token_kind_of<TokenRule>;
155 constexpr
explicit operator bool() const noexcept
171 constexpr
const char*
name() const noexcept
191 return lhs._value == rhs._value;
195 return lhs._value != rhs._value;
214 template <
typename TokenKind,
typename = std::enable_if_t<std::is_
integral_v<TokenKind>>>
216 template <
typename TokenKind,
typename = std::enable_if_t<std::is_enum_v<TokenKind>>>
218 template <
typename TokenRule,
typename = std::enable_if_t<_has_special_token_kind<TokenRule>>>
225 template <
typename Reader,
typename TokenKind =
void>
247 constexpr
const char*
name() const noexcept
267 template <
typename TokenKind,
typename Reader>
269 template <
typename TokenKind,
typename Reader,
270 typename = std::enable_if_t<std::is_integral_v<TokenKind>>>
272 template <
typename TokenKind,
typename Reader,
273 typename = std::enable_if_t<std::is_enum_v<TokenKind>>>
275 template <
typename TokenRule,
typename Reader,
276 typename = std::enable_if_t<_has_special_token_kind<TokenRule>>>
280 template <
typename Input,
typename TokenKind =
void>
284 #endif // LEXY_TOKEN_HPP_INCLUDED
static constexpr bool _is_compatible_kind_type()
constexpr token(token_kind< TokenKind > kind, iterator begin, iterator end) noexcept
make_index_sequence< sizeof...(T)> index_sequence_for
constexpr token_kind(TokenRule) noexcept
Creates the token kind of a token rule.
typename Reader::iterator iterator
string_view::value_type char_type
constexpr auto lexeme() const noexcept
constexpr bool ignore_if_empty() const noexcept
token_kind(TokenKind) -> token_kind< void >
constexpr token_kind(_underlying_type value) noexcept
Creates the token kind with the specified value.
A parsed token, i.e. its kind and its lexeme.
token(token_kind< TokenKind >, lexy::lexeme< Reader >) -> token< Reader, TokenKind >
std::conditional_t< std::is_void_v< T >, Fallback, T > type_or
typename Reader::encoding encoding
TokenKind _data[sizeof...(Tokens)]
constexpr auto token_kind_map_for
A mapping of token rule to token kind; specialize for your own kinds.
constexpr token_kind(predefined_token_kind value) noexcept
Creates a predefined token kind.
#define LEXY_PRECONDITION(Expr)
constexpr auto token_kind_map
constexpr auto end(const C &c) -> decltype(c.end())
constexpr token_kind(std::uint_least16_t kind) noexcept
constexpr _underlying_type get() const noexcept
constexpr iterator position() const noexcept
LEXY_CONSTEVAL auto map(Token) const
constexpr token_kind< TokenKind > kind() const noexcept
What sort of token it is.
static LEXY_CONSTEVAL auto lookup(Token)
constexpr friend bool operator==(token_kind lhs, token_kind rhs) noexcept
static constexpr token_kind< TokenKind > from_raw(std::uint_least16_t kind) noexcept
#define LEXY_DECAY_DECLTYPE(...)
lexy::lexeme< Reader > _lexeme
LEXY_CONSTEVAL auto lookup(Token) const
constexpr auto begin(const C &c) -> decltype(c.begin())
constexpr auto _has_special_token_kind
constexpr const friend char * token_kind_name(token_kind kind) noexcept
constexpr token_kind() noexcept
Creates an unknown token kind.
token_kind< TokenKind > _kind
constexpr auto token_kind_map_for< void >
constexpr const char * name() const noexcept
LEXY_CONSTEVAL auto map(Token) const
constexpr friend bool operator!=(token_kind lhs, token_kind rhs) noexcept
typename encoding::char_type char_type
LEXY_CONSTEVAL _tk_map(lexy::_detail::index_sequence< Idx... >, const TokenKind *data, TokenKind new_kind)
@ _smallest_predefined_token_kind
std::uint_least16_t _value
constexpr token(token_kind< TokenKind > kind, lexy::lexeme< Reader > lex) noexcept
constexpr const char * name() const noexcept
static constexpr std::uint_least16_t to_raw(token_kind< TokenKind > kind) noexcept
lexy::_detail::type_or< void, int > _underlying_type
constexpr bool is_predefined() const noexcept