list.hpp
Go to the documentation of this file.
1 // Copyright (C) 2020-2023 Jonathan Müller and lexy contributors
2 // SPDX-License-Identifier: BSL-1.0
3 
4 #ifndef LEXY_DSL_LIST_HPP_INCLUDED
5 #define LEXY_DSL_LIST_HPP_INCLUDED
6 
7 #include <lexy/dsl/base.hpp>
8 #include <lexy/dsl/choice.hpp>
9 #include <lexy/dsl/option.hpp>
10 #include <lexy/dsl/separator.hpp>
11 
12 namespace lexyd
13 {
14 template <typename Item, typename Sep>
15 struct _lst : _copy_base<Item>
16 {
17  template <typename Context, typename Reader, typename Sink>
18  LEXY_PARSER_FUNC static bool _loop(Context& context, Reader& reader, Sink& sink)
19  {
20  while (true)
21  {
22  // Parse a separator if necessary.
23  [[maybe_unused]] auto sep_begin = reader.position();
24  if constexpr (!std::is_void_v<Sep>)
25  {
27  if (!sep.try_parse(context.control_block, reader))
28  {
29  // We didn't have a separator, list is definitely finished.
30  sep.cancel(context);
31  break;
32  }
33 
34  if (!sep.template finish<lexy::sink_parser>(context, reader, sink))
35  return false;
36  }
37  [[maybe_unused]] auto sep_end = reader.position();
38 
39  // Parse the next item.
40  if constexpr (lexy::is_branch_rule<Item>)
41  {
42  // It's a branch, so try parsing it to detect loop exit.
44  if (!item.try_parse(context.control_block, reader))
45  {
46  // We don't have a next item, exit the loop.
47  // If necessary, we report a trailing separator.
48  item.cancel(context);
49  if constexpr (!std::is_void_v<Sep>)
50  Sep::report_trailing_error(context, reader, sep_begin, sep_end);
51  break;
52  }
53 
54  // We're having an item, finish it.
55  if (!item.template finish<lexy::sink_parser>(context, reader, sink))
56  return false;
57  }
58  else
59  {
60  // Not a branch, so we need one item.
61  if (!lexy::parser_for<Item, lexy::sink_parser>::parse(context, reader, sink))
62  return false;
63  }
64  }
65 
66  return true;
67  }
68 
69  template <typename NextParser>
70  struct p
71  {
72  template <typename Context, typename Reader, typename... Args>
73  LEXY_PARSER_FUNC static bool parse(Context& context, Reader& reader, Args&&... args)
74  {
75  // Construct the sink.
76  auto sink = context.value_callback().sink();
77 
78  // Parse the first item.
79  if (!lexy::parser_for<Item, lexy::sink_parser>::parse(context, reader, sink))
80  return false;
81 
82  // Parse the remaining items.
83  if (!_loop(context, reader, sink))
84  return false;
85 
86  // We're done with the list, finish the sink and continue.
87  return lexy::sink_finish_parser<NextParser>::parse(context, reader, sink,
88  LEXY_FWD(args)...);
89  }
90  };
91 
92  template <typename Reader>
93  struct bp
94  {
96 
97  template <typename ControlBlock>
98  constexpr bool try_parse(const ControlBlock* cb, const Reader& reader)
99  {
100  // We parse a list if we can parse its first item.
101  return item.try_parse(cb, reader);
102  }
103 
104  template <typename Context>
105  constexpr void cancel(Context& context)
106  {
107  return item.cancel(context);
108  }
109 
110  template <typename NextParser, typename Context, typename... Args>
111  LEXY_PARSER_FUNC bool finish(Context& context, Reader& reader, Args&&... args)
112  {
113  // At this point, we have a list so construct a sink.
114  auto sink = context.value_callback().sink();
115 
116  // Finish the first item, passing all values to the sink.
117  if (!item.template finish<lexy::sink_parser>(context, reader, sink))
118  return false;
119 
120  // Parse the remaining items.
121  if (!_loop(context, reader, sink))
122  return false;
123 
124  // We're done with the list, finish the sink and continue.
125  return lexy::sink_finish_parser<NextParser>::parse(context, reader, sink,
126  LEXY_FWD(args)...);
127  }
128  };
129 };
130 
132 template <typename Item>
133 constexpr auto list(Item)
134 {
135  static_assert(lexy::is_branch_rule<Item>,
136  "list() without a separator requires a branch condition");
137  return _lst<Item, void>{};
138 }
139 
141 template <typename Item, typename Sep, typename Tag>
142 constexpr auto list(Item, _sep<Sep, Tag>)
143 {
144  return _lst<Item, _sep<Sep, Tag>>{};
145 }
146 
148 template <typename Item, typename Sep>
149 constexpr auto list(Item, _tsep<Sep>)
150 {
151  static_assert(lexy::is_branch_rule<Item>,
152  "list() without a trailing separator requires a branch condition");
153  return _lst<Item, _tsep<Sep>>{};
154 }
155 
156 template <typename Item, typename Sep>
157 constexpr auto list(Item, _isep<Sep>)
158 {
159  static_assert(lexy::_detail::error<Item, Sep>,
160  "list() does not support `dsl::ignore_trailing_sep()`");
161  return _lst<Item, void>{};
162 }
163 } // namespace lexyd
164 
165 namespace lexyd
166 {
167 template <typename Term, typename Item, typename Sep, typename Recover>
168 struct _lstt : rule_base
169 {
170  // We're using an enum together with a switch to compensate a lack of goto in constexpr.
171  // The simple state machine goes as follows on well-formed input:
172  // terminator -> separator -> separator_trailing_check -> item -> terminator -> ... ->
173  // done
174  //
175  // The interesting case is error recovery.
176  // There we skip over characters until we either found the terminator, separator or
177  // item. We then set the enum to jump to the appropriate state of the state machine.
178  enum class _state
179  {
180  terminator,
181  separator,
183  item,
184  recovery,
185  };
186 
187  template <typename TermParser, typename Context, typename Reader, typename Sink>
188  LEXY_PARSER_FUNC static bool _loop(_state initial_state, TermParser& term, Context& context,
189  Reader& reader, Sink& sink)
190  {
191  auto state = initial_state;
192 
193  [[maybe_unused]] auto sep_pos = reader.position();
194  while (true)
195  {
196  switch (state)
197  {
198  case _state::terminator:
199  if (term.try_parse(context.control_block, reader))
200  // We had the terminator, so the list is done.
201  return true;
202  term.cancel(context);
203 
204  // Parse the following list separator next.
205  state = _state::separator;
206  break;
207 
208  case _state::separator:
209  if constexpr (!std::is_void_v<Sep>)
210  {
211  sep_pos = reader.position();
213  reader,
214  sink))
215  {
216  // Check for a trailing separator next.
218  break;
219  }
220  else if (sep_pos == reader.position())
221  {
222  // We don't have a separator at all.
223  // Assume it's missing and parse an item instead.
224 
225  if constexpr (lexy::is_branch_rule<Item>)
226  {
228  if (item.try_parse(context.control_block, reader)
229  && item.template finish<lexy::sink_parser>(context, reader, sink))
230  {
231  // Continue after an item has been parsed.
232  state = _state::terminator;
233  break;
234  }
235  else
236  {
237  // Not an item, recover.
238  item.cancel(context);
239  state = _state::recovery;
240  break;
241  }
242  }
243  else
244  {
245  // We cannot try and parse an item.
246  // To avoid generating wrong errors, immediately recover.
247  state = _state::recovery;
248  break;
249  }
250  }
251  else
252  {
253  // We did have something that looked like a separator initially, but
254  // wasn't one on closer inspection. Enter generic recovery as we've
255  // already consumed input. (If we ignore the case where the item and
256  // separator share a common prefix, we know it wasn't the start of an
257  // item so can't just pretend that there is one).
258  state = _state::recovery;
259  break;
260  }
261  }
262  else
263  {
264  // List doesn't have a separator; immediately parse item next.
265  state = _state::item;
266  break;
267  }
268 
270  if constexpr (!std::is_void_v<Sep>)
271  {
272  // We need to check whether we're having a trailing separator by checking
273  // for a terminating one.
274  if (term.try_parse(context.control_block, reader))
275  {
276  // We had the terminator, so the list is done.
277  // Report a trailing separator error if necessary.
278  Sep::report_trailing_error(context, reader, sep_pos, reader.position());
279  return true;
280  }
281  else
282  {
283  // We didn't have a separator, parse item next.
284  state = _state::item;
285  break;
286  }
287  }
288  break;
289 
290  case _state::item:
291  if (lexy::parser_for<Item, lexy::sink_parser>::parse(context, reader, sink))
292  {
293  // Loop back.
294  state = _state::terminator;
295  break;
296  }
297  else
298  {
299  // Recover from missing item.
300  state = _state::recovery;
301  break;
302  }
303 
304  case _state::recovery: {
305  auto recovery_begin = reader.position();
306  context.on(_ev::recovery_start{}, recovery_begin);
307  while (true)
308  {
309  // Recovery succeeds when we reach the next separator.
310  if constexpr (!std::is_void_v<Sep>)
311  {
312  sep_pos = reader.position();
313 
315  if (sep.try_parse(context.control_block, reader))
316  {
317  auto recovery_end = reader.position();
318  context.on(_ev::token{}, lexy::error_token_kind, recovery_begin,
319  recovery_end);
320  context.on(_ev::recovery_finish{}, recovery_end);
321 
322  if (sep.template finish<lexy::sink_parser>(context, reader, sink))
323  {
324  // Continue the list with the trailing separator check.
326  break;
327  }
328  else
329  {
330  // Need to recover from this.
331  state = _state::recovery;
332  break;
333  }
334  }
335  else
336  {
337  sep.cancel(context);
338  }
339  }
340  // When we don't have a separator, but the item is a branch, we also succeed
341  // when we reach the next item.
342  //
343  // Note that we're doing this check only if we don't have a separator.
344  // If we do have one, the heuristic "end of the invalid item" is better than
345  // "beginning of the next one".
346  else if constexpr (lexy::is_branch_rule<Item>)
347  {
349  if (item.try_parse(context.control_block, reader))
350  {
351  auto recovery_end = reader.position();
352  context.on(_ev::token{}, lexy::error_token_kind, recovery_begin,
353  recovery_end);
354  context.on(_ev::recovery_finish{}, recovery_end);
355 
356  if (item.template finish<lexy::sink_parser>(context, reader, sink))
357  {
358  // Continue the list with the next terminator check.
359  state = _state::terminator;
360  break;
361  }
362  else
363  {
364  // Need to recover from this.
365  state = _state::recovery;
366  break;
367  }
368  }
369  else
370  {
371  item.cancel(context);
372  }
373  }
374 
375  // At this point, we couldn't detect the next item.
376  // Recovery succeeds when we reach the terminator.
377  if (term.try_parse(context.control_block, reader))
378  {
379  // We're now done with the entire list.
380  auto recovery_end = reader.position();
381  context.on(_ev::token{}, lexy::error_token_kind, recovery_begin,
382  recovery_end);
383  context.on(_ev::recovery_finish{}, recovery_end);
384  return true;
385  }
386  else
387  {
388  term.cancel(context);
389  }
390 
391  // At this point, we couldn't detect the next item or a terminator.
392  // Recovery fails when we reach the limit.
393  using limit_rule = decltype(Recover{}.get_limit());
395  limit.try_parse(reader) || reader.peek() == Reader::encoding::eof())
396  {
397  // Recovery has failed, propagate error.
398  auto recovery_end = reader.position();
399  context.on(_ev::token{}, lexy::error_token_kind, recovery_begin,
400  recovery_end);
401  context.on(_ev::recovery_cancel{}, recovery_end);
402  return false;
403  }
404 
405  // Consume one code unit and try again.
406  reader.bump();
407  }
408  break;
409  }
410  }
411  }
412 
413  return false; // unreachable
414  }
415 
416  template <typename NextParser>
417  struct p
418  {
419  template <typename Context, typename Reader, typename... Args>
420  LEXY_PARSER_FUNC static bool parse(Context& context, Reader& reader, Args&&... args)
421  {
423  auto sink = context.value_callback().sink();
424 
425  // Parse initial item.
426  using item_parser = lexy::parser_for<Item, lexy::sink_parser>;
427  auto result = item_parser::parse(context, reader, sink);
428 
429  // Parse the remaining items.
430  if (!_loop(result ? _state::terminator : _state::recovery, term, context, reader, sink))
431  return false;
432 
433  // At this point, we just need to finish parsing the terminator.
434  if constexpr (std::is_same_v<typename decltype(sink)::return_type, void>)
435  {
436  LEXY_MOV(sink).finish();
437  return term.template finish<NextParser>(context, reader, LEXY_FWD(args)...);
438  }
439  else
440  {
441  return term.template finish<NextParser>(context, reader, LEXY_FWD(args)...,
442  LEXY_MOV(sink).finish());
443  }
444  }
445  };
446 };
447 } // namespace lexyd
448 
449 #endif // LEXY_DSL_LIST_HPP_INCLUDED
450 
choice.hpp
LEXY_MOV
#define LEXY_MOV(...)
Definition: config.hpp:21
lexyd::_lst::bp::finish
LEXY_PARSER_FUNC bool finish(Context &context, Reader &reader, Args &&... args)
Definition: list.hpp:111
lexy::parse_events::recovery_start
Definition: dsl/base.hpp:61
lexyd::_lstt::_state
_state
Definition: list.hpp:178
lexyd::sep
constexpr auto sep(Branch)
Defines a separator for a list.
Definition: separator.hpp:91
lexyd::_lstt::_state::separator_trailing_check
@ separator_trailing_check
lexyd::_lst::bp
Definition: list.hpp:93
lexy::branch_parser_for
typename BranchRule::template bp< Reader > branch_parser_for
Definition: dsl/base.hpp:103
lexyd::_tsep
Definition: separator.hpp:98
lexyd::_lst::p::parse
static LEXY_PARSER_FUNC bool parse(Context &context, Reader &reader, Args &&... args)
Definition: list.hpp:73
LEXY_FWD
#define LEXY_FWD(...)
Definition: config.hpp:22
lexyd::_lst
Definition: list.hpp:15
lexyd::_lstt::_state::terminator
@ terminator
lexyd::list
constexpr auto list(Item)
Parses a list of items without a separator.
Definition: list.hpp:133
lexyd::_lstt::p
Definition: list.hpp:417
lexyd::_lst::bp::cancel
constexpr void cancel(Context &context)
Definition: list.hpp:105
separator.hpp
lexy::parse
constexpr auto parse(const Input &input, const ErrorCallback &callback)
Parses the production into a value, invoking the callback on error.
Definition: parse.hpp:171
lexyd::rule_base
Definition: grammar.hpp:17
lexy::error_token_kind
@ error_token_kind
Definition: grammar.hpp:77
lexyd::_lstt::_state::item
@ item
lexy::parse_events::recovery_finish
Definition: dsl/base.hpp:66
lexyd::_lstt::_loop
static LEXY_PARSER_FUNC bool _loop(_state initial_state, TermParser &term, Context &context, Reader &reader, Sink &sink)
Definition: list.hpp:188
LEXY_PARSER_FUNC
#define LEXY_PARSER_FUNC
Definition: dsl/base.hpp:95
lexyd::_lst::p
Definition: list.hpp:70
lexyd::_sep
Definition: separator.hpp:69
lexyd::_lstt::p::parse
static LEXY_PARSER_FUNC bool parse(Context &context, Reader &reader, Args &&... args)
Definition: list.hpp:420
lexyd::_lstt
Definition: list.hpp:168
lexyd::_lstt::_state::separator
@ separator
base.hpp
lexy::parse_events::token
Definition: dsl/base.hpp:44
lexy::sink_finish_parser::parse
static LEXY_PARSER_FUNC auto parse(Context &context, Reader &reader, Sink &sink, Args &&... args)
Definition: dsl/base.hpp:189
lexyd::_lstt::_state::recovery
@ recovery
lexyd::_lst::_loop
static LEXY_PARSER_FUNC bool _loop(Context &context, Reader &reader, Sink &sink)
Definition: list.hpp:18
lexy::parser_for
typename Rule::template p< NextParser > parser_for
Definition: dsl/base.hpp:100
lexyd::_lst::bp::item
lexy::branch_parser_for< Item, Reader > item
Definition: list.hpp:95
lexyd::_isep
Definition: separator.hpp:118
lexy::parse_events::recovery_cancel
Definition: dsl/base.hpp:71
lexyd::_lst::bp::try_parse
constexpr bool try_parse(const ControlBlock *cb, const Reader &reader)
Definition: list.hpp:98
lexy::token_parser_for
typename TokenRule::template tp< Reader > token_parser_for
Definition: dsl/base.hpp:229
lexyd
Definition: trace.hpp:22
lexyd::eof
constexpr auto eof
Matches EOF.
Definition: eof.hpp:72
option.hpp
lexyd::_copy_base
decltype(_copy_base_impl< Rule >()) _copy_base
Definition: dsl/base.hpp:91


behaviortree_cpp_v4
Author(s): Davide Faconti
autogenerated on Fri Jun 28 2024 02:20:07