libstdc++
generator
Go to the documentation of this file.
1 // <generator> -*- C++ -*-
2 
3 // Copyright (C) 2023-2024 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10 
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15 
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19 
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24 
25 /** @file include/generator
26  * This is a Standard C++ Library header.
27  */
28 
29 #ifndef _GLIBCXX_GENERATOR
30 #define _GLIBCXX_GENERATOR
31 
32 #include <ranges>
33 #pragma GCC system_header
34 
35 #include <bits/c++config.h>
36 
37 #define __glibcxx_want_generator
38 #include <bits/version.h>
39 
40 #ifdef __cpp_lib_generator // C++ >= 23 && __glibcxx_coroutine
41 #include <new>
42 #include <bits/move.h>
43 #include <bits/ranges_util.h>
44 #include <bits/elements_of.h>
45 #include <bits/uses_allocator.h>
46 #include <bits/exception_ptr.h>
47 #include <cstddef>
48 #include <cstdint>
49 #include <cstring>
50 #include <coroutine>
51 
52 #include <type_traits>
53 #include <variant>
54 #include <concepts>
55 
56 #if _GLIBCXX_HOSTED
57 # include <bits/memory_resource.h>
58 #endif // HOSTED
59 
60 namespace std _GLIBCXX_VISIBILITY(default)
61 {
62 _GLIBCXX_BEGIN_NAMESPACE_VERSION
63 
64  /**
65  * @defgroup generator_coros Range generator coroutines
66  * @addtogroup ranges
67  * @since C++23
68  * @{
69  */
70 
71  /** @brief A range specified using a yielding coroutine.
72  *
73  * `std::generator` is a utility class for defining ranges using coroutines
74  * that yield elements as a range. Generator coroutines are synchronous.
75  *
76  * @headerfile generator
77  * @since C++23
78  */
79  template<typename _Ref, typename _Val = void, typename _Alloc = void>
80  class generator;
81 
82  /// @cond undocumented
83  namespace __gen
84  {
85  /// _Reference type for a generator whose reference (first argument) and
86  /// value (second argument) types are _Ref and _Val.
87  template<typename _Ref, typename _Val>
88  using _Reference_t = __conditional_t<is_void_v<_Val>,
89  _Ref&&, _Ref>;
90 
91  /// Type yielded by a generator whose _Reference type is _Reference.
92  template<typename _Reference>
93  using _Yield_t = __conditional_t<is_reference_v<_Reference>,
94  _Reference,
95  const _Reference&>;
96 
97  /// _Yield_t * _Reference_t
98  template<typename _Ref, typename _Val>
99  using _Yield2_t = _Yield_t<_Reference_t<_Ref, _Val>>;
100 
101  template<typename> constexpr bool __is_generator = false;
102  template<typename _Val, typename _Ref, typename _Alloc>
103  constexpr bool __is_generator<std::generator<_Val, _Ref, _Alloc>> = true;
104 
105  /// Allocator and value type erased generator promise type.
106  /// \tparam _Yielded The corresponding generators yielded type.
107  template<typename _Yielded>
108  class _Promise_erased
109  {
110  static_assert(is_reference_v<_Yielded>);
111  using _Yielded_deref = remove_reference_t<_Yielded>;
112  using _Yielded_decvref = remove_cvref_t<_Yielded>;
113  using _ValuePtr = add_pointer_t<_Yielded>;
114  using _Coro_handle = std::coroutine_handle<_Promise_erased>;
115 
116  template<typename, typename, typename>
117  friend class std::generator;
118 
119  template<typename _Gen>
120  struct _Recursive_awaiter;
121  template<typename>
122  friend struct _Recursive_awaiter;
123  struct _Copy_awaiter;
124  struct _Subyield_state;
125  struct _Final_awaiter;
126  public:
127  suspend_always
128  initial_suspend() const noexcept
129  { return {}; }
130 
131  suspend_always
132  yield_value(_Yielded __val) noexcept
133  {
134  _M_bottom_value() = ::std::addressof(__val);
135  return {};
136  }
137 
138  auto
139  yield_value(const _Yielded_deref& __val)
140  noexcept (is_nothrow_constructible_v<_Yielded_decvref,
141  const _Yielded_deref&>)
142  requires (is_rvalue_reference_v<_Yielded>
143  && constructible_from<_Yielded_decvref,
144  const _Yielded_deref&>)
145  { return _Copy_awaiter(__val, _M_bottom_value()); }
146 
147  template<typename _R2, typename _V2, typename _A2, typename _U2>
148  requires std::same_as<_Yield2_t<_R2, _V2>, _Yielded>
149  auto
150  yield_value(ranges::elements_of<generator<_R2, _V2, _A2>&&, _U2> __r)
151  noexcept
152  { return _Recursive_awaiter { std::move(__r.range) }; }
153 
154  template<ranges::input_range _R, typename _Alloc>
155  requires convertible_to<ranges::range_reference_t<_R>, _Yielded>
156  auto
157  yield_value(ranges::elements_of<_R, _Alloc> __r)
158  {
159  auto __n = [] (allocator_arg_t, _Alloc,
160  ranges::iterator_t<_R> __i,
161  ranges::sentinel_t<_R> __s)
162  -> generator<_Yielded, ranges::range_value_t<_R>, _Alloc> {
163  for (; __i != __s; ++__i)
164  co_yield static_cast<_Yielded>(*__i);
165  };
166  return yield_value(ranges::elements_of(__n(allocator_arg,
167  __r.allocator,
168  ranges::begin(__r.range),
169  ranges::end(__r.range))));
170  }
171 
172 
173  _Final_awaiter
174  final_suspend() noexcept
175  { return {}; }
176 
177  void
178  unhandled_exception()
179  {
180  // To get to this point, this coroutine must have been active. In that
181  // case, it must be the top of the stack. The current coroutine is
182  // the sole entry of the stack iff it is both the top and the bottom. As
183  // it is the top implicitly in this context it will be the sole entry iff
184  // it is the bottom.
185  if (_M_nest._M_is_bottom())
186  throw;
187  else
188  this->_M_except = std::current_exception();
189  }
190 
191  void await_transform() = delete;
192  void return_void() const noexcept {}
193 
194  private:
195  _ValuePtr&
196  _M_bottom_value() noexcept
197  { return _M_nest._M_bottom_value(*this); }
198 
199  _ValuePtr&
200  _M_value() noexcept
201  { return _M_nest._M_value(*this); }
202 
203  _Subyield_state _M_nest;
204  std::exception_ptr _M_except;
205  };
206 
207  template<typename _Yielded>
208  struct _Promise_erased<_Yielded>::_Subyield_state
209  {
210  struct _Frame
211  {
212  _Coro_handle _M_bottom;
213  _Coro_handle _M_parent;
214  };
215 
216  struct _Bottom_frame
217  {
218  _Coro_handle _M_top;
219  _ValuePtr _M_value = nullptr;
220  };
221 
222  std::variant<
223  _Bottom_frame,
224  _Frame
225  > _M_stack;
226 
227  bool
228  _M_is_bottom() const noexcept
229  { return !std::holds_alternative<_Frame>(this->_M_stack); }
230 
231  _Coro_handle&
232  _M_top() noexcept
233  {
234  if (auto __f = std::get_if<_Frame>(&this->_M_stack))
235  return __f->_M_bottom.promise()._M_nest._M_top();
236 
237  auto __bf = std::get_if<_Bottom_frame>(&this->_M_stack);
238  __glibcxx_assert(__bf);
239  return __bf->_M_top;
240  }
241 
242  void
243  _M_push(_Coro_handle __current, _Coro_handle __subyield) noexcept
244  {
245  __glibcxx_assert(&__current.promise()._M_nest == this);
246  __glibcxx_assert(this->_M_top() == __current);
247 
248  __subyield.promise()._M_nest._M_jump_in(__current, __subyield);
249  }
250 
251  std::coroutine_handle<>
252  _M_pop() noexcept
253  {
254  if (auto __f = std::get_if<_Frame>(&this->_M_stack))
255  {
256  // We aren't a bottom coroutine. Restore the parent to the top
257  // and resume.
258  auto __p = this->_M_top() = __f->_M_parent;
259  return __p;
260  }
261  else
262  // Otherwise, there's nothing to resume.
263  return std::noop_coroutine();
264  }
265 
266  void
267  _M_jump_in(_Coro_handle __rest, _Coro_handle __new) noexcept
268  {
269  __glibcxx_assert(&__new.promise()._M_nest == this);
270  __glibcxx_assert(this->_M_is_bottom());
271  // We're bottom. We're also top if top is unset (note that this is
272  // not true if something was added to the coro stack and then popped,
273  // but in that case we can't possibly be yielded from, as it would
274  // require rerunning begin()).
275  __glibcxx_assert(!this->_M_top());
276 
277  auto& __rn = __rest.promise()._M_nest;
278  __rn._M_top() = __new;
279 
280  // Presume we're the second frame...
281  auto __bott = __rest;
282  if (auto __f = std::get_if<_Frame>(&__rn._M_stack))
283  // But, if we aren't, get the actual bottom. We're only the second
284  // frame if our parent is the bottom frame, i.e. it doesn't have a
285  // _Frame member.
286  __bott = __f->_M_bottom;
287 
288  this->_M_stack = _Frame {
289  ._M_bottom = __bott,
290  ._M_parent = __rest
291  };
292  }
293 
294  _ValuePtr&
295  _M_bottom_value(_Promise_erased& __current) noexcept
296  {
297  __glibcxx_assert(&__current._M_nest == this);
298  if (auto __bf = std::get_if<_Bottom_frame>(&this->_M_stack))
299  return __bf->_M_value;
300  auto __f = std::get_if<_Frame>(&this->_M_stack);
301  __glibcxx_assert(__f);
302  auto& __p = __f->_M_bottom.promise();
303  return __p._M_nest._M_value(__p);
304  }
305 
306  _ValuePtr&
307  _M_value(_Promise_erased& __current) noexcept
308  {
309  __glibcxx_assert(&__current._M_nest == this);
310  auto __bf = std::get_if<_Bottom_frame>(&this->_M_stack);
311  __glibcxx_assert(__bf);
312  return __bf->_M_value;
313  }
314  };
315 
316  template<typename _Yielded>
317  struct _Promise_erased<_Yielded>::_Final_awaiter
318  {
319  bool await_ready() noexcept
320  { return false; }
321 
322  template<typename _Promise>
323  auto await_suspend(std::coroutine_handle<_Promise> __c) noexcept
324  {
325 #ifdef __glibcxx_is_pointer_interconvertible
326  static_assert(is_pointer_interconvertible_base_of_v<
327  _Promise_erased, _Promise>);
328 #endif
329 
330  auto& __n = __c.promise()._M_nest;
331  return __n._M_pop();
332  }
333 
334  void await_resume() noexcept {}
335  };
336 
337  template<typename _Yielded>
338  struct _Promise_erased<_Yielded>::_Copy_awaiter
339  {
340  _Yielded_decvref _M_value;
341  _ValuePtr& _M_bottom_value;
342 
343  constexpr bool await_ready() noexcept
344  { return false; }
345 
346  template<typename _Promise>
347  void await_suspend(std::coroutine_handle<_Promise>) noexcept
348  {
349 #ifdef __glibcxx_is_pointer_interconvertible
350  static_assert(is_pointer_interconvertible_base_of_v<
351  _Promise_erased, _Promise>);
352 #endif
353  _M_bottom_value = ::std::addressof(_M_value);
354  }
355 
356  constexpr void
357  await_resume() const noexcept
358  {}
359  };
360 
361  template<typename _Yielded>
362  template<typename _Gen>
363  struct _Promise_erased<_Yielded>::_Recursive_awaiter
364  {
365  _Gen _M_gen;
366  static_assert(__is_generator<_Gen>);
367  static_assert(std::same_as<typename _Gen::yielded, _Yielded>);
368 
369  _Recursive_awaiter(_Gen __gen) noexcept
370  : _M_gen(std::move(__gen))
371  { this->_M_gen._M_mark_as_started(); }
372 
373  constexpr bool
374  await_ready() const noexcept
375  { return false; }
376 
377 
378  template<typename _Promise>
379  std::coroutine_handle<>
380  await_suspend(std::coroutine_handle<_Promise> __p) noexcept
381  {
382 #ifdef __glibcxx_is_pointer_interconvertible
383  static_assert(is_pointer_interconvertible_base_of_v<
384  _Promise_erased, _Promise>);
385 #endif
386 
387  auto __c = _Coro_handle::from_address(__p.address());
388  auto __t = _Coro_handle::from_address(this->_M_gen._M_coro.address());
389  __p.promise()._M_nest._M_push(__c, __t);
390  return __t;
391  }
392 
393  void await_resume()
394  {
395  if (auto __e = _M_gen._M_coro.promise()._M_except)
396  std::rethrow_exception(__e);
397  }
398  };
399 
400  struct _Alloc_block
401  {
402  alignas(__STDCPP_DEFAULT_NEW_ALIGNMENT__)
403  char _M_data[__STDCPP_DEFAULT_NEW_ALIGNMENT__];
404 
405  static auto
406  _M_cnt(std::size_t __sz) noexcept
407  {
408  auto __blksz = sizeof(_Alloc_block);
409  return (__sz + __blksz - 1) / __blksz;
410  }
411  };
412 
413  template<typename _All>
414  concept _Stateless_alloc = (allocator_traits<_All>::is_always_equal::value
415  && default_initializable<_All>);
416 
417  template<typename _Alloc>
418  class _Promise_alloc
419  {
420  using _ATr = allocator_traits<_Alloc>;
421  using _Rebound = typename _ATr::template rebind_alloc<_Alloc_block>;
422  using _Rebound_ATr = typename _ATr
423  ::template rebind_traits<_Alloc_block>;
424  static_assert(is_pointer_v<typename _Rebound_ATr::pointer>,
425  "Must use allocators for true pointers with generators");
426 
427  static auto
428  _M_alloc_address(std::uintptr_t __fn, std::uintptr_t __fsz) noexcept
429  {
430  auto __an = __fn + __fsz;
431  auto __ba = alignof(_Rebound);
432  return reinterpret_cast<_Rebound*>(((__an + __ba - 1) / __ba) * __ba);
433  }
434 
435  static auto
436  _M_alloc_size(std::size_t __csz) noexcept
437  {
438  auto __ba = alignof(_Rebound);
439  // Our desired layout is placing the coroutine frame, then pad out to
440  // align, then place the allocator. The total size of that is the
441  // size of the coroutine frame, plus up to __ba bytes, plus the size
442  // of the allocator.
443  return __csz + __ba + sizeof(_Rebound);
444  }
445 
446  static void*
447  _M_allocate(_Rebound __b, std::size_t __csz)
448  {
449  if constexpr (_Stateless_alloc<_Rebound>)
450  // Only need room for the coroutine.
451  return __b.allocate(_Alloc_block::_M_cnt(__csz));
452  else
453  {
454  auto __nsz = _Alloc_block::_M_cnt(_M_alloc_size(__csz));
455  auto __f = __b.allocate(__nsz);
456  auto __fn = reinterpret_cast<std::uintptr_t>(__f);
457  auto __an = _M_alloc_address(__fn, __csz);
458  ::new (__an) _Rebound(std::move(__b));
459  return __f;
460  }
461  }
462 
463  public:
464  void*
465  operator new(std::size_t __sz)
466  requires default_initializable<_Rebound> // _Alloc is non-void
467  { return _M_allocate({}, __sz); }
468 
469  template<typename _Na, typename... _Args>
470  void*
471  operator new(std::size_t __sz,
472  allocator_arg_t, const _Na& __na,
473  const _Args&...)
474  requires convertible_to<const _Na&, _Alloc>
475  {
476  return _M_allocate(static_cast<_Rebound>(static_cast<_Alloc>(__na)),
477  __sz);
478  }
479 
480  template<typename _This, typename _Na, typename... _Args>
481  void*
482  operator new(std::size_t __sz,
483  const _This&,
484  allocator_arg_t, const _Na& __na,
485  const _Args&...)
486  requires convertible_to<const _Na&, _Alloc>
487  {
488  return _M_allocate(static_cast<_Rebound>(static_cast<_Alloc>(__na)),
489  __sz);
490  }
491 
492  void
493  operator delete(void* __ptr, std::size_t __csz) noexcept
494  {
495  if constexpr (_Stateless_alloc<_Rebound>)
496  {
497  _Rebound __b;
498  return __b.deallocate(reinterpret_cast<_Alloc_block*>(__ptr),
499  _Alloc_block::_M_cnt(__csz));
500  }
501  else
502  {
503  auto __nsz = _Alloc_block::_M_cnt(_M_alloc_size(__csz));
504  auto __fn = reinterpret_cast<std::uintptr_t>(__ptr);
505  auto __an = _M_alloc_address(__fn, __csz);
506  _Rebound __b(std::move(*__an));
507  __an->~_Rebound();
508  __b.deallocate(reinterpret_cast<_Alloc_block*>(__ptr), __nsz);
509  }
510  }
511  };
512 
513  template<>
514  class _Promise_alloc<void>
515  {
516  using _Dealloc_fn = void (*)(void*, std::size_t);
517 
518  static auto
519  _M_dealloc_address(std::uintptr_t __fn, std::uintptr_t __fsz) noexcept
520  {
521  auto __an = __fn + __fsz;
522  auto __ba = alignof(_Dealloc_fn);
523  auto __aligned = ((__an + __ba - 1) / __ba) * __ba;
524  return reinterpret_cast<_Dealloc_fn*>(__aligned);
525  }
526 
527  template<typename _Rebound>
528  static auto
529  _M_alloc_address(std::uintptr_t __fn, std::uintptr_t __fsz) noexcept
530  requires (!_Stateless_alloc<_Rebound>)
531  {
532  auto __ba = alignof(_Rebound);
533  auto __da = _M_dealloc_address(__fn, __fsz);
534  auto __aan = reinterpret_cast<std::uintptr_t>(__da);
535  __aan += sizeof(_Dealloc_fn);
536  auto __aligned = ((__aan + __ba - 1) / __ba) * __ba;
537  return reinterpret_cast<_Rebound*>(__aligned);
538  }
539 
540  template<typename _Rebound>
541  static auto
542  _M_alloc_size(std::size_t __csz) noexcept
543  {
544  // This time, we want the coroutine frame, then the deallocator
545  // pointer, then the allocator itself, if any.
546  std::size_t __aa = 0;
547  std::size_t __as = 0;
548  if constexpr (!std::same_as<_Rebound, void>)
549  {
550  __aa = alignof(_Rebound);
551  __as = sizeof(_Rebound);
552  }
553  auto __ba = __aa + alignof(_Dealloc_fn);
554  return __csz + __ba + __as + sizeof(_Dealloc_fn);
555  }
556 
557  template<typename _Rebound>
558  static void
559  _M_deallocator(void* __ptr, std::size_t __csz) noexcept
560  {
561  auto __asz = _M_alloc_size<_Rebound>(__csz);
562  auto __nblk = _Alloc_block::_M_cnt(__asz);
563 
564  if constexpr (_Stateless_alloc<_Rebound>)
565  {
566  _Rebound __b;
567  __b.deallocate(reinterpret_cast<_Alloc_block*>(__ptr), __nblk);
568  }
569  else
570  {
571  auto __fn = reinterpret_cast<std::uintptr_t>(__ptr);
572  auto __an = _M_alloc_address<_Rebound>(__fn, __csz);
573  _Rebound __b(std::move(*__an));
574  __an->~_Rebound();
575  __b.deallocate(reinterpret_cast<_Alloc_block*>(__ptr), __nblk);
576  }
577  }
578 
579  template<typename _Na>
580  static void*
581  _M_allocate(const _Na& __na, std::size_t __csz)
582  {
583  using _Rebound = typename std::allocator_traits<_Na>
584  ::template rebind_alloc<_Alloc_block>;
585  using _Rebound_ATr = typename std::allocator_traits<_Na>
586  ::template rebind_traits<_Alloc_block>;
587 
588  static_assert(is_pointer_v<typename _Rebound_ATr::pointer>,
589  "Must use allocators for true pointers with generators");
590 
591  _Dealloc_fn __d = &_M_deallocator<_Rebound>;
592  auto __b = static_cast<_Rebound>(__na);
593  auto __asz = _M_alloc_size<_Rebound>(__csz);
594  auto __nblk = _Alloc_block::_M_cnt(__asz);
595  void* __p = __b.allocate(__nblk);
596  auto __pn = reinterpret_cast<std::uintptr_t>(__p);
597  *_M_dealloc_address(__pn, __csz) = __d;
598  if constexpr (!_Stateless_alloc<_Rebound>)
599  {
600  auto __an = _M_alloc_address<_Rebound>(__pn, __csz);
601  ::new (__an) _Rebound(std::move(__b));
602  }
603  return __p;
604  }
605  public:
606  void*
607  operator new(std::size_t __sz)
608  {
609  auto __nsz = _M_alloc_size<void>(__sz);
610  _Dealloc_fn __d = [] (void* __ptr, std::size_t __sz)
611  {
612  ::operator delete(__ptr, _M_alloc_size<void>(__sz));
613  };
614  auto __p = ::operator new(__nsz);
615  auto __pn = reinterpret_cast<uintptr_t>(__p);
616  *_M_dealloc_address(__pn, __sz) = __d;
617  return __p;
618  }
619 
620  template<typename _Na, typename... _Args>
621  void*
622  operator new(std::size_t __sz,
623  allocator_arg_t, const _Na& __na,
624  const _Args&...)
625  { return _M_allocate(__na, __sz); }
626 
627  template<typename _This, typename _Na, typename... _Args>
628  void*
629  operator new(std::size_t __sz,
630  const _This&,
631  allocator_arg_t, const _Na& __na,
632  const _Args&...)
633  { return _M_allocate(__na, __sz); }
634 
635  void
636  operator delete(void* __ptr, std::size_t __sz) noexcept
637  {
638  _Dealloc_fn __d;
639  auto __pn = reinterpret_cast<uintptr_t>(__ptr);
640  __d = *_M_dealloc_address(__pn, __sz);
641  __d(__ptr, __sz);
642  }
643  };
644 
645  template<typename _Tp>
646  concept _Cv_unqualified_object = is_object_v<_Tp>
647  && same_as<_Tp, remove_cv_t<_Tp>>;
648  } // namespace __gen
649  /// @endcond
650 
651  template<typename _Ref, typename _Val, typename _Alloc>
652  class generator
653  : public ranges::view_interface<generator<_Ref, _Val, _Alloc>>
654  {
655  using _Value = __conditional_t<is_void_v<_Val>,
656  remove_cvref_t<_Ref>,
657  _Val>;
658  static_assert(__gen::_Cv_unqualified_object<_Value>,
659  "Generator value must be a cv-unqualified object type");
660  using _Reference = __gen::_Reference_t<_Ref, _Val>;
661  static_assert(is_reference_v<_Reference>
662  || (__gen::_Cv_unqualified_object<_Reference>
663  && copy_constructible<_Reference>),
664  "Generator reference type must be either a cv-unqualified "
665  "object type that is trivially constructible or a "
666  "reference type");
667 
668  using _RRef = __conditional_t<
669  is_reference_v<_Reference>,
670  remove_reference_t<_Reference>&&,
671  _Reference>;
672 
673  /* Required to model indirectly_readable, and input_iterator. */
674  static_assert(common_reference_with<_Reference&&, _Value&&>);
675  static_assert(common_reference_with<_Reference&&, _RRef&&>);
676  static_assert(common_reference_with<_RRef&&, const _Value&>);
677 
678  using _Yielded = __gen::_Yield_t<_Reference>;
679  using _Erased_promise = __gen::_Promise_erased<_Yielded>;
680 
681  struct _Iterator;
682 
683  friend _Erased_promise;
684  friend struct _Erased_promise::_Subyield_state;
685  public:
686  using yielded = _Yielded;
687 
688  struct promise_type : _Erased_promise, __gen::_Promise_alloc<_Alloc>
689  {
690  generator get_return_object() noexcept
691  { return { coroutine_handle<promise_type>::from_promise(*this) }; }
692  };
693 
694 #ifdef __glibcxx_is_pointer_interconvertible
695  static_assert(is_pointer_interconvertible_base_of_v<_Erased_promise,
696  promise_type>);
697 #endif
698 
699  generator(const generator&) = delete;
700 
701  generator(generator&& __other) noexcept
702  : _M_coro(std::__exchange(__other._M_coro, nullptr)),
703  _M_began(std::__exchange(__other._M_began, false))
704  {}
705 
706  ~generator()
707  {
708  if (auto& __c = this->_M_coro)
709  __c.destroy();
710  }
711 
712  generator&
713  operator=(generator __other) noexcept
714  {
715  swap(__other._M_coro, this->_M_coro);
716  swap(__other._M_began, this->_M_began);
717  }
718 
719  _Iterator
720  begin()
721  {
722  this->_M_mark_as_started();
723  auto __h = _Coro_handle::from_promise(_M_coro.promise());
724  __h.promise()._M_nest._M_top() = __h;
725  return { __h };
726  }
727 
728  default_sentinel_t
729  end() const noexcept
730  { return default_sentinel; }
731 
732  private:
733  using _Coro_handle = std::coroutine_handle<_Erased_promise>;
734 
735  generator(coroutine_handle<promise_type> __coro) noexcept
736  : _M_coro { move(__coro) }
737  {}
738 
739  void
740  _M_mark_as_started() noexcept
741  {
742  __glibcxx_assert(!this->_M_began);
743  this->_M_began = true;
744  }
745 
746  coroutine_handle<promise_type> _M_coro;
747  bool _M_began = false;
748  };
749 
750  template<class _Ref, class _Val, class _Alloc>
751  struct generator<_Ref, _Val, _Alloc>::_Iterator
752  {
753  using value_type = _Value;
754  using difference_type = ptrdiff_t;
755 
756  friend bool
757  operator==(const _Iterator& __i, default_sentinel_t) noexcept
758  { return __i._M_coro.done(); }
759 
760  friend class generator;
761 
762  _Iterator(_Iterator&& __o) noexcept
763  : _M_coro(std::__exchange(__o._M_coro, {}))
764  {}
765 
766  _Iterator&
767  operator=(_Iterator&& __o) noexcept
768  {
769  this->_M_coro = std::__exchange(__o._M_coro, {});
770  return *this;
771  }
772 
773  _Iterator&
774  operator++()
775  {
776  _M_next();
777  return *this;
778  }
779 
780  void
781  operator++(int)
782  { this->operator++(); }
783 
784  _Reference
785  operator*()
786  const noexcept(is_nothrow_move_constructible_v<_Reference>)
787  {
788  auto& __p = this->_M_coro.promise();
789  return static_cast<_Reference>(*__p._M_value());
790  }
791 
792  private:
793  friend class generator;
794 
795  _Iterator(_Coro_handle __g)
796  : _M_coro { __g }
797  { this->_M_next(); }
798 
799  void _M_next()
800  {
801  auto& __t = this->_M_coro.promise()._M_nest._M_top();
802  __t.resume();
803  }
804 
805  _Coro_handle _M_coro;
806  };
807 
808  /// @}
809 
810 #if _GLIBCXX_HOSTED
811  namespace pmr {
812  template<typename _Ref, typename _Val = void>
813  using generator = std::generator<_Ref, _Val, polymorphic_allocator<std::byte>>;
814  }
815 #endif // HOSTED
816 
817 _GLIBCXX_END_NAMESPACE_VERSION
818 } // namespace std
819 #endif // __cpp_lib_generator
820 
821 #endif // _GLIBCXX_GENERATOR