libstdc++
stop_token
Go to the documentation of this file.
1// <stop_token> -*- C++ -*-
2
3// Copyright (C) 2019-2020 Free Software Foundation, Inc.
4//
5// This file is part of the GNU ISO C++ Library. This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 3, or (at your option)
9// any later version.
10
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14// GNU General Public License for more details.
15
16// Under Section 7 of GPL version 3, you are granted additional
17// permissions described in the GCC Runtime Library Exception, version
18// 3.1, as published by the Free Software Foundation.
19
20// You should have received a copy of the GNU General Public License and
21// a copy of the GCC Runtime Library Exception along with this program;
22// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23// <http://www.gnu.org/licenses/>.
24
25/** @file include/stop_token
26 * This is a Standard C++ Library header.
27 */
28
29#ifndef _GLIBCXX_STOP_TOKEN
30#define _GLIBCXX_STOP_TOKEN
31
32#if __cplusplus > 201703L
33
34#include <atomic>
35
36#ifdef _GLIBCXX_HAS_GTHREADS
37# define __cpp_lib_jthread 201907L
38# include <bits/gthr.h>
39# if __has_include(<semaphore>)
40# include <semaphore>
41# endif
42#endif
43
44namespace std _GLIBCXX_VISIBILITY(default)
45{
46_GLIBCXX_BEGIN_NAMESPACE_VERSION
47
48 /// Tag type indicating a stop_source should have no shared-stop-state.
49 struct nostopstate_t { explicit nostopstate_t() = default; };
50 inline constexpr nostopstate_t nostopstate{};
51
52 class stop_source;
53
54 /// Allow testing whether a stop request has been made on a `stop_source`.
55 class stop_token
56 {
57 public:
58 stop_token() noexcept = default;
59
60 stop_token(const stop_token&) noexcept = default;
61 stop_token(stop_token&&) noexcept = default;
62
63 ~stop_token() = default;
64
65 stop_token&
66 operator=(const stop_token&) noexcept = default;
67
68 stop_token&
69 operator=(stop_token&&) noexcept = default;
70
71 [[nodiscard]]
72 bool
73 stop_possible() const noexcept
74 {
75 return static_cast<bool>(_M_state) && _M_state->_M_stop_possible();
76 }
77
78 [[nodiscard]]
79 bool
80 stop_requested() const noexcept
81 {
82 return static_cast<bool>(_M_state) && _M_state->_M_stop_requested();
83 }
84
85 void
86 swap(stop_token& __rhs) noexcept
87 { _M_state.swap(__rhs._M_state); }
88
89 [[nodiscard]]
90 friend bool
91 operator==(const stop_token& __a, const stop_token& __b)
92 { return __a._M_state == __b._M_state; }
93
94 friend void
95 swap(stop_token& __lhs, stop_token& __rhs) noexcept
96 { __lhs.swap(__rhs); }
97
98 private:
99 friend class stop_source;
100 template<typename _Callback>
101 friend class stop_callback;
102
103 static void
104 _S_yield() noexcept
105 {
106#if defined __i386__ || defined __x86_64__
107 __builtin_ia32_pause();
108#elif defined _GLIBCXX_USE_SCHED_YIELD
109 __gthread_yield();
110#endif
111 }
112
113#ifndef __cpp_lib_semaphore
114 // TODO: replace this with a real implementation of std::binary_semaphore
115 struct binary_semaphore
116 {
117 explicit binary_semaphore(int __d) : _M_counter(__d > 0) { }
118
119 void release() { _M_counter.fetch_add(1, memory_order::release); }
120
121 void acquire()
122 {
123 int __old = 1;
124 while (!_M_counter.compare_exchange_weak(__old, 0,
125 memory_order::acquire,
126 memory_order::relaxed))
127 {
128 __old = 1;
129 _S_yield();
130 }
131 }
132
133 atomic<int> _M_counter;
134 };
135#endif
136
137 struct _Stop_cb
138 {
139 using __cb_type = void(_Stop_cb*) noexcept;
140 __cb_type* _M_callback;
141 _Stop_cb* _M_prev = nullptr;
142 _Stop_cb* _M_next = nullptr;
143 bool* _M_destroyed = nullptr;
144 binary_semaphore _M_done{0};
145
146 [[__gnu__::__nonnull__]]
147 explicit
148 _Stop_cb(__cb_type* __cb)
149 : _M_callback(__cb)
150 { }
151
152 void _M_run() noexcept { _M_callback(this); }
153 };
154
155 struct _Stop_state_t
156 {
157 using value_type = uint32_t;
158 static constexpr value_type _S_stop_requested_bit = 1;
159 static constexpr value_type _S_locked_bit = 2;
160 static constexpr value_type _S_ssrc_counter_inc = 4;
161
162 std::atomic<value_type> _M_owners{1};
163 std::atomic<value_type> _M_value{_S_ssrc_counter_inc};
164 _Stop_cb* _M_head = nullptr;
165#if _GLIBCXX_HAS_GTHREADS
166 __gthread_t _M_requester;
167#endif
168
169 _Stop_state_t() = default;
170
171 bool
172 _M_stop_possible() noexcept
173 {
174 // true if a stop request has already been made or there are still
175 // stop_source objects that would allow one to be made.
176 return _M_value.load(memory_order::acquire) & ~_S_locked_bit;
177 }
178
179 bool
180 _M_stop_requested() noexcept
181 {
182 return _M_value.load(memory_order::acquire) & _S_stop_requested_bit;
183 }
184
185 void
186 _M_add_owner() noexcept
187 {
188 _M_owners.fetch_add(1, memory_order::relaxed);
189 }
190
191 void
192 _M_release_ownership() noexcept
193 {
194 if (_M_owners.fetch_sub(1, memory_order::release) == 1)
195 delete this;
196 }
197
198 void
199 _M_add_ssrc() noexcept
200 {
201 _M_value.fetch_add(_S_ssrc_counter_inc, memory_order::relaxed);
202 }
203
204 void
205 _M_sub_ssrc() noexcept
206 {
207 _M_value.fetch_sub(_S_ssrc_counter_inc, memory_order::release);
208 }
209
210 // Obtain lock.
211 void
212 _M_lock() noexcept
213 {
214 // Can use relaxed loads to get the current value.
215 // The successful call to _M_try_lock is an acquire operation.
216 auto __old = _M_value.load(memory_order::relaxed);
217 while (!_M_try_lock(__old, memory_order::relaxed))
218 { }
219 }
220
221 // Precondition: calling thread holds the lock.
222 void
223 _M_unlock() noexcept
224 {
225 _M_value.fetch_sub(_S_locked_bit, memory_order::release);
226 }
227
228 bool
229 _M_request_stop() noexcept
230 {
231 // obtain lock and set stop_requested bit
232 auto __old = _M_value.load(memory_order::acquire);
233 do
234 {
235 if (__old & _S_stop_requested_bit) // stop request already made
236 return false;
237 }
238 while (!_M_try_lock_and_stop(__old));
239
240#if _GLIBCXX_HAS_GTHREADS
241 _M_requester = __gthread_self();
242#endif
243
244 while (_M_head)
245 {
246 bool __last_cb;
247 _Stop_cb* __cb = _M_head;
248 _M_head = _M_head->_M_next;
249 if (_M_head)
250 {
251 _M_head->_M_prev = nullptr;
252 __last_cb = false;
253 }
254 else
255 __last_cb = true;
256
257 // Allow other callbacks to be unregistered while __cb runs.
258 _M_unlock();
259
260 bool __destroyed = false;
261 __cb->_M_destroyed = &__destroyed;
262
263 // run callback
264 __cb->_M_run();
265
266 if (!__destroyed)
267 {
268 __cb->_M_destroyed = nullptr;
269#if _GLIBCXX_HAS_GTHREADS
270 // synchronize with destructor of stop_callback that owns *__cb
271 __cb->_M_done.release();
272#endif
273 }
274
275 // Avoid relocking if we already know there are no more callbacks.
276 if (__last_cb)
277 return true;
278
279 _M_lock();
280 }
281
282 _M_unlock();
283 return true;
284 }
285
286 [[__gnu__::__nonnull__]]
287 bool
288 _M_register_callback(_Stop_cb* __cb) noexcept
289 {
290 auto __old = _M_value.load(memory_order::acquire);
291 do
292 {
293 if (__old & _S_stop_requested_bit) // stop request already made
294 {
295 __cb->_M_run(); // run synchronously
296 return false;
297 }
298
299 if (__old < _S_ssrc_counter_inc) // no stop_source owns *this
300 // No need to register callback if no stop request can be made.
301 // Returning false also means the stop_callback does not share
302 // ownership of this state, but that's not observable.
303 return false;
304 }
305 while (!_M_try_lock(__old));
306
307 __cb->_M_next = _M_head;
308 if (_M_head)
309 {
310 _M_head->_M_prev = __cb;
311 }
312 _M_head = __cb;
313 _M_unlock();
314 return true;
315 }
316
317 // Called by ~stop_callback just before destroying *__cb.
318 [[__gnu__::__nonnull__]]
319 void
320 _M_remove_callback(_Stop_cb* __cb)
321 {
322 _M_lock();
323
324 if (__cb == _M_head)
325 {
326 _M_head = _M_head->_M_next;
327 if (_M_head)
328 _M_head->_M_prev = nullptr;
329 _M_unlock();
330 return;
331 }
332 else if (__cb->_M_prev)
333 {
334 __cb->_M_prev->_M_next = __cb->_M_next;
335 if (__cb->_M_next)
336 __cb->_M_next->_M_prev = __cb->_M_prev;
337 _M_unlock();
338 return;
339 }
340
341 _M_unlock();
342
343 // Callback is not in the list, so must have been removed by a call to
344 // _M_request_stop.
345
346#if _GLIBCXX_HAS_GTHREADS
347 // Despite appearances there is no data race on _M_requester. The only
348 // write to it happens before the callback is removed from the list,
349 // and removing it from the list happens before this read.
350 if (!__gthread_equal(_M_requester, __gthread_self()))
351 {
352 // Synchronize with completion of callback.
353 __cb->_M_done.acquire();
354 // Safe for ~stop_callback to destroy *__cb now.
355 return;
356 }
357#endif
358 if (__cb->_M_destroyed)
359 *__cb->_M_destroyed = true;
360 }
361
362 // Try to obtain the lock.
363 // Returns true if the lock is acquired (with memory order acquire).
364 // Otherwise, sets __curval = _M_value.load(__failure) and returns false.
365 // Might fail spuriously, so must be called in a loop.
366 bool
367 _M_try_lock(value_type& __curval,
368 memory_order __failure = memory_order::acquire) noexcept
369 {
370 return _M_do_try_lock(__curval, 0, memory_order::acquire, __failure);
371 }
372
373 // Try to obtain the lock to make a stop request.
374 // Returns true if the lock is acquired and the _S_stop_requested_bit is
375 // set (with memory order acq_rel so that other threads see the request).
376 // Otherwise, sets __curval = _M_value.load(memory_order::acquire) and
377 // returns false.
378 // Might fail spuriously, so must be called in a loop.
379 bool
380 _M_try_lock_and_stop(value_type& __curval) noexcept
381 {
382 return _M_do_try_lock(__curval, _S_stop_requested_bit,
383 memory_order::acq_rel, memory_order::acquire);
384 }
385
386 bool
387 _M_do_try_lock(value_type& __curval, value_type __newbits,
388 memory_order __success, memory_order __failure) noexcept
389 {
390 if (__curval & _S_locked_bit)
391 {
392 _S_yield();
393 __curval = _M_value.load(__failure);
394 return false;
395 }
396 __newbits |= _S_locked_bit;
397 return _M_value.compare_exchange_weak(__curval, __curval | __newbits,
398 __success, __failure);
399 }
400 };
401
402 struct _Stop_state_ref
403 {
404 _Stop_state_ref() = default;
405
406 explicit
407 _Stop_state_ref(const stop_source&)
408 : _M_ptr(new _Stop_state_t())
409 { }
410
411 _Stop_state_ref(const _Stop_state_ref& __other) noexcept
412 : _M_ptr(__other._M_ptr)
413 {
414 if (_M_ptr)
415 _M_ptr->_M_add_owner();
416 }
417
418 _Stop_state_ref(_Stop_state_ref&& __other) noexcept
419 : _M_ptr(__other._M_ptr)
420 {
421 __other._M_ptr = nullptr;
422 }
423
424 _Stop_state_ref&
425 operator=(const _Stop_state_ref& __other) noexcept
426 {
427 if (auto __ptr = __other._M_ptr; __ptr != _M_ptr)
428 {
429 if (__ptr)
430 __ptr->_M_add_owner();
431 if (_M_ptr)
432 _M_ptr->_M_release_ownership();
433 _M_ptr = __ptr;
434 }
435 return *this;
436 }
437
438 _Stop_state_ref&
439 operator=(_Stop_state_ref&& __other) noexcept
440 {
441 _Stop_state_ref(std::move(__other)).swap(*this);
442 return *this;
443 }
444
445 ~_Stop_state_ref()
446 {
447 if (_M_ptr)
448 _M_ptr->_M_release_ownership();
449 }
450
451 void
452 swap(_Stop_state_ref& __other) noexcept
453 { std::swap(_M_ptr, __other._M_ptr); }
454
455 explicit operator bool() const noexcept { return _M_ptr != nullptr; }
456
457 _Stop_state_t* operator->() const noexcept { return _M_ptr; }
458
459#if __cpp_impl_three_way_comparison >= 201907L
460 friend bool
461 operator==(const _Stop_state_ref&, const _Stop_state_ref&) = default;
462#else
463 friend bool
464 operator==(const _Stop_state_ref& __lhs, const _Stop_state_ref& __rhs)
465 noexcept
466 { return __lhs._M_ptr == __rhs._M_ptr; }
467
468 friend bool
469 operator!=(const _Stop_state_ref& __lhs, const _Stop_state_ref& __rhs)
470 noexcept
471 { return __lhs._M_ptr != __rhs._M_ptr; }
472#endif
473
474 private:
475 _Stop_state_t* _M_ptr = nullptr;
476 };
477
478 _Stop_state_ref _M_state;
479
480 explicit
481 stop_token(const _Stop_state_ref& __state) noexcept
482 : _M_state{__state}
483 { }
484 };
485
486 /// A type that allows a stop request to be made.
487 class stop_source
488 {
489 public:
490 stop_source() : _M_state(*this)
491 { }
492
493 explicit stop_source(std::nostopstate_t) noexcept
494 { }
495
496 stop_source(const stop_source& __other) noexcept
497 : _M_state(__other._M_state)
498 {
499 if (_M_state)
500 _M_state->_M_add_ssrc();
501 }
502
503 stop_source(stop_source&&) noexcept = default;
504
505 stop_source&
506 operator=(const stop_source& __other) noexcept
507 {
508 if (_M_state != __other._M_state)
509 {
510 stop_source __sink(std::move(*this));
511 _M_state = __other._M_state;
512 if (_M_state)
513 _M_state->_M_add_ssrc();
514 }
515 return *this;
516 }
517
518 stop_source&
519 operator=(stop_source&&) noexcept = default;
520
521 ~stop_source()
522 {
523 if (_M_state)
524 _M_state->_M_sub_ssrc();
525 }
526
527 [[nodiscard]]
528 bool
529 stop_possible() const noexcept
530 {
531 return static_cast<bool>(_M_state);
532 }
533
534 [[nodiscard]]
535 bool
536 stop_requested() const noexcept
537 {
538 return static_cast<bool>(_M_state) && _M_state->_M_stop_requested();
539 }
540
541 bool
542 request_stop() const noexcept
543 {
544 if (stop_possible())
545 return _M_state->_M_request_stop();
546 return false;
547 }
548
549 [[nodiscard]]
550 stop_token
551 get_token() const noexcept
552 {
553 return stop_token{_M_state};
554 }
555
556 void
557 swap(stop_source& __other) noexcept
558 {
559 _M_state.swap(__other._M_state);
560 }
561
562 [[nodiscard]]
563 friend bool
564 operator==(const stop_source& __a, const stop_source& __b) noexcept
565 {
566 return __a._M_state == __b._M_state;
567 }
568
569 friend void
570 swap(stop_source& __lhs, stop_source& __rhs) noexcept
571 {
572 __lhs.swap(__rhs);
573 }
574
575 private:
576 stop_token::_Stop_state_ref _M_state;
577 };
578
579 /// A wrapper for callbacks to be run when a stop request is made.
580 template<typename _Callback>
581 class [[nodiscard]] stop_callback
582 {
583 static_assert(is_nothrow_destructible_v<_Callback>);
584 static_assert(is_invocable_v<_Callback>);
585
586 public:
587 using callback_type = _Callback;
588
589 template<typename _Cb,
590 enable_if_t<is_constructible_v<_Callback, _Cb>, int> = 0>
591 explicit
592 stop_callback(const stop_token& __token, _Cb&& __cb)
593 noexcept(is_nothrow_constructible_v<_Callback, _Cb>)
594 : _M_cb(std::forward<_Cb>(__cb))
595 {
596 if (auto __state = __token._M_state)
597 {
598 if (__state->_M_register_callback(&_M_cb))
599 _M_state.swap(__state);
600 }
601 }
602
603 template<typename _Cb,
604 enable_if_t<is_constructible_v<_Callback, _Cb>, int> = 0>
605 explicit
606 stop_callback(stop_token&& __token, _Cb&& __cb)
607 noexcept(is_nothrow_constructible_v<_Callback, _Cb>)
608 : _M_cb(std::forward<_Cb>(__cb))
609 {
610 if (auto& __state = __token._M_state)
611 {
612 if (__state->_M_register_callback(&_M_cb))
613 _M_state.swap(__state);
614 }
615 }
616
617 ~stop_callback()
618 {
619 if (_M_state)
620 {
621 _M_state->_M_remove_callback(&_M_cb);
622 }
623 }
624
625 stop_callback(const stop_callback&) = delete;
626 stop_callback& operator=(const stop_callback&) = delete;
627 stop_callback(stop_callback&&) = delete;
628 stop_callback& operator=(stop_callback&&) = delete;
629
630 private:
631 struct _Cb_impl : stop_token::_Stop_cb
632 {
633 template<typename _Cb>
634 explicit
635 _Cb_impl(_Cb&& __cb)
636 : _Stop_cb(&_S_execute),
637 _M_cb(std::forward<_Cb>(__cb))
638 { }
639
640 _Callback _M_cb;
641
642 [[__gnu__::__nonnull__]]
643 static void
644 _S_execute(_Stop_cb* __that) noexcept
645 {
646 _Callback& __cb = static_cast<_Cb_impl*>(__that)->_M_cb;
647 std::forward<_Callback>(__cb)();
648 }
649 };
650
651 _Cb_impl _M_cb;
652 stop_token::_Stop_state_ref _M_state;
653 };
654
655 template<typename _Callback>
656 stop_callback(stop_token, _Callback) -> stop_callback<_Callback>;
657
658_GLIBCXX_END_NAMESPACE_VERSION
659} // namespace
660#endif // __cplusplus > 201703L
661#endif // _GLIBCXX_STOP_TOKEN
constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept
Convert a value to an rvalue.
Definition: move.h:101
constexpr _Tp && forward(typename std::remove_reference< _Tp >::type &__t) noexcept
Forward an lvalue.
Definition: move.h:76
memory_order
Enumeration for memory_order.
Definition: atomic_base.h:75
ISO C++ entities toplevel namespace is std.
Generic atomic type, primary class template.
Definition: atomic:181