Lines Matching full:epoch
32 #include <sys/epoch.h>
70 struct epoch *er_parent;
80 struct epoch { struct
96 SYSCTL_NODE(_kern, OID_AUTO, epoch, CTLFLAG_RW | CTLFLAG_MPSAFE, 0, argument
97 "epoch information");
99 "epoch stats");
105 &block_count, "# of times a thread was in an epoch when epoch_wait was called");
113 &turnstile_count, "# of times a thread was blocked on a lock in an epoch during an epoch_wait");
132 static struct epoch epoch_array[MAX_EPOCHS];
150 epoch_currecord(epoch_t epoch) in epoch_currecord() argument
153 return (zpcpu_get(epoch->e_pcpu_record)); in epoch_currecord()
188 &epoch_trace_stack_print, 0, "Print stack traces on epoch reports");
222 epoch_trace_enter(struct thread *td, epoch_t epoch, epoch_tracker_t et, in epoch_trace_enter() argument
228 if (iet->et_epoch != epoch) in epoch_trace_enter()
230 epoch_trace_report("Recursively entering epoch %s " in epoch_trace_enter()
232 epoch->e_name, file, line, in epoch_trace_enter()
235 et->et_epoch = epoch; in epoch_trace_enter()
243 epoch_trace_exit(struct thread *td, epoch_t epoch, epoch_tracker_t et, in epoch_trace_exit() argument
248 epoch_trace_report("Exiting epoch %s in a not nested order " in epoch_trace_exit()
250 epoch->e_name, in epoch_trace_exit()
260 printf("Td %p exiting epoch %s at %s:%d\n", td, epoch->e_name, in epoch_trace_exit()
271 printf("Epoch %s entered at %s:%d\n", iet->et_epoch->e_name, in epoch_trace_list()
276 epoch_where_report(epoch_t epoch) in epoch_where_report() argument
281 MPASS(epoch != NULL); in epoch_where_report()
282 MPASS((epoch->e_flags & EPOCH_PREEMPT) != 0); in epoch_where_report()
285 er = epoch_currecord(epoch); in epoch_where_report()
292 printf("Td %p entered epoch %s at %s:%d\n", curthread, in epoch_where_report()
293 epoch->e_name, tdwait->et_file, tdwait->et_line); in epoch_where_report()
318 "epoch call task"); in epoch_init()
323 sx_init(&epoch_sx, "epoch-sx"); in epoch_init()
328 SYSINIT(epoch, SI_SUB_EPOCH, SI_ORDER_FIRST, epoch_init, NULL);
340 epoch_ctor(epoch_t epoch) in epoch_ctor() argument
345 epoch->e_pcpu_record = uma_zalloc_pcpu(pcpu_zone_record, M_WAITOK); in epoch_ctor()
347 er = zpcpu_get_cpu(epoch->e_pcpu_record, cpu); in epoch_ctor()
349 ck_epoch_register(&epoch->e_epoch, &er->er_record, NULL); in epoch_ctor()
352 er->er_parent = epoch; in epoch_ctor()
368 epoch_t epoch; in epoch_alloc() local
379 * Find a free index in the epoch array. If no free index is in epoch_alloc()
388 epoch = NULL; in epoch_alloc()
395 epoch = epoch_array + i; in epoch_alloc()
396 ck_epoch_init(&epoch->e_epoch); in epoch_alloc()
397 epoch_ctor(epoch); in epoch_alloc()
398 epoch->e_flags = flags; in epoch_alloc()
399 epoch->e_name = name; in epoch_alloc()
400 sx_init(&epoch->e_drain_sx, "epoch-drain-sx"); in epoch_alloc()
401 mtx_init(&epoch->e_drain_mtx, "epoch-drain-mtx", NULL, MTX_DEF); in epoch_alloc()
405 * epoch_call_task() function will start scanning this epoch in epoch_alloc()
408 atomic_store_rel_int(&epoch->e_in_use, 1); in epoch_alloc()
411 return (epoch); in epoch_alloc()
415 epoch_free(epoch_t epoch) in epoch_free() argument
423 MPASS(epoch->e_in_use != 0); in epoch_free()
425 epoch_drain_callbacks(epoch); in epoch_free()
427 atomic_store_rel_int(&epoch->e_in_use, 0); in epoch_free()
437 er = zpcpu_get_cpu(epoch->e_pcpu_record, cpu); in epoch_free()
448 uma_zfree_pcpu(pcpu_zone_record, epoch->e_pcpu_record); in epoch_free()
449 mtx_destroy(&epoch->e_drain_mtx); in epoch_free()
450 sx_destroy(&epoch->e_drain_sx); in epoch_free()
451 memset(epoch, 0, sizeof(*epoch)); in epoch_free()
456 #define INIT_CHECK(epoch) \ argument
458 if (__predict_false((epoch) == NULL)) \
463 _epoch_enter_preempt(epoch_t epoch, epoch_tracker_t et EPOCH_FILE_LINE) in _epoch_enter_preempt() argument
468 MPASS(cold || epoch != NULL); in _epoch_enter_preempt()
472 INIT_CHECK(epoch); in _epoch_enter_preempt()
473 MPASS(epoch->e_flags & EPOCH_PREEMPT); in _epoch_enter_preempt()
476 epoch_trace_enter(td, epoch, et, file, line); in _epoch_enter_preempt()
483 er = epoch_currecord(epoch); in _epoch_enter_preempt()
492 epoch_enter(epoch_t epoch) in epoch_enter() argument
496 MPASS(cold || epoch != NULL); in epoch_enter()
497 INIT_CHECK(epoch); in epoch_enter()
499 er = epoch_currecord(epoch); in epoch_enter()
513 _epoch_exit_preempt(epoch_t epoch, epoch_tracker_t et EPOCH_FILE_LINE) in _epoch_exit_preempt() argument
518 INIT_CHECK(epoch); in _epoch_exit_preempt()
523 er = epoch_currecord(epoch); in _epoch_exit_preempt()
524 MPASS(epoch->e_flags & EPOCH_PREEMPT); in _epoch_exit_preempt()
539 epoch_trace_exit(td, epoch, et, file, line); in _epoch_exit_preempt()
544 epoch_exit(epoch_t epoch) in epoch_exit() argument
548 INIT_CHECK(epoch); in epoch_exit()
549 er = epoch_currecord(epoch); in epoch_exit()
561 * thread is currently in an epoch section.
582 * on the CPU in an epoch section. in epoch_block_handler_preempt()
628 * in fact belong to a different epoch. in epoch_block_handler_preempt()
633 * Try to find a thread in an epoch section on this CPU in epoch_block_handler_preempt()
702 epoch_wait_preempt(epoch_t epoch) in epoch_wait_preempt() argument
711 MPASS(cold || epoch != NULL); in epoch_wait_preempt()
712 INIT_CHECK(epoch); in epoch_wait_preempt()
716 MPASS(epoch->e_flags & EPOCH_PREEMPT); in epoch_wait_preempt()
717 if ((epoch->e_flags & EPOCH_LOCKED) == 0) in epoch_wait_preempt()
720 KASSERT(!in_epoch(epoch), ("epoch_wait_preempt() called in the middle " in epoch_wait_preempt()
721 "of an epoch section of the same epoch")); in epoch_wait_preempt()
734 ck_epoch_synchronize_wait(&epoch->e_epoch, epoch_block_handler_preempt, in epoch_wait_preempt()
765 epoch_wait(epoch_t epoch) in epoch_wait() argument
768 MPASS(cold || epoch != NULL); in epoch_wait()
769 INIT_CHECK(epoch); in epoch_wait()
770 MPASS(epoch->e_flags == 0); in epoch_wait()
772 ck_epoch_synchronize_wait(&epoch->e_epoch, epoch_block_handler, NULL); in epoch_wait()
777 epoch_call(epoch_t epoch, epoch_callback_t callback, epoch_context_t ctx) in epoch_call() argument
785 /* too early in boot to have epoch set up */ in epoch_call()
786 if (__predict_false(epoch == NULL)) in epoch_call()
795 er = epoch_currecord(epoch); in epoch_call()
809 epoch_t epoch; in epoch_call_task() local
817 epoch = epoch_array + i; in epoch_call_task()
819 atomic_load_acq_int(&epoch->e_in_use) == 0)) in epoch_call_task()
821 er = epoch_currecord(epoch); in epoch_call_task()
846 in_epoch_verbose_preempt(epoch_t epoch, int dump_onfail) in in_epoch_verbose_preempt() argument
852 MPASS(epoch != NULL); in in_epoch_verbose_preempt()
853 MPASS((epoch->e_flags & EPOCH_PREEMPT) != 0); in in_epoch_verbose_preempt()
858 er = epoch_currecord(epoch); in in_epoch_verbose_preempt()
879 epoch_assert_nocpu(epoch_t epoch, struct thread *td) in epoch_assert_nocpu() argument
889 er = zpcpu_get_cpu(epoch->e_pcpu_record, cpu); in epoch_assert_nocpu()
891 ("%s critical section in epoch '%s', from cpu %d", in epoch_assert_nocpu()
892 (crit ? "exited" : "re-entered"), epoch->e_name, cpu)); in epoch_assert_nocpu()
900 in_epoch_verbose(epoch_t epoch, int dump_onfail) in in_epoch_verbose() argument
905 if (__predict_false((epoch) == NULL)) in in_epoch_verbose()
907 if ((epoch->e_flags & EPOCH_PREEMPT) != 0) in in_epoch_verbose()
908 return (in_epoch_verbose_preempt(epoch, dump_onfail)); in in_epoch_verbose()
912 * condition to be correctly inside a non-preemptible epoch, in in_epoch_verbose()
913 * so it's definitely not in this epoch. in in_epoch_verbose()
917 epoch_assert_nocpu(epoch, td); in in_epoch_verbose()
922 * The current cpu is in a critical section, so the epoch record will be in in_epoch_verbose()
924 * active is sufficient for knowing whether we're in this epoch or not, in in_epoch_verbose()
927 er = epoch_currecord(epoch); in in_epoch_verbose()
929 epoch_assert_nocpu(epoch, td); in in_epoch_verbose()
938 in_epoch(epoch_t epoch) in in_epoch() argument
940 return (in_epoch_verbose(epoch, 0)); in in_epoch()
946 struct epoch *epoch = in epoch_drain_cb() local
949 if (atomic_fetchadd_int(&epoch->e_drain_count, -1) == 1) { in epoch_drain_cb()
950 mtx_lock(&epoch->e_drain_mtx); in epoch_drain_cb()
951 wakeup(epoch); in epoch_drain_cb()
952 mtx_unlock(&epoch->e_drain_mtx); in epoch_drain_cb()
957 epoch_drain_callbacks(epoch_t epoch) in epoch_drain_callbacks() argument
969 /* too early in boot to have epoch set up */ in epoch_drain_callbacks()
970 if (__predict_false(epoch == NULL)) in epoch_drain_callbacks()
978 sx_xlock(&epoch->e_drain_sx); in epoch_drain_callbacks()
979 mtx_lock(&epoch->e_drain_mtx); in epoch_drain_callbacks()
990 epoch->e_drain_count++; in epoch_drain_callbacks()
992 er = zpcpu_get_cpu(epoch->e_pcpu_record, cpu); in epoch_drain_callbacks()
994 epoch_call(epoch, &epoch_drain_cb, &er->er_drain_ctx); in epoch_drain_callbacks()
1011 while (epoch->e_drain_count != 0) in epoch_drain_callbacks()
1012 msleep(epoch, &epoch->e_drain_mtx, PZERO, "EDRAIN", 0); in epoch_drain_callbacks()
1014 mtx_unlock(&epoch->e_drain_mtx); in epoch_drain_callbacks()
1015 sx_xunlock(&epoch->e_drain_sx); in epoch_drain_callbacks()