Lines Matching full:part

3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
9 // This file is a part of ThreadSanitizer (TSan), a race detector.
81 TracePart* part = nullptr; in TracePartAlloc() local
88 part = ctx->trace_part_recycle.PopFront(); in TracePartAlloc()
89 DPrintf("#%d: TracePartAlloc: part=%p\n", thr->tid, part); in TracePartAlloc()
90 if (part && part->trace) { in TracePartAlloc()
91 Trace* trace1 = part->trace; in TracePartAlloc()
93 part->trace = nullptr; in TracePartAlloc()
95 CHECK_EQ(part, part1); in TracePartAlloc()
108 if (!part) in TracePartAlloc()
113 if (!part) in TracePartAlloc()
114 part = new (MmapOrDie(sizeof(*part), "TracePart")) TracePart(); in TracePartAlloc()
115 return part; in TracePartAlloc()
118 static void TracePartFree(TracePart* part) SANITIZER_REQUIRES(ctx->slot_mtx) { in TracePartFree() argument
119 DCHECK(part->trace); in TracePartFree()
120 part->trace = nullptr; in TracePartFree()
121 ctx->trace_part_recycle.PushFront(part); in TracePartFree()
126 while (auto* part = ctx->trace_part_recycle.PopFront()) { in TraceResetForTesting() local
127 if (auto trace = part->trace) in TraceResetForTesting()
128 CHECK_EQ(trace->parts.PopFront(), part); in TraceResetForTesting()
129 UnmapOrDie(part, sizeof(*part)); in TraceResetForTesting()
156 auto part = parts->Front(); in DoResetImpl() local
157 local = local || part == trace->local_head; in DoResetImpl()
159 CHECK(!ctx->trace_part_recycle.Queued(part)); in DoResetImpl()
161 ctx->trace_part_recycle.Remove(part); in DoResetImpl()
163 // The thread is running and this is the last/current part. in DoResetImpl()
164 // Set the trace position to the end of the current part in DoResetImpl()
166 // to a new slot and allocate a new trace part. in DoResetImpl()
169 // within this part, because switching parts is protected by in DoResetImpl()
173 reinterpret_cast<uptr>(&part->events[TracePart::kSize])); in DoResetImpl()
176 parts->Remove(part); in DoResetImpl()
177 TracePartFree(part); in DoResetImpl()
326 TracePart* part = nullptr; in SlotDetachImpl() local
335 part = parts->PopFront(); in SlotDetachImpl()
340 if (part) { in SlotDetachImpl()
342 TracePartFree(part); in SlotDetachImpl()
917 auto *part = trace->parts.Back(); in TraceSkipGap() local
919 trace, trace->parts.Front(), part, pos); in TraceSkipGap()
920 if (!part) in TraceSkipGap()
922 // We can get here when we still have space in the current trace part. in TraceSkipGap()
924 // the part. Check if we are indeed at the end of the current part or not, in TraceSkipGap()
926 Event* end = &part->events[TracePart::kSize]; in TraceSkipGap()
927 DCHECK_GE(pos, &part->events[0]); in TraceSkipGap()
950 TracePart* part = thr->tctx->trace.parts.Back(); in TraceSwitchPart() local
951 if (part) { in TraceSwitchPart()
953 reinterpret_cast<uptr>(&part->events[0])); in TraceSwitchPart()
964 TracePart* part = TracePartAlloc(thr); in TraceSwitchPartImpl() local
965 part->trace = trace; in TraceSwitchPartImpl()
974 trace->local_head = part; in TraceSwitchPartImpl()
979 trace->parts.PushBack(part); in TraceSwitchPartImpl()
981 reinterpret_cast<uptr>(&part->events[0])); in TraceSwitchPartImpl()
983 // Make this part self-sufficient by restoring the current stack in TraceSwitchPartImpl()
987 // Pathologically large stacks may not fit into the part. in TraceSwitchPartImpl()
990 // Check that kMaxFrames won't consume the whole part. in TraceSwitchPartImpl()
1008 // filled the trace part exactly up to the TracePart::kAlignment gap in TraceSwitchPartImpl()