xref: /freebsd/contrib/jemalloc/include/jemalloc/internal/prof_inlines_b.h (revision 5bf5ca772c6de2d53344a78cf461447cc322ccea)
1 #ifndef JEMALLOC_INTERNAL_PROF_INLINES_B_H
2 #define JEMALLOC_INTERNAL_PROF_INLINES_B_H
3 
4 #include "jemalloc/internal/sz.h"
5 
6 JEMALLOC_ALWAYS_INLINE bool
7 prof_active_get_unlocked(void) {
8 	/*
9 	 * Even if opt_prof is true, sampling can be temporarily disabled by
10 	 * setting prof_active to false.  No locking is used when reading
11 	 * prof_active in the fast path, so there are no guarantees regarding
12 	 * how long it will take for all threads to notice state changes.
13 	 */
14 	return prof_active;
15 }
16 
17 JEMALLOC_ALWAYS_INLINE bool
18 prof_gdump_get_unlocked(void) {
19 	/*
20 	 * No locking is used when reading prof_gdump_val in the fast path, so
21 	 * there are no guarantees regarding how long it will take for all
22 	 * threads to notice state changes.
23 	 */
24 	return prof_gdump_val;
25 }
26 
27 JEMALLOC_ALWAYS_INLINE prof_tdata_t *
28 prof_tdata_get(tsd_t *tsd, bool create) {
29 	prof_tdata_t *tdata;
30 
31 	cassert(config_prof);
32 
33 	tdata = tsd_prof_tdata_get(tsd);
34 	if (create) {
35 		if (unlikely(tdata == NULL)) {
36 			if (tsd_nominal(tsd)) {
37 				tdata = prof_tdata_init(tsd);
38 				tsd_prof_tdata_set(tsd, tdata);
39 			}
40 		} else if (unlikely(tdata->expired)) {
41 			tdata = prof_tdata_reinit(tsd, tdata);
42 			tsd_prof_tdata_set(tsd, tdata);
43 		}
44 		assert(tdata == NULL || tdata->attached);
45 	}
46 
47 	return tdata;
48 }
49 
50 JEMALLOC_ALWAYS_INLINE prof_tctx_t *
51 prof_tctx_get(tsdn_t *tsdn, const void *ptr, alloc_ctx_t *alloc_ctx) {
52 	cassert(config_prof);
53 	assert(ptr != NULL);
54 
55 	return arena_prof_tctx_get(tsdn, ptr, alloc_ctx);
56 }
57 
58 JEMALLOC_ALWAYS_INLINE void
59 prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
60     alloc_ctx_t *alloc_ctx, prof_tctx_t *tctx) {
61 	cassert(config_prof);
62 	assert(ptr != NULL);
63 
64 	arena_prof_tctx_set(tsdn, ptr, usize, alloc_ctx, tctx);
65 }
66 
67 JEMALLOC_ALWAYS_INLINE void
68 prof_tctx_reset(tsdn_t *tsdn, const void *ptr, prof_tctx_t *tctx) {
69 	cassert(config_prof);
70 	assert(ptr != NULL);
71 
72 	arena_prof_tctx_reset(tsdn, ptr, tctx);
73 }
74 
75 JEMALLOC_ALWAYS_INLINE bool
76 prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update,
77     prof_tdata_t **tdata_out) {
78 	prof_tdata_t *tdata;
79 
80 	cassert(config_prof);
81 
82 	tdata = prof_tdata_get(tsd, true);
83 	if (unlikely((uintptr_t)tdata <= (uintptr_t)PROF_TDATA_STATE_MAX)) {
84 		tdata = NULL;
85 	}
86 
87 	if (tdata_out != NULL) {
88 		*tdata_out = tdata;
89 	}
90 
91 	if (unlikely(tdata == NULL)) {
92 		return true;
93 	}
94 
95 	if (likely(tdata->bytes_until_sample >= usize)) {
96 		if (update) {
97 			tdata->bytes_until_sample -= usize;
98 		}
99 		return true;
100 	} else {
101 		if (tsd_reentrancy_level_get(tsd) > 0) {
102 			return true;
103 		}
104 		/* Compute new sample threshold. */
105 		if (update) {
106 			prof_sample_threshold_update(tdata);
107 		}
108 		return !tdata->active;
109 	}
110 }
111 
112 JEMALLOC_ALWAYS_INLINE prof_tctx_t *
113 prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active, bool update) {
114 	prof_tctx_t *ret;
115 	prof_tdata_t *tdata;
116 	prof_bt_t bt;
117 
118 	assert(usize == sz_s2u(usize));
119 
120 	if (!prof_active || likely(prof_sample_accum_update(tsd, usize, update,
121 	    &tdata))) {
122 		ret = (prof_tctx_t *)(uintptr_t)1U;
123 	} else {
124 		bt_init(&bt, tdata->vec);
125 		prof_backtrace(&bt);
126 		ret = prof_lookup(tsd, &bt);
127 	}
128 
129 	return ret;
130 }
131 
132 JEMALLOC_ALWAYS_INLINE void
133 prof_malloc(tsdn_t *tsdn, const void *ptr, size_t usize, alloc_ctx_t *alloc_ctx,
134     prof_tctx_t *tctx) {
135 	cassert(config_prof);
136 	assert(ptr != NULL);
137 	assert(usize == isalloc(tsdn, ptr));
138 
139 	if (unlikely((uintptr_t)tctx > (uintptr_t)1U)) {
140 		prof_malloc_sample_object(tsdn, ptr, usize, tctx);
141 	} else {
142 		prof_tctx_set(tsdn, ptr, usize, alloc_ctx,
143 		    (prof_tctx_t *)(uintptr_t)1U);
144 	}
145 }
146 
147 JEMALLOC_ALWAYS_INLINE void
148 prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx,
149     bool prof_active, bool updated, const void *old_ptr, size_t old_usize,
150     prof_tctx_t *old_tctx) {
151 	bool sampled, old_sampled, moved;
152 
153 	cassert(config_prof);
154 	assert(ptr != NULL || (uintptr_t)tctx <= (uintptr_t)1U);
155 
156 	if (prof_active && !updated && ptr != NULL) {
157 		assert(usize == isalloc(tsd_tsdn(tsd), ptr));
158 		if (prof_sample_accum_update(tsd, usize, true, NULL)) {
159 			/*
160 			 * Don't sample.  The usize passed to prof_alloc_prep()
161 			 * was larger than what actually got allocated, so a
162 			 * backtrace was captured for this allocation, even
163 			 * though its actual usize was insufficient to cross the
164 			 * sample threshold.
165 			 */
166 			prof_alloc_rollback(tsd, tctx, true);
167 			tctx = (prof_tctx_t *)(uintptr_t)1U;
168 		}
169 	}
170 
171 	sampled = ((uintptr_t)tctx > (uintptr_t)1U);
172 	old_sampled = ((uintptr_t)old_tctx > (uintptr_t)1U);
173 	moved = (ptr != old_ptr);
174 
175 	if (unlikely(sampled)) {
176 		prof_malloc_sample_object(tsd_tsdn(tsd), ptr, usize, tctx);
177 	} else if (moved) {
178 		prof_tctx_set(tsd_tsdn(tsd), ptr, usize, NULL,
179 		    (prof_tctx_t *)(uintptr_t)1U);
180 	} else if (unlikely(old_sampled)) {
181 		/*
182 		 * prof_tctx_set() would work for the !moved case as well, but
183 		 * prof_tctx_reset() is slightly cheaper, and the proper thing
184 		 * to do here in the presence of explicit knowledge re: moved
185 		 * state.
186 		 */
187 		prof_tctx_reset(tsd_tsdn(tsd), ptr, tctx);
188 	} else {
189 		assert((uintptr_t)prof_tctx_get(tsd_tsdn(tsd), ptr, NULL) ==
190 		    (uintptr_t)1U);
191 	}
192 
193 	/*
194 	 * The prof_free_sampled_object() call must come after the
195 	 * prof_malloc_sample_object() call, because tctx and old_tctx may be
196 	 * the same, in which case reversing the call order could cause the tctx
197 	 * to be prematurely destroyed as a side effect of momentarily zeroed
198 	 * counters.
199 	 */
200 	if (unlikely(old_sampled)) {
201 		prof_free_sampled_object(tsd, old_usize, old_tctx);
202 	}
203 }
204 
205 JEMALLOC_ALWAYS_INLINE void
206 prof_free(tsd_t *tsd, const void *ptr, size_t usize, alloc_ctx_t *alloc_ctx) {
207 	prof_tctx_t *tctx = prof_tctx_get(tsd_tsdn(tsd), ptr, alloc_ctx);
208 
209 	cassert(config_prof);
210 	assert(usize == isalloc(tsd_tsdn(tsd), ptr));
211 
212 	if (unlikely((uintptr_t)tctx > (uintptr_t)1U)) {
213 		prof_free_sampled_object(tsd, usize, tctx);
214 	}
215 }
216 
217 #endif /* JEMALLOC_INTERNAL_PROF_INLINES_B_H */
218