xref: /freebsd/contrib/llvm-project/compiler-rt/lib/tsan/rtl/tsan_interface_java.cpp (revision 04eeddc0aa8e0a417a16eaf9d7d095207f4a8623)
1 //===-- tsan_interface_java.cpp -------------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file is a part of ThreadSanitizer (TSan), a race detector.
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #include "tsan_interface_java.h"
14 #include "tsan_rtl.h"
15 #include "sanitizer_common/sanitizer_internal_defs.h"
16 #include "sanitizer_common/sanitizer_common.h"
17 #include "sanitizer_common/sanitizer_placement_new.h"
18 #include "sanitizer_common/sanitizer_stacktrace.h"
19 #include "sanitizer_common/sanitizer_procmaps.h"
20 
21 using namespace __tsan;
22 
23 const jptr kHeapAlignment = 8;
24 
25 namespace __tsan {
26 
27 struct JavaContext {
28   const uptr heap_begin;
29   const uptr heap_size;
30 
31   JavaContext(jptr heap_begin, jptr heap_size)
32       : heap_begin(heap_begin)
33       , heap_size(heap_size) {
34   }
35 };
36 
37 static u64 jctx_buf[sizeof(JavaContext) / sizeof(u64) + 1];
38 static JavaContext *jctx;
39 
40 MBlock *JavaHeapBlock(uptr addr, uptr *start) {
41   if (!jctx || addr < jctx->heap_begin ||
42       addr >= jctx->heap_begin + jctx->heap_size)
43     return nullptr;
44   for (uptr p = RoundDown(addr, kMetaShadowCell); p >= jctx->heap_begin;
45        p -= kMetaShadowCell) {
46     MBlock *b = ctx->metamap.GetBlock(p);
47     if (!b)
48       continue;
49     if (p + b->siz <= addr)
50       return nullptr;
51     *start = p;
52     return b;
53   }
54   return nullptr;
55 }
56 
57 }  // namespace __tsan
58 
59 #define JAVA_FUNC_ENTER(func)      \
60   ThreadState *thr = cur_thread(); \
61   (void)thr;
62 
63 void __tsan_java_init(jptr heap_begin, jptr heap_size) {
64   JAVA_FUNC_ENTER(__tsan_java_init);
65   Initialize(thr);
66   DPrintf("#%d: java_init(0x%zx, 0x%zx)\n", thr->tid, heap_begin, heap_size);
67   DCHECK_EQ(jctx, 0);
68   DCHECK_GT(heap_begin, 0);
69   DCHECK_GT(heap_size, 0);
70   DCHECK_EQ(heap_begin % kHeapAlignment, 0);
71   DCHECK_EQ(heap_size % kHeapAlignment, 0);
72   DCHECK_LT(heap_begin, heap_begin + heap_size);
73   jctx = new(jctx_buf) JavaContext(heap_begin, heap_size);
74 }
75 
76 int  __tsan_java_fini() {
77   JAVA_FUNC_ENTER(__tsan_java_fini);
78   DPrintf("#%d: java_fini()\n", thr->tid);
79   DCHECK_NE(jctx, 0);
80   // FIXME(dvyukov): this does not call atexit() callbacks.
81   int status = Finalize(thr);
82   DPrintf("#%d: java_fini() = %d\n", thr->tid, status);
83   return status;
84 }
85 
86 void __tsan_java_alloc(jptr ptr, jptr size) {
87   JAVA_FUNC_ENTER(__tsan_java_alloc);
88   DPrintf("#%d: java_alloc(0x%zx, 0x%zx)\n", thr->tid, ptr, size);
89   DCHECK_NE(jctx, 0);
90   DCHECK_NE(size, 0);
91   DCHECK_EQ(ptr % kHeapAlignment, 0);
92   DCHECK_EQ(size % kHeapAlignment, 0);
93   DCHECK_GE(ptr, jctx->heap_begin);
94   DCHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size);
95 
96   OnUserAlloc(thr, 0, ptr, size, false);
97 }
98 
99 void __tsan_java_free(jptr ptr, jptr size) {
100   JAVA_FUNC_ENTER(__tsan_java_free);
101   DPrintf("#%d: java_free(0x%zx, 0x%zx)\n", thr->tid, ptr, size);
102   DCHECK_NE(jctx, 0);
103   DCHECK_NE(size, 0);
104   DCHECK_EQ(ptr % kHeapAlignment, 0);
105   DCHECK_EQ(size % kHeapAlignment, 0);
106   DCHECK_GE(ptr, jctx->heap_begin);
107   DCHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size);
108 
109   ctx->metamap.FreeRange(thr->proc(), ptr, size, false);
110 }
111 
112 void __tsan_java_move(jptr src, jptr dst, jptr size) {
113   JAVA_FUNC_ENTER(__tsan_java_move);
114   DPrintf("#%d: java_move(0x%zx, 0x%zx, 0x%zx)\n", thr->tid, src, dst, size);
115   DCHECK_NE(jctx, 0);
116   DCHECK_NE(size, 0);
117   DCHECK_EQ(src % kHeapAlignment, 0);
118   DCHECK_EQ(dst % kHeapAlignment, 0);
119   DCHECK_EQ(size % kHeapAlignment, 0);
120   DCHECK_GE(src, jctx->heap_begin);
121   DCHECK_LE(src + size, jctx->heap_begin + jctx->heap_size);
122   DCHECK_GE(dst, jctx->heap_begin);
123   DCHECK_LE(dst + size, jctx->heap_begin + jctx->heap_size);
124   DCHECK_NE(dst, src);
125   DCHECK_NE(size, 0);
126 
127   // Assuming it's not running concurrently with threads that do
128   // memory accesses and mutex operations (stop-the-world phase).
129   ctx->metamap.MoveMemory(src, dst, size);
130 
131   // Clear the destination shadow range.
132   // We used to move shadow from src to dst, but the trace format does not
133   // support that anymore as it contains addresses of accesses.
134   RawShadow *d = MemToShadow(dst);
135   RawShadow *dend = MemToShadow(dst + size);
136   ShadowSet(d, dend, Shadow::kEmpty);
137 }
138 
139 jptr __tsan_java_find(jptr *from_ptr, jptr to) {
140   JAVA_FUNC_ENTER(__tsan_java_find);
141   DPrintf("#%d: java_find(&0x%zx, 0x%zx)\n", thr->tid, *from_ptr, to);
142   DCHECK_EQ((*from_ptr) % kHeapAlignment, 0);
143   DCHECK_EQ(to % kHeapAlignment, 0);
144   DCHECK_GE(*from_ptr, jctx->heap_begin);
145   DCHECK_LE(to, jctx->heap_begin + jctx->heap_size);
146   for (uptr from = *from_ptr; from < to; from += kHeapAlignment) {
147     MBlock *b = ctx->metamap.GetBlock(from);
148     if (b) {
149       *from_ptr = from;
150       return b->siz;
151     }
152   }
153   return 0;
154 }
155 
156 void __tsan_java_finalize() {
157   JAVA_FUNC_ENTER(__tsan_java_finalize);
158   DPrintf("#%d: java_finalize()\n", thr->tid);
159   AcquireGlobal(thr);
160 }
161 
162 void __tsan_java_mutex_lock(jptr addr) {
163   JAVA_FUNC_ENTER(__tsan_java_mutex_lock);
164   DPrintf("#%d: java_mutex_lock(0x%zx)\n", thr->tid, addr);
165   DCHECK_NE(jctx, 0);
166   DCHECK_GE(addr, jctx->heap_begin);
167   DCHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
168 
169   MutexPostLock(thr, 0, addr,
170                 MutexFlagLinkerInit | MutexFlagWriteReentrant |
171                     MutexFlagDoPreLockOnPostLock);
172 }
173 
174 void __tsan_java_mutex_unlock(jptr addr) {
175   JAVA_FUNC_ENTER(__tsan_java_mutex_unlock);
176   DPrintf("#%d: java_mutex_unlock(0x%zx)\n", thr->tid, addr);
177   DCHECK_NE(jctx, 0);
178   DCHECK_GE(addr, jctx->heap_begin);
179   DCHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
180 
181   MutexUnlock(thr, 0, addr);
182 }
183 
184 void __tsan_java_mutex_read_lock(jptr addr) {
185   JAVA_FUNC_ENTER(__tsan_java_mutex_read_lock);
186   DPrintf("#%d: java_mutex_read_lock(0x%zx)\n", thr->tid, addr);
187   DCHECK_NE(jctx, 0);
188   DCHECK_GE(addr, jctx->heap_begin);
189   DCHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
190 
191   MutexPostReadLock(thr, 0, addr,
192                     MutexFlagLinkerInit | MutexFlagWriteReentrant |
193                         MutexFlagDoPreLockOnPostLock);
194 }
195 
196 void __tsan_java_mutex_read_unlock(jptr addr) {
197   JAVA_FUNC_ENTER(__tsan_java_mutex_read_unlock);
198   DPrintf("#%d: java_mutex_read_unlock(0x%zx)\n", thr->tid, addr);
199   DCHECK_NE(jctx, 0);
200   DCHECK_GE(addr, jctx->heap_begin);
201   DCHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
202 
203   MutexReadUnlock(thr, 0, addr);
204 }
205 
206 void __tsan_java_mutex_lock_rec(jptr addr, int rec) {
207   JAVA_FUNC_ENTER(__tsan_java_mutex_lock_rec);
208   DPrintf("#%d: java_mutex_lock_rec(0x%zx, %d)\n", thr->tid, addr, rec);
209   DCHECK_NE(jctx, 0);
210   DCHECK_GE(addr, jctx->heap_begin);
211   DCHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
212   DCHECK_GT(rec, 0);
213 
214   MutexPostLock(thr, 0, addr,
215                 MutexFlagLinkerInit | MutexFlagWriteReentrant |
216                     MutexFlagDoPreLockOnPostLock | MutexFlagRecursiveLock,
217                 rec);
218 }
219 
220 int __tsan_java_mutex_unlock_rec(jptr addr) {
221   JAVA_FUNC_ENTER(__tsan_java_mutex_unlock_rec);
222   DPrintf("#%d: java_mutex_unlock_rec(0x%zx)\n", thr->tid, addr);
223   DCHECK_NE(jctx, 0);
224   DCHECK_GE(addr, jctx->heap_begin);
225   DCHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
226 
227   return MutexUnlock(thr, 0, addr, MutexFlagRecursiveUnlock);
228 }
229 
230 void __tsan_java_acquire(jptr addr) {
231   JAVA_FUNC_ENTER(__tsan_java_acquire);
232   DPrintf("#%d: java_acquire(0x%zx)\n", thr->tid, addr);
233   DCHECK_NE(jctx, 0);
234   DCHECK_GE(addr, jctx->heap_begin);
235   DCHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
236 
237   Acquire(thr, 0, addr);
238 }
239 
240 void __tsan_java_release(jptr addr) {
241   JAVA_FUNC_ENTER(__tsan_java_release);
242   DPrintf("#%d: java_release(0x%zx)\n", thr->tid, addr);
243   DCHECK_NE(jctx, 0);
244   DCHECK_GE(addr, jctx->heap_begin);
245   DCHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
246 
247   Release(thr, 0, addr);
248 }
249 
250 void __tsan_java_release_store(jptr addr) {
251   JAVA_FUNC_ENTER(__tsan_java_release);
252   DPrintf("#%d: java_release_store(0x%zx)\n", thr->tid, addr);
253   DCHECK_NE(jctx, 0);
254   DCHECK_GE(addr, jctx->heap_begin);
255   DCHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
256 
257   ReleaseStore(thr, 0, addr);
258 }
259