Lines Matching +full:use +full:- +full:dma +full:- +full:tx
1 /*-
5 * Redistribution and use in source and binary forms, with or without
20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
54 #define time_after(a,b) ((long)(b) - (long)(a) < 0)
83 ioat_test_transaction_destroy(struct test_transaction *tx)
88 if (tx->buf[i] != NULL) {
89 free(tx->buf[i], M_IOAT_TEST);
90 tx->buf[i] = NULL;
94 free(tx, M_IOAT_TEST);
101 struct test_transaction *tx;
104 tx = malloc(sizeof(*tx), M_IOAT_TEST, M_NOWAIT | M_ZERO);
105 if (tx == NULL)
108 tx->length = test->buffer_size;
111 if (test->testkind == IOAT_TEST_DMA_8K)
112 tx->buf[i] = malloc(test->buffer_size, M_IOAT_TEST,
115 tx->buf[i] = contigmalloc(test->buffer_size,
119 if (tx->buf[i] == NULL) {
120 ioat_test_transaction_destroy(tx);
124 return (tx);
140 ioat_compare_ok(struct test_transaction *tx)
146 test = tx->test;
148 for (i = 0; i < tx->depth; i++) {
149 dst = tx->buf[2 * i + 1];
150 src = tx->buf[2 * i];
152 if (test->testkind == IOAT_TEST_FILL) {
153 for (j = 0; j < tx->length; j += sizeof(uint64_t)) {
155 MIN(sizeof(uint64_t), tx->length - j))
159 } else if (test->testkind == IOAT_TEST_DMA) {
160 if (memcmp(src, dst, tx->length) != 0)
162 } else if (test->testkind == IOAT_TEST_RAW_DMA) {
163 if (test->raw_write)
164 dst = test->raw_vtarget;
165 dump_hex(dst, tx->length / 32);
174 struct test_transaction *tx;
180 tx = arg;
181 test = tx->test;
183 if (test->verify && !ioat_compare_ok(tx)) {
185 atomic_add_32(&test->status[IOAT_TEST_MISCOMPARE], tx->depth);
186 } else if (!test->too_late)
187 atomic_add_32(&test->status[IOAT_TEST_OK], tx->depth);
190 TAILQ_REMOVE(&test->pend_q, tx, entry);
191 TAILQ_INSERT_TAIL(&test->free_q, tx, entry);
192 wakeup(&test->free_q);
200 struct test_transaction *tx;
202 for (i = 0; i < test->transactions; i++) {
203 tx = ioat_test_transaction_create(test, test->chain_depth * 2);
204 if (tx == NULL) {
205 ioat_test_log(0, "tx == NULL - memory exhausted\n");
206 test->status[IOAT_TEST_NO_MEMORY]++;
210 TAILQ_INSERT_HEAD(&test->free_q, tx, entry);
212 tx->test = test;
213 tx->depth = test->chain_depth;
216 for (j = 0; j < (tx->length / sizeof(uint32_t)); j++) {
219 for (k = 0; k < test->chain_depth; k++) {
220 ((uint32_t *)tx->buf[2*k])[j] = ~val;
221 ((uint32_t *)tx->buf[2*k+1])[j] = val;
231 struct test_transaction *tx, *s;
233 TAILQ_FOREACH_SAFE(tx, &test->free_q, entry, s)
234 ioat_test_transaction_destroy(tx);
235 TAILQ_INIT(&test->free_q);
237 TAILQ_FOREACH_SAFE(tx, &test->pend_q, entry, s)
238 ioat_test_transaction_destroy(tx);
239 TAILQ_INIT(&test->pend_q);
243 ioat_test_submit_1_tx(struct ioat_test *test, bus_dmaengine_t dma)
245 struct test_transaction *tx;
255 while (TAILQ_EMPTY(&test->free_q))
256 msleep(&test->free_q, &ioat_test_lk, 0, "test_submit", 0);
258 tx = TAILQ_FIRST(&test->free_q);
259 TAILQ_REMOVE(&test->free_q, tx, entry);
260 TAILQ_INSERT_HEAD(&test->pend_q, tx, entry);
263 if (test->testkind != IOAT_TEST_MEMCPY)
264 ioat_acquire(dma);
265 for (i = 0; i < tx->depth; i++) {
266 if (test->testkind == IOAT_TEST_MEMCPY) {
267 memcpy(tx->buf[2 * i + 1], tx->buf[2 * i], tx->length);
268 if (i == tx->depth - 1)
269 ioat_dma_test_callback(tx, 0);
273 src = vtophys((vm_offset_t)tx->buf[2*i]);
274 dest = vtophys((vm_offset_t)tx->buf[2*i+1]);
276 if (test->testkind == IOAT_TEST_RAW_DMA) {
277 if (test->raw_write)
278 dest = test->raw_target;
280 src = test->raw_target;
283 if (i == tx->depth - 1) {
291 if (test->testkind == IOAT_TEST_DMA ||
292 test->testkind == IOAT_TEST_RAW_DMA)
293 desc = ioat_copy(dma, dest, src, tx->length, cb, tx,
295 else if (test->testkind == IOAT_TEST_FILL) {
296 fillpattern = *(uint64_t *)tx->buf[2*i];
297 desc = ioat_blockfill(dma, dest, fillpattern,
298 tx->length, cb, tx, flags);
299 } else if (test->testkind == IOAT_TEST_DMA_8K) {
302 src2 = vtophys((vm_offset_t)tx->buf[2*i] + PAGE_SIZE);
303 dst2 = vtophys((vm_offset_t)tx->buf[2*i+1] + PAGE_SIZE);
305 desc = ioat_copy_8k_aligned(dma, dest, dst2, src, src2,
306 cb, tx, flags);
307 } else if (test->testkind == IOAT_TEST_DMA_8K_PB) {
310 src2 = vtophys((vm_offset_t)tx->buf[2*i+1] + PAGE_SIZE);
311 dst2 = vtophys((vm_offset_t)tx->buf[2*i] + PAGE_SIZE);
313 desc = ioat_copy_8k_aligned(dma, dest, dst2, src, src2,
314 cb, tx, flags);
315 } else if (test->testkind == IOAT_TEST_DMA_CRC) {
318 tx->crc[i] = 0;
319 crc = vtophys((vm_offset_t)&tx->crc[i]);
320 desc = ioat_crc(dma, src, tx->length,
321 NULL, crc, cb, tx, flags | DMA_CRC_STORE);
322 } else if (test->testkind == IOAT_TEST_DMA_CRC_COPY) {
325 tx->crc[i] = 0;
326 crc = vtophys((vm_offset_t)&tx->crc[i]);
327 desc = ioat_copy_crc(dma, dest, src, tx->length,
328 NULL, crc, cb, tx, flags | DMA_CRC_STORE);
333 if (test->testkind == IOAT_TEST_MEMCPY)
335 ioat_release(dma);
338 * We couldn't issue an IO -- either the device is being detached or
342 if (desc == NULL && tx->depth > 0) {
343 atomic_add_32(&test->status[IOAT_TEST_NO_DMA_ENGINE], tx->depth);
345 TAILQ_REMOVE(&test->pend_q, tx, entry);
346 TAILQ_INSERT_HEAD(&test->free_q, tx, entry);
361 memset(__DEVOLATILE(void *, test->status), 0, sizeof(test->status));
363 if ((test->testkind == IOAT_TEST_DMA_8K ||
364 test->testkind == IOAT_TEST_DMA_8K_PB) &&
365 test->buffer_size != 2 * PAGE_SIZE) {
367 test->status[IOAT_TEST_INVALID_INPUT]++;
371 if (test->buffer_size > 1024 * 1024) {
373 test->status[IOAT_TEST_NO_MEMORY]++;
377 if (test->chain_depth * 2 > IOAT_MAX_BUFS) {
380 test->status[IOAT_TEST_NO_MEMORY]++;
384 if (btoc((uint64_t)test->buffer_size * test->chain_depth *
385 test->transactions) > (physmem / 4)) {
386 ioat_test_log(0, "Sanity check failed -- test would "
387 "use more than 1/4 of phys mem.\n");
388 test->status[IOAT_TEST_NO_MEMORY]++;
392 if ((uint64_t)test->transactions * test->chain_depth > (1<<16)) {
393 ioat_test_log(0, "Sanity check failed -- test would "
394 "use more than available IOAT ring space.\n");
395 test->status[IOAT_TEST_NO_MEMORY]++;
399 if (test->testkind >= IOAT_NUM_TESTKINDS) {
401 (unsigned)test->testkind);
402 test->status[IOAT_TEST_INVALID_INPUT]++;
406 dmaengine = ioat_get_dmaengine(test->channel_index, M_NOWAIT);
409 test->status[IOAT_TEST_NO_DMA_ENGINE]++;
414 if (test->testkind == IOAT_TEST_FILL &&
415 (ioat->capabilities & IOAT_DMACAP_BFILL) == 0)
419 test->status[IOAT_TEST_INVALID_INPUT]++;
423 if (test->coalesce_period > ioat->intrdelay_max) {
426 (unsigned)test->coalesce_period);
427 test->status[IOAT_TEST_INVALID_INPUT]++;
430 error = ioat_set_interrupt_coalesce(dmaengine, test->coalesce_period);
431 if (error == ENODEV && test->coalesce_period == 0)
435 test->status[IOAT_TEST_INVALID_INPUT]++;
439 if (test->zero_stats)
440 memset(&ioat->stats, 0, sizeof(ioat->stats));
442 if (test->testkind == IOAT_TEST_RAW_DMA) {
443 if (test->raw_is_virtual) {
444 test->raw_vtarget = (void *)test->raw_target;
445 test->raw_target = vtophys(test->raw_vtarget);
447 test->raw_vtarget = pmap_mapdev(test->raw_target,
448 test->buffer_size);
453 TAILQ_INIT(&test->free_q);
454 TAILQ_INIT(&test->pend_q);
456 if (test->duration == 0)
458 index, test->transactions);
469 test->too_late = false;
471 end = start + (((sbintime_t)test->duration * hz) / 1000);
474 if (test->duration == 0 && loops >= test->transactions)
476 else if (test->duration != 0 && time_after(ticks, end)) {
477 test->too_late = true;
485 ticks - start, ticks - end, (ticks - start) / hz);
488 while (!TAILQ_EMPTY(&test->pend_q))
489 msleep(&test->free_q, &ioat_test_lk, 0, "ioattestcompl", hz);
493 ticks - start, ticks - end, (ticks - start) / hz);
497 if (test->testkind == IOAT_TEST_RAW_DMA && !test->raw_is_virtual)
498 pmap_unmapdev(test->raw_vtarget, test->buffer_size);
567 if (error != 0 || req->newptr == NULL)
575 "Non-zero: Enable the /dev/ioat_test device");