| /linux/drivers/dma/ti/ |
| H A D | k3-psil.c | 34 struct psil_endpoint_config *psil_get_ep_config(u32 thread_id) in psil_get_ep_config() argument 54 if (thread_id & K3_PSIL_DST_THREAD_ID_OFFSET && soc_ep_map->dst) { in psil_get_ep_config() 57 if (soc_ep_map->dst[i].thread_id == thread_id) in psil_get_ep_config() 62 thread_id &= ~K3_PSIL_DST_THREAD_ID_OFFSET; in psil_get_ep_config() 65 if (soc_ep_map->src[i].thread_id == thread_id) in psil_get_ep_config() 79 u32 thread_id; in psil_set_new_ep_config() local 93 thread_id = dma_spec.args[0]; in psil_set_new_ep_config() 95 dst_ep_config = psil_get_ep_config(thread_id); in psil_set_new_ep_config() 98 thread_id); in psil_set_new_ep_config()
|
| H A D | k3-psil-am62a.c | 12 .thread_id = x, \ 22 .thread_id = x, \ 33 .thread_id = x, \ 48 .thread_id = x, \ 64 .thread_id = x, \ 74 .thread_id = x, \
|
| H A D | k3-psil-j721s2.c | 12 .thread_id = x, \ 20 .thread_id = x, \ 29 .thread_id = x, \ 39 .thread_id = x, \ 50 .thread_id = x, \ 62 .thread_id = x, \
|
| H A D | k3-psil-am62.c | 12 .thread_id = x, \ 23 .thread_id = x, \ 38 .thread_id = x, \ 54 .thread_id = x, \ 64 .thread_id = x, \
|
| H A D | k3-psil-j7200.c | 13 .thread_id = x, \ 21 .thread_id = x, \ 30 .thread_id = x, \ 40 .thread_id = x, \ 51 .thread_id = x, \
|
| H A D | k3-psil-am62p.c | 12 .thread_id = x, \ 22 .thread_id = x, \ 33 .thread_id = x, \ 48 .thread_id = x, \ 64 .thread_id = x, \ 74 .thread_id = x, \
|
| H A D | k3-psil-j784s4.c | 12 .thread_id = x, \ 20 .thread_id = x, \ 29 .thread_id = x, \ 39 .thread_id = x, \ 50 .thread_id = x, \ 62 .thread_id = x, \
|
| H A D | k3-psil-j721e.c | 13 .thread_id = x, \ 21 .thread_id = x, \ 30 .thread_id = x, \ 40 .thread_id = x, \ 51 .thread_id = x, \ 63 .thread_id = x, \
|
| H A D | k3-psil-am64.c | 13 .thread_id = x, \ 23 .thread_id = x, \ 34 .thread_id = x, \ 49 .thread_id = x, \
|
| H A D | k3-psil-am654.c | 13 .thread_id = x, \ 21 .thread_id = x, \ 30 .thread_id = x, \ 41 .thread_id = x, \
|
| /linux/arch/powerpc/platforms/ps3/ |
| H A D | interrupt.c | 81 u64 thread_id; member 101 pd->thread_id, d->irq); in ps3_chip_mask() 105 lv1_did_update_interrupt_mask(pd->ppe_id, pd->thread_id); in ps3_chip_mask() 122 pd->thread_id, d->irq); in ps3_chip_unmask() 126 lv1_did_update_interrupt_mask(pd->ppe_id, pd->thread_id); in ps3_chip_unmask() 144 lv1_end_of_interrupt_ext(pd->ppe_id, pd->thread_id, d->irq); in ps3_chip_eoi() 224 __LINE__, pd->ppe_id, pd->thread_id, virq); in ps3_virq_destroy() 260 result = lv1_connect_irq_plug_ext(pd->ppe_id, pd->thread_id, *virq, in ps3_irq_plug_setup() 294 __LINE__, pd->ppe_id, pd->thread_id, virq); in ps3_irq_plug_destroy() 298 result = lv1_disconnect_irq_plug_ext(pd->ppe_id, pd->thread_id, virq); in ps3_irq_plug_destroy() [all …]
|
| /linux/drivers/acpi/acpica/ |
| H A D | exmutex.c | 128 acpi_thread_id thread_id) in acpi_ex_acquire_mutex_object() argument 140 if (obj_desc->mutex.thread_id == thread_id) { in acpi_ex_acquire_mutex_object() 168 obj_desc->mutex.thread_id = thread_id; in acpi_ex_acquire_mutex_object() 235 walk_state->thread->thread_id); in acpi_ex_acquire_mutex() 321 obj_desc->mutex.thread_id = 0; in acpi_ex_release_mutex_object() 376 if ((owner_thread->thread_id != walk_state->thread->thread_id) && in acpi_ex_release_mutex() 380 (u32)walk_state->thread->thread_id, in acpi_ex_release_mutex() 382 (u32)owner_thread->thread_id)); in acpi_ex_release_mutex() 502 obj_desc->mutex.thread_id = 0; in acpi_ex_release_all_mutexes()
|
| H A D | utmutex.c | 142 acpi_gbl_mutex_info[mutex_id].thread_id = in acpi_ut_create_mutex() 170 acpi_gbl_mutex_info[mutex_id].thread_id = ACPI_MUTEX_NOT_ACQUIRED; in acpi_ut_delete_mutex() 212 if (acpi_gbl_mutex_info[i].thread_id == this_thread_id) { in acpi_ut_acquire_mutex() 250 acpi_gbl_mutex_info[mutex_id].thread_id = this_thread_id; in acpi_ut_acquire_mutex() 288 if (acpi_gbl_mutex_info[mutex_id].thread_id == ACPI_MUTEX_NOT_ACQUIRED) { in acpi_ut_release_mutex() 307 if (acpi_gbl_mutex_info[i].thread_id == in acpi_ut_release_mutex() 326 acpi_gbl_mutex_info[mutex_id].thread_id = ACPI_MUTEX_NOT_ACQUIRED; in acpi_ut_release_mutex()
|
| H A D | utdebug.c | 140 acpi_thread_id thread_id; in acpi_debug_print() local 155 thread_id = acpi_os_get_thread_id(); in acpi_debug_print() 156 if (thread_id != acpi_gbl_previous_thread_id) { in acpi_debug_print() 160 (u32)acpi_gbl_previous_thread_id, (u32)thread_id); in acpi_debug_print() 163 acpi_gbl_previous_thread_id = thread_id; in acpi_debug_print() 181 acpi_os_printf("[%u] ", (u32)thread_id); in acpi_debug_print()
|
| /linux/drivers/md/dm-vdo/ |
| H A D | vdo.c | 338 thread_id_t thread_id, char *buffer, size_t buffer_length) in get_thread_name() argument 340 if (thread_id == thread_config->journal_thread) { in get_thread_name() 341 if (thread_config->packer_thread == thread_id) { in get_thread_name() 353 } else if (thread_id == thread_config->admin_thread) { in get_thread_name() 357 } else if (thread_id == thread_config->packer_thread) { in get_thread_name() 360 } else if (thread_id == thread_config->dedupe_thread) { in get_thread_name() 363 } else if (thread_id == thread_config->bio_ack_thread) { in get_thread_name() 366 } else if (thread_id == thread_config->cpu_thread) { in get_thread_name() 373 thread_id, "logQ", buffer, buffer_length)) in get_thread_name() 378 thread_id, "physQ", buffer, buffer_length)) in get_thread_name() [all …]
|
| H A D | completion.c | 115 thread_id_t thread_id = completion->callback_thread_id; in vdo_enqueue_completion() local 117 if (VDO_ASSERT(thread_id < vdo->thread_config.thread_count, in vdo_enqueue_completion() 119 thread_id, completion->type, in vdo_enqueue_completion() 126 vdo_enqueue_work_queue(vdo->threads[thread_id].queue, completion); in vdo_enqueue_completion()
|
| H A D | flush.c | 42 thread_id_t thread_id; member 62 VDO_ASSERT_LOG_ONLY((vdo_get_callback_thread_id() == flusher->thread_id), in assert_on_flusher_thread() 143 vdo->flusher->thread_id = vdo->thread_config.packer_thread; in vdo_make_flusher() 178 return flusher->thread_id; in vdo_get_flusher_thread_id() 218 flusher->thread_id); in flush_packer_callback() 236 flusher->thread_id); in increment_generation() 242 flusher->logical_zone_to_notify->thread_id); in increment_generation() 258 flusher->logical_zone_to_notify->thread_id); in notify_flush()
|
| H A D | logical-zone.c | 43 return zones->zones[zone_number].thread_id; in get_thread_id_for_zone() 69 zone->thread_id = vdo->thread_config.logical_threads[zone_number]; in initialize_zone() 74 allocation_zone_number = zone->thread_id % vdo->thread_config.physical_zone_count; in initialize_zone() 77 return vdo_make_default_thread(vdo, zone->thread_id); in initialize_zone() 145 VDO_ASSERT_LOG_ONLY((vdo_get_callback_thread_id() == zone->thread_id), in assert_on_zone_thread() 280 zone->thread_id); in notify_flusher()
|
| H A D | vdo.h | 58 thread_id_t thread_id; member 292 int __must_check vdo_make_thread(struct vdo *vdo, thread_id_t thread_id, 297 thread_id_t thread_id) in vdo_make_default_thread() argument 299 return vdo_make_thread(vdo, thread_id, NULL, 1, NULL); in vdo_make_default_thread() 333 thread_id_t thread_id);
|
| /linux/tools/testing/selftests/bpf/prog_tests/ |
| H A D | map_lock.c | 54 pthread_t thread_id[6]; in test_map_lock() local 74 if (CHECK_FAIL(pthread_create(&thread_id[i], NULL, in test_map_lock() 78 if (CHECK_FAIL(pthread_create(&thread_id[i], NULL, in test_map_lock() 83 if (CHECK_FAIL(pthread_join(thread_id[i], &ret) || in test_map_lock() 87 if (CHECK_FAIL(pthread_join(thread_id[i], &ret) || in test_map_lock()
|
| /linux/arch/powerpc/kvm/ |
| H A D | book3s_hv_ras.c | 205 int thread_id, subcore_id; in kvmppc_subcore_enter_guest() local 207 thread_id = cpu_thread_in_core(local_paca->paca_index); in kvmppc_subcore_enter_guest() 208 subcore_id = thread_id / kvmppc_cur_subcore_size(); in kvmppc_subcore_enter_guest() 216 int thread_id, subcore_id; in kvmppc_subcore_exit_guest() local 218 thread_id = cpu_thread_in_core(local_paca->paca_index); in kvmppc_subcore_exit_guest() 219 subcore_id = thread_id / kvmppc_cur_subcore_size(); in kvmppc_subcore_exit_guest()
|
| /linux/drivers/misc/amd-sbi/ |
| H A D | rmi-core.c | 91 u8 thread_id, u32 func, in prepare_cpuid_input_message() argument 97 input->thread = thread_id << 1; in prepare_cpuid_input_message() 103 u16 thread_id, u32 func, in prepare_cpuid_input_message_ext() argument 109 input->thread_lo = (thread_id & 0xFF) << 1; in prepare_cpuid_input_message_ext() 110 input->thread_hi = thread_id >> 8; in prepare_cpuid_input_message_ext() 116 u8 thread_id, u32 data_in) in prepare_mca_msr_input_message() argument 121 input->thread = thread_id << 1; in prepare_mca_msr_input_message() 126 u16 thread_id, u32 data_in) in prepare_mca_msr_input_message_ext() argument 131 input->thread_lo = (thread_id & 0xFF) << 1; in prepare_mca_msr_input_message_ext() 132 input->thread_hi = thread_id >> 8; in prepare_mca_msr_input_message_ext()
|
| /linux/arch/arm/kernel/ |
| H A D | topology.c | 204 cpuid_topo->thread_id = MPIDR_AFFINITY_LEVEL(mpidr, 0); in store_cpu_topology() 209 cpuid_topo->thread_id = -1; in store_cpu_topology() 219 cpuid_topo->thread_id = -1; in store_cpu_topology() 227 cpuid, cpu_topology[cpuid].thread_id, in store_cpu_topology()
|
| /linux/rust/syn/ |
| H A D | thread.rs | 11 thread_id: ThreadId, field 26 thread_id: thread::current().id(), in new() 31 if thread::current().id() == self.thread_id { in get()
|
| /linux/tools/testing/selftests/sync/ |
| H A D | sync_stress_parallelism.c | 42 int thread_id = (long)d; in test_stress_two_threads_shared_timeline_thread() local 49 i * 2 + thread_id); in test_stress_two_threads_shared_timeline_thread() 61 ASSERT(test_data_two_threads.counter == i * 2 + thread_id, in test_stress_two_threads_shared_timeline_thread()
|