Lines Matching refs:node
117 iort_entry_get_id_mapping_index(struct iort_node *node) in iort_entry_get_id_mapping_index() argument
120 switch(node->type) { in iort_entry_get_id_mapping_index()
123 if (node->revision < 1) in iort_entry_get_id_mapping_index()
130 if (node->data.smmu_v3.EventGsiv != 0 && in iort_entry_get_id_mapping_index()
131 node->data.smmu_v3.PriGsiv != 0 && in iort_entry_get_id_mapping_index()
132 node->data.smmu_v3.GerrGsiv != 0 && in iort_entry_get_id_mapping_index()
133 node->data.smmu_v3.SyncGsiv != 0) in iort_entry_get_id_mapping_index()
136 if (node->data.smmu_v3.IdMappingIndex >= node->nentries) in iort_entry_get_id_mapping_index()
139 return (node->data.smmu_v3.IdMappingIndex); in iort_entry_get_id_mapping_index()
154 iort_entry_lookup(struct iort_node *node, u_int id, u_int *outid) in iort_entry_lookup() argument
159 id_map = iort_entry_get_id_mapping_index(node); in iort_entry_lookup()
160 entry = node->entries.mappings; in iort_entry_lookup()
161 for (i = 0; i < node->nentries; i++, entry++) { in iort_entry_lookup()
167 if (i == node->nentries) in iort_entry_lookup()
180 iort_smmu_trymap(struct iort_node *node, u_int outtype, u_int *outid) in iort_smmu_trymap() argument
183 if (!node) in iort_smmu_trymap()
188 (node->type == ACPI_IORT_NODE_SMMU_V3 || in iort_smmu_trymap()
189 node->type == ACPI_IORT_NODE_SMMU)) { in iort_smmu_trymap()
190 node = iort_entry_lookup(node, *outid, outid); in iort_smmu_trymap()
191 if (node == NULL) in iort_smmu_trymap()
195 KASSERT(node->type == outtype, ("mapping fail")); in iort_smmu_trymap()
196 return (node); in iort_smmu_trymap()
205 struct iort_node *node, *out_node; in iort_pci_rc_map() local
209 TAILQ_FOREACH(node, &pci_nodes, next) { in iort_pci_rc_map()
210 if (node->data.pci_rc.PciSegmentNumber != seg) in iort_pci_rc_map()
212 out_node = iort_entry_lookup(node, rid, &nxtid); in iort_pci_rc_map()
230 struct iort_node *node, *out_node; in iort_named_comp_map() local
234 TAILQ_FOREACH(node, &named_nodes, next) { in iort_named_comp_map()
235 if (strstr(node->data.named_comp.DeviceName, devname) == NULL) in iort_named_comp_map()
237 out_node = iort_entry_lookup(node, rid, &nxtid); in iort_named_comp_map()
266 iort_copy_data(struct iort_node *node, ACPI_IORT_NODE *node_entry) in iort_copy_data() argument
274 node->nentries = node_entry->MappingCount; in iort_copy_data()
275 node->usecount = 0; in iort_copy_data()
276 mapping = malloc(sizeof(*mapping) * node->nentries, M_DEVBUF, in iort_copy_data()
278 node->entries.mappings = mapping; in iort_copy_data()
279 for (i = 0; i < node->nentries; i++, mapping++, map_entry++) { in iort_copy_data()
297 iort_copy_its(struct iort_node *node, ACPI_IORT_NODE *node_entry) in iort_copy_its() argument
305 node->nentries = itsg_entry->ItsCount; in iort_copy_its()
306 node->usecount = 0; in iort_copy_its()
307 its = malloc(sizeof(*its) * node->nentries, M_DEVBUF, M_WAITOK | M_ZERO); in iort_copy_its()
308 node->entries.its = its; in iort_copy_its()
310 for (i = 0; i < node->nentries; i++, its++, id++) { in iort_copy_its()
327 struct iort_node *node; in iort_add_nodes() local
329 node = malloc(sizeof(*node), M_DEVBUF, M_WAITOK | M_ZERO); in iort_add_nodes()
330 node->type = node_entry->Type; in iort_add_nodes()
331 node->node_offset = node_offset; in iort_add_nodes()
332 node->revision = node_entry->Revision; in iort_add_nodes()
338 memcpy(&node->data.pci_rc, pci_rc, sizeof(*pci_rc)); in iort_add_nodes()
339 iort_copy_data(node, node_entry); in iort_add_nodes()
340 TAILQ_INSERT_TAIL(&pci_nodes, node, next); in iort_add_nodes()
344 memcpy(&node->data.smmu, smmu, sizeof(*smmu)); in iort_add_nodes()
345 iort_copy_data(node, node_entry); in iort_add_nodes()
346 TAILQ_INSERT_TAIL(&smmu_nodes, node, next); in iort_add_nodes()
350 memcpy(&node->data.smmu_v3, smmu_v3, sizeof(*smmu_v3)); in iort_add_nodes()
351 iort_copy_data(node, node_entry); in iort_add_nodes()
352 TAILQ_INSERT_TAIL(&smmu_nodes, node, next); in iort_add_nodes()
355 iort_copy_its(node, node_entry); in iort_add_nodes()
356 TAILQ_INSERT_TAIL(&its_groups, node, next); in iort_add_nodes()
360 memcpy(&node->data.named_comp, named_comp, sizeof(*named_comp)); in iort_add_nodes()
363 strncpy(node->data.named_comp.DeviceName, in iort_add_nodes()
365 sizeof(node->data.named_comp.DeviceName)); in iort_add_nodes()
366 node->data.named_comp.DeviceName[31] = 0; in iort_add_nodes()
368 iort_copy_data(node, node_entry); in iort_add_nodes()
369 TAILQ_INSERT_TAIL(&named_nodes, node, next); in iort_add_nodes()
374 free(node, M_DEVBUF); in iort_add_nodes()
386 struct iort_node *node, *np; in iort_resolve_node() local
388 node = NULL; in iort_resolve_node()
392 node = np; in iort_resolve_node()
397 if (node == NULL) { in iort_resolve_node()
400 node = np; in iort_resolve_node()
405 if (node != NULL) { in iort_resolve_node()
406 node->usecount++; in iort_resolve_node()
407 entry->out_node = node; in iort_resolve_node()
420 struct iort_node *node; in iort_post_process_mappings() local
423 TAILQ_FOREACH(node, &pci_nodes, next) in iort_post_process_mappings()
424 for (i = 0; i < node->nentries; i++) in iort_post_process_mappings()
425 iort_resolve_node(&node->entries.mappings[i], TRUE); in iort_post_process_mappings()
426 TAILQ_FOREACH(node, &smmu_nodes, next) in iort_post_process_mappings()
427 for (i = 0; i < node->nentries; i++) in iort_post_process_mappings()
428 iort_resolve_node(&node->entries.mappings[i], FALSE); in iort_post_process_mappings()
429 TAILQ_FOREACH(node, &named_nodes, next) in iort_post_process_mappings()
430 for (i = 0; i < node->nentries; i++) in iort_post_process_mappings()
431 iort_resolve_node(&node->entries.mappings[i], TRUE); in iort_post_process_mappings()
618 struct iort_node *node; in acpi_iort_map_pci_msi() local
620 node = iort_pci_rc_map(seg, rid, ACPI_IORT_NODE_ITS_GROUP, devid); in acpi_iort_map_pci_msi()
621 if (node == NULL) in acpi_iort_map_pci_msi()
625 KASSERT(node->type == ACPI_IORT_NODE_ITS_GROUP, ("bad group")); in acpi_iort_map_pci_msi()
628 *xref = node->entries.its[0].xref; in acpi_iort_map_pci_msi()
636 struct iort_node *node; in acpi_iort_map_pci_smmuv3() local
638 node = iort_pci_rc_map(seg, rid, ACPI_IORT_NODE_SMMU_V3, sid); in acpi_iort_map_pci_smmuv3()
639 if (node == NULL) in acpi_iort_map_pci_smmuv3()
643 KASSERT(node->type == ACPI_IORT_NODE_SMMU_V3, ("bad node")); in acpi_iort_map_pci_smmuv3()
645 smmu = (ACPI_IORT_SMMU_V3 *)&node->data.smmu_v3; in acpi_iort_map_pci_smmuv3()
659 struct iort_node *node; in acpi_iort_map_named_msi() local
661 node = iort_named_comp_map(devname, rid, ACPI_IORT_NODE_ITS_GROUP, in acpi_iort_map_named_msi()
663 if (node == NULL) in acpi_iort_map_named_msi()
667 KASSERT(node->type == ACPI_IORT_NODE_ITS_GROUP, ("bad group")); in acpi_iort_map_named_msi()
670 *xref = node->entries.its[0].xref; in acpi_iort_map_named_msi()
679 struct iort_node *node; in acpi_iort_map_named_smmuv3() local
681 node = iort_named_comp_map(devname, rid, ACPI_IORT_NODE_SMMU_V3, devid); in acpi_iort_map_named_smmuv3()
682 if (node == NULL) in acpi_iort_map_named_smmuv3()
686 KASSERT(node->type == ACPI_IORT_NODE_SMMU_V3, ("bad node")); in acpi_iort_map_named_smmuv3()
688 smmu = (ACPI_IORT_SMMU_V3 *)&node->data.smmu_v3; in acpi_iort_map_named_smmuv3()