Searched refs:LDG_VALID (Results 1 – 4 of 4) sorted by relevance
859 ASSERT(LDG_VALID(ldg)); in npi_fzc_ldg_num_set()860 if (!LDG_VALID(ldg)) { in npi_fzc_ldg_num_set()964 ASSERT(LDG_VALID(ldg)); in npi_ldsv_get()965 if (!LDG_VALID(ldg)) { in npi_ldsv_get()1022 ASSERT(LDG_VALID(ldg)); in npi_ldsv_ld_get()1023 if (!LDG_VALID(ldg)) { in npi_ldsv_ld_get()1225 ASSERT((LDG_VALID(ldg)) && (LD_INTTIMER_VALID(timer))); in npi_intr_ldg_mgmt_set()1226 if (!LDG_VALID(ldg)) { in npi_intr_ldg_mgmt_set()1277 ASSERT(LDG_VALID(ldg)); in npi_intr_ldg_mgmt_timer_get()1278 if (!LDG_VALID(ldg)) { in npi_intr_ldg_mgmt_timer_get()[all …]
46 if (!LDG_VALID(ldg)) { in hpi_fzc_ldg_num_set()88 if (!LDG_VALID(ldg)) { in hpi_ldsv_get()149 if (!LDG_VALID(ldg)) { in hpi_intr_ldg_mgmt_set()210 if (!LDG_VALID(sid.ldg)) { in hpi_fzc_sid_set()
122 #define LDG_VALID(n) (n < HXGE_INT_MAX_LDG) macro
473 #define LDG_VALID(n) (n < NXGE_INT_MAX_LDG) macro