Searched refs:LDG_VALID (Results 1 – 4 of 4) sorted by relevance
857 ASSERT(LDG_VALID(ldg)); in npi_fzc_ldg_num_set()858 if (!LDG_VALID(ldg)) { in npi_fzc_ldg_num_set()962 ASSERT(LDG_VALID(ldg)); in npi_ldsv_get()963 if (!LDG_VALID(ldg)) { in npi_ldsv_get()1020 ASSERT(LDG_VALID(ldg)); in npi_ldsv_ld_get()1021 if (!LDG_VALID(ldg)) { in npi_ldsv_ld_get()1224 ASSERT((LDG_VALID(ldg)) && (LD_INTTIMER_VALID(timer))); in npi_intr_ldg_mgmt_set()1225 if (!LDG_VALID(ldg)) { in npi_intr_ldg_mgmt_set()1276 ASSERT(LDG_VALID(ldg)); in npi_intr_ldg_mgmt_timer_get()1277 if (!LDG_VALID(ldg)) { in npi_intr_ldg_mgmt_timer_get()[all …]
44 if (!LDG_VALID(ldg)) { in hpi_fzc_ldg_num_set()86 if (!LDG_VALID(ldg)) { in hpi_ldsv_get()147 if (!LDG_VALID(ldg)) { in hpi_intr_ldg_mgmt_set()208 if (!LDG_VALID(sid.ldg)) { in hpi_fzc_sid_set()
122 #define LDG_VALID(n) (n < HXGE_INT_MAX_LDG) macro
473 #define LDG_VALID(n) (n < NXGE_INT_MAX_LDG) macro