Lines Matching full:chunk

79                                   typename traits_t<T>::signed_t chunk  in __kmp_for_static_init()  argument
141 " %%%s, %%%s) incr=%%%s chunk=%%%s signed?<%s>\n", in __kmp_for_static_init()
145 *pstride, incr, chunk)); in __kmp_for_static_init()
364 KMP_DEBUG_ASSERT(chunk != 0); in __kmp_for_static_init()
365 if (chunk < 1) in __kmp_for_static_init()
366 chunk = 1; in __kmp_for_static_init()
367 else if ((UT)chunk > trip_count) in __kmp_for_static_init()
368 chunk = trip_count; in __kmp_for_static_init()
369 nchunks = (trip_count) / (UT)chunk + (trip_count % (UT)chunk ? 1 : 0); in __kmp_for_static_init()
370 span = chunk * incr; in __kmp_for_static_init()
391 // round up to make sure the chunk is enough to cover all iterations in __kmp_for_static_init()
394 // perform chunk adjustment in __kmp_for_static_init()
395 chunk = (span + chunk - 1) & ~(chunk - 1); in __kmp_for_static_init()
397 span = chunk * incr; in __kmp_for_static_init()
407 KMP_DEBUG_ASSERT(chunk != 0); in __kmp_for_static_init()
408 *plastiter = (tid == ((trip_count - 1) / (UT)chunk)); in __kmp_for_static_init()
422 kmp_uint64 cur_chunk = chunk; in __kmp_for_static_init()
424 // Calculate chunk in case it was not specified; it is specified for in __kmp_for_static_init()
484 typename traits_t<T>::signed_t chunk in __kmp_dist_for_static_init() argument
512 "iter=(%%%s, %%%s, %%%s) chunk=%%%s signed?<%s>\n", in __kmp_dist_for_static_init()
516 (buff, gtid, schedule, *plastiter, *plower, *pupper, incr, chunk)); in __kmp_dist_for_static_init()
579 // Get the team's chunk first (each team gets at most one chunk) in __kmp_dist_for_static_init()
622 // Get the parallel loop chunk now (for thread) in __kmp_dist_for_static_init()
623 // compute trip count for team's chunk in __kmp_dist_for_static_init()
693 if (chunk < 1) in __kmp_dist_for_static_init()
694 chunk = 1; in __kmp_dist_for_static_init()
695 span = chunk * incr; in __kmp_dist_for_static_init()
700 KMP_DEBUG_ASSERT(chunk != 0); in __kmp_dist_for_static_init()
701 if (*plastiter != 0 && !(tid == ((trip_count - 1) / (UT)chunk) % nth)) in __kmp_dist_for_static_init()
756 typename traits_t<T>::signed_t chunk) { in __kmp_team_static_init() argument
757 // The routine returns the first chunk distributed to the team and in __kmp_team_static_init()
761 // The routine is called for dist_schedule(static,chunk) only. in __kmp_team_static_init()
781 "iter=(%%%s, %%%s, %%%s) chunk %%%s; signed?<%s>\n", in __kmp_team_static_init()
785 KD_TRACE(100, (buff, gtid, *p_last, *p_lb, *p_ub, *p_st, chunk)); in __kmp_team_static_init()
829 if (chunk < 1) in __kmp_team_static_init()
830 chunk = 1; in __kmp_team_static_init()
831 span = chunk * incr; in __kmp_team_static_init()
836 KMP_DEBUG_ASSERT(chunk != 0); in __kmp_team_static_init()
837 *p_last = (team_id == ((trip_count - 1) / (UT)chunk) % nteams); in __kmp_team_static_init()
857 "iter=(%%%s, %%%s, %%%s) chunk %%%s\n", in __kmp_team_static_init()
860 KD_TRACE(100, (buff, gtid, team_id, *p_last, *p_lb, *p_ub, *p_st, chunk)); in __kmp_team_static_init()
878 @param chunk The chunk size
885 increment and chunk size.
892 kmp_int32 incr, kmp_int32 chunk) { in __kmpc_for_static_init_4() argument
894 pupper, pstride, incr, chunk in __kmpc_for_static_init_4()
909 kmp_int32 chunk) { in __kmpc_for_static_init_4u() argument
911 pupper, pstride, incr, chunk in __kmpc_for_static_init_4u()
925 kmp_int64 incr, kmp_int64 chunk) { in __kmpc_for_static_init_8() argument
927 pupper, pstride, incr, chunk in __kmpc_for_static_init_8()
942 kmp_int64 chunk) { in __kmpc_for_static_init_8u() argument
944 pupper, pstride, incr, chunk in __kmpc_for_static_init_8u()
968 @param pupper Pointer to the upper bound of loop chunk
972 @param chunk The chunk size for the parallel loop
987 kmp_int32 incr, kmp_int32 chunk) { in __kmpc_dist_for_static_init_4() argument
990 chunk OMPT_CODEPTR_ARG); in __kmpc_dist_for_static_init_4()
1000 kmp_int32 incr, kmp_int32 chunk) { in __kmpc_dist_for_static_init_4u() argument
1003 chunk OMPT_CODEPTR_ARG); in __kmpc_dist_for_static_init_4u()
1013 kmp_int64 incr, kmp_int64 chunk) { in __kmpc_dist_for_static_init_8() argument
1016 chunk OMPT_CODEPTR_ARG); in __kmpc_dist_for_static_init_8()
1026 kmp_int64 incr, kmp_int64 chunk) { in __kmpc_dist_for_static_init_8u() argument
1029 chunk OMPT_CODEPTR_ARG); in __kmpc_dist_for_static_init_8u()
1039 // int *p_last, T *lb, T *ub, ST *st, ST incr, ST chunk )
1051 @param chunk The chunk size to block with
1056 increment and chunk for the distribute construct as part of composite distribute
1064 kmp_int32 chunk) { in __kmpc_team_static_init_4() argument
1067 chunk); in __kmpc_team_static_init_4()
1076 kmp_int32 chunk) { in __kmpc_team_static_init_4u() argument
1079 chunk); in __kmpc_team_static_init_4u()
1088 kmp_int64 chunk) { in __kmpc_team_static_init_8() argument
1091 chunk); in __kmpc_team_static_init_8()
1100 kmp_int64 chunk) { in __kmpc_team_static_init_8u() argument
1103 chunk); in __kmpc_team_static_init_8u()