Lines Matching +full:j +full:- +full:to +full:- +full:k
1 // SPDX-License-Identifier: GPL-2.0-only
20 RC_CHK_ACCESS(map)->nr = nr_cpus; in perf_cpu_map__set_nr()
33 cpus->nr = nr_cpus; in perf_cpu_map__alloc()
34 refcount_set(&cpus->refcnt, 1); in perf_cpu_map__alloc()
44 RC_CHK_ACCESS(cpus)->map[0].cpu = -1; in perf_cpu_map__new_any_cpu()
98 RC_CHK_ACCESS(cpus)->map[i].cpu = i; in cpu_map__new_sysconf()
132 return cpu_a->cpu - cpu_b->cpu; in cmp_cpu()
137 return RC_CHK_ACCESS(cpus)->map[idx]; in __perf_cpu_map__cpu()
144 int i, j; in cpu_map__trim_new() local
147 memcpy(RC_CHK_ACCESS(cpus)->map, tmp_cpus, payload_size); in cpu_map__trim_new()
148 qsort(RC_CHK_ACCESS(cpus)->map, nr_cpus, sizeof(struct perf_cpu), cmp_cpu); in cpu_map__trim_new()
150 j = 0; in cpu_map__trim_new()
154 __perf_cpu_map__cpu(cpus, i - 1).cpu) { in cpu_map__trim_new()
155 RC_CHK_ACCESS(cpus)->map[j++].cpu = in cpu_map__trim_new()
159 perf_cpu_map__set_nr(cpus, j); in cpu_map__trim_new()
160 assert(j <= nr_cpus); in cpu_map__trim_new()
178 * must handle the case of empty cpumap to cover in perf_cpu_map__new()
189 || (*p != '\0' && *p != ',' && *p != '-' && *p != '\n')) in perf_cpu_map__new()
192 if (*p == '-') { in perf_cpu_map__new()
216 max_entries += max(end_cpu - start_cpu + 1, 16UL); in perf_cpu_map__new()
250 RC_CHK_ACCESS(cpus)->map[0].cpu = cpu; in perf_cpu_map__new_int()
257 return RC_CHK_ACCESS(cpus)->nr; in __perf_cpu_map__nr()
263 .cpu = -1 in perf_cpu_map__cpu()
279 return map ? __perf_cpu_map__cpu(map, 0).cpu == -1 : true; in perf_cpu_map__has_any_cpu_or_is_empty()
287 return __perf_cpu_map__nr(map) == 1 && __perf_cpu_map__cpu(map, 0).cpu == -1; in perf_cpu_map__is_any_cpu_or_is_empty()
300 return -1; in perf_cpu_map__idx()
317 return -1; in perf_cpu_map__idx()
322 return perf_cpu_map__idx(cpus, cpu) != -1; in perf_cpu_map__has()
348 return map && __perf_cpu_map__cpu(map, 0).cpu == -1; in perf_cpu_map__has_any_cpu()
354 .cpu = -1 in perf_cpu_map__min()
368 .cpu = -1 in perf_cpu_map__max()
373 ? __perf_cpu_map__cpu(map, __perf_cpu_map__nr(map) - 1) in perf_cpu_map__max()
385 for (int i = 0, j = 0; i < __perf_cpu_map__nr(a); i++) { in perf_cpu_map__is_subset() local
386 if (__perf_cpu_map__cpu(a, i).cpu > __perf_cpu_map__cpu(b, j).cpu) in perf_cpu_map__is_subset()
388 if (__perf_cpu_map__cpu(a, i).cpu == __perf_cpu_map__cpu(b, j).cpu) { in perf_cpu_map__is_subset()
389 j++; in perf_cpu_map__is_subset()
390 if (j == __perf_cpu_map__nr(b)) in perf_cpu_map__is_subset()
401 * change (similar to "realloc").
412 int i, j, k; in perf_cpu_map__merge() local
426 return -ENOMEM; in perf_cpu_map__merge()
429 i = j = k = 0; in perf_cpu_map__merge()
430 while (i < __perf_cpu_map__nr(*orig) && j < __perf_cpu_map__nr(other)) { in perf_cpu_map__merge()
431 if (__perf_cpu_map__cpu(*orig, i).cpu <= __perf_cpu_map__cpu(other, j).cpu) { in perf_cpu_map__merge()
432 if (__perf_cpu_map__cpu(*orig, i).cpu == __perf_cpu_map__cpu(other, j).cpu) in perf_cpu_map__merge()
433 j++; in perf_cpu_map__merge()
434 tmp_cpus[k++] = __perf_cpu_map__cpu(*orig, i++); in perf_cpu_map__merge()
436 tmp_cpus[k++] = __perf_cpu_map__cpu(other, j++); in perf_cpu_map__merge()
440 tmp_cpus[k++] = __perf_cpu_map__cpu(*orig, i++); in perf_cpu_map__merge()
442 while (j < __perf_cpu_map__nr(other)) in perf_cpu_map__merge()
443 tmp_cpus[k++] = __perf_cpu_map__cpu(other, j++); in perf_cpu_map__merge()
444 assert(k <= tmp_len); in perf_cpu_map__merge()
446 merged = cpu_map__trim_new(k, tmp_cpus); in perf_cpu_map__merge()
458 int i, j, k; in perf_cpu_map__intersect() local
471 i = j = k = 0; in perf_cpu_map__intersect()
472 while (i < __perf_cpu_map__nr(orig) && j < __perf_cpu_map__nr(other)) { in perf_cpu_map__intersect()
473 if (__perf_cpu_map__cpu(orig, i).cpu < __perf_cpu_map__cpu(other, j).cpu) in perf_cpu_map__intersect()
475 else if (__perf_cpu_map__cpu(orig, i).cpu > __perf_cpu_map__cpu(other, j).cpu) in perf_cpu_map__intersect()
476 j++; in perf_cpu_map__intersect()
478 j++; in perf_cpu_map__intersect()
479 tmp_cpus[k++] = __perf_cpu_map__cpu(orig, i++); in perf_cpu_map__intersect()
482 if (k) in perf_cpu_map__intersect()
483 merged = cpu_map__trim_new(k, tmp_cpus); in perf_cpu_map__intersect()