Lines Matching refs:core

28 	return map->core.mask + 1 + page_size;  in perf_mmap__mmap_len()
35 unsigned char *data = map->core.base + page_size; in perf_mmap__read()
42 event = (union perf_event *)&data[*startp & map->core.mask]; in perf_mmap__read()
52 if ((*startp & map->core.mask) + size != ((*startp + size) & map->core.mask)) { in perf_mmap__read()
55 void *dst = map->core.event_copy; in perf_mmap__read()
58 cpy = min(map->core.mask + 1 - (offset & map->core.mask), len); in perf_mmap__read()
59 memcpy(dst, &data[offset & map->core.mask], cpy); in perf_mmap__read()
65 event = (union perf_event *)map->core.event_copy; in perf_mmap__read()
93 if (!refcount_read(&map->core.refcnt)) in perf_mmap__read_event()
97 if (!map->core.overwrite) in perf_mmap__read_event()
98 map->core.end = perf_mmap__read_head(map); in perf_mmap__read_event()
100 event = perf_mmap__read(map, &map->core.start, map->core.end); in perf_mmap__read_event()
102 if (!map->core.overwrite) in perf_mmap__read_event()
103 map->core.prev = map->core.start; in perf_mmap__read_event()
110 return perf_mmap__read_head(map) == map->core.prev && !map->auxtrace_mmap.base; in perf_mmap__empty()
115 refcount_inc(&map->core.refcnt); in perf_mmap__get()
120 BUG_ON(map->core.base && refcount_read(&map->core.refcnt) == 0); in perf_mmap__put()
122 if (refcount_dec_and_test(&map->core.refcnt)) in perf_mmap__put()
128 if (!map->core.overwrite) { in perf_mmap__consume()
129 u64 old = map->core.prev; in perf_mmap__consume()
134 if (refcount_read(&map->core.refcnt) == 1 && perf_mmap__empty(map)) in perf_mmap__consume()
260 ret = perf_mmap__aio_bind(map, i, map->core.cpu, mp->affinity); in perf_mmap__aio_mmap()
321 if (map->core.base != NULL) { in perf_mmap__munmap()
322 munmap(map->core.base, perf_mmap__mmap_len(map)); in perf_mmap__munmap()
323 map->core.base = NULL; in perf_mmap__munmap()
324 map->core.fd = -1; in perf_mmap__munmap()
325 refcount_set(&map->core.refcnt, 0); in perf_mmap__munmap()
351 build_node_mask(cpu__get_node(map->core.cpu), &map->affinity_mask); in perf_mmap__setup_affinity_mask()
353 CPU_SET(map->core.cpu, &map->affinity_mask); in perf_mmap__setup_affinity_mask()
371 refcount_set(&map->core.refcnt, 2); in perf_mmap__mmap()
372 map->core.prev = 0; in perf_mmap__mmap()
373 map->core.mask = mp->mask; in perf_mmap__mmap()
374 map->core.base = mmap(NULL, perf_mmap__mmap_len(map), mp->prot, in perf_mmap__mmap()
376 if (map->core.base == MAP_FAILED) { in perf_mmap__mmap()
379 map->core.base = NULL; in perf_mmap__mmap()
382 map->core.fd = fd; in perf_mmap__mmap()
383 map->core.cpu = cpu; in perf_mmap__mmap()
387 map->core.flush = mp->flush; in perf_mmap__mmap()
403 &mp->auxtrace_mp, map->core.base, fd)) in perf_mmap__mmap()
447 u64 old = md->core.prev; in __perf_mmap__read_init()
448 unsigned char *data = md->core.base + page_size; in __perf_mmap__read_init()
451 md->core.start = md->core.overwrite ? head : old; in __perf_mmap__read_init()
452 md->core.end = md->core.overwrite ? old : head; in __perf_mmap__read_init()
454 if ((md->core.end - md->core.start) < md->core.flush) in __perf_mmap__read_init()
457 size = md->core.end - md->core.start; in __perf_mmap__read_init()
458 if (size > (unsigned long)(md->core.mask) + 1) { in __perf_mmap__read_init()
459 if (!md->core.overwrite) { in __perf_mmap__read_init()
462 md->core.prev = head; in __perf_mmap__read_init()
471 if (overwrite_rb_find_range(data, md->core.mask, &md->core.start, &md->core.end)) in __perf_mmap__read_init()
483 if (!refcount_read(&map->core.refcnt)) in perf_mmap__read_init()
493 unsigned char *data = md->core.base + page_size; in perf_mmap__push()
502 size = md->core.end - md->core.start; in perf_mmap__push()
504 if ((md->core.start & md->core.mask) + size != (md->core.end & md->core.mask)) { in perf_mmap__push()
505 buf = &data[md->core.start & md->core.mask]; in perf_mmap__push()
506 size = md->core.mask + 1 - (md->core.start & md->core.mask); in perf_mmap__push()
507 md->core.start += size; in perf_mmap__push()
515 buf = &data[md->core.start & md->core.mask]; in perf_mmap__push()
516 size = md->core.end - md->core.start; in perf_mmap__push()
517 md->core.start += size; in perf_mmap__push()
524 md->core.prev = head; in perf_mmap__push()
541 if (!refcount_read(&map->core.refcnt)) in perf_mmap__read_done()
544 map->core.prev = perf_mmap__read_head(map); in perf_mmap__read_done()