Lines Matching full:fd
48 #define FD(_evsel, _cpu_map_idx, _thread) \ macro
49 ((int *)xyarray__entry(_evsel->fd, _cpu_map_idx, _thread))
56 evsel->fd = xyarray__new(ncpus, nthreads, sizeof(int)); in perf_evsel__alloc_fd()
58 if (evsel->fd) { in perf_evsel__alloc_fd()
63 int *fd = FD(evsel, idx, thread); in perf_evsel__alloc_fd() local
65 if (fd) in perf_evsel__alloc_fd()
66 *fd = -1; in perf_evsel__alloc_fd()
71 return evsel->fd != NULL ? 0 : -ENOMEM; in perf_evsel__alloc_fd()
92 int *fd; in get_group_fd() local
103 if (!leader->fd) in get_group_fd()
106 fd = FD(leader, cpu_map_idx, thread); in get_group_fd()
107 if (fd == NULL || *fd == -1) in get_group_fd()
110 *group_fd = *fd; in get_group_fd()
145 if (evsel->fd == NULL && in perf_evsel__open()
151 int fd, group_fd, *evsel_fd; in perf_evsel__open() local
153 evsel_fd = FD(evsel, idx, thread); in perf_evsel__open()
163 fd = sys_perf_event_open(&evsel->attr, in perf_evsel__open()
167 if (fd < 0) { in perf_evsel__open()
172 *evsel_fd = fd; in perf_evsel__open()
186 for (thread = 0; thread < xyarray__max_y(evsel->fd); ++thread) { in perf_evsel__close_fd_cpu()
187 int *fd = FD(evsel, cpu_map_idx, thread); in perf_evsel__close_fd_cpu() local
189 if (fd && *fd >= 0) { in perf_evsel__close_fd_cpu()
190 close(*fd); in perf_evsel__close_fd_cpu()
191 *fd = -1; in perf_evsel__close_fd_cpu()
198 for (int idx = 0; idx < xyarray__max_x(evsel->fd); idx++) in perf_evsel__close_fd()
204 xyarray__delete(evsel->fd); in perf_evsel__free_fd()
205 evsel->fd = NULL; in perf_evsel__free_fd()
210 if (evsel->fd == NULL) in perf_evsel__close()
219 if (evsel->fd == NULL) in perf_evsel__close_cpu()
229 if (evsel->fd == NULL || evsel->mmap == NULL) in perf_evsel__munmap()
232 for (idx = 0; idx < xyarray__max_x(evsel->fd); idx++) { in perf_evsel__munmap()
233 for (thread = 0; thread < xyarray__max_y(evsel->fd); thread++) { in perf_evsel__munmap()
234 int *fd = FD(evsel, idx, thread); in perf_evsel__munmap() local
236 if (fd == NULL || *fd < 0) in perf_evsel__munmap()
255 if (evsel->fd == NULL || evsel->mmap) in perf_evsel__mmap()
258 if (perf_evsel__alloc_mmap(evsel, xyarray__max_x(evsel->fd), xyarray__max_y(evsel->fd)) < 0) in perf_evsel__mmap()
261 for (idx = 0; idx < xyarray__max_x(evsel->fd); idx++) { in perf_evsel__mmap()
262 for (thread = 0; thread < xyarray__max_y(evsel->fd); thread++) { in perf_evsel__mmap()
263 int *fd = FD(evsel, idx, thread); in perf_evsel__mmap() local
267 if (fd == NULL || *fd < 0) in perf_evsel__mmap()
273 ret = perf_mmap__mmap(map, &mp, *fd, cpu); in perf_evsel__mmap()
286 int *fd = FD(evsel, cpu_map_idx, thread); in perf_evsel__mmap_base() local
288 if (fd == NULL || *fd < 0 || MMAP(evsel, cpu_map_idx, thread) == NULL) in perf_evsel__mmap_base()
327 int *fd = FD(evsel, cpu_map_idx, thread); in perf_evsel__read_group() local
332 if (fd == NULL || *fd < 0) in perf_evsel__read_group()
339 if (readn(*fd, data, size) <= 0) { in perf_evsel__read_group()
393 int *fd = FD(evsel, cpu_map_idx, thread); in perf_evsel__read() local
399 if (fd == NULL || *fd < 0) in perf_evsel__read()
410 if (readn(*fd, buf.values, size) <= 0) in perf_evsel__read()
420 int *fd = FD(evsel, cpu_map_idx, thread); in perf_evsel__ioctl() local
422 if (fd == NULL || *fd < 0) in perf_evsel__ioctl()
425 return ioctl(*fd, ioc, arg); in perf_evsel__ioctl()
434 for (thread = 0; thread < xyarray__max_y(evsel->fd); thread++) { in perf_evsel__run_ioctl()
469 for (i = 0; i < xyarray__max_x(evsel->fd) && !err; i++) in perf_evsel__enable()
484 for (i = 0; i < xyarray__max_x(evsel->fd) && !err; i++) in perf_evsel__disable()