Lines Matching +full:- +full:seed
4 * SPDX-License-Identifier: GPL-2.0-or-later
12 * Two mutator threads change the non-fixed protection bits randomly.
28 #define PAGE_IDX_MASK (PAGE_COUNT - 1)
34 #define PAGES_PER_REGION (1 << (PAGE_IDX_BITS - REGION_IDX_BITS))
50 for (i = 0; ctx->mutator_count; i++) { in thread_read()
54 p = &ctx->ptr[j * ctx->pagesize]; in thread_read()
64 sret = write(ctx->dev_null_fd, p, 1); in thread_read()
85 for (i = 0; ctx->mutator_count; i++) { in thread_write()
89 memcpy(&ctx->ptr[j * ctx->pagesize], nop_func, sizeof(nop_func)); in thread_write()
92 ts = (struct timespec *)(&ctx->ptr[(j + 1) * ctx->pagesize] - in thread_write()
109 for (i = 0; ctx->mutator_count; i++) { in thread_execute()
111 ((void(*)(void))&ctx->ptr[j * ctx->pagesize])(); in thread_execute()
121 unsigned int seed; in thread_mutate() local
124 seed = (unsigned int)time(NULL); in thread_mutate()
126 start_idx = rand_r(&seed) & PAGE_IDX_MASK; in thread_mutate()
127 end_idx = rand_r(&seed) & PAGE_IDX_MASK; in thread_mutate()
133 prot = rand_r(&seed) & (PROT_READ | PROT_WRITE | PROT_EXEC); in thread_mutate()
147 ret = mprotect(&ctx->ptr[start_idx * ctx->pagesize], in thread_mutate()
148 (end_idx - start_idx + 1) * ctx->pagesize, prot); in thread_mutate()
152 __atomic_fetch_sub(&ctx->mutator_count, 1, __ATOMIC_SEQ_CST); in thread_mutate()
173 MAP_PRIVATE | MAP_ANONYMOUS, -1, 0); in main()