Searched refs:vstruct_end (Results 1 – 17 of 17) sorted by relevance
34 entry < (struct jset_entry *) vstruct_end(&clean->field); in bch2_sb_clean_validate_late() 36 if (vstruct_end(entry) > vstruct_end(&clean->field)) { in bch2_sb_clean_validate_late() 39 (u64 *) vstruct_end(entry) - (u64 *) vstruct_end(&clean->field)); in bch2_sb_clean_validate_late() 65 end = vstruct_end(&clean->field); in btree_root_find() 224 entry != vstruct_end(&clean->field); in bch2_sb_clean_validate() 226 if ((void *) vstruct_next(entry) > vstruct_end(&clean->field)) { in bch2_sb_clean_validate() 247 entry != vstruct_end(&clean->field); in bch2_sb_clean_to_text() 249 if ((void *) vstruct_next(entry) > vstruct_end( in bch2_sb_clean_to_text() [all...]
296 (void *) _i < vstruct_end(&(_d)->field) && \297 (void *) &_i->errors[0] <= vstruct_end(&(_d)->field) && \298 (void *) downgrade_entry_next_c(_i) <= vstruct_end(&(_d)->field); \307 (void *) i < vstruct_end(&e->field); in bch2_sb_downgrade_validate() 314 if ((void *) &i->errors[0] > vstruct_end(&e->field)) in bch2_sb_downgrade_validate() 318 (void *) downgrade_entry_next_c(i) > vstruct_end(&e->field)) { in bch2_sb_downgrade_validate()
68 (void *) (_i) < vstruct_end(&(_r)->field) && (_i)->data_type;\73 (void *) (_i) < vstruct_end(&(_r)->field) && (_i)->data_type;\
9 ? ((vstruct_end(&bl->field) - (void *) &bl->start[0]) / in blacklist_nr_entries()
307 if ((void *) members_v1_get_mut(mi, sb->nr_devices) > vstruct_end(&mi->field)) { in bch2_sb_members_v1_validate() 329 if (vstruct_end(&mi->field) <= (void *) &mi->_members[0]) { in bch2_sb_members_v1_to_text() 334 unsigned nr = (vstruct_end(&mi->field) - (void *) &mi->_members[0]) / sizeof(mi->_members[0]); in bch2_sb_members_v1_to_text() 353 if (vstruct_end(&mi->field) <= (void *) &mi->_members[0]) { in bch2_sb_members_v2_to_text() 363 unsigned nr = (vstruct_end(&mi->field) - (void *) &mi->_members[0]) / le16_to_cpu(mi->member_bytes); in bch2_sb_members_v2_to_text()
12 ? (vstruct_end(&groups->field) - in disk_groups_nr()
47 #define vstruct_end(_s) \ macro
101 ? ((vstruct_end(&r->field) - (void *) &r->start[0]) / in recovery_passes_nr_entries()
631 vstruct_end(&sb_r->field) - in bch2_cpu_replicas_to_sb_replicas_v0() 642 BUG_ON((void *) dst > vstruct_end(&sb_r->field)); in bch2_cpu_replicas_to_sb_replicas_v0() 676 vstruct_end(&sb_r->field) - in bch2_cpu_replicas_to_sb_replicas() 685 BUG_ON((void *) dst > vstruct_end(&sb_r->field)); in bch2_cpu_replicas_to_sb_replicas()
26 return (__le64 *) vstruct_end(&ctrs->field) - &ctrs->d[0]; in bch2_sb_counter_nr_entries()
126 vstruct_end(i) - (void *) i->_data); in bset_encrypt()
144 src = vstruct_end(f); in __bch2_sb_field_resize() 148 dst = vstruct_end(f); in __bch2_sb_field_resize() 153 memmove(dst, src, vstruct_end(sb->sb) - src); in __bch2_sb_field_resize()
334 BUG_ON(vstruct_end(&out->keys) > (void *) out + bytes); in btree_node_sort() 692 (u64 *) vstruct_end(i) - (u64 *) k); in bch2_btree_node_drop_keys_outside_node() 1042 memmove_u64s_down(k, (u64 *) k + next_good_key, (u64 *) vstruct_end(i) - (u64 *) k); in validate_bset_keys() 1311 (u64 *) vstruct_end(i) - (u64 *) k); in bch2_btree_node_read_done() 2464 bytes_to_write = vstruct_end(i) - data; in __bch2_btree_node_write()
78 vstruct_end(inmemory) - (void *) inmemory->start)) { in bch2_btree_verify_replica()
1165 vstruct_end(j) - (void *) j->encrypted_start); in journal_read_bucket() 2070 vstruct_end(jset) - (void *) jset->encrypted_start); in bch2_journal_write_checksum()
568 entry != vstruct_end(&clean->field); in journal_replay_early()
983 unsigned bytes = vstruct_end(&b->data->keys) - (void *) b->data; in bch2_btree_update_add_new_node()