Lines Matching full:pointers
1004 * Both @faila and @failb should be valid pointers if any of in get_rbio_veritical_errors()
1187 void **pointers = rbio->finish_pointers; in generate_pq_vertical() local
1196 pointers[stripe] = kmap_local_page(sector->page) + in generate_pq_vertical()
1203 pointers[stripe++] = kmap_local_page(sector->page) + sector->pgoff; in generate_pq_vertical()
1212 pointers[stripe++] = kmap_local_page(sector->page) + in generate_pq_vertical()
1216 pointers); in generate_pq_vertical()
1219 memcpy(pointers[rbio->nr_data], pointers[0], sectorsize); in generate_pq_vertical()
1220 run_xor(pointers + 1, rbio->nr_data - 1, sectorsize); in generate_pq_vertical()
1223 kunmap_local(pointers[stripe]); in generate_pq_vertical()
1714 * @*pointers are the pre-allocated pointers by the caller, so we don't
1715 * need to allocate/free the pointers again and again.
1718 void **pointers, void **unmap_array) in recover_vertical() argument
1750 * Setup our array of pointers with sectors from each stripe in recover_vertical()
1752 * NOTE: store a duplicate array of pointers to preserve the in recover_vertical()
1766 pointers[stripe_nr] = kmap_local_page(sector->page) + in recover_vertical()
1768 unmap_array[stripe_nr] = pointers[stripe_nr]; in recover_vertical()
1814 faila, pointers); in recover_vertical()
1817 faila, failb, pointers); in recover_vertical()
1826 memcpy(pointers[faila], pointers[rbio->nr_data], sectorsize); in recover_vertical()
1829 p = pointers[faila]; in recover_vertical()
1832 pointers[stripe_nr] = pointers[stripe_nr + 1]; in recover_vertical()
1833 pointers[rbio->nr_data - 1] = p; in recover_vertical()
1836 run_xor(pointers, rbio->nr_data - 1, sectorsize); in recover_vertical()
1875 void **pointers = NULL; in recover_sectors() local
1881 * @pointers array stores the pointer for each sector. in recover_sectors()
1883 * @unmap_array stores copy of pointers that does not get reordered in recover_sectors()
1886 pointers = kcalloc(rbio->real_stripes, sizeof(void *), GFP_NOFS); in recover_sectors()
1888 if (!pointers || !unmap_array) { in recover_sectors()
1902 ret = recover_vertical(rbio, sectornr, pointers, unmap_array); in recover_sectors()
1908 kfree(pointers); in recover_sectors()
2403 void **pointers = rbio->finish_pointers; in finish_parity_scrub() local
2456 pointers[rbio->real_stripes - 1] = kmap_local_page(q_sector.page); in finish_parity_scrub()
2462 pointers[nr_data] = kmap_local_page(p_sector.page); in finish_parity_scrub()
2471 pointers[stripe] = kmap_local_page(sector->page) + in finish_parity_scrub()
2478 pointers); in finish_parity_scrub()
2481 memcpy(pointers[nr_data], pointers[0], sectorsize); in finish_parity_scrub()
2482 run_xor(pointers + 1, nr_data - 1, sectorsize); in finish_parity_scrub()
2488 if (memcmp(parity, pointers[rbio->scrubp], sectorsize) != 0) in finish_parity_scrub()
2489 memcpy(parity, pointers[rbio->scrubp], sectorsize); in finish_parity_scrub()
2496 kunmap_local(pointers[stripe]); in finish_parity_scrub()
2499 kunmap_local(pointers[nr_data]); in finish_parity_scrub()
2503 kunmap_local(pointers[rbio->real_stripes - 1]); in finish_parity_scrub()
2560 void **pointers = NULL; in recover_scrub_rbio() local
2566 * @pointers array stores the pointer for each sector. in recover_scrub_rbio()
2568 * @unmap_array stores copy of pointers that does not get reordered in recover_scrub_rbio()
2571 pointers = kcalloc(rbio->real_stripes, sizeof(void *), GFP_NOFS); in recover_scrub_rbio()
2573 if (!pointers || !unmap_array) { in recover_scrub_rbio()
2632 ret = recover_vertical(rbio, sector_nr, pointers, unmap_array); in recover_scrub_rbio()
2637 kfree(pointers); in recover_scrub_rbio()