Lines Matching +full:cs +full:- +full:out

1 // SPDX-License-Identifier: GPL-2.0-or-later
20 /* all 32-bit counters here */
37 /* all 64-bit items here */
43 /* non-counter state must go at the end for clearall */
92 struct xchk_stats *cs, in xchk_stats_format() argument
96 struct xchk_scrub_stats *css = &cs->cs_stats[0]; in xchk_stats_format()
108 (unsigned int)css->invocations, in xchk_stats_format()
109 (unsigned int)css->clean, in xchk_stats_format()
110 (unsigned int)css->corrupt, in xchk_stats_format()
111 (unsigned int)css->preen, in xchk_stats_format()
112 (unsigned int)css->xfail, in xchk_stats_format()
113 (unsigned int)css->xcorrupt, in xchk_stats_format()
114 (unsigned int)css->incomplete, in xchk_stats_format()
115 (unsigned int)css->warning, in xchk_stats_format()
116 (unsigned int)css->retries, in xchk_stats_format()
117 (unsigned long long)css->checktime_us, in xchk_stats_format()
118 (unsigned int)css->repair_invocations, in xchk_stats_format()
119 (unsigned int)css->repair_success, in xchk_stats_format()
120 (unsigned long long)css->repairtime_us); in xchk_stats_format()
124 remaining -= ret; in xchk_stats_format()
135 struct xchk_stats *cs) in xchk_stats_estimate_bufsize() argument
137 struct xchk_scrub_stats *css = &cs->cs_stats[0]; in xchk_stats_estimate_bufsize()
147 field_width += 21 * ((offsetof(struct xchk_scrub_stats, css_lock) - in xchk_stats_estimate_bufsize()
168 struct xchk_stats *cs) in xchk_stats_clearall() argument
170 struct xchk_scrub_stats *css = &cs->cs_stats[0]; in xchk_stats_clearall()
174 spin_lock(&css->css_lock); in xchk_stats_clearall()
176 spin_unlock(&css->css_lock); in xchk_stats_clearall()
189 struct xchk_stats *cs, in xchk_stats_merge_one() argument
195 if (sm->sm_type >= XFS_SCRUB_TYPE_NR) { in xchk_stats_merge_one()
196 ASSERT(sm->sm_type < XFS_SCRUB_TYPE_NR); in xchk_stats_merge_one()
200 css = &cs->cs_stats[sm->sm_type]; in xchk_stats_merge_one()
201 spin_lock(&css->css_lock); in xchk_stats_merge_one()
202 css->invocations++; in xchk_stats_merge_one()
203 if (!(sm->sm_flags & XFS_SCRUB_OFLAG_UNCLEAN)) in xchk_stats_merge_one()
204 css->clean++; in xchk_stats_merge_one()
205 if (sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT) in xchk_stats_merge_one()
206 css->corrupt++; in xchk_stats_merge_one()
207 if (sm->sm_flags & XFS_SCRUB_OFLAG_PREEN) in xchk_stats_merge_one()
208 css->preen++; in xchk_stats_merge_one()
209 if (sm->sm_flags & XFS_SCRUB_OFLAG_XFAIL) in xchk_stats_merge_one()
210 css->xfail++; in xchk_stats_merge_one()
211 if (sm->sm_flags & XFS_SCRUB_OFLAG_XCORRUPT) in xchk_stats_merge_one()
212 css->xcorrupt++; in xchk_stats_merge_one()
213 if (sm->sm_flags & XFS_SCRUB_OFLAG_INCOMPLETE) in xchk_stats_merge_one()
214 css->incomplete++; in xchk_stats_merge_one()
215 if (sm->sm_flags & XFS_SCRUB_OFLAG_WARNING) in xchk_stats_merge_one()
216 css->warning++; in xchk_stats_merge_one()
217 css->retries += run->retries; in xchk_stats_merge_one()
218 css->checktime_us += howmany_64(run->scrub_ns, NSEC_PER_USEC); in xchk_stats_merge_one()
220 if (run->repair_attempted) in xchk_stats_merge_one()
221 css->repair_invocations++; in xchk_stats_merge_one()
222 if (run->repair_succeeded) in xchk_stats_merge_one()
223 css->repair_success++; in xchk_stats_merge_one()
224 css->repairtime_us += howmany_64(run->repair_ns, NSEC_PER_USEC); in xchk_stats_merge_one()
225 spin_unlock(&css->css_lock); in xchk_stats_merge_one()
228 /* Merge these scrub-run stats into the global and mount stat data. */
236 xchk_stats_merge_one(mp->m_scrub_stats, sm, run); in xchk_stats_merge()
248 struct xchk_stats *cs = file->private_data; in xchk_scrub_stats_read() local
261 bufsize = xchk_stats_estimate_bufsize(cs); in xchk_scrub_stats_read()
265 return -ENOMEM; in xchk_scrub_stats_read()
267 avail = xchk_stats_format(cs, buf, bufsize); in xchk_scrub_stats_read()
270 goto out; in xchk_scrub_stats_read()
274 out: in xchk_scrub_stats_read()
291 struct xchk_stats *cs = file->private_data; in xchk_clear_scrub_stats_write() local
300 return -EINVAL; in xchk_clear_scrub_stats_write()
302 xchk_stats_clearall(cs); in xchk_clear_scrub_stats_write()
314 struct xchk_stats *cs, in xchk_stats_init() argument
317 struct xchk_scrub_stats *css = &cs->cs_stats[0]; in xchk_stats_init()
321 spin_lock_init(&css->css_lock); in xchk_stats_init()
329 struct xchk_stats *cs, in xchk_stats_register() argument
335 cs->cs_debugfs = xfs_debugfs_mkdir("scrub", parent); in xchk_stats_register()
336 if (!cs->cs_debugfs) in xchk_stats_register()
339 debugfs_create_file("stats", 0444, cs->cs_debugfs, cs, in xchk_stats_register()
341 debugfs_create_file("clear_stats", 0200, cs->cs_debugfs, cs, in xchk_stats_register()
348 struct xchk_stats *cs) in xchk_stats_teardown() argument
356 struct xchk_stats *cs) in xchk_stats_unregister() argument
358 debugfs_remove(cs->cs_debugfs); in xchk_stats_unregister()
384 /* Allocate per-mount stats */
389 struct xchk_stats *cs; in xchk_mount_stats_alloc() local
392 cs = kvzalloc(sizeof(struct xchk_stats), GFP_KERNEL); in xchk_mount_stats_alloc()
393 if (!cs) in xchk_mount_stats_alloc()
394 return -ENOMEM; in xchk_mount_stats_alloc()
396 error = xchk_stats_init(cs, mp); in xchk_mount_stats_alloc()
400 mp->m_scrub_stats = cs; in xchk_mount_stats_alloc()
403 kvfree(cs); in xchk_mount_stats_alloc()
407 /* Free per-mount stats */
412 xchk_stats_teardown(mp->m_scrub_stats); in xchk_mount_stats_free()
413 kvfree(mp->m_scrub_stats); in xchk_mount_stats_free()
414 mp->m_scrub_stats = NULL; in xchk_mount_stats_free()