Searched refs:work_queued (Results 1 – 10 of 10) sorted by relevance
65 sbi->work_queued = 0; in flush_mdb() 80 if (!sbi->work_queued) { in hfs_mark_mdb_dirty() 83 sbi->work_queued = 1; in hfs_mark_mdb_dirty()
164 int work_queued; /* non-zero delayed work is queued */ member 166 spinlock_t work_lock; /* protects mdb_work and work_queued */
570 bool work_queued = false; in amdgpu_dm_irq_schedule_work() local 577 work_queued = true; in amdgpu_dm_irq_schedule_work() 582 if (!work_queued) { in amdgpu_dm_irq_schedule_work()
74 sbi->work_queued = 0; in flush_superblock() 89 if (!sbi->work_queued) { in affs_mark_sb_dirty() 92 sbi->work_queued = 1; in affs_mark_sb_dirty()
103 int work_queued; /* non-zero delayed work is queued */ member 105 spinlock_t work_lock; /* protects sb_work and work_queued */
30 int work_queued; /* non-zero if the delayed work is queued */ member 32 spinlock_t work_lock; /* protects sync_work and work_queued */
662 sbi->work_queued = 0; in delayed_sync_fs() 674 if (!sbi->work_queued) { in ufs_mark_sb_dirty() 677 sbi->work_queued = 1; in ufs_mark_sb_dirty()
254 sbi->work_queued = 0; in delayed_sync_fs() 271 if (!sbi->work_queued) { in hfsplus_mark_mdb_dirty() 274 sbi->work_queued = 1; in hfsplus_mark_mdb_dirty()
192 int work_queued; /* non-zero delayed work is queued */ member 194 spinlock_t work_lock; /* protects sync_work and work_queued */
1541 unsigned int work_queued; in xennet_handle_rx() local 1548 work_queued = XEN_RING_NR_UNCONSUMED_RESPONSES(&queue->rx); in xennet_handle_rx() 1549 if (work_queued > queue->rx_rsp_unconsumed) { in xennet_handle_rx() 1550 queue->rx_rsp_unconsumed = work_queued; in xennet_handle_rx() 1552 } else if (unlikely(work_queued < queue->rx_rsp_unconsumed)) { in xennet_handle_rx() 1563 if (likely(netif_carrier_ok(queue->info->netdev) && work_queued)) in xennet_handle_rx()