Home
last modified time | relevance | path

Searched refs:queue_mask (Results 1 – 19 of 19) sorted by relevance

/linux-6.1.9/drivers/soc/ti/
Dknav_qmss_acc.c283 cmd->command, cmd->queue_mask, cmd->list_dma, in knav_acc_write()
289 writel_relaxed(cmd->queue_mask, &pdsp->acc_command->queue_mask); in knav_acc_write()
308 u32 queue_mask; in knav_acc_setup_cmd() local
313 queue_mask = BIT(range->num_queues) - 1; in knav_acc_setup_cmd()
317 queue_mask = 0; in knav_acc_setup_cmd()
322 cmd->queue_mask = queue_mask; in knav_acc_setup_cmd()
Dknav_qmss.h89 u32 queue_mask; member
/linux-6.1.9/drivers/gpu/drm/amd/amdkfd/
Dkfd_packet_manager_vi.c136 packet->queue_mask_lo = lower_32_bits(res->queue_mask); in pm_set_resources_vi()
137 packet->queue_mask_hi = upper_32_bits(res->queue_mask); in pm_set_resources_vi()
Dkfd_packet_manager_v9.c176 packet->queue_mask_lo = lower_32_bits(res->queue_mask); in pm_set_resources_v9()
177 packet->queue_mask_hi = upper_32_bits(res->queue_mask); in pm_set_resources_v9()
Dkfd_device_queue_manager.c1431 res.queue_mask = 0; in set_sched_resources()
1447 if (WARN_ON(i >= (sizeof(res.queue_mask)*8))) { in set_sched_resources()
1452 res.queue_mask |= 1ull in set_sched_resources()
1462 res.vmid_mask, res.queue_mask); in set_sched_resources()
Dkfd_priv.h595 uint64_t queue_mask; member
/linux-6.1.9/drivers/gpu/drm/amd/amdgpu/
Damdgpu_gfx.c508 uint64_t queue_mask = 0; in amdgpu_gfx_enable_kcq() local
521 if (WARN_ON(i > (sizeof(queue_mask)*8))) { in amdgpu_gfx_enable_kcq()
526 queue_mask |= (1ull << amdgpu_queue_mask_bit_to_set_resource_bit(adev, i)); in amdgpu_gfx_enable_kcq()
542 queue_mask = ~0ULL; in amdgpu_gfx_enable_kcq()
544 kiq->pmf->kiq_set_resources(kiq_ring, queue_mask); in amdgpu_gfx_enable_kcq()
Damdgpu_gfx.h82 uint64_t queue_mask);
Dgfx_v8_0.c4345 uint64_t queue_mask = 0; in gfx_v8_0_kiq_kcq_enable() local
4355 if (WARN_ON(i >= (sizeof(queue_mask)*8))) { in gfx_v8_0_kiq_kcq_enable()
4360 queue_mask |= (1ull << i); in gfx_v8_0_kiq_kcq_enable()
4371 amdgpu_ring_write(kiq_ring, lower_32_bits(queue_mask)); /* queue mask lo */ in gfx_v8_0_kiq_kcq_enable()
4372 amdgpu_ring_write(kiq_ring, upper_32_bits(queue_mask)); /* queue mask hi */ in gfx_v8_0_kiq_kcq_enable()
Dgfx_v11_0.c130 static void gfx11_kiq_set_resources(struct amdgpu_ring *kiq_ring, uint64_t queue_mask) in gfx11_kiq_set_resources() argument
135 amdgpu_ring_write(kiq_ring, lower_32_bits(queue_mask)); /* queue mask lo */ in gfx11_kiq_set_resources()
136 amdgpu_ring_write(kiq_ring, upper_32_bits(queue_mask)); /* queue mask hi */ in gfx11_kiq_set_resources()
Dgfx_v9_0.c765 uint64_t queue_mask) in gfx_v9_0_kiq_set_resources() argument
773 lower_32_bits(queue_mask)); /* queue mask lo */ in gfx_v9_0_kiq_set_resources()
775 upper_32_bits(queue_mask)); /* queue mask hi */ in gfx_v9_0_kiq_set_resources()
Dgfx_v10_0.c3510 static void gfx10_kiq_set_resources(struct amdgpu_ring *kiq_ring, uint64_t queue_mask) in gfx10_kiq_set_resources() argument
3515 amdgpu_ring_write(kiq_ring, lower_32_bits(queue_mask)); /* queue mask lo */ in gfx10_kiq_set_resources()
3516 amdgpu_ring_write(kiq_ring, upper_32_bits(queue_mask)); /* queue mask hi */ in gfx10_kiq_set_resources()
/linux-6.1.9/drivers/net/ethernet/marvell/
Dmv643xx_eth.c2258 u8 queue_mask; in mv643xx_eth_poll() local
2269 queue_mask = mp->work_tx | mp->work_tx_end | mp->work_rx; in mv643xx_eth_poll()
2271 queue_mask |= mp->work_rx_refill; in mv643xx_eth_poll()
2273 if (!queue_mask) { in mv643xx_eth_poll()
2279 queue = fls(queue_mask) - 1; in mv643xx_eth_poll()
2280 queue_mask = 1 << queue; in mv643xx_eth_poll()
2286 if (mp->work_tx_end & queue_mask) { in mv643xx_eth_poll()
2288 } else if (mp->work_tx & queue_mask) { in mv643xx_eth_poll()
2291 } else if (mp->work_rx & queue_mask) { in mv643xx_eth_poll()
2293 } else if (!mp->oom && (mp->work_rx_refill & queue_mask)) { in mv643xx_eth_poll()
/linux-6.1.9/net/ethtool/
Dioctl.c2477 DECLARE_BITMAP(queue_mask, MAX_NUM_QUEUE); in ethtool_get_per_queue_coalesce()
2484 bitmap_from_arr32(queue_mask, per_queue_opt->queue_mask, in ethtool_get_per_queue_coalesce()
2487 for_each_set_bit(bit, queue_mask, MAX_NUM_QUEUE) { in ethtool_get_per_queue_coalesce()
2510 DECLARE_BITMAP(queue_mask, MAX_NUM_QUEUE); in ethtool_set_per_queue_coalesce()
2518 bitmap_from_arr32(queue_mask, per_queue_opt->queue_mask, MAX_NUM_QUEUE); in ethtool_set_per_queue_coalesce()
2519 n_queue = bitmap_weight(queue_mask, MAX_NUM_QUEUE); in ethtool_set_per_queue_coalesce()
2524 for_each_set_bit(bit, queue_mask, MAX_NUM_QUEUE) { in ethtool_set_per_queue_coalesce()
2553 for_each_set_bit(i, queue_mask, bit) { in ethtool_set_per_queue_coalesce()
/linux-6.1.9/drivers/net/ethernet/cadence/
Dmacb_main.c3833 unsigned int *queue_mask, in macb_probe_queues() argument
3836 *queue_mask = 0x1; in macb_probe_queues()
3849 *queue_mask |= readl_relaxed(mem + GEM_DCFG6) & 0xff; in macb_probe_queues()
3850 *num_queues = hweight32(*queue_mask); in macb_probe_queues()
3969 if (!(bp->queue_mask & (1 << hw_q))) in macb_init()
4851 unsigned int queue_mask, num_queues; in macb_probe() local
4886 macb_probe_queues(mem, native_io, &queue_mask, &num_queues); in macb_probe()
4910 bp->queue_mask = queue_mask; in macb_probe()
Dmacb.h1259 unsigned int queue_mask; member
/linux-6.1.9/net/sched/
Dsch_taprio.c1187 u32 i, queue_mask = 0; in tc_map_to_queue_mask() local
1198 queue_mask |= GENMASK(offset + count - 1, offset); in tc_map_to_queue_mask()
1201 return queue_mask; in tc_map_to_queue_mask()
/linux-6.1.9/include/uapi/linux/
Dethtool.h1468 __u32 queue_mask[__KERNEL_DIV_ROUND_UP(MAX_NUM_QUEUE, 32)]; member
/linux-6.1.9/Documentation/networking/device_drivers/ethernet/intel/
Dice.rst977 # ethtool --per-queue <ethX> queue_mask 0xa --coalesce adaptive-rx off
982 # ethtool --per-queue <ethX> queue_mask 0xa --show-coalesce