Home
last modified time | relevance | path

Searched refs:hw_data (Results 1 – 25 of 67) sorted by relevance

123

/linux-6.1.9/drivers/crypto/qat/qat_c3xxx/
Dadf_c3xxx_hw_data.c88 void adf_init_hw_data_c3xxx(struct adf_hw_device_data *hw_data) in adf_init_hw_data_c3xxx() argument
90 hw_data->dev_class = &c3xxx_class; in adf_init_hw_data_c3xxx()
91 hw_data->instance_id = c3xxx_class.instances++; in adf_init_hw_data_c3xxx()
92 hw_data->num_banks = ADF_C3XXX_ETR_MAX_BANKS; in adf_init_hw_data_c3xxx()
93 hw_data->num_rings_per_bank = ADF_ETR_MAX_RINGS_PER_BANK; in adf_init_hw_data_c3xxx()
94 hw_data->num_accel = ADF_C3XXX_MAX_ACCELERATORS; in adf_init_hw_data_c3xxx()
95 hw_data->num_logical_accel = 1; in adf_init_hw_data_c3xxx()
96 hw_data->num_engines = ADF_C3XXX_MAX_ACCELENGINES; in adf_init_hw_data_c3xxx()
97 hw_data->tx_rx_gap = ADF_GEN2_RX_RINGS_OFFSET; in adf_init_hw_data_c3xxx()
98 hw_data->tx_rings_mask = ADF_GEN2_TX_RINGS_MASK; in adf_init_hw_data_c3xxx()
[all …]
Dadf_drv.c77 struct adf_hw_device_data *hw_data; in adf_probe() local
118 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe()
120 if (!hw_data) { in adf_probe()
125 accel_dev->hw_device = hw_data; in adf_probe()
129 &hw_data->fuses); in adf_probe()
131 &hw_data->straps); in adf_probe()
134 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe()
135 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe()
136 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
138 if (!hw_data->accel_mask || !hw_data->ae_mask || in adf_probe()
[all …]
/linux-6.1.9/drivers/crypto/qat/qat_c62x/
Dadf_c62x_hw_data.c90 void adf_init_hw_data_c62x(struct adf_hw_device_data *hw_data) in adf_init_hw_data_c62x() argument
92 hw_data->dev_class = &c62x_class; in adf_init_hw_data_c62x()
93 hw_data->instance_id = c62x_class.instances++; in adf_init_hw_data_c62x()
94 hw_data->num_banks = ADF_C62X_ETR_MAX_BANKS; in adf_init_hw_data_c62x()
95 hw_data->num_rings_per_bank = ADF_ETR_MAX_RINGS_PER_BANK; in adf_init_hw_data_c62x()
96 hw_data->num_accel = ADF_C62X_MAX_ACCELERATORS; in adf_init_hw_data_c62x()
97 hw_data->num_logical_accel = 1; in adf_init_hw_data_c62x()
98 hw_data->num_engines = ADF_C62X_MAX_ACCELENGINES; in adf_init_hw_data_c62x()
99 hw_data->tx_rx_gap = ADF_GEN2_RX_RINGS_OFFSET; in adf_init_hw_data_c62x()
100 hw_data->tx_rings_mask = ADF_GEN2_TX_RINGS_MASK; in adf_init_hw_data_c62x()
[all …]
Dadf_drv.c77 struct adf_hw_device_data *hw_data; in adf_probe() local
118 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe()
120 if (!hw_data) { in adf_probe()
125 accel_dev->hw_device = hw_data; in adf_probe()
129 &hw_data->fuses); in adf_probe()
131 &hw_data->straps); in adf_probe()
134 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe()
135 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe()
136 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
138 if (!hw_data->accel_mask || !hw_data->ae_mask || in adf_probe()
[all …]
/linux-6.1.9/drivers/crypto/qat/qat_c3xxxvf/
Dadf_c3xxxvf_hw_data.c60 void adf_init_hw_data_c3xxxiov(struct adf_hw_device_data *hw_data) in adf_init_hw_data_c3xxxiov() argument
62 hw_data->dev_class = &c3xxxiov_class; in adf_init_hw_data_c3xxxiov()
63 hw_data->num_banks = ADF_C3XXXIOV_ETR_MAX_BANKS; in adf_init_hw_data_c3xxxiov()
64 hw_data->num_rings_per_bank = ADF_ETR_MAX_RINGS_PER_BANK; in adf_init_hw_data_c3xxxiov()
65 hw_data->num_accel = ADF_C3XXXIOV_MAX_ACCELERATORS; in adf_init_hw_data_c3xxxiov()
66 hw_data->num_logical_accel = 1; in adf_init_hw_data_c3xxxiov()
67 hw_data->num_engines = ADF_C3XXXIOV_MAX_ACCELENGINES; in adf_init_hw_data_c3xxxiov()
68 hw_data->tx_rx_gap = ADF_C3XXXIOV_RX_RINGS_OFFSET; in adf_init_hw_data_c3xxxiov()
69 hw_data->tx_rings_mask = ADF_C3XXXIOV_TX_RINGS_MASK; in adf_init_hw_data_c3xxxiov()
70 hw_data->ring_to_svc_map = ADF_GEN2_DEFAULT_RING_TO_SRV_MAP; in adf_init_hw_data_c3xxxiov()
[all …]
Dadf_drv.c78 struct adf_hw_device_data *hw_data; in adf_probe() local
112 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe()
114 if (!hw_data) { in adf_probe()
118 accel_dev->hw_device = hw_data; in adf_probe()
122 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe()
123 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe()
124 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
128 hw_data->dev_class->name, pci_name(pdev)); in adf_probe()
/linux-6.1.9/drivers/crypto/qat/qat_dh895xccvf/
Dadf_dh895xccvf_hw_data.c60 void adf_init_hw_data_dh895xcciov(struct adf_hw_device_data *hw_data) in adf_init_hw_data_dh895xcciov() argument
62 hw_data->dev_class = &dh895xcciov_class; in adf_init_hw_data_dh895xcciov()
63 hw_data->num_banks = ADF_DH895XCCIOV_ETR_MAX_BANKS; in adf_init_hw_data_dh895xcciov()
64 hw_data->num_rings_per_bank = ADF_ETR_MAX_RINGS_PER_BANK; in adf_init_hw_data_dh895xcciov()
65 hw_data->num_accel = ADF_DH895XCCIOV_MAX_ACCELERATORS; in adf_init_hw_data_dh895xcciov()
66 hw_data->num_logical_accel = 1; in adf_init_hw_data_dh895xcciov()
67 hw_data->num_engines = ADF_DH895XCCIOV_MAX_ACCELENGINES; in adf_init_hw_data_dh895xcciov()
68 hw_data->tx_rx_gap = ADF_DH895XCCIOV_RX_RINGS_OFFSET; in adf_init_hw_data_dh895xcciov()
69 hw_data->tx_rings_mask = ADF_DH895XCCIOV_TX_RINGS_MASK; in adf_init_hw_data_dh895xcciov()
70 hw_data->ring_to_svc_map = ADF_GEN2_DEFAULT_RING_TO_SRV_MAP; in adf_init_hw_data_dh895xcciov()
[all …]
Dadf_drv.c78 struct adf_hw_device_data *hw_data; in adf_probe() local
112 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe()
114 if (!hw_data) { in adf_probe()
118 accel_dev->hw_device = hw_data; in adf_probe()
122 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe()
123 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe()
124 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
128 hw_data->dev_class->name, pci_name(pdev)); in adf_probe()
/linux-6.1.9/drivers/crypto/qat/qat_c62xvf/
Dadf_c62xvf_hw_data.c60 void adf_init_hw_data_c62xiov(struct adf_hw_device_data *hw_data) in adf_init_hw_data_c62xiov() argument
62 hw_data->dev_class = &c62xiov_class; in adf_init_hw_data_c62xiov()
63 hw_data->num_banks = ADF_C62XIOV_ETR_MAX_BANKS; in adf_init_hw_data_c62xiov()
64 hw_data->num_rings_per_bank = ADF_ETR_MAX_RINGS_PER_BANK; in adf_init_hw_data_c62xiov()
65 hw_data->num_accel = ADF_C62XIOV_MAX_ACCELERATORS; in adf_init_hw_data_c62xiov()
66 hw_data->num_logical_accel = 1; in adf_init_hw_data_c62xiov()
67 hw_data->num_engines = ADF_C62XIOV_MAX_ACCELENGINES; in adf_init_hw_data_c62xiov()
68 hw_data->tx_rx_gap = ADF_C62XIOV_RX_RINGS_OFFSET; in adf_init_hw_data_c62xiov()
69 hw_data->tx_rings_mask = ADF_C62XIOV_TX_RINGS_MASK; in adf_init_hw_data_c62xiov()
70 hw_data->ring_to_svc_map = ADF_GEN2_DEFAULT_RING_TO_SRV_MAP; in adf_init_hw_data_c62xiov()
[all …]
Dadf_drv.c78 struct adf_hw_device_data *hw_data; in adf_probe() local
112 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe()
114 if (!hw_data) { in adf_probe()
118 accel_dev->hw_device = hw_data; in adf_probe()
122 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe()
123 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe()
124 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
128 hw_data->dev_class->name, pci_name(pdev)); in adf_probe()
/linux-6.1.9/drivers/crypto/qat/qat_dh895xcc/
Dadf_dh895xcc_hw_data.c199 void adf_init_hw_data_dh895xcc(struct adf_hw_device_data *hw_data) in adf_init_hw_data_dh895xcc() argument
201 hw_data->dev_class = &dh895xcc_class; in adf_init_hw_data_dh895xcc()
202 hw_data->instance_id = dh895xcc_class.instances++; in adf_init_hw_data_dh895xcc()
203 hw_data->num_banks = ADF_DH895XCC_ETR_MAX_BANKS; in adf_init_hw_data_dh895xcc()
204 hw_data->num_rings_per_bank = ADF_ETR_MAX_RINGS_PER_BANK; in adf_init_hw_data_dh895xcc()
205 hw_data->num_accel = ADF_DH895XCC_MAX_ACCELERATORS; in adf_init_hw_data_dh895xcc()
206 hw_data->num_logical_accel = 1; in adf_init_hw_data_dh895xcc()
207 hw_data->num_engines = ADF_DH895XCC_MAX_ACCELENGINES; in adf_init_hw_data_dh895xcc()
208 hw_data->tx_rx_gap = ADF_GEN2_RX_RINGS_OFFSET; in adf_init_hw_data_dh895xcc()
209 hw_data->tx_rings_mask = ADF_GEN2_TX_RINGS_MASK; in adf_init_hw_data_dh895xcc()
[all …]
Dadf_drv.c77 struct adf_hw_device_data *hw_data; in adf_probe() local
118 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe()
120 if (!hw_data) { in adf_probe()
125 accel_dev->hw_device = hw_data; in adf_probe()
129 &hw_data->fuses); in adf_probe()
132 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe()
133 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe()
134 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
136 if (!hw_data->accel_mask || !hw_data->ae_mask || in adf_probe()
137 ((~hw_data->ae_mask) & 0x01)) { in adf_probe()
[all …]
/linux-6.1.9/drivers/crypto/qat/qat_4xxx/
Dadf_4xxx_hw_data.c312 void adf_init_hw_data_4xxx(struct adf_hw_device_data *hw_data) in adf_init_hw_data_4xxx() argument
314 hw_data->dev_class = &adf_4xxx_class; in adf_init_hw_data_4xxx()
315 hw_data->instance_id = adf_4xxx_class.instances++; in adf_init_hw_data_4xxx()
316 hw_data->num_banks = ADF_4XXX_ETR_MAX_BANKS; in adf_init_hw_data_4xxx()
317 hw_data->num_banks_per_vf = ADF_4XXX_NUM_BANKS_PER_VF; in adf_init_hw_data_4xxx()
318 hw_data->num_rings_per_bank = ADF_4XXX_NUM_RINGS_PER_BANK; in adf_init_hw_data_4xxx()
319 hw_data->num_accel = ADF_4XXX_MAX_ACCELERATORS; in adf_init_hw_data_4xxx()
320 hw_data->num_engines = ADF_4XXX_MAX_ACCELENGINES; in adf_init_hw_data_4xxx()
321 hw_data->num_logical_accel = 1; in adf_init_hw_data_4xxx()
322 hw_data->tx_rx_gap = ADF_4XXX_RX_RINGS_OFFSET; in adf_init_hw_data_4xxx()
[all …]
Dadf_drv.c169 struct adf_hw_device_data *hw_data; in adf_probe() local
205 hw_data = devm_kzalloc(&pdev->dev, sizeof(*hw_data), GFP_KERNEL); in adf_probe()
206 if (!hw_data) { in adf_probe()
211 accel_dev->hw_device = hw_data; in adf_probe()
215 pci_read_config_dword(pdev, ADF_4XXX_FUSECTL4_OFFSET, &hw_data->fuses); in adf_probe()
218 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe()
219 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe()
220 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
222 if (!hw_data->accel_mask || !hw_data->ae_mask || in adf_probe()
223 (~hw_data->ae_mask & 0x01)) { in adf_probe()
[all …]
/linux-6.1.9/drivers/video/backlight/
Dapple_bl.c31 struct hw_data { struct
40 static const struct hw_data *hw_data; argument
81 static const struct hw_data intel_chipset_data = {
126 static const struct hw_data nvidia_chipset_data = {
151 hw_data = &intel_chipset_data; in apple_bl_add()
153 hw_data = &nvidia_chipset_data; in apple_bl_add()
157 if (!hw_data) { in apple_bl_add()
164 intensity = hw_data->backlight_ops.get_brightness(NULL); in apple_bl_add()
167 hw_data->set_brightness(1); in apple_bl_add()
168 if (!hw_data->backlight_ops.get_brightness(NULL)) in apple_bl_add()
[all …]
/linux-6.1.9/drivers/crypto/qat/qat_common/
Dadf_init.c63 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_dev_init() local
66 if (!hw_data) { in adf_dev_init()
83 if (hw_data->init_device && hw_data->init_device(accel_dev)) { in adf_dev_init()
88 if (hw_data->init_admin_comms && hw_data->init_admin_comms(accel_dev)) { in adf_dev_init()
93 if (hw_data->init_arb && hw_data->init_arb(accel_dev)) { in adf_dev_init()
112 if (hw_data->alloc_irq(accel_dev)) { in adf_dev_init()
118 hw_data->enable_ints(accel_dev); in adf_dev_init()
119 hw_data->enable_error_correction(accel_dev); in adf_dev_init()
121 ret = hw_data->pfvf_ops.enable_comms(accel_dev); in adf_dev_init()
163 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_dev_start() local
[all …]
Dadf_hw_arbiter.c20 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_init_arb() local
22 unsigned long ae_mask = hw_data->ae_mask; in adf_init_arb()
28 hw_data->get_arb_info(&info); in adf_init_arb()
39 thd_2_arb_cfg = hw_data->get_arb_mapping(); in adf_init_arb()
41 for_each_set_bit(i, &ae_mask, hw_data->num_engines) in adf_init_arb()
51 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_update_ring_arb() local
53 u32 tx_ring_mask = hw_data->tx_rings_mask; in adf_update_ring_arb()
54 u32 shift = hw_data->tx_rx_gap; in adf_update_ring_arb()
75 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_exit_arb() local
82 hw_data->get_arb_info(&info); in adf_exit_arb()
[all …]
Dadf_isr.c24 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_enable_msix() local
25 u32 msix_num_entries = hw_data->num_banks + 1; in adf_enable_msix()
28 if (hw_data->set_msix_rttable) in adf_enable_msix()
29 hw_data->set_msix_rttable(accel_dev); in adf_enable_msix()
126 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_handle_pm_int() local
128 if (hw_data->handle_pm_interrupt && in adf_handle_pm_int()
129 hw_data->handle_pm_interrupt(accel_dev)) in adf_handle_pm_int()
157 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_free_irqs() local
160 int clust_irq = hw_data->num_banks; in adf_free_irqs()
164 for (i = 0; i < hw_data->num_banks; i++) { in adf_free_irqs()
[all …]
Dadf_gen2_hw_data.c28 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_gen2_enable_error_correction() local
30 unsigned long accel_mask = hw_data->accel_mask; in adf_gen2_enable_error_correction()
31 unsigned long ae_mask = hw_data->ae_mask; in adf_gen2_enable_error_correction()
35 for_each_set_bit(i, &ae_mask, hw_data->num_engines) { in adf_gen2_enable_error_correction()
45 for_each_set_bit(i, &accel_mask, hw_data->num_accel) { in adf_gen2_enable_error_correction()
213 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_gen2_get_accel_cap() local
215 u32 straps = hw_data->straps; in adf_gen2_get_accel_cap()
216 u32 fuses = hw_data->fuses; in adf_gen2_get_accel_cap()
253 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_gen2_set_ssm_wdtimer() local
257 unsigned long accel_mask = hw_data->accel_mask; in adf_gen2_set_ssm_wdtimer()
[all …]
Dadf_ctl_drv.c349 struct adf_hw_device_data *hw_data; in adf_ctl_ioctl_get_status() local
363 hw_data = accel_dev->hw_device; in adf_ctl_ioctl_get_status()
365 dev_info.num_ae = hw_data->get_num_aes(hw_data); in adf_ctl_ioctl_get_status()
366 dev_info.num_accel = hw_data->get_num_accels(hw_data); in adf_ctl_ioctl_get_status()
367 dev_info.num_logical_accel = hw_data->num_logical_accel; in adf_ctl_ioctl_get_status()
368 dev_info.banks_per_accel = hw_data->num_banks in adf_ctl_ioctl_get_status()
369 / hw_data->num_logical_accel; in adf_ctl_ioctl_get_status()
370 strscpy(dev_info.name, hw_data->dev_class->name, sizeof(dev_info.name)); in adf_ctl_ioctl_get_status()
371 dev_info.instance_id = hw_data->instance_id; in adf_ctl_ioctl_get_status()
372 dev_info.type = hw_data->dev_class->type; in adf_ctl_ioctl_get_status()
/linux-6.1.9/drivers/clk/
Dclk-lan966x.c206 struct clk_hw_onecell_data *hw_data, in lan966x_gate_clk_register() argument
214 hw_data->hws[i] = in lan966x_gate_clk_register()
220 if (IS_ERR(hw_data->hws[i])) in lan966x_gate_clk_register()
221 return dev_err_probe(dev, PTR_ERR(hw_data->hws[i]), in lan966x_gate_clk_register()
231 struct clk_hw_onecell_data *hw_data; in lan966x_clk_probe() local
237 hw_data = devm_kzalloc(dev, struct_size(hw_data, hws, N_CLOCKS), in lan966x_clk_probe()
239 if (!hw_data) in lan966x_clk_probe()
248 hw_data->num = GCK_GATE_UHPHS; in lan966x_clk_probe()
252 hw_data->hws[i] = lan966x_gck_clk_register(dev, i); in lan966x_clk_probe()
253 if (IS_ERR(hw_data->hws[i])) { in lan966x_clk_probe()
[all …]
/linux-6.1.9/net/core/
Ddrop_monitor.c300 net_dm_hw_reset_per_cpu_data(struct per_cpu_dm_data *hw_data) in net_dm_hw_reset_per_cpu_data() argument
312 mod_timer(&hw_data->send_timer, jiffies + HZ / 10); in net_dm_hw_reset_per_cpu_data()
315 spin_lock_irqsave(&hw_data->lock, flags); in net_dm_hw_reset_per_cpu_data()
316 swap(hw_data->hw_entries, hw_entries); in net_dm_hw_reset_per_cpu_data()
317 spin_unlock_irqrestore(&hw_data->lock, flags); in net_dm_hw_reset_per_cpu_data()
408 struct per_cpu_dm_data *hw_data; in net_dm_hw_summary_work() local
412 hw_data = container_of(work, struct per_cpu_dm_data, dm_alert_work); in net_dm_hw_summary_work()
414 hw_entries = net_dm_hw_reset_per_cpu_data(hw_data); in net_dm_hw_summary_work()
441 struct per_cpu_dm_data *hw_data; in net_dm_hw_trap_summary_probe() local
448 hw_data = this_cpu_ptr(&dm_hw_cpu_data); in net_dm_hw_trap_summary_probe()
[all …]
/linux-6.1.9/include/sound/
Dpcm-indirect.h17 unsigned int hw_data; /* Offset to next dst (or src) in hw ring buffer */ member
53 unsigned int hw_to_end = rec->hw_buffer_size - rec->hw_data; in snd_pcm_indirect_playback_transfer()
65 rec->hw_data += bytes; in snd_pcm_indirect_playback_transfer()
66 if (rec->hw_data == rec->hw_buffer_size) in snd_pcm_indirect_playback_transfer()
67 rec->hw_data = 0; in snd_pcm_indirect_playback_transfer()
121 size_t hw_to_end = rec->hw_buffer_size - rec->hw_data; in snd_pcm_indirect_capture_transfer()
133 rec->hw_data += bytes; in snd_pcm_indirect_capture_transfer()
134 if ((int)rec->hw_data == rec->hw_buffer_size) in snd_pcm_indirect_capture_transfer()
135 rec->hw_data = 0; in snd_pcm_indirect_capture_transfer()
/linux-6.1.9/drivers/clk/uniphier/
Dclk-uniphier-core.c45 struct clk_hw_onecell_data *hw_data; in uniphier_clk_probe() local
67 hw_data = devm_kzalloc(dev, struct_size(hw_data, hws, clk_num), in uniphier_clk_probe()
69 if (!hw_data) in uniphier_clk_probe()
72 hw_data->num = clk_num; in uniphier_clk_probe()
76 hw_data->hws[clk_num] = ERR_PTR(-EINVAL); in uniphier_clk_probe()
87 hw_data->hws[p->idx] = hw; in uniphier_clk_probe()
91 hw_data); in uniphier_clk_probe()
/linux-6.1.9/drivers/clk/keystone/
Dsyscon-clk.c95 struct clk_hw_onecell_data *hw_data; in ti_syscon_gate_clk_probe() local
116 hw_data = devm_kzalloc(dev, struct_size(hw_data, hws, num_clks), in ti_syscon_gate_clk_probe()
118 if (!hw_data) in ti_syscon_gate_clk_probe()
121 hw_data->num = num_clks; in ti_syscon_gate_clk_probe()
124 hw_data->hws[i] = ti_syscon_gate_clk_register(dev, regmap, in ti_syscon_gate_clk_probe()
126 if (IS_ERR(hw_data->hws[i])) in ti_syscon_gate_clk_probe()
132 hw_data); in ti_syscon_gate_clk_probe()

123