Home
last modified time | relevance | path

Searched refs:entries (Results 1 – 25 of 1659) sorted by relevance

12345678910>>...67

/linux-6.6.21/drivers/net/ethernet/engleder/
Dtsnep_selftests.c357 qopt = kzalloc(struct_size(qopt, entries, 255), GFP_KERNEL); in tsnep_test_taprio()
361 qopt->entries[i].command = TC_TAPRIO_CMD_SET_GATES; in tsnep_test_taprio()
367 qopt->entries[0].gate_mask = 0x02; in tsnep_test_taprio()
368 qopt->entries[0].interval = 200000; in tsnep_test_taprio()
369 qopt->entries[1].gate_mask = 0x03; in tsnep_test_taprio()
370 qopt->entries[1].interval = 800000; in tsnep_test_taprio()
371 qopt->entries[2].gate_mask = 0x07; in tsnep_test_taprio()
372 qopt->entries[2].interval = 240000; in tsnep_test_taprio()
373 qopt->entries[3].gate_mask = 0x01; in tsnep_test_taprio()
374 qopt->entries[3].interval = 80000; in tsnep_test_taprio()
[all …]
/linux-6.6.21/lib/
Dstackdepot.c63 unsigned long entries[]; /* Variable-sized array of frames */ member
126 unsigned long entries = 0; in stack_depot_early_init() local
149 entries = 1UL << stack_bucket_number_order; in stack_depot_early_init()
153 entries, in stack_depot_early_init()
173 unsigned long entries; in stack_depot_init() local
186 entries = 1UL << stack_bucket_number_order; in stack_depot_init()
190 entries = nr_free_buffer_pages(); in stack_depot_init()
191 entries = roundup_pow_of_two(entries); in stack_depot_init()
194 entries >>= (scale - PAGE_SHIFT); in stack_depot_init()
196 entries <<= (PAGE_SHIFT - scale); in stack_depot_init()
[all …]
Dhashtable_test.c125 struct hashtable_test_entry entries[3]; in hashtable_test_hash_for_each() local
132 entries[i].key = i; in hashtable_test_hash_for_each()
133 entries[i].data = i + 10; in hashtable_test_hash_for_each()
134 entries[i].visited = 0; in hashtable_test_hash_for_each()
135 hash_add(hash, &entries[i].node, entries[i].key); in hashtable_test_hash_for_each()
149 KUNIT_EXPECT_EQ(test, entries[j].visited, 1); in hashtable_test_hash_for_each()
154 struct hashtable_test_entry entries[3]; in hashtable_test_hash_for_each_safe() local
162 entries[i].key = i; in hashtable_test_hash_for_each_safe()
163 entries[i].data = i + 10; in hashtable_test_hash_for_each_safe()
164 entries[i].visited = 0; in hashtable_test_hash_for_each_safe()
[all …]
Dlist-test.c387 struct list_head entries[3], *cur; in list_test_list_cut_position() local
392 list_add_tail(&entries[0], &list1); in list_test_list_cut_position()
393 list_add_tail(&entries[1], &list1); in list_test_list_cut_position()
394 list_add_tail(&entries[2], &list1); in list_test_list_cut_position()
397 list_cut_position(&list2, &list1, &entries[1]); in list_test_list_cut_position()
401 KUNIT_EXPECT_PTR_EQ(test, cur, &entries[i]); in list_test_list_cut_position()
408 KUNIT_EXPECT_PTR_EQ(test, cur, &entries[i]); in list_test_list_cut_position()
415 struct list_head entries[3], *cur; in list_test_list_cut_before() local
420 list_add_tail(&entries[0], &list1); in list_test_list_cut_before()
421 list_add_tail(&entries[1], &list1); in list_test_list_cut_before()
[all …]
Dtest_rhashtable.c73 unsigned int entries; member
138 unsigned int entries) in test_rht_lookup() argument
142 for (i = 0; i < entries; i++) { in test_rht_lookup()
175 static void test_bucket_stats(struct rhashtable *ht, unsigned int entries) in test_bucket_stats() argument
202 total, atomic_read(&ht->nelems), entries, chain_len); in test_bucket_stats()
204 if (total != atomic_read(&ht->nelems) || total != entries) in test_bucket_stats()
209 unsigned int entries) in test_rhashtable() argument
220 pr_info(" Adding %d keys\n", entries); in test_rhashtable()
222 for (i = 0; i < entries; i++) { in test_rhashtable()
237 test_bucket_stats(ht, entries); in test_rhashtable()
[all …]
/linux-6.6.21/drivers/gpu/drm/amd/pm/powerplay/hwmgr/
Dsmu_helper.c224 vvalue = vol_table->entries[i].value; in phm_trim_voltage_table()
228 if (vvalue == table->entries[j].value) { in phm_trim_voltage_table()
235 table->entries[table->count].value = vvalue; in phm_trim_voltage_table()
236 table->entries[table->count].smio_low = in phm_trim_voltage_table()
237 vol_table->entries[i].smio_low; in phm_trim_voltage_table()
265 vol_table->entries[i].value = dep_table->entries[i].mvdd; in phm_get_svi2_mvdd_voltage_table()
266 vol_table->entries[i].smio_low = 0; in phm_get_svi2_mvdd_voltage_table()
293 vol_table->entries[i].value = dep_table->entries[i].vddci; in phm_get_svi2_vddci_voltage_table()
294 vol_table->entries[i].smio_low = 0; in phm_get_svi2_vddci_voltage_table()
321 vol_table->entries[i].value = lookup_table->entries[i].us_vdd; in phm_get_svi2_vdd_voltage_table()
[all …]
Dvega10_processpptables.c319 (ATOM_Vega10_GFXCLK_Dependency_Record_V2 *)gfxclk_dep_table->entries; in init_over_drive_limits()
353 mm_table = kzalloc(struct_size(mm_table, entries, mm_dependency_table->ucNumEntries), in get_mm_clock_voltage_table()
361 mm_dependency_record = &mm_dependency_table->entries[i]; in get_mm_clock_voltage_table()
362 mm_table->entries[i].vddcInd = mm_dependency_record->ucVddcInd; in get_mm_clock_voltage_table()
363 mm_table->entries[i].samclock = in get_mm_clock_voltage_table()
365 mm_table->entries[i].eclk = le32_to_cpu(mm_dependency_record->ulEClk); in get_mm_clock_voltage_table()
366 mm_table->entries[i].vclk = le32_to_cpu(mm_dependency_record->ulVClk); in get_mm_clock_voltage_table()
367 mm_table->entries[i].dclk = le32_to_cpu(mm_dependency_record->ulDClk); in get_mm_clock_voltage_table()
576 clk_table = kzalloc(struct_size(clk_table, entries, clk_dep_table->ucNumEntries), in get_socclk_voltage_dependency_table()
584 clk_table->entries[i].vddInd = in get_socclk_voltage_dependency_table()
[all …]
/linux-6.6.21/kernel/events/
Dcallchain.c50 struct callchain_cpus_entries *entries; in release_callchain_buffers_rcu() local
53 entries = container_of(head, struct callchain_cpus_entries, rcu_head); in release_callchain_buffers_rcu()
56 kfree(entries->cpu_entries[cpu]); in release_callchain_buffers_rcu()
58 kfree(entries); in release_callchain_buffers_rcu()
63 struct callchain_cpus_entries *entries; in release_callchain_buffers() local
65 entries = callchain_cpus_entries; in release_callchain_buffers()
67 call_rcu(&entries->rcu_head, release_callchain_buffers_rcu); in release_callchain_buffers()
74 struct callchain_cpus_entries *entries; in alloc_callchain_buffers() local
83 entries = kzalloc(size, GFP_KERNEL); in alloc_callchain_buffers()
84 if (!entries) in alloc_callchain_buffers()
[all …]
/linux-6.6.21/tools/lib/api/fd/
Darray.c15 fda->entries = NULL; in fdarray__init()
27 struct pollfd *entries = realloc(fda->entries, size); in fdarray__grow() local
29 if (entries == NULL) in fdarray__grow()
34 free(entries); in fdarray__grow()
38 memset(&entries[fda->nr_alloc], 0, sizeof(struct pollfd) * nr); in fdarray__grow()
42 fda->entries = entries; in fdarray__grow()
65 free(fda->entries); in fdarray__exit()
84 fda->entries[fda->nr].fd = fd; in fdarray__add()
85 fda->entries[fda->nr].events = revents; in fdarray__add()
99 entry = &from->entries[pos]; in fdarray__dup_entry_from()
[all …]
/linux-6.6.21/arch/powerpc/mm/book3s64/
Diommu_api.c34 u64 entries; /* number of entries in hpas/hpages[] */ member
57 unsigned long entries, unsigned long dev_hpa, in mm_iommu_do_alloc() argument
66 ret = account_locked_vm(mm, entries, true); in mm_iommu_do_alloc()
70 locked_entries = entries; in mm_iommu_do_alloc()
80 mem->pageshift = __ffs(dev_hpa | (entries << PAGE_SHIFT)); in mm_iommu_do_alloc()
91 mem->pageshift = __ffs(ua | (entries << PAGE_SHIFT)); in mm_iommu_do_alloc()
92 mem->hpas = vzalloc(array_size(entries, sizeof(mem->hpas[0]))); in mm_iommu_do_alloc()
102 chunk = min(chunk, entries); in mm_iommu_do_alloc()
103 for (entry = 0; entry < entries; entry += chunk) { in mm_iommu_do_alloc()
104 unsigned long n = min(entries - entry, chunk); in mm_iommu_do_alloc()
[all …]
/linux-6.6.21/drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/
Ddcn315_clk_mgr.c248 .entries = {
296 .entries = {
333 .entries = {
381 if (!bw_params->wm_table.entries[i].valid) in dcn315_build_watermark_ranges()
384 table->WatermarkRow[WM_DCFCLK][num_valid_sets].WmSetting = bw_params->wm_table.entries[i].wm_inst; in dcn315_build_watermark_ranges()
385 table->WatermarkRow[WM_DCFCLK][num_valid_sets].WmType = bw_params->wm_table.entries[i].wm_type; in dcn315_build_watermark_ranges()
396 bw_params->clk_table.entries[i - 1].dcfclk_mhz + 1; in dcn315_build_watermark_ranges()
399 bw_params->clk_table.entries[i].dcfclk_mhz; in dcn315_build_watermark_ranges()
479 …struct clk_limit_table_entry def_max = bw_params->clk_table.entries[bw_params->clk_table.num_entri… in dcn315_clk_mgr_helper_populate_bw_params()
496 if (bw_params->clk_table.entries[j].dcfclk_mhz <= clock_table->DcfClocks[i]) in dcn315_clk_mgr_helper_populate_bw_params()
[all …]
/linux-6.6.21/drivers/net/ethernet/netronome/nfp/nfpcore/
Dnfp_nsp_eth.c281 union eth_table_entry *entries; in __nfp_eth_read_ports() local
285 entries = kzalloc(NSP_ETH_TABLE_SIZE, GFP_KERNEL); in __nfp_eth_read_ports()
286 if (!entries) in __nfp_eth_read_ports()
289 ret = nfp_nsp_read_eth_table(nsp, entries, NSP_ETH_TABLE_SIZE); in __nfp_eth_read_ports()
296 if (entries[i].port & NSP_ETH_PORT_LANES_MASK) in __nfp_eth_read_ports()
315 if (entries[i].port & NSP_ETH_PORT_LANES_MASK) in __nfp_eth_read_ports()
316 nfp_eth_port_translate(nsp, &entries[i], i, in __nfp_eth_read_ports()
325 kfree(entries); in __nfp_eth_read_ports()
330 kfree(entries); in __nfp_eth_read_ports()
336 union eth_table_entry *entries; in nfp_eth_config_start() local
[all …]
/linux-6.6.21/tools/perf/util/
Dmem2node.c50 struct phys_entry *entries, *tmp_entries; in mem2node__init() local
62 entries = zalloc(sizeof(*entries) * max); in mem2node__init()
63 if (!entries) in mem2node__init()
84 struct phys_entry *prev = &entries[j - 1]; in mem2node__init()
93 phys_entry__init(&entries[j++], start, bsize, n->node); in mem2node__init()
98 tmp_entries = realloc(entries, sizeof(*entries) * j); in mem2node__init()
101 entries = tmp_entries; in mem2node__init()
105 entries[i].node, entries[i].start, entries[i].end); in mem2node__init()
107 phys_entry__insert(&entries[i], &map->root); in mem2node__init()
110 map->entries = entries; in mem2node__init()
[all …]
Darm64-frame-pointer-unwind-support.c12 struct entries { struct
25 struct entries *entries = arg; in add_entry() local
27 entries->stack[entries->length++] = entry->ip; in add_entry()
34 struct entries entries = {}; in get_leaf_frame_caller_aarch64() local
56 ret = unwind__get_entries(add_entry, &entries, thread, sample, 2, true); in get_leaf_frame_caller_aarch64()
59 if (ret || entries.length != 2) in get_leaf_frame_caller_aarch64()
62 return callchain_param.order == ORDER_CALLER ? entries.stack[0] : entries.stack[1]; in get_leaf_frame_caller_aarch64()
Drb_resort.h72 struct rb_root entries; \
79 struct rb_node **p = &sorted->entries.rb_node, *parent = NULL; \
88 rb_insert_color(sorted_nd, &sorted->entries); \
92 struct rb_root *entries) \
96 for (nd = rb_first(entries); nd; nd = rb_next(nd)) { \
103 static struct __name##_sorted *__name##_sorted__new(struct rb_root *entries, \
109 sorted->entries = RB_ROOT; \
110 __name##_sorted__sort(sorted, entries); \
128 for (__nd = rb_first(&__name->entries); \
143 DECLARE_RESORT_RB(__name)(&__ilist->rblist.entries.rb_root, \
[all …]
Dpstack.c18 void *entries[]; member
45 if (pstack->entries[i] == key) { in pstack__remove()
47 memmove(pstack->entries + i, in pstack__remove()
48 pstack->entries + i + 1, in pstack__remove()
63 pstack->entries[pstack->top++] = key; in pstack__push()
75 ret = pstack->entries[--pstack->top]; in pstack__pop()
76 pstack->entries[pstack->top] = NULL; in pstack__pop()
84 return pstack->entries[pstack->top - 1]; in pstack__peek()
/linux-6.6.21/drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/
Ddcn314_clk_mgr.c359 .entries = {
396 .entries = {
444 if (!bw_params->wm_table.entries[i].valid) in dcn314_build_watermark_ranges()
447 table->WatermarkRow[WM_DCFCLK][num_valid_sets].WmSetting = bw_params->wm_table.entries[i].wm_inst; in dcn314_build_watermark_ranges()
448 table->WatermarkRow[WM_DCFCLK][num_valid_sets].WmType = bw_params->wm_table.entries[i].wm_type; in dcn314_build_watermark_ranges()
459 bw_params->clk_table.entries[i - 1].dcfclk_mhz + 1; in dcn314_build_watermark_ranges()
462 bw_params->clk_table.entries[i].dcfclk_mhz; in dcn314_build_watermark_ranges()
572 …struct clk_limit_table_entry def_max = bw_params->clk_table.entries[bw_params->clk_table.num_entri… in dcn314_clk_mgr_helper_populate_bw_params()
614 if (bw_params->clk_table.entries[j].dcfclk_mhz <= clock_table->DcfClocks[i]) in dcn314_clk_mgr_helper_populate_bw_params()
617 bw_params->clk_table.entries[i].phyclk_mhz = bw_params->clk_table.entries[j].phyclk_mhz; in dcn314_clk_mgr_helper_populate_bw_params()
[all …]
/linux-6.6.21/fs/nfs_common/
Dnfsacl.c95 int entries = (acl && acl->a_count) ? max_t(int, acl->a_count, 4) : 0; in nfsacl_encode() local
99 .array_len = encode_entries ? entries : 0, in nfsacl_encode()
110 if (entries > NFS_ACL_MAX_ENTRIES || in nfsacl_encode()
111 xdr_encode_word(buf, base, entries)) in nfsacl_encode()
157 u32 entries = (acl && acl->a_count) ? max_t(int, acl->a_count, 4) : 0; in nfs_stream_encode_acl() local
161 .array_len = encode_entries ? entries : 0, in nfs_stream_encode_acl()
173 if (entries > NFS_ACL_MAX_ENTRIES) in nfs_stream_encode_acl()
175 if (xdr_stream_encode_u32(xdr, entries) < 0) in nfs_stream_encode_acl()
345 u32 entries; in nfsacl_decode() local
348 if (xdr_decode_word(buf, base, &entries) || in nfsacl_decode()
[all …]
/linux-6.6.21/drivers/gpu/drm/amd/display/dc/dml/dcn321/
Ddcn321_fpu.c365 if (bw_params->clk_table.entries[i].dcfclk_mhz > max_clk_data.dcfclk_mhz) in build_synthetic_soc_states()
366 max_clk_data.dcfclk_mhz = bw_params->clk_table.entries[i].dcfclk_mhz; in build_synthetic_soc_states()
367 if (bw_params->clk_table.entries[i].fclk_mhz > max_clk_data.fclk_mhz) in build_synthetic_soc_states()
368 max_clk_data.fclk_mhz = bw_params->clk_table.entries[i].fclk_mhz; in build_synthetic_soc_states()
369 if (bw_params->clk_table.entries[i].memclk_mhz > max_clk_data.memclk_mhz) in build_synthetic_soc_states()
370 max_clk_data.memclk_mhz = bw_params->clk_table.entries[i].memclk_mhz; in build_synthetic_soc_states()
371 if (bw_params->clk_table.entries[i].dispclk_mhz > max_clk_data.dispclk_mhz) in build_synthetic_soc_states()
372 max_clk_data.dispclk_mhz = bw_params->clk_table.entries[i].dispclk_mhz; in build_synthetic_soc_states()
373 if (bw_params->clk_table.entries[i].dppclk_mhz > max_clk_data.dppclk_mhz) in build_synthetic_soc_states()
374 max_clk_data.dppclk_mhz = bw_params->clk_table.entries[i].dppclk_mhz; in build_synthetic_soc_states()
[all …]
/linux-6.6.21/tools/perf/trace/beauty/
Dioctl.c41 if (nr < strarray__ioctl_tty_cmd.nr_entries && strarray__ioctl_tty_cmd.entries[nr] != NULL) in ioctl__scnprintf_tty_cmd()
42 return scnprintf(bf, size, "%s", strarray__ioctl_tty_cmd.entries[nr]); in ioctl__scnprintf_tty_cmd()
52 if (nr < strarray__drm_ioctl_cmds.nr_entries && strarray__drm_ioctl_cmds.entries[nr] != NULL) in ioctl__scnprintf_drm_cmd()
53 return scnprintf(bf, size, "DRM_%s", strarray__drm_ioctl_cmds.entries[nr]); in ioctl__scnprintf_drm_cmd()
63 …if (nr < strarray__sndrv_pcm_ioctl_cmds.nr_entries && strarray__sndrv_pcm_ioctl_cmds.entries[nr] !… in ioctl__scnprintf_sndrv_pcm_cmd()
64 return scnprintf(bf, size, "SNDRV_PCM_%s", strarray__sndrv_pcm_ioctl_cmds.entries[nr]); in ioctl__scnprintf_sndrv_pcm_cmd()
74 …if (nr < strarray__sndrv_ctl_ioctl_cmds.nr_entries && strarray__sndrv_ctl_ioctl_cmds.entries[nr] !… in ioctl__scnprintf_sndrv_ctl_cmd()
75 return scnprintf(bf, size, "SNDRV_CTL_%s", strarray__sndrv_ctl_ioctl_cmds.entries[nr]); in ioctl__scnprintf_sndrv_ctl_cmd()
85 if (nr < strarray__kvm_ioctl_cmds.nr_entries && strarray__kvm_ioctl_cmds.entries[nr] != NULL) in ioctl__scnprintf_kvm_cmd()
86 return scnprintf(bf, size, "KVM_%s", strarray__kvm_ioctl_cmds.entries[nr]); in ioctl__scnprintf_kvm_cmd()
[all …]
/linux-6.6.21/arch/x86/kernel/cpu/
Dintel.c832 if (tlb_lli_4k[ENTRIES] < intel_tlb_table[k].entries) in intel_tlb_lookup()
833 tlb_lli_4k[ENTRIES] = intel_tlb_table[k].entries; in intel_tlb_lookup()
834 if (tlb_lld_4k[ENTRIES] < intel_tlb_table[k].entries) in intel_tlb_lookup()
835 tlb_lld_4k[ENTRIES] = intel_tlb_table[k].entries; in intel_tlb_lookup()
838 if (tlb_lli_4k[ENTRIES] < intel_tlb_table[k].entries) in intel_tlb_lookup()
839 tlb_lli_4k[ENTRIES] = intel_tlb_table[k].entries; in intel_tlb_lookup()
840 if (tlb_lld_4k[ENTRIES] < intel_tlb_table[k].entries) in intel_tlb_lookup()
841 tlb_lld_4k[ENTRIES] = intel_tlb_table[k].entries; in intel_tlb_lookup()
842 if (tlb_lli_2m[ENTRIES] < intel_tlb_table[k].entries) in intel_tlb_lookup()
843 tlb_lli_2m[ENTRIES] = intel_tlb_table[k].entries; in intel_tlb_lookup()
[all …]
/linux-6.6.21/drivers/net/dsa/sja1105/
Dsja1105_vl.c27 if (list_empty(&gating_cfg->entries)) { in sja1105_insert_gate_entry()
28 list_add(&e->list, &gating_cfg->entries); in sja1105_insert_gate_entry()
32 list_for_each_entry(p, &gating_cfg->entries, list) { in sja1105_insert_gate_entry()
65 list_for_each_entry(e, &gating_cfg->entries, list) { in sja1105_gating_cfg_time_to_interval()
70 if (prev == &gating_cfg->entries) in sja1105_gating_cfg_time_to_interval()
76 last_e = list_last_entry(&gating_cfg->entries, in sja1105_gating_cfg_time_to_interval()
85 list_for_each_entry_safe(e, n, &gating_cfg->entries, list) { in sja1105_free_gating_config()
144 u8 gate_state = rule->vl.entries[i].gate_state; in sja1105_compose_gating_subschedule()
157 time += rule->vl.entries[i].interval; in sja1105_compose_gating_subschedule()
353 kfree(table->entries); in sja1105_init_virtual_links()
[all …]
/linux-6.6.21/drivers/misc/vmw_vmci/
Dvmci_handle_array.c68 array->entries[array->size] = handle; in vmci_handle_arr_append_entry()
84 if (vmci_handle_is_equal(array->entries[i], entry_handle)) { in vmci_handle_arr_remove_entry()
85 handle = array->entries[i]; in vmci_handle_arr_remove_entry()
87 array->entries[i] = array->entries[array->size]; in vmci_handle_arr_remove_entry()
88 array->entries[array->size] = VMCI_INVALID_HANDLE; in vmci_handle_arr_remove_entry()
105 handle = array->entries[array->size]; in vmci_handle_arr_remove_tail()
106 array->entries[array->size] = VMCI_INVALID_HANDLE; in vmci_handle_arr_remove_tail()
121 return array->entries[index]; in vmci_handle_arr_get_entry()
130 if (vmci_handle_is_equal(array->entries[i], entry_handle)) in vmci_handle_arr_has_entry()
143 return array->entries; in vmci_handle_arr_get_handles()
/linux-6.6.21/drivers/gpu/drm/amd/display/dc/dml/dcn302/
Ddcn302_fpu.c220 if (bw_params->clk_table.entries[0].memclk_mhz) { in dcn302_fpu_update_bw_bounding_box()
224 if (bw_params->clk_table.entries[i].dcfclk_mhz > max_dcfclk_mhz) in dcn302_fpu_update_bw_bounding_box()
225 max_dcfclk_mhz = bw_params->clk_table.entries[i].dcfclk_mhz; in dcn302_fpu_update_bw_bounding_box()
226 if (bw_params->clk_table.entries[i].dispclk_mhz > max_dispclk_mhz) in dcn302_fpu_update_bw_bounding_box()
227 max_dispclk_mhz = bw_params->clk_table.entries[i].dispclk_mhz; in dcn302_fpu_update_bw_bounding_box()
228 if (bw_params->clk_table.entries[i].dppclk_mhz > max_dppclk_mhz) in dcn302_fpu_update_bw_bounding_box()
229 max_dppclk_mhz = bw_params->clk_table.entries[i].dppclk_mhz; in dcn302_fpu_update_bw_bounding_box()
230 if (bw_params->clk_table.entries[i].phyclk_mhz > max_phyclk_mhz) in dcn302_fpu_update_bw_bounding_box()
231 max_phyclk_mhz = bw_params->clk_table.entries[i].phyclk_mhz; in dcn302_fpu_update_bw_bounding_box()
262 dcn302_get_optimal_dcfclk_fclk_for_uclk(bw_params->clk_table.entries[i].memclk_mhz * 16, in dcn302_fpu_update_bw_bounding_box()
[all …]
/linux-6.6.21/drivers/gpu/drm/i915/display/
Dintel_ddi_buf_trans.c31 .entries = _hsw_trans_dp,
48 .entries = _hsw_trans_fdi,
69 .entries = _hsw_trans_hdmi,
87 .entries = _bdw_trans_edp,
104 .entries = _bdw_trans_dp,
121 .entries = _bdw_trans_fdi,
140 .entries = _bdw_trans_hdmi,
159 .entries = _skl_trans_dp,
177 .entries = _skl_u_trans_dp,
195 .entries = _skl_y_trans_dp,
[all …]

12345678910>>...67