Home
last modified time | relevance | path

Searched refs:iommu (Results 1 – 25 of 87) sorted by relevance

1234

/linux-3.4.99/drivers/iommu/
Damd_iommu_init.c197 extern void iommu_flush_all_caches(struct amd_iommu *iommu);
217 static u32 iommu_read_l1(struct amd_iommu *iommu, u16 l1, u8 address) in iommu_read_l1() argument
221 pci_write_config_dword(iommu->dev, 0xf8, (address | l1 << 16)); in iommu_read_l1()
222 pci_read_config_dword(iommu->dev, 0xfc, &val); in iommu_read_l1()
226 static void iommu_write_l1(struct amd_iommu *iommu, u16 l1, u8 address, u32 val) in iommu_write_l1() argument
228 pci_write_config_dword(iommu->dev, 0xf8, (address | l1 << 16 | 1 << 31)); in iommu_write_l1()
229 pci_write_config_dword(iommu->dev, 0xfc, val); in iommu_write_l1()
230 pci_write_config_dword(iommu->dev, 0xf8, (address | l1 << 16)); in iommu_write_l1()
233 static u32 iommu_read_l2(struct amd_iommu *iommu, u8 address) in iommu_read_l2() argument
237 pci_write_config_dword(iommu->dev, 0xf0, address); in iommu_read_l2()
[all …]
Dintr_remapping.c77 *entry = *(irq_iommu->iommu->ir_table->base + index); in get_irte()
83 int alloc_irte(struct intel_iommu *iommu, int irq, u16 count) in alloc_irte() argument
85 struct ir_table *table = iommu->ir_table; in alloc_irte()
105 if (mask > ecap_max_handle_mask(iommu->ecap)) { in alloc_irte()
109 ecap_max_handle_mask(iommu->ecap)); in alloc_irte()
134 irq_iommu->iommu = iommu; in alloc_irte()
144 static int qi_flush_iec(struct intel_iommu *iommu, int index, int mask) in qi_flush_iec() argument
152 return qi_submit_sync(&desc, iommu); in qi_flush_iec()
171 int set_irte_irq(int irq, struct intel_iommu *iommu, u16 index, u16 subhandle) in set_irte_irq() argument
181 irq_iommu->iommu = iommu; in set_irte_irq()
[all …]
Ddmar.c234 drhd->iommu->node = node; in dmar_parse_one_rhsa()
571 x86_init.iommu.iommu_init = intel_iommu_init; in detect_intel_iommu()
583 struct intel_iommu *iommu; in alloc_iommu() local
595 iommu = kzalloc(sizeof(*iommu), GFP_KERNEL); in alloc_iommu()
596 if (!iommu) in alloc_iommu()
599 iommu->seq_id = iommu_allocated++; in alloc_iommu()
600 sprintf (iommu->name, "dmar%d", iommu->seq_id); in alloc_iommu()
602 iommu->reg = ioremap(drhd->reg_base_addr, VTD_PAGE_SIZE); in alloc_iommu()
603 if (!iommu->reg) { in alloc_iommu()
607 iommu->cap = dmar_readq(iommu->reg + DMAR_CAP_REG); in alloc_iommu()
[all …]
Dintel-iommu.c399 struct intel_iommu *iommu; /* IOMMU used by this device */ member
538 static int __iommu_calculate_agaw(struct intel_iommu *iommu, int max_gaw) in __iommu_calculate_agaw() argument
543 sagaw = cap_sagaw(iommu->cap); in __iommu_calculate_agaw()
556 int iommu_calculate_max_sagaw(struct intel_iommu *iommu) in iommu_calculate_max_sagaw() argument
558 return __iommu_calculate_agaw(iommu, MAX_AGAW_WIDTH); in iommu_calculate_max_sagaw()
566 int iommu_calculate_agaw(struct intel_iommu *iommu) in iommu_calculate_agaw() argument
568 return __iommu_calculate_agaw(iommu, DEFAULT_DOMAIN_ADDRESS_WIDTH); in iommu_calculate_agaw()
620 struct intel_iommu *iommu = NULL; in domain_update_iommu_superpage() local
629 for_each_active_iommu(iommu, drhd) { in domain_update_iommu_superpage()
630 mask &= cap_super_page_val(iommu->cap); in domain_update_iommu_superpage()
[all …]
Damd_iommu.c174 return dev->archdata.iommu; in get_dev_data()
265 if (dev->archdata.iommu) in iommu_init_device()
287 struct amd_iommu *iommu; in iommu_init_device() local
289 iommu = amd_iommu_rlookup_table[dev_data->devid]; in iommu_init_device()
290 dev_data->iommu_v2 = iommu->is_iommu_v2; in iommu_init_device()
293 dev->archdata.iommu = dev_data; in iommu_init_device()
453 static void iommu_print_event(struct amd_iommu *iommu, void *__evt) in iommu_print_event() argument
532 static void iommu_poll_events(struct amd_iommu *iommu) in iommu_poll_events() argument
537 spin_lock_irqsave(&iommu->lock, flags); in iommu_poll_events()
546 status = readl(iommu->mmio_base + MMIO_STATUS_OFFSET); in iommu_poll_events()
[all …]
DMakefile1 obj-$(CONFIG_IOMMU_API) += iommu.o
6 obj-$(CONFIG_INTEL_IOMMU) += iova.o intel-iommu.o
8 obj-$(CONFIG_OMAP_IOMMU) += omap-iommu.o
10 obj-$(CONFIG_OMAP_IOMMU_DEBUG) += omap-iommu-debug.o
Damd_iommu_proto.h29 extern void amd_iommu_reset_cmd_buffer(struct amd_iommu *iommu);
70 static inline bool iommu_feature(struct amd_iommu *iommu, u64 f) in iommu_feature() argument
72 if (!(iommu->cap & (1 << IOMMU_CAP_EFR))) in iommu_feature()
75 return !!(iommu->features & f); in iommu_feature()
Dintr_remapping.h4 struct intel_iommu *iommu; member
11 struct intel_iommu *iommu; member
/linux-3.4.99/arch/sparc/kernel/
Diommu.c47 static void iommu_flushall(struct iommu *iommu) in iommu_flushall() argument
49 if (iommu->iommu_flushinv) { in iommu_flushall()
50 iommu_write(iommu->iommu_flushinv, ~(u64)0); in iommu_flushall()
55 tag = iommu->iommu_tags; in iommu_flushall()
62 (void) iommu_read(iommu->write_complete_reg); in iommu_flushall()
76 #define IOPTE_IS_DUMMY(iommu, iopte) \ argument
77 ((iopte_val(*iopte) & IOPTE_PAGE) == (iommu)->dummy_page_pa)
79 static inline void iopte_make_dummy(struct iommu *iommu, iopte_t *iopte) in iopte_make_dummy() argument
84 val |= iommu->dummy_page_pa; in iopte_make_dummy()
97 struct iommu *iommu, in iommu_range_alloc() argument
[all …]
Dsbus.c60 struct iommu *iommu = dev->archdata.iommu; in sbus_set_sbus64() local
75 cfg_reg = iommu->write_complete_reg; in sbus_set_sbus64()
210 struct iommu *iommu = op->dev.archdata.iommu; in sbus_build_irq() local
211 unsigned long reg_base = iommu->write_complete_reg - 0x2000UL; in sbus_build_irq()
272 struct iommu *iommu = op->dev.archdata.iommu; in sysio_ue_handler() local
273 unsigned long reg_base = iommu->write_complete_reg - 0x2000UL; in sysio_ue_handler()
346 struct iommu *iommu = op->dev.archdata.iommu; in sysio_ce_handler() local
347 unsigned long reg_base = iommu->write_complete_reg - 0x2000UL; in sysio_ce_handler()
425 struct iommu *iommu = op->dev.archdata.iommu; in sysio_sbus_error_handler() local
430 reg_base = iommu->write_complete_reg - 0x2000UL; in sysio_sbus_error_handler()
[all …]
Dpci_sun4v.c135 struct iommu *iommu; in dma_4v_alloc_coherent() local
156 iommu = dev->archdata.iommu; in dma_4v_alloc_coherent()
158 spin_lock_irqsave(&iommu->lock, flags); in dma_4v_alloc_coherent()
159 entry = iommu_range_alloc(dev, iommu, npages, NULL); in dma_4v_alloc_coherent()
160 spin_unlock_irqrestore(&iommu->lock, flags); in dma_4v_alloc_coherent()
165 *dma_addrp = (iommu->page_table_map_base + in dma_4v_alloc_coherent()
192 spin_lock(&iommu->lock); in dma_4v_alloc_coherent()
193 iommu_range_free(iommu, *dma_addrp, npages); in dma_4v_alloc_coherent()
194 spin_unlock_irqrestore(&iommu->lock, flags); in dma_4v_alloc_coherent()
205 struct iommu *iommu; in dma_4v_free_coherent() local
[all …]
Dpsycho_common.c206 struct iommu *iommu = pbm->iommu; in psycho_check_iommu_error() local
209 spin_lock_irqsave(&iommu->lock, flags); in psycho_check_iommu_error()
210 control = upa_readq(iommu->iommu_control); in psycho_check_iommu_error()
215 upa_writeq(control, iommu->iommu_control); in psycho_check_iommu_error()
245 spin_unlock_irqrestore(&iommu->lock, flags); in psycho_check_iommu_error()
402 struct iommu *iommu = pbm->iommu; in psycho_iommu_init() local
406 iommu->iommu_control = pbm->controller_regs + PSYCHO_IOMMU_CONTROL; in psycho_iommu_init()
407 iommu->iommu_tsbbase = pbm->controller_regs + PSYCHO_IOMMU_TSBBASE; in psycho_iommu_init()
408 iommu->iommu_flush = pbm->controller_regs + PSYCHO_IOMMU_FLUSH; in psycho_iommu_init()
409 iommu->iommu_tags = pbm->controller_regs + PSYCHO_IOMMU_TAG; in psycho_iommu_init()
[all …]
Dpci_fire.c30 struct iommu *iommu = pbm->iommu; in pci_fire_pbm_iommu_init() local
42 iommu->iommu_control = pbm->pbm_regs + FIRE_IOMMU_CONTROL; in pci_fire_pbm_iommu_init()
43 iommu->iommu_tsbbase = pbm->pbm_regs + FIRE_IOMMU_TSBBASE; in pci_fire_pbm_iommu_init()
44 iommu->iommu_flush = pbm->pbm_regs + FIRE_IOMMU_FLUSH; in pci_fire_pbm_iommu_init()
45 iommu->iommu_flushinv = pbm->pbm_regs + FIRE_IOMMU_FLUSHINV; in pci_fire_pbm_iommu_init()
50 iommu->write_complete_reg = pbm->controller_regs + 0x410000UL; in pci_fire_pbm_iommu_init()
55 upa_writeq(~(u64)0, iommu->iommu_flushinv); in pci_fire_pbm_iommu_init()
57 err = iommu_table_init(iommu, tsbsize * 8 * 1024, vdma[0], dma_mask, in pci_fire_pbm_iommu_init()
62 upa_writeq(__pa(iommu->page_table) | 0x7UL, iommu->iommu_tsbbase); in pci_fire_pbm_iommu_init()
64 control = upa_readq(iommu->iommu_control); in pci_fire_pbm_iommu_init()
[all …]
Dpci_schizo.c237 struct iommu *iommu = pbm->iommu; in schizo_check_iommu_error_pbm() local
244 spin_lock_irqsave(&iommu->lock, flags); in schizo_check_iommu_error_pbm()
245 control = upa_readq(iommu->iommu_control); in schizo_check_iommu_error_pbm()
252 upa_writeq(control, iommu->iommu_control); in schizo_check_iommu_error_pbm()
283 iommu->iommu_control); in schizo_check_iommu_error_pbm()
299 upa_writeq(control, iommu->iommu_control); in schizo_check_iommu_error_pbm()
341 spin_unlock_irqrestore(&iommu->lock, flags); in schizo_check_iommu_error_pbm()
1136 struct iommu *iommu = pbm->iommu; in schizo_pbm_iommu_init() local
1169 iommu->iommu_control = pbm->pbm_regs + SCHIZO_IOMMU_CONTROL; in schizo_pbm_iommu_init()
1170 iommu->iommu_tsbbase = pbm->pbm_regs + SCHIZO_IOMMU_TSBBASE; in schizo_pbm_iommu_init()
[all …]
Diommu_common.h52 struct iommu *iommu,
55 extern void iommu_range_free(struct iommu *iommu,
Dldc.c141 struct ldc_iommu iommu; member
1008 struct ldc_iommu *iommu = &lp->iommu; in ldc_iommu_init() local
1016 spin_lock_init(&iommu->lock); in ldc_iommu_init()
1020 iommu->arena.map = kzalloc(sz, GFP_KERNEL); in ldc_iommu_init()
1021 if (!iommu->arena.map) { in ldc_iommu_init()
1026 iommu->arena.limit = num_tsb_entries; in ldc_iommu_init()
1041 iommu->page_table = table; in ldc_iommu_init()
1053 iommu->page_table = NULL; in ldc_iommu_init()
1056 kfree(iommu->arena.map); in ldc_iommu_init()
1057 iommu->arena.map = NULL; in ldc_iommu_init()
[all …]
Dpci_psycho.c512 struct iommu *iommu; in psycho_probe() local
527 iommu = pbm->sibling->iommu; in psycho_probe()
529 iommu = kzalloc(sizeof(struct iommu), GFP_KERNEL); in psycho_probe()
530 if (!iommu) { in psycho_probe()
536 pbm->iommu = iommu; in psycho_probe()
587 kfree(pbm->iommu); in psycho_probe()
Dpci_sabre.c464 struct iommu *iommu; in sabre_probe() local
490 iommu = kzalloc(sizeof(*iommu), GFP_KERNEL); in sabre_probe()
491 if (!iommu) { in sabre_probe()
496 pbm->iommu = iommu; in sabre_probe()
579 kfree(pbm->iommu); in sabre_probe()
/linux-3.4.99/arch/sparc/mm/
Diommu.c61 struct iommu_struct *iommu; in sbus_iommu_init() local
66 iommu = kmalloc(sizeof(struct iommu_struct), GFP_KERNEL); in sbus_iommu_init()
67 if (!iommu) { in sbus_iommu_init()
72 iommu->regs = of_ioremap(&op->resource[0], 0, PAGE_SIZE * 3, in sbus_iommu_init()
74 if (!iommu->regs) { in sbus_iommu_init()
78 impl = (iommu->regs->control & IOMMU_CTRL_IMPL) >> 28; in sbus_iommu_init()
79 vers = (iommu->regs->control & IOMMU_CTRL_VERS) >> 24; in sbus_iommu_init()
80 tmp = iommu->regs->control; in sbus_iommu_init()
83 iommu->regs->control = tmp; in sbus_iommu_init()
84 iommu_invalidate(iommu->regs); in sbus_iommu_init()
[all …]
Dio-unit.c62 op->dev.archdata.iommu = iounit; in iounit_iommu_init()
141 struct iounit_struct *iounit = dev->archdata.iommu; in iounit_get_scsi_one()
152 struct iounit_struct *iounit = dev->archdata.iommu; in iounit_get_scsi_sgl()
168 struct iounit_struct *iounit = dev->archdata.iommu; in iounit_release_scsi_one()
182 struct iounit_struct *iounit = dev->archdata.iommu; in iounit_release_scsi_sgl()
202 struct iounit_struct *iounit = dev->archdata.iommu; in iounit_map_dma_area()
/linux-3.4.99/arch/powerpc/platforms/cell/
Diommu.c114 struct cbe_iommu *iommu; member
141 static void invalidate_tce_cache(struct cbe_iommu *iommu, unsigned long *pte, in invalidate_tce_cache() argument
148 reg = iommu->xlate_regs + IOC_IOPT_CacheInvd; in invalidate_tce_cache()
205 invalidate_tce_cache(window->iommu, io_pte, npages); in tce_build_cell()
228 __pa(window->iommu->pad_page) | in tce_free_cell()
239 invalidate_tce_cache(window->iommu, io_pte, npages); in tce_free_cell()
245 struct cbe_iommu *iommu = data; in ioc_interrupt() local
247 stat = in_be64(iommu->xlate_regs + IOC_IO_ExcpStat); in ioc_interrupt()
263 out_be64(iommu->xlate_regs + IOC_IO_ExcpStat, stat); in ioc_interrupt()
308 static void cell_iommu_setup_stab(struct cbe_iommu *iommu, in cell_iommu_setup_stab() argument
[all …]
/linux-3.4.99/include/linux/
Ddma_remapping.h30 extern void free_dmar_iommu(struct intel_iommu *iommu);
31 extern int iommu_calculate_agaw(struct intel_iommu *iommu);
32 extern int iommu_calculate_max_sagaw(struct intel_iommu *iommu);
36 static inline int iommu_calculate_agaw(struct intel_iommu *iommu) in iommu_calculate_agaw() argument
40 static inline int iommu_calculate_max_sagaw(struct intel_iommu *iommu) in iommu_calculate_max_sagaw() argument
44 static inline void free_dmar_iommu(struct intel_iommu *iommu) in free_dmar_iommu() argument
Dintel-iommu.h215 #define IOMMU_WAIT_OP(iommu, offset, op, cond, sts) \ argument
219 sts = op(iommu->reg + offset); \
295 void (*flush_context)(struct intel_iommu *iommu, u16 did, u16 sid,
297 void (*flush_iotlb)(struct intel_iommu *iommu, u16 did, u64 addr,
339 struct intel_iommu *iommu, void *addr, int size) in __iommu_flush_cache() argument
341 if (!ecap_coherent(iommu->ecap)) in __iommu_flush_cache()
349 extern void free_iommu(struct intel_iommu *iommu);
350 extern int dmar_enable_qi(struct intel_iommu *iommu);
351 extern void dmar_disable_qi(struct intel_iommu *iommu);
352 extern int dmar_reenable_qi(struct intel_iommu *iommu);
[all …]
Ddmar.h47 struct intel_iommu *iommu; member
57 if (i=drhd->iommu, drhd->ignored) {} else
61 if (i=drhd->iommu, 0) {} else
126 extern int alloc_irte(struct intel_iommu *iommu, int irq, u16 count);
127 extern int set_irte_irq(int irq, struct intel_iommu *iommu, u16 index,
139 static inline int alloc_irte(struct intel_iommu *iommu, int irq, u16 count) in alloc_irte() argument
155 static inline int set_irte_irq(int irq, struct intel_iommu *iommu, u16 index, in set_irte_irq() argument
215 extern int dmar_set_interrupt(struct intel_iommu *iommu);
/linux-3.4.99/arch/sparc/include/asm/
Diommu_64.h26 struct iommu { struct
29 void (*flush_all)(struct iommu *); argument
61 extern int iommu_table_init(struct iommu *iommu, int tsbsize, argument

1234