Lines Matching refs:ram
73 nv50_ram_timing_calc(struct nv50_ram *ram, u32 *timing) in nv50_ram_timing_calc() argument
75 struct nvbios_ramcfg *cfg = &ram->base.target.bios; in nv50_ram_timing_calc()
76 struct nvkm_subdev *subdev = &ram->base.fb->subdev; in nv50_ram_timing_calc()
86 switch ((!T(CWL)) * ram->base.type) { in nv50_ram_timing_calc()
97 unkt3b = 0x19 + ram->base.next->bios.rammap_00_16_40; in nv50_ram_timing_calc()
99 ram->base.next->bios.rammap_00_16_40) << 16 | in nv50_ram_timing_calc()
133 if (ram->base.type == NVKM_RAM_TYPE_DDR2) { in nv50_ram_timing_calc()
137 if (ram->base.type == NVKM_RAM_TYPE_GDDR3) { in nv50_ram_timing_calc()
151 nv50_ram_timing_read(struct nv50_ram *ram, u32 *timing) in nv50_ram_timing_read() argument
154 struct nvbios_ramcfg *cfg = &ram->base.target.bios; in nv50_ram_timing_read()
155 struct nvkm_subdev *subdev = &ram->base.fb->subdev; in nv50_ram_timing_read()
165 switch (ram->base.type) { in nv50_ram_timing_read()
220 struct nv50_ram *ram = nv50_ram(base); in nv50_ram_calc() local
221 struct nv50_ramseq *hwsq = &ram->hwsq; in nv50_ram_calc()
222 struct nvkm_subdev *subdev = &ram->base.fb->subdev; in nv50_ram_calc()
234 next = &ram->base.target; in nv50_ram_calc()
236 ram->base.next = next; in nv50_ram_calc()
276 nv50_ram_timing_calc(ram, timing); in nv50_ram_calc()
278 nv50_ram_timing_read(ram, timing); in nv50_ram_calc()
286 ram->base.mr[0] = ram_rd32(hwsq, mr[0]); in nv50_ram_calc()
287 ram->base.mr[1] = ram_rd32(hwsq, mr[1]); in nv50_ram_calc()
288 ram->base.mr[2] = ram_rd32(hwsq, mr[2]); in nv50_ram_calc()
290 switch (ram->base.type) { in nv50_ram_calc()
292 ret = nvkm_gddr3_calc(&ram->base); in nv50_ram_calc()
374 switch (ram->base.type) { in nv50_ram_calc()
381 ram_wr32(hwsq, mr[1], ram->base.mr[1]); in nv50_ram_calc()
383 ram_wr32(hwsq, mr[0], ram->base.mr[0]); in nv50_ram_calc()
453 ram_mask(hwsq, mr[1], 0xffffffff, ram->base.mr[1]); in nv50_ram_calc()
482 struct nv50_ram *ram = nv50_ram(base); in nv50_ram_prog() local
483 struct nvkm_device *device = ram->base.fb->subdev.device; in nv50_ram_prog()
484 ram_exec(&ram->hwsq, nvkm_boolopt(device->cfgopt, "NvMemExec", true)); in nv50_ram_prog()
491 struct nv50_ram *ram = nv50_ram(base); in nv50_ram_tidy() local
492 ram_exec(&ram->hwsq, false); in nv50_ram_tidy()
503 nv50_fb_vram_rblock(struct nvkm_ram *ram) in nv50_fb_vram_rblock() argument
505 struct nvkm_subdev *subdev = &ram->fb->subdev; in nv50_fb_vram_rblock()
522 rowsize = ram->parts * banks * (1 << colbits) * 8; in nv50_fb_vram_rblock()
527 if (predicted != ram->size) { in nv50_fb_vram_rblock()
529 (u32)(ram->size >> 20)); in nv50_fb_vram_rblock()
542 struct nvkm_fb *fb, struct nvkm_ram *ram) in nv50_ram_ctor() argument
569 ret = nvkm_ram_ctor(func, fb, type, size, ram); in nv50_ram_ctor()
573 ram->part_mask = (nvkm_rd32(device, 0x001540) & 0x00ff0000) >> 16; in nv50_ram_ctor()
574 ram->parts = hweight8(ram->part_mask); in nv50_ram_ctor()
575 ram->ranks = (nvkm_rd32(device, 0x100200) & 0x4) ? 2 : 1; in nv50_ram_ctor()
576 nvkm_mm_fini(&ram->vram); in nv50_ram_ctor()
578 return nvkm_mm_init(&ram->vram, NVKM_RAM_MM_NORMAL, in nv50_ram_ctor()
581 nv50_fb_vram_rblock(ram) >> NVKM_RAM_MM_SHIFT); in nv50_ram_ctor()
587 struct nv50_ram *ram; in nv50_ram_new() local
590 if (!(ram = kzalloc(sizeof(*ram), GFP_KERNEL))) in nv50_ram_new()
592 *pram = &ram->base; in nv50_ram_new()
594 ret = nv50_ram_ctor(&nv50_ram_func, fb, &ram->base); in nv50_ram_new()
598 ram->hwsq.r_0x002504 = hwsq_reg(0x002504); in nv50_ram_new()
599 ram->hwsq.r_0x00c040 = hwsq_reg(0x00c040); in nv50_ram_new()
600 ram->hwsq.r_0x004008 = hwsq_reg(0x004008); in nv50_ram_new()
601 ram->hwsq.r_0x00400c = hwsq_reg(0x00400c); in nv50_ram_new()
602 ram->hwsq.r_0x100200 = hwsq_reg(0x100200); in nv50_ram_new()
603 ram->hwsq.r_0x100210 = hwsq_reg(0x100210); in nv50_ram_new()
604 ram->hwsq.r_0x10021c = hwsq_reg(0x10021c); in nv50_ram_new()
605 ram->hwsq.r_0x1002d0 = hwsq_reg(0x1002d0); in nv50_ram_new()
606 ram->hwsq.r_0x1002d4 = hwsq_reg(0x1002d4); in nv50_ram_new()
607 ram->hwsq.r_0x1002dc = hwsq_reg(0x1002dc); in nv50_ram_new()
608 ram->hwsq.r_0x10053c = hwsq_reg(0x10053c); in nv50_ram_new()
609 ram->hwsq.r_0x1005a0 = hwsq_reg(0x1005a0); in nv50_ram_new()
610 ram->hwsq.r_0x1005a4 = hwsq_reg(0x1005a4); in nv50_ram_new()
611 ram->hwsq.r_0x100710 = hwsq_reg(0x100710); in nv50_ram_new()
612 ram->hwsq.r_0x100714 = hwsq_reg(0x100714); in nv50_ram_new()
613 ram->hwsq.r_0x100718 = hwsq_reg(0x100718); in nv50_ram_new()
614 ram->hwsq.r_0x10071c = hwsq_reg(0x10071c); in nv50_ram_new()
615 ram->hwsq.r_0x100da0 = hwsq_stride(0x100da0, 4, ram->base.part_mask); in nv50_ram_new()
616 ram->hwsq.r_0x100e20 = hwsq_reg(0x100e20); in nv50_ram_new()
617 ram->hwsq.r_0x100e24 = hwsq_reg(0x100e24); in nv50_ram_new()
618 ram->hwsq.r_0x611200 = hwsq_reg(0x611200); in nv50_ram_new()
621 ram->hwsq.r_timing[i] = hwsq_reg(0x100220 + (i * 0x04)); in nv50_ram_new()
623 if (ram->base.ranks > 1) { in nv50_ram_new()
624 ram->hwsq.r_mr[0] = hwsq_reg2(0x1002c0, 0x1002c8); in nv50_ram_new()
625 ram->hwsq.r_mr[1] = hwsq_reg2(0x1002c4, 0x1002cc); in nv50_ram_new()
626 ram->hwsq.r_mr[2] = hwsq_reg2(0x1002e0, 0x1002e8); in nv50_ram_new()
627 ram->hwsq.r_mr[3] = hwsq_reg2(0x1002e4, 0x1002ec); in nv50_ram_new()
629 ram->hwsq.r_mr[0] = hwsq_reg(0x1002c0); in nv50_ram_new()
630 ram->hwsq.r_mr[1] = hwsq_reg(0x1002c4); in nv50_ram_new()
631 ram->hwsq.r_mr[2] = hwsq_reg(0x1002e0); in nv50_ram_new()
632 ram->hwsq.r_mr[3] = hwsq_reg(0x1002e4); in nv50_ram_new()
635 ram->hwsq.r_gpio[0] = hwsq_reg(0x00e104); in nv50_ram_new()
636 ram->hwsq.r_gpio[1] = hwsq_reg(0x00e108); in nv50_ram_new()
637 ram->hwsq.r_gpio[2] = hwsq_reg(0x00e120); in nv50_ram_new()
638 ram->hwsq.r_gpio[3] = hwsq_reg(0x00e124); in nv50_ram_new()