• Home
  • Raw
  • Download

Lines Matching refs:r

235 static int ps3_mm_region_create(struct mem_region *r, unsigned long size)  in ps3_mm_region_create()  argument
240 r->size = _ALIGN_DOWN(size, 1 << PAGE_SHIFT_16M); in ps3_mm_region_create()
243 DBG("%s:%d actual %llxh\n", __func__, __LINE__, r->size); in ps3_mm_region_create()
245 size - r->size, (size - r->size) / 1024 / 1024); in ps3_mm_region_create()
247 if (r->size == 0) { in ps3_mm_region_create()
253 result = lv1_allocate_memory(r->size, PAGE_SHIFT_16M, 0, in ps3_mm_region_create()
254 ALLOCATE_MEMORY_TRY_ALT_UNIT, &r->base, &muid); in ps3_mm_region_create()
256 if (result || r->base < map.rm.size) { in ps3_mm_region_create()
262 r->offset = r->base - map.rm.size; in ps3_mm_region_create()
266 r->size = r->base = r->offset = 0; in ps3_mm_region_create()
275 static void ps3_mm_region_destroy(struct mem_region *r) in ps3_mm_region_destroy() argument
279 DBG("%s:%d: r->base = %llxh\n", __func__, __LINE__, r->base); in ps3_mm_region_destroy()
280 if (r->base) { in ps3_mm_region_destroy()
281 result = lv1_release_memory(r->base); in ps3_mm_region_destroy()
283 r->size = r->base = r->offset = 0; in ps3_mm_region_destroy()
343 static unsigned long dma_sb_lpar_to_bus(struct ps3_dma_region *r, in dma_sb_lpar_to_bus() argument
348 BUG_ON(lpar_addr < r->offset); in dma_sb_lpar_to_bus()
349 BUG_ON(lpar_addr >= r->offset + r->len); in dma_sb_lpar_to_bus()
350 return r->bus_addr + lpar_addr - r->offset; in dma_sb_lpar_to_bus()
354 static void __maybe_unused _dma_dump_region(const struct ps3_dma_region *r, in _dma_dump_region() argument
357 DBG("%s:%d: dev %llu:%llu\n", func, line, r->dev->bus_id, in _dma_dump_region()
358 r->dev->dev_id); in _dma_dump_region()
359 DBG("%s:%d: page_size %u\n", func, line, r->page_size); in _dma_dump_region()
360 DBG("%s:%d: bus_addr %lxh\n", func, line, r->bus_addr); in _dma_dump_region()
361 DBG("%s:%d: len %lxh\n", func, line, r->len); in _dma_dump_region()
362 DBG("%s:%d: offset %lxh\n", func, line, r->offset); in _dma_dump_region()
403 static struct dma_chunk * dma_find_chunk(struct ps3_dma_region *r, in dma_find_chunk() argument
407 unsigned long aligned_bus = _ALIGN_DOWN(bus_addr, 1 << r->page_size); in dma_find_chunk()
409 1 << r->page_size); in dma_find_chunk()
411 list_for_each_entry(c, &r->chunk_list.head, link) { in dma_find_chunk()
432 static struct dma_chunk *dma_find_chunk_lpar(struct ps3_dma_region *r, in dma_find_chunk_lpar() argument
436 unsigned long aligned_lpar = _ALIGN_DOWN(lpar_addr, 1 << r->page_size); in dma_find_chunk_lpar()
438 1 << r->page_size); in dma_find_chunk_lpar()
440 list_for_each_entry(c, &r->chunk_list.head, link) { in dma_find_chunk_lpar()
482 struct ps3_dma_region *r = c->region; in dma_ioc0_free_chunk() local
485 for (iopage = 0; iopage < (c->len >> r->page_size); iopage++) { in dma_ioc0_free_chunk()
486 offset = (1 << r->page_size) * iopage; in dma_ioc0_free_chunk()
491 r->ioid, in dma_ioc0_free_chunk()
496 r->ioid); in dma_ioc0_free_chunk()
519 static int dma_sb_map_pages(struct ps3_dma_region *r, unsigned long phys_addr, in dma_sb_map_pages() argument
532 c->region = r; in dma_sb_map_pages()
534 c->bus_addr = dma_sb_lpar_to_bus(r, c->lpar_addr); in dma_sb_map_pages()
547 list_add(&c->link, &r->chunk_list.head); in dma_sb_map_pages()
560 static int dma_ioc0_map_pages(struct ps3_dma_region *r, unsigned long phys_addr, in dma_ioc0_map_pages() argument
578 c->region = r; in dma_ioc0_map_pages()
582 if (list_empty(&r->chunk_list.head)) { in dma_ioc0_map_pages()
584 c->bus_addr = r->bus_addr; in dma_ioc0_map_pages()
587 last = list_entry(r->chunk_list.head.next, in dma_ioc0_map_pages()
597 pages = len >> r->page_size; in dma_ioc0_map_pages()
599 r->page_size, r->len, pages, iopte_flag); in dma_ioc0_map_pages()
601 offset = (1 << r->page_size) * iopage; in dma_ioc0_map_pages()
605 r->ioid, in dma_ioc0_map_pages()
615 r->ioid); in dma_ioc0_map_pages()
619 list_add(&c->link, &r->chunk_list.head); in dma_ioc0_map_pages()
630 r->ioid, in dma_ioc0_map_pages()
647 static int dma_sb_region_create(struct ps3_dma_region *r) in dma_sb_region_create() argument
654 BUG_ON(!r); in dma_sb_region_create()
656 if (!r->dev->bus_id) { in dma_sb_region_create()
658 r->dev->bus_id, r->dev->dev_id); in dma_sb_region_create()
663 __LINE__, r->len, r->page_size, r->offset); in dma_sb_region_create()
665 BUG_ON(!r->len); in dma_sb_region_create()
666 BUG_ON(!r->page_size); in dma_sb_region_create()
667 BUG_ON(!r->region_ops); in dma_sb_region_create()
669 INIT_LIST_HEAD(&r->chunk_list.head); in dma_sb_region_create()
670 spin_lock_init(&r->chunk_list.lock); in dma_sb_region_create()
672 result = lv1_allocate_device_dma_region(r->dev->bus_id, r->dev->dev_id, in dma_sb_region_create()
673 roundup_pow_of_two(r->len), r->page_size, r->region_type, in dma_sb_region_create()
675 r->bus_addr = bus_addr; in dma_sb_region_create()
680 r->len = r->bus_addr = 0; in dma_sb_region_create()
686 static int dma_ioc0_region_create(struct ps3_dma_region *r) in dma_ioc0_region_create() argument
691 INIT_LIST_HEAD(&r->chunk_list.head); in dma_ioc0_region_create()
692 spin_lock_init(&r->chunk_list.lock); in dma_ioc0_region_create()
695 r->len, in dma_ioc0_region_create()
696 r->page_size, in dma_ioc0_region_create()
698 r->bus_addr = bus_addr; in dma_ioc0_region_create()
702 r->len = r->bus_addr = 0; in dma_ioc0_region_create()
705 r->len, r->page_size, r->bus_addr); in dma_ioc0_region_create()
717 static int dma_sb_region_free(struct ps3_dma_region *r) in dma_sb_region_free() argument
723 BUG_ON(!r); in dma_sb_region_free()
725 if (!r->dev->bus_id) { in dma_sb_region_free()
727 r->dev->bus_id, r->dev->dev_id); in dma_sb_region_free()
731 list_for_each_entry_safe(c, tmp, &r->chunk_list.head, link) { in dma_sb_region_free()
736 result = lv1_free_device_dma_region(r->dev->bus_id, r->dev->dev_id, in dma_sb_region_free()
737 r->bus_addr); in dma_sb_region_free()
743 r->bus_addr = 0; in dma_sb_region_free()
748 static int dma_ioc0_region_free(struct ps3_dma_region *r) in dma_ioc0_region_free() argument
754 list_for_each_entry_safe(c, n, &r->chunk_list.head, link) { in dma_ioc0_region_free()
759 result = lv1_release_io_segment(0, r->bus_addr); in dma_ioc0_region_free()
765 r->bus_addr = 0; in dma_ioc0_region_free()
782 static int dma_sb_map_area(struct ps3_dma_region *r, unsigned long virt_addr, in dma_sb_map_area() argument
791 unsigned long aligned_phys = _ALIGN_DOWN(phys_addr, 1 << r->page_size); in dma_sb_map_area()
793 1 << r->page_size); in dma_sb_map_area()
794 *bus_addr = dma_sb_lpar_to_bus(r, ps3_mm_phys_to_lpar(phys_addr)); in dma_sb_map_area()
810 spin_lock_irqsave(&r->chunk_list.lock, flags); in dma_sb_map_area()
811 c = dma_find_chunk(r, *bus_addr, len); in dma_sb_map_area()
817 spin_unlock_irqrestore(&r->chunk_list.lock, flags); in dma_sb_map_area()
821 result = dma_sb_map_pages(r, aligned_phys, aligned_len, &c, iopte_flag); in dma_sb_map_area()
827 spin_unlock_irqrestore(&r->chunk_list.lock, flags); in dma_sb_map_area()
833 spin_unlock_irqrestore(&r->chunk_list.lock, flags); in dma_sb_map_area()
837 static int dma_ioc0_map_area(struct ps3_dma_region *r, unsigned long virt_addr, in dma_ioc0_map_area() argument
846 unsigned long aligned_phys = _ALIGN_DOWN(phys_addr, 1 << r->page_size); in dma_ioc0_map_area()
848 1 << r->page_size); in dma_ioc0_map_area()
855 spin_lock_irqsave(&r->chunk_list.lock, flags); in dma_ioc0_map_area()
856 c = dma_find_chunk_lpar(r, ps3_mm_phys_to_lpar(phys_addr), len); in dma_ioc0_map_area()
863 spin_unlock_irqrestore(&r->chunk_list.lock, flags); in dma_ioc0_map_area()
867 result = dma_ioc0_map_pages(r, aligned_phys, aligned_len, &c, in dma_ioc0_map_area()
874 spin_unlock_irqrestore(&r->chunk_list.lock, flags); in dma_ioc0_map_area()
882 spin_unlock_irqrestore(&r->chunk_list.lock, flags); in dma_ioc0_map_area()
895 static int dma_sb_unmap_area(struct ps3_dma_region *r, dma_addr_t bus_addr, in dma_sb_unmap_area() argument
901 spin_lock_irqsave(&r->chunk_list.lock, flags); in dma_sb_unmap_area()
902 c = dma_find_chunk(r, bus_addr, len); in dma_sb_unmap_area()
906 1 << r->page_size); in dma_sb_unmap_area()
908 - aligned_bus, 1 << r->page_size); in dma_sb_unmap_area()
927 spin_unlock_irqrestore(&r->chunk_list.lock, flags); in dma_sb_unmap_area()
931 static int dma_ioc0_unmap_area(struct ps3_dma_region *r, in dma_ioc0_unmap_area() argument
938 spin_lock_irqsave(&r->chunk_list.lock, flags); in dma_ioc0_unmap_area()
939 c = dma_find_chunk(r, bus_addr, len); in dma_ioc0_unmap_area()
943 1 << r->page_size); in dma_ioc0_unmap_area()
946 1 << r->page_size); in dma_ioc0_unmap_area()
965 spin_unlock_irqrestore(&r->chunk_list.lock, flags); in dma_ioc0_unmap_area()
978 static int dma_sb_region_create_linear(struct ps3_dma_region *r) in dma_sb_region_create_linear() argument
984 if (r->len > 16*1024*1024) { /* FIXME: need proper fix */ in dma_sb_region_create_linear()
986 if (r->page_size != PS3_DMA_16M) { in dma_sb_region_create_linear()
989 r->page_size = PS3_DMA_16M; in dma_sb_region_create_linear()
990 r->len = _ALIGN_UP(r->len, 1 << r->page_size); in dma_sb_region_create_linear()
994 result = dma_sb_region_create(r); in dma_sb_region_create_linear()
997 if (r->offset < map.rm.size) { in dma_sb_region_create_linear()
999 virt_addr = map.rm.base + r->offset; in dma_sb_region_create_linear()
1000 len = map.rm.size - r->offset; in dma_sb_region_create_linear()
1001 if (len > r->len) in dma_sb_region_create_linear()
1002 len = r->len; in dma_sb_region_create_linear()
1003 result = dma_sb_map_area(r, virt_addr, len, &tmp, in dma_sb_region_create_linear()
1008 if (r->offset + r->len > map.rm.size) { in dma_sb_region_create_linear()
1011 len = r->len; in dma_sb_region_create_linear()
1012 if (r->offset >= map.rm.size) in dma_sb_region_create_linear()
1013 virt_addr += r->offset - map.rm.size; in dma_sb_region_create_linear()
1015 len -= map.rm.size - r->offset; in dma_sb_region_create_linear()
1016 result = dma_sb_map_area(r, virt_addr, len, &tmp, in dma_sb_region_create_linear()
1031 static int dma_sb_region_free_linear(struct ps3_dma_region *r) in dma_sb_region_free_linear() argument
1037 if (r->offset < map.rm.size) { in dma_sb_region_free_linear()
1039 lpar_addr = map.rm.base + r->offset; in dma_sb_region_free_linear()
1040 len = map.rm.size - r->offset; in dma_sb_region_free_linear()
1041 if (len > r->len) in dma_sb_region_free_linear()
1042 len = r->len; in dma_sb_region_free_linear()
1043 bus_addr = dma_sb_lpar_to_bus(r, lpar_addr); in dma_sb_region_free_linear()
1044 result = dma_sb_unmap_area(r, bus_addr, len); in dma_sb_region_free_linear()
1048 if (r->offset + r->len > map.rm.size) { in dma_sb_region_free_linear()
1051 len = r->len; in dma_sb_region_free_linear()
1052 if (r->offset >= map.rm.size) in dma_sb_region_free_linear()
1053 lpar_addr += r->offset - map.rm.size; in dma_sb_region_free_linear()
1055 len -= map.rm.size - r->offset; in dma_sb_region_free_linear()
1056 bus_addr = dma_sb_lpar_to_bus(r, lpar_addr); in dma_sb_region_free_linear()
1057 result = dma_sb_unmap_area(r, bus_addr, len); in dma_sb_region_free_linear()
1061 result = dma_sb_region_free(r); in dma_sb_region_free_linear()
1079 static int dma_sb_map_area_linear(struct ps3_dma_region *r, in dma_sb_map_area_linear() argument
1085 *bus_addr = dma_sb_lpar_to_bus(r, ps3_mm_phys_to_lpar(phys_addr)); in dma_sb_map_area_linear()
1098 static int dma_sb_unmap_area_linear(struct ps3_dma_region *r, in dma_sb_unmap_area_linear() argument
1126 struct ps3_dma_region *r, enum ps3_dma_page_size page_size, in ps3_dma_region_init() argument
1133 r->dev = dev; in ps3_dma_region_init()
1134 r->page_size = page_size; in ps3_dma_region_init()
1135 r->region_type = region_type; in ps3_dma_region_init()
1136 r->offset = lpar_addr; in ps3_dma_region_init()
1137 if (r->offset >= map.rm.size) in ps3_dma_region_init()
1138 r->offset -= map.r1.offset; in ps3_dma_region_init()
1139 r->len = len ? len : _ALIGN_UP(map.total, 1 << r->page_size); in ps3_dma_region_init()
1143 r->region_ops = (USE_DYNAMIC_DMA) in ps3_dma_region_init()
1148 r->region_ops = &ps3_dma_ioc0_region_ops; in ps3_dma_region_init()
1158 int ps3_dma_region_create(struct ps3_dma_region *r) in ps3_dma_region_create() argument
1160 BUG_ON(!r); in ps3_dma_region_create()
1161 BUG_ON(!r->region_ops); in ps3_dma_region_create()
1162 BUG_ON(!r->region_ops->create); in ps3_dma_region_create()
1163 return r->region_ops->create(r); in ps3_dma_region_create()
1167 int ps3_dma_region_free(struct ps3_dma_region *r) in ps3_dma_region_free() argument
1169 BUG_ON(!r); in ps3_dma_region_free()
1170 BUG_ON(!r->region_ops); in ps3_dma_region_free()
1171 BUG_ON(!r->region_ops->free); in ps3_dma_region_free()
1172 return r->region_ops->free(r); in ps3_dma_region_free()
1176 int ps3_dma_map(struct ps3_dma_region *r, unsigned long virt_addr, in ps3_dma_map() argument
1180 return r->region_ops->map(r, virt_addr, len, bus_addr, iopte_flag); in ps3_dma_map()
1183 int ps3_dma_unmap(struct ps3_dma_region *r, dma_addr_t bus_addr, in ps3_dma_unmap() argument
1186 return r->region_ops->unmap(r, bus_addr, len); in ps3_dma_unmap()