• Home
  • Raw
  • Download

Lines Matching refs:r

214 static int ps3_mm_get_repository_highmem(struct mem_region *r)  in ps3_mm_get_repository_highmem()  argument
220 result = ps3_repository_read_highmem_info(0, &r->base, &r->size); in ps3_mm_get_repository_highmem()
225 if (!r->base || !r->size) { in ps3_mm_get_repository_highmem()
230 r->offset = r->base - map.rm.size; in ps3_mm_get_repository_highmem()
233 __func__, __LINE__, r->base, r->size); in ps3_mm_get_repository_highmem()
240 r->size = r->base = r->offset = 0; in ps3_mm_get_repository_highmem()
244 static int ps3_mm_set_repository_highmem(const struct mem_region *r) in ps3_mm_set_repository_highmem() argument
248 return r ? ps3_repository_write_highmem_info(0, r->base, r->size) : in ps3_mm_set_repository_highmem()
261 static int ps3_mm_region_create(struct mem_region *r, unsigned long size) in ps3_mm_region_create() argument
266 r->size = _ALIGN_DOWN(size, 1 << PAGE_SHIFT_16M); in ps3_mm_region_create()
269 DBG("%s:%d actual %llxh\n", __func__, __LINE__, r->size); in ps3_mm_region_create()
271 size - r->size, (size - r->size) / 1024 / 1024); in ps3_mm_region_create()
273 if (r->size == 0) { in ps3_mm_region_create()
279 result = lv1_allocate_memory(r->size, PAGE_SHIFT_16M, 0, in ps3_mm_region_create()
280 ALLOCATE_MEMORY_TRY_ALT_UNIT, &r->base, &muid); in ps3_mm_region_create()
282 if (result || r->base < map.rm.size) { in ps3_mm_region_create()
288 r->destroy = 1; in ps3_mm_region_create()
289 r->offset = r->base - map.rm.size; in ps3_mm_region_create()
293 r->size = r->base = r->offset = 0; in ps3_mm_region_create()
302 static void ps3_mm_region_destroy(struct mem_region *r) in ps3_mm_region_destroy() argument
306 if (!r->destroy) { in ps3_mm_region_destroy()
308 __func__, __LINE__, r->base, r->size); in ps3_mm_region_destroy()
312 DBG("%s:%d: r->base = %llxh\n", __func__, __LINE__, r->base); in ps3_mm_region_destroy()
314 if (r->base) { in ps3_mm_region_destroy()
315 result = lv1_release_memory(r->base); in ps3_mm_region_destroy()
317 r->size = r->base = r->offset = 0; in ps3_mm_region_destroy()
333 static unsigned long dma_sb_lpar_to_bus(struct ps3_dma_region *r, in dma_sb_lpar_to_bus() argument
338 BUG_ON(lpar_addr < r->offset); in dma_sb_lpar_to_bus()
339 BUG_ON(lpar_addr >= r->offset + r->len); in dma_sb_lpar_to_bus()
340 return r->bus_addr + lpar_addr - r->offset; in dma_sb_lpar_to_bus()
344 static void __maybe_unused _dma_dump_region(const struct ps3_dma_region *r, in _dma_dump_region() argument
347 DBG("%s:%d: dev %llu:%llu\n", func, line, r->dev->bus_id, in _dma_dump_region()
348 r->dev->dev_id); in _dma_dump_region()
349 DBG("%s:%d: page_size %u\n", func, line, r->page_size); in _dma_dump_region()
350 DBG("%s:%d: bus_addr %lxh\n", func, line, r->bus_addr); in _dma_dump_region()
351 DBG("%s:%d: len %lxh\n", func, line, r->len); in _dma_dump_region()
352 DBG("%s:%d: offset %lxh\n", func, line, r->offset); in _dma_dump_region()
393 static struct dma_chunk * dma_find_chunk(struct ps3_dma_region *r, in dma_find_chunk() argument
397 unsigned long aligned_bus = _ALIGN_DOWN(bus_addr, 1 << r->page_size); in dma_find_chunk()
399 1 << r->page_size); in dma_find_chunk()
401 list_for_each_entry(c, &r->chunk_list.head, link) { in dma_find_chunk()
422 static struct dma_chunk *dma_find_chunk_lpar(struct ps3_dma_region *r, in dma_find_chunk_lpar() argument
426 unsigned long aligned_lpar = _ALIGN_DOWN(lpar_addr, 1 << r->page_size); in dma_find_chunk_lpar()
428 1 << r->page_size); in dma_find_chunk_lpar()
430 list_for_each_entry(c, &r->chunk_list.head, link) { in dma_find_chunk_lpar()
472 struct ps3_dma_region *r = c->region; in dma_ioc0_free_chunk() local
475 for (iopage = 0; iopage < (c->len >> r->page_size); iopage++) { in dma_ioc0_free_chunk()
476 offset = (1 << r->page_size) * iopage; in dma_ioc0_free_chunk()
481 r->ioid, in dma_ioc0_free_chunk()
486 r->ioid); in dma_ioc0_free_chunk()
509 static int dma_sb_map_pages(struct ps3_dma_region *r, unsigned long phys_addr, in dma_sb_map_pages() argument
521 c->region = r; in dma_sb_map_pages()
523 c->bus_addr = dma_sb_lpar_to_bus(r, c->lpar_addr); in dma_sb_map_pages()
536 list_add(&c->link, &r->chunk_list.head); in dma_sb_map_pages()
549 static int dma_ioc0_map_pages(struct ps3_dma_region *r, unsigned long phys_addr, in dma_ioc0_map_pages() argument
566 c->region = r; in dma_ioc0_map_pages()
570 if (list_empty(&r->chunk_list.head)) { in dma_ioc0_map_pages()
572 c->bus_addr = r->bus_addr; in dma_ioc0_map_pages()
575 last = list_entry(r->chunk_list.head.next, in dma_ioc0_map_pages()
585 pages = len >> r->page_size; in dma_ioc0_map_pages()
587 r->page_size, r->len, pages, iopte_flag); in dma_ioc0_map_pages()
589 offset = (1 << r->page_size) * iopage; in dma_ioc0_map_pages()
593 r->ioid, in dma_ioc0_map_pages()
602 r->ioid); in dma_ioc0_map_pages()
606 list_add(&c->link, &r->chunk_list.head); in dma_ioc0_map_pages()
617 r->ioid, in dma_ioc0_map_pages()
634 static int dma_sb_region_create(struct ps3_dma_region *r) in dma_sb_region_create() argument
641 BUG_ON(!r); in dma_sb_region_create()
643 if (!r->dev->bus_id) { in dma_sb_region_create()
645 r->dev->bus_id, r->dev->dev_id); in dma_sb_region_create()
650 __LINE__, r->len, r->page_size, r->offset); in dma_sb_region_create()
652 BUG_ON(!r->len); in dma_sb_region_create()
653 BUG_ON(!r->page_size); in dma_sb_region_create()
654 BUG_ON(!r->region_ops); in dma_sb_region_create()
656 INIT_LIST_HEAD(&r->chunk_list.head); in dma_sb_region_create()
657 spin_lock_init(&r->chunk_list.lock); in dma_sb_region_create()
659 result = lv1_allocate_device_dma_region(r->dev->bus_id, r->dev->dev_id, in dma_sb_region_create()
660 roundup_pow_of_two(r->len), r->page_size, r->region_type, in dma_sb_region_create()
662 r->bus_addr = bus_addr; in dma_sb_region_create()
667 r->len = r->bus_addr = 0; in dma_sb_region_create()
673 static int dma_ioc0_region_create(struct ps3_dma_region *r) in dma_ioc0_region_create() argument
678 INIT_LIST_HEAD(&r->chunk_list.head); in dma_ioc0_region_create()
679 spin_lock_init(&r->chunk_list.lock); in dma_ioc0_region_create()
682 r->len, in dma_ioc0_region_create()
683 r->page_size, in dma_ioc0_region_create()
685 r->bus_addr = bus_addr; in dma_ioc0_region_create()
689 r->len = r->bus_addr = 0; in dma_ioc0_region_create()
692 r->len, r->page_size, r->bus_addr); in dma_ioc0_region_create()
704 static int dma_sb_region_free(struct ps3_dma_region *r) in dma_sb_region_free() argument
710 BUG_ON(!r); in dma_sb_region_free()
712 if (!r->dev->bus_id) { in dma_sb_region_free()
714 r->dev->bus_id, r->dev->dev_id); in dma_sb_region_free()
718 list_for_each_entry_safe(c, tmp, &r->chunk_list.head, link) { in dma_sb_region_free()
723 result = lv1_free_device_dma_region(r->dev->bus_id, r->dev->dev_id, in dma_sb_region_free()
724 r->bus_addr); in dma_sb_region_free()
730 r->bus_addr = 0; in dma_sb_region_free()
735 static int dma_ioc0_region_free(struct ps3_dma_region *r) in dma_ioc0_region_free() argument
741 list_for_each_entry_safe(c, n, &r->chunk_list.head, link) { in dma_ioc0_region_free()
746 result = lv1_release_io_segment(0, r->bus_addr); in dma_ioc0_region_free()
752 r->bus_addr = 0; in dma_ioc0_region_free()
769 static int dma_sb_map_area(struct ps3_dma_region *r, unsigned long virt_addr, in dma_sb_map_area() argument
778 unsigned long aligned_phys = _ALIGN_DOWN(phys_addr, 1 << r->page_size); in dma_sb_map_area()
780 1 << r->page_size); in dma_sb_map_area()
781 *bus_addr = dma_sb_lpar_to_bus(r, ps3_mm_phys_to_lpar(phys_addr)); in dma_sb_map_area()
797 spin_lock_irqsave(&r->chunk_list.lock, flags); in dma_sb_map_area()
798 c = dma_find_chunk(r, *bus_addr, len); in dma_sb_map_area()
804 spin_unlock_irqrestore(&r->chunk_list.lock, flags); in dma_sb_map_area()
808 result = dma_sb_map_pages(r, aligned_phys, aligned_len, &c, iopte_flag); in dma_sb_map_area()
814 spin_unlock_irqrestore(&r->chunk_list.lock, flags); in dma_sb_map_area()
820 spin_unlock_irqrestore(&r->chunk_list.lock, flags); in dma_sb_map_area()
824 static int dma_ioc0_map_area(struct ps3_dma_region *r, unsigned long virt_addr, in dma_ioc0_map_area() argument
833 unsigned long aligned_phys = _ALIGN_DOWN(phys_addr, 1 << r->page_size); in dma_ioc0_map_area()
835 1 << r->page_size); in dma_ioc0_map_area()
842 spin_lock_irqsave(&r->chunk_list.lock, flags); in dma_ioc0_map_area()
843 c = dma_find_chunk_lpar(r, ps3_mm_phys_to_lpar(phys_addr), len); in dma_ioc0_map_area()
850 spin_unlock_irqrestore(&r->chunk_list.lock, flags); in dma_ioc0_map_area()
854 result = dma_ioc0_map_pages(r, aligned_phys, aligned_len, &c, in dma_ioc0_map_area()
861 spin_unlock_irqrestore(&r->chunk_list.lock, flags); in dma_ioc0_map_area()
869 spin_unlock_irqrestore(&r->chunk_list.lock, flags); in dma_ioc0_map_area()
882 static int dma_sb_unmap_area(struct ps3_dma_region *r, dma_addr_t bus_addr, in dma_sb_unmap_area() argument
888 spin_lock_irqsave(&r->chunk_list.lock, flags); in dma_sb_unmap_area()
889 c = dma_find_chunk(r, bus_addr, len); in dma_sb_unmap_area()
893 1 << r->page_size); in dma_sb_unmap_area()
895 - aligned_bus, 1 << r->page_size); in dma_sb_unmap_area()
914 spin_unlock_irqrestore(&r->chunk_list.lock, flags); in dma_sb_unmap_area()
918 static int dma_ioc0_unmap_area(struct ps3_dma_region *r, in dma_ioc0_unmap_area() argument
925 spin_lock_irqsave(&r->chunk_list.lock, flags); in dma_ioc0_unmap_area()
926 c = dma_find_chunk(r, bus_addr, len); in dma_ioc0_unmap_area()
930 1 << r->page_size); in dma_ioc0_unmap_area()
933 1 << r->page_size); in dma_ioc0_unmap_area()
952 spin_unlock_irqrestore(&r->chunk_list.lock, flags); in dma_ioc0_unmap_area()
965 static int dma_sb_region_create_linear(struct ps3_dma_region *r) in dma_sb_region_create_linear() argument
971 if (r->len > 16*1024*1024) { /* FIXME: need proper fix */ in dma_sb_region_create_linear()
973 if (r->page_size != PS3_DMA_16M) { in dma_sb_region_create_linear()
976 r->page_size = PS3_DMA_16M; in dma_sb_region_create_linear()
977 r->len = _ALIGN_UP(r->len, 1 << r->page_size); in dma_sb_region_create_linear()
981 result = dma_sb_region_create(r); in dma_sb_region_create_linear()
984 if (r->offset < map.rm.size) { in dma_sb_region_create_linear()
986 virt_addr = map.rm.base + r->offset; in dma_sb_region_create_linear()
987 len = map.rm.size - r->offset; in dma_sb_region_create_linear()
988 if (len > r->len) in dma_sb_region_create_linear()
989 len = r->len; in dma_sb_region_create_linear()
990 result = dma_sb_map_area(r, virt_addr, len, &tmp, in dma_sb_region_create_linear()
996 if (r->offset + r->len > map.rm.size) { in dma_sb_region_create_linear()
999 len = r->len; in dma_sb_region_create_linear()
1000 if (r->offset >= map.rm.size) in dma_sb_region_create_linear()
1001 virt_addr += r->offset - map.rm.size; in dma_sb_region_create_linear()
1003 len -= map.rm.size - r->offset; in dma_sb_region_create_linear()
1004 result = dma_sb_map_area(r, virt_addr, len, &tmp, in dma_sb_region_create_linear()
1020 static int dma_sb_region_free_linear(struct ps3_dma_region *r) in dma_sb_region_free_linear() argument
1026 if (r->offset < map.rm.size) { in dma_sb_region_free_linear()
1028 lpar_addr = map.rm.base + r->offset; in dma_sb_region_free_linear()
1029 len = map.rm.size - r->offset; in dma_sb_region_free_linear()
1030 if (len > r->len) in dma_sb_region_free_linear()
1031 len = r->len; in dma_sb_region_free_linear()
1032 bus_addr = dma_sb_lpar_to_bus(r, lpar_addr); in dma_sb_region_free_linear()
1033 result = dma_sb_unmap_area(r, bus_addr, len); in dma_sb_region_free_linear()
1037 if (r->offset + r->len > map.rm.size) { in dma_sb_region_free_linear()
1040 len = r->len; in dma_sb_region_free_linear()
1041 if (r->offset >= map.rm.size) in dma_sb_region_free_linear()
1042 lpar_addr += r->offset - map.rm.size; in dma_sb_region_free_linear()
1044 len -= map.rm.size - r->offset; in dma_sb_region_free_linear()
1045 bus_addr = dma_sb_lpar_to_bus(r, lpar_addr); in dma_sb_region_free_linear()
1046 result = dma_sb_unmap_area(r, bus_addr, len); in dma_sb_region_free_linear()
1050 result = dma_sb_region_free(r); in dma_sb_region_free_linear()
1068 static int dma_sb_map_area_linear(struct ps3_dma_region *r, in dma_sb_map_area_linear() argument
1074 *bus_addr = dma_sb_lpar_to_bus(r, ps3_mm_phys_to_lpar(phys_addr)); in dma_sb_map_area_linear()
1087 static int dma_sb_unmap_area_linear(struct ps3_dma_region *r, in dma_sb_unmap_area_linear() argument
1115 struct ps3_dma_region *r, enum ps3_dma_page_size page_size, in ps3_dma_region_init() argument
1122 r->dev = dev; in ps3_dma_region_init()
1123 r->page_size = page_size; in ps3_dma_region_init()
1124 r->region_type = region_type; in ps3_dma_region_init()
1125 r->offset = lpar_addr; in ps3_dma_region_init()
1126 if (r->offset >= map.rm.size) in ps3_dma_region_init()
1127 r->offset -= map.r1.offset; in ps3_dma_region_init()
1128 r->len = len ? len : _ALIGN_UP(map.total, 1 << r->page_size); in ps3_dma_region_init()
1132 r->region_ops = (USE_DYNAMIC_DMA) in ps3_dma_region_init()
1137 r->region_ops = &ps3_dma_ioc0_region_ops; in ps3_dma_region_init()
1147 int ps3_dma_region_create(struct ps3_dma_region *r) in ps3_dma_region_create() argument
1149 BUG_ON(!r); in ps3_dma_region_create()
1150 BUG_ON(!r->region_ops); in ps3_dma_region_create()
1151 BUG_ON(!r->region_ops->create); in ps3_dma_region_create()
1152 return r->region_ops->create(r); in ps3_dma_region_create()
1156 int ps3_dma_region_free(struct ps3_dma_region *r) in ps3_dma_region_free() argument
1158 BUG_ON(!r); in ps3_dma_region_free()
1159 BUG_ON(!r->region_ops); in ps3_dma_region_free()
1160 BUG_ON(!r->region_ops->free); in ps3_dma_region_free()
1161 return r->region_ops->free(r); in ps3_dma_region_free()
1165 int ps3_dma_map(struct ps3_dma_region *r, unsigned long virt_addr, in ps3_dma_map() argument
1169 return r->region_ops->map(r, virt_addr, len, bus_addr, iopte_flag); in ps3_dma_map()
1172 int ps3_dma_unmap(struct ps3_dma_region *r, dma_addr_t bus_addr, in ps3_dma_unmap() argument
1175 return r->region_ops->unmap(r, bus_addr, len); in ps3_dma_unmap()