• Home
  • Raw
  • Download

Lines Matching +full:uniphier +full:- +full:system +full:- +full:cache

2  * Copyright (C) 2015-2016 Socionext Inc.
16 #define pr_fmt(fmt) "uniphier: " fmt
24 #include <asm/hardware/cache-uniphier.h>
30 #define UNIPHIER_SSCC_ACT BIT(19) /* Inst-Data separate */
32 #define UNIPHIER_SSCC_PRD BIT(17) /* enable pre-fetch */
33 #define UNIPHIER_SSCC_ON BIT(0) /* enable cache */
41 #define UNIPHIER_SSCOPE 0x244 /* Cache Operation Primitive Entry */
46 #define UNIPHIER_SSCOPE_CM_FLUSH_PREFETCH 0x9 /* flush p-fetch buf */
47 #define UNIPHIER_SSCOQM 0x248 /* Cache Operation Queue Mode */
55 #define UNIPHIER_SSCOQAD 0x24c /* Cache Operation Queue Address */
56 #define UNIPHIER_SSCOQSZ 0x250 /* Cache Operation Queue Size */
57 #define UNIPHIER_SSCOPPQSEF 0x25c /* Cache Operation Queue Set Complete*/
60 #define UNIPHIER_SSCOLPQS 0x260 /* Cache Operation Queue Status */
70 * uniphier_cache_data - UniPhier outer cache specific data
79 * @list: list node to include this level in the whole cache hierarchy
94 * List of the whole outer cache hierarchy. This list is only modified during
100 * __uniphier_cache_sync - perform a sync point for a particular cache level
102 * @data: cache controller specific data
108 data->op_base + UNIPHIER_SSCOPE); in __uniphier_cache_sync()
110 readl_relaxed(data->op_base + UNIPHIER_SSCOPE); in __uniphier_cache_sync()
114 * __uniphier_cache_maint_common - run a queue operation for a particular level
116 * @data: cache controller specific data
119 * @operation: flags to specify the desired cache operation
131 * [1] This outer cache controller is able to accept maintenance in __uniphier_cache_maint_common()
132 * operations from multiple CPUs at a time in an SMP system; if a in __uniphier_cache_maint_common()
141 * [2] The cache command registers, UNIPHIER_{SSCOQM, SSCOQAD, SSCOQSZ, in __uniphier_cache_maint_common()
154 writel_relaxed(UNIPHIER_SSCOLPQS_EF, data->op_base + UNIPHIER_SSCOLPQS); in __uniphier_cache_maint_common()
157 /* set cache operation */ in __uniphier_cache_maint_common()
159 data->op_base + UNIPHIER_SSCOQM); in __uniphier_cache_maint_common()
163 writel_relaxed(start, data->op_base + UNIPHIER_SSCOQAD); in __uniphier_cache_maint_common()
164 writel_relaxed(size, data->op_base + UNIPHIER_SSCOQSZ); in __uniphier_cache_maint_common()
166 } while (unlikely(readl_relaxed(data->op_base + UNIPHIER_SSCOPPQSEF) & in __uniphier_cache_maint_common()
170 while (likely(readl_relaxed(data->op_base + UNIPHIER_SSCOLPQS) != in __uniphier_cache_maint_common()
194 * perform a cache operation for the first cache-line in __uniphier_cache_maint_range()
196 start = start & ~(data->line_size - 1); in __uniphier_cache_maint_range()
198 size = end - start; in __uniphier_cache_maint_range()
200 if (unlikely(size >= (unsigned long)(-data->line_size))) { in __uniphier_cache_maint_range()
201 /* this means cache operation for all range */ in __uniphier_cache_maint_range()
208 * perform a cache operation for the last cache-line in __uniphier_cache_maint_range()
210 size = ALIGN(size, data->line_size); in __uniphier_cache_maint_range()
214 data->range_op_max_size); in __uniphier_cache_maint_range()
220 size -= chunk_size; in __uniphier_cache_maint_range()
233 writel_relaxed(val, data->ctrl_base + UNIPHIER_SSCC); in __uniphier_cache_enable()
242 writel_relaxed(data->way_mask, data->way_ctrl_base + 4 * cpu); in __uniphier_cache_set_active_ways()
318 { .compatible = "socionext,uniphier-system-cache" },
331 pr_err("L%d: not compatible with uniphier cache\n", in __uniphier_cache_init()
333 return -EINVAL; in __uniphier_cache_init()
336 if (of_property_read_u32(np, "cache-level", &level)) { in __uniphier_cache_init()
337 pr_err("L%d: cache-level is not specified\n", *cache_level); in __uniphier_cache_init()
338 return -EINVAL; in __uniphier_cache_init()
342 pr_err("L%d: cache-level is unexpected value %d\n", in __uniphier_cache_init()
344 return -EINVAL; in __uniphier_cache_init()
347 if (!of_property_read_bool(np, "cache-unified")) { in __uniphier_cache_init()
348 pr_err("L%d: cache-unified is not specified\n", *cache_level); in __uniphier_cache_init()
349 return -EINVAL; in __uniphier_cache_init()
354 return -ENOMEM; in __uniphier_cache_init()
356 if (of_property_read_u32(np, "cache-line-size", &data->line_size) || in __uniphier_cache_init()
357 !is_power_of_2(data->line_size)) { in __uniphier_cache_init()
358 pr_err("L%d: cache-line-size is unspecified or invalid\n", in __uniphier_cache_init()
360 ret = -EINVAL; in __uniphier_cache_init()
364 if (of_property_read_u32(np, "cache-sets", &data->nsets) || in __uniphier_cache_init()
365 !is_power_of_2(data->nsets)) { in __uniphier_cache_init()
366 pr_err("L%d: cache-sets is unspecified or invalid\n", in __uniphier_cache_init()
368 ret = -EINVAL; in __uniphier_cache_init()
372 if (of_property_read_u32(np, "cache-size", &cache_size) || in __uniphier_cache_init()
373 cache_size == 0 || cache_size % (data->nsets * data->line_size)) { in __uniphier_cache_init()
374 pr_err("L%d: cache-size is unspecified or invalid\n", in __uniphier_cache_init()
376 ret = -EINVAL; in __uniphier_cache_init()
380 data->way_mask = GENMASK(cache_size / data->nsets / data->line_size - 1, in __uniphier_cache_init()
383 data->ctrl_base = of_iomap(np, 0); in __uniphier_cache_init()
384 if (!data->ctrl_base) { in __uniphier_cache_init()
386 ret = -ENOMEM; in __uniphier_cache_init()
390 data->rev_base = of_iomap(np, 1); in __uniphier_cache_init()
391 if (!data->rev_base) { in __uniphier_cache_init()
393 ret = -ENOMEM; in __uniphier_cache_init()
397 data->op_base = of_iomap(np, 2); in __uniphier_cache_init()
398 if (!data->op_base) { in __uniphier_cache_init()
400 ret = -ENOMEM; in __uniphier_cache_init()
404 data->way_ctrl_base = data->ctrl_base + 0xc00; in __uniphier_cache_init()
407 u32 revision = readl(data->rev_base + UNIPHIER_SSCID); in __uniphier_cache_init()
410 * for PH-sLD8 or older SoCs. in __uniphier_cache_init()
413 data->range_op_max_size = (u32)1 << 22; in __uniphier_cache_init()
421 data->way_ctrl_base = data->ctrl_base + 0x870; in __uniphier_cache_init()
425 data->way_ctrl_base = data->ctrl_base + 0x840; in __uniphier_cache_init()
432 data->range_op_max_size -= data->line_size; in __uniphier_cache_init()
434 INIT_LIST_HEAD(&data->list); in __uniphier_cache_init()
435 list_add_tail(&data->list, &uniphier_cache_list); /* no mutex */ in __uniphier_cache_init()
439 * level cache. Do not roll back even if the initialization of the in __uniphier_cache_init()
440 * next level cache fails because we want to continue with available in __uniphier_cache_init()
441 * cache levels. in __uniphier_cache_init()
452 iounmap(data->op_base); in __uniphier_cache_init()
453 iounmap(data->rev_base); in __uniphier_cache_init()
454 iounmap(data->ctrl_base); in __uniphier_cache_init()
466 /* look for level 2 cache */ in uniphier_cache_init()
468 if (!of_property_read_u32(np, "cache-level", &cache_level) && in uniphier_cache_init()
473 return -ENODEV; in uniphier_cache_init()
484 pr_err("failed to initialize L2 cache\n"); in uniphier_cache_init()
488 cache_level--; in uniphier_cache_init()
501 pr_info("enabled outer cache (cache level: %d)\n", cache_level); in uniphier_cache_init()