• Home
  • Raw
  • Download

Lines Matching full:dsi

18 #define DSS_SUBSYS_NAME "DSI"
58 /* DSI Protocol Engine */
122 #define REG_GET(dsi, idx, start, end) \ argument
123 FLD_GET(dsi_read_reg(dsi, idx), start, end)
125 #define REG_FLD_MOD(dsi, idx, val, start, end) \ argument
126 dsi_write_reg(dsi, idx, FLD_MOD(dsi_read_reg(dsi, idx), val, start, end))
218 static int dsi_display_init_dispc(struct dsi_data *dsi);
219 static void dsi_display_uninit_dispc(struct dsi_data *dsi);
221 static int dsi_vc_send_null(struct dsi_data *dsi, int channel);
223 /* DSI PLL HSDIV indices */
284 struct dsi_data *dsi; member
313 DSI_QUIRK_PLL_PWR_BUG = (1 << 0), /* DSI-PLL power command 0x3 is not working */
431 struct dsi_data *dsi; member
466 static inline void dsi_write_reg(struct dsi_data *dsi, in dsi_write_reg() argument
472 case DSI_PROTO: base = dsi->proto_base; break; in dsi_write_reg()
473 case DSI_PHY: base = dsi->phy_base; break; in dsi_write_reg()
474 case DSI_PLL: base = dsi->pll_base; break; in dsi_write_reg()
481 static inline u32 dsi_read_reg(struct dsi_data *dsi, const struct dsi_reg idx) in dsi_read_reg() argument
486 case DSI_PROTO: base = dsi->proto_base; break; in dsi_read_reg()
487 case DSI_PHY: base = dsi->phy_base; break; in dsi_read_reg()
488 case DSI_PLL: base = dsi->pll_base; break; in dsi_read_reg()
497 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_bus_lock() local
499 down(&dsi->bus_lock); in dsi_bus_lock()
504 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_bus_unlock() local
506 up(&dsi->bus_lock); in dsi_bus_unlock()
509 static bool dsi_bus_is_locked(struct dsi_data *dsi) in dsi_bus_is_locked() argument
511 return dsi->bus_lock.count == 0; in dsi_bus_is_locked()
519 static inline bool wait_for_bit_change(struct dsi_data *dsi, in wait_for_bit_change() argument
530 if (REG_GET(dsi, idx, bitnum, bitnum) == value) in wait_for_bit_change()
537 if (REG_GET(dsi, idx, bitnum, bitnum) == value) in wait_for_bit_change()
565 static void dsi_perf_mark_setup(struct dsi_data *dsi) in dsi_perf_mark_setup() argument
567 dsi->perf_setup_time = ktime_get(); in dsi_perf_mark_setup()
570 static void dsi_perf_mark_start(struct dsi_data *dsi) in dsi_perf_mark_start() argument
572 dsi->perf_start_time = ktime_get(); in dsi_perf_mark_start()
575 static void dsi_perf_show(struct dsi_data *dsi, const char *name) in dsi_perf_show() argument
586 setup_time = ktime_sub(dsi->perf_start_time, dsi->perf_setup_time); in dsi_perf_show()
591 trans_time = ktime_sub(t, dsi->perf_start_time); in dsi_perf_show()
598 total_bytes = dsi->update_bytes; in dsi_perf_show()
600 pr_info("DSI(%s): %u us + %u us = %u us (%uHz), %u bytes, %u kbytes/sec\n", in dsi_perf_show()
610 static inline void dsi_perf_mark_setup(struct dsi_data *dsi) in dsi_perf_mark_setup() argument
614 static inline void dsi_perf_mark_start(struct dsi_data *dsi) in dsi_perf_mark_start() argument
618 static inline void dsi_perf_show(struct dsi_data *dsi, const char *name) in dsi_perf_show() argument
635 pr_debug("DSI IRQ: 0x%x: %s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s\n", in print_irq_status()
667 pr_debug("DSI VC(%d) IRQ 0x%x: %s%s%s%s%s%s%s%s%s\n", in print_irq_status_vc()
689 pr_debug("DSI CIO IRQ 0x%x: %s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s\n", in print_irq_status_cio()
715 static void dsi_collect_irq_stats(struct dsi_data *dsi, u32 irqstatus, in dsi_collect_irq_stats() argument
720 spin_lock(&dsi->irq_stats_lock); in dsi_collect_irq_stats()
722 dsi->irq_stats.irq_count++; in dsi_collect_irq_stats()
723 dss_collect_irq_stats(irqstatus, dsi->irq_stats.dsi_irqs); in dsi_collect_irq_stats()
726 dss_collect_irq_stats(vcstatus[i], dsi->irq_stats.vc_irqs[i]); in dsi_collect_irq_stats()
728 dss_collect_irq_stats(ciostatus, dsi->irq_stats.cio_irqs); in dsi_collect_irq_stats()
730 spin_unlock(&dsi->irq_stats_lock); in dsi_collect_irq_stats()
733 #define dsi_collect_irq_stats(dsi, irqstatus, vcstatus, ciostatus) argument
738 static void dsi_handle_irq_errors(struct dsi_data *dsi, u32 irqstatus, in dsi_handle_irq_errors() argument
744 DSSERR("DSI error, irqstatus %x\n", irqstatus); in dsi_handle_irq_errors()
746 spin_lock(&dsi->errors_lock); in dsi_handle_irq_errors()
747 dsi->errors |= irqstatus & DSI_IRQ_ERROR_MASK; in dsi_handle_irq_errors()
748 spin_unlock(&dsi->errors_lock); in dsi_handle_irq_errors()
755 DSSERR("DSI VC(%d) error, vc irqstatus %x\n", in dsi_handle_irq_errors()
764 DSSERR("DSI CIO error, cio irqstatus %x\n", ciostatus); in dsi_handle_irq_errors()
809 struct dsi_data *dsi = arg; in omap_dsi_irq_handler() local
813 if (!dsi->is_enabled) in omap_dsi_irq_handler()
816 spin_lock(&dsi->irq_lock); in omap_dsi_irq_handler()
818 irqstatus = dsi_read_reg(dsi, DSI_IRQSTATUS); in omap_dsi_irq_handler()
822 spin_unlock(&dsi->irq_lock); in omap_dsi_irq_handler()
826 dsi_write_reg(dsi, DSI_IRQSTATUS, irqstatus & ~DSI_IRQ_CHANNEL_MASK); in omap_dsi_irq_handler()
828 dsi_read_reg(dsi, DSI_IRQSTATUS); in omap_dsi_irq_handler()
836 vcstatus[i] = dsi_read_reg(dsi, DSI_VC_IRQSTATUS(i)); in omap_dsi_irq_handler()
838 dsi_write_reg(dsi, DSI_VC_IRQSTATUS(i), vcstatus[i]); in omap_dsi_irq_handler()
840 dsi_read_reg(dsi, DSI_VC_IRQSTATUS(i)); in omap_dsi_irq_handler()
844 ciostatus = dsi_read_reg(dsi, DSI_COMPLEXIO_IRQ_STATUS); in omap_dsi_irq_handler()
846 dsi_write_reg(dsi, DSI_COMPLEXIO_IRQ_STATUS, ciostatus); in omap_dsi_irq_handler()
848 dsi_read_reg(dsi, DSI_COMPLEXIO_IRQ_STATUS); in omap_dsi_irq_handler()
855 del_timer(&dsi->te_timer); in omap_dsi_irq_handler()
860 memcpy(&dsi->isr_tables_copy, &dsi->isr_tables, in omap_dsi_irq_handler()
861 sizeof(dsi->isr_tables)); in omap_dsi_irq_handler()
863 spin_unlock(&dsi->irq_lock); in omap_dsi_irq_handler()
865 dsi_handle_isrs(&dsi->isr_tables_copy, irqstatus, vcstatus, ciostatus); in omap_dsi_irq_handler()
867 dsi_handle_irq_errors(dsi, irqstatus, vcstatus, ciostatus); in omap_dsi_irq_handler()
869 dsi_collect_irq_stats(dsi, irqstatus, vcstatus, ciostatus); in omap_dsi_irq_handler()
874 /* dsi->irq_lock has to be locked by the caller */
875 static void _omap_dsi_configure_irqs(struct dsi_data *dsi, in _omap_dsi_configure_irqs() argument
898 old_mask = dsi_read_reg(dsi, enable_reg); in _omap_dsi_configure_irqs()
900 dsi_write_reg(dsi, status_reg, (mask ^ old_mask) & mask); in _omap_dsi_configure_irqs()
901 dsi_write_reg(dsi, enable_reg, mask); in _omap_dsi_configure_irqs()
904 dsi_read_reg(dsi, enable_reg); in _omap_dsi_configure_irqs()
905 dsi_read_reg(dsi, status_reg); in _omap_dsi_configure_irqs()
908 /* dsi->irq_lock has to be locked by the caller */
909 static void _omap_dsi_set_irqs(struct dsi_data *dsi) in _omap_dsi_set_irqs() argument
915 _omap_dsi_configure_irqs(dsi, dsi->isr_tables.isr_table, in _omap_dsi_set_irqs()
916 ARRAY_SIZE(dsi->isr_tables.isr_table), mask, in _omap_dsi_set_irqs()
920 /* dsi->irq_lock has to be locked by the caller */
921 static void _omap_dsi_set_irqs_vc(struct dsi_data *dsi, int vc) in _omap_dsi_set_irqs_vc() argument
923 _omap_dsi_configure_irqs(dsi, dsi->isr_tables.isr_table_vc[vc], in _omap_dsi_set_irqs_vc()
924 ARRAY_SIZE(dsi->isr_tables.isr_table_vc[vc]), in _omap_dsi_set_irqs_vc()
929 /* dsi->irq_lock has to be locked by the caller */
930 static void _omap_dsi_set_irqs_cio(struct dsi_data *dsi) in _omap_dsi_set_irqs_cio() argument
932 _omap_dsi_configure_irqs(dsi, dsi->isr_tables.isr_table_cio, in _omap_dsi_set_irqs_cio()
933 ARRAY_SIZE(dsi->isr_tables.isr_table_cio), in _omap_dsi_set_irqs_cio()
938 static void _dsi_initialize_irq(struct dsi_data *dsi) in _dsi_initialize_irq() argument
943 spin_lock_irqsave(&dsi->irq_lock, flags); in _dsi_initialize_irq()
945 memset(&dsi->isr_tables, 0, sizeof(dsi->isr_tables)); in _dsi_initialize_irq()
947 _omap_dsi_set_irqs(dsi); in _dsi_initialize_irq()
949 _omap_dsi_set_irqs_vc(dsi, vc); in _dsi_initialize_irq()
950 _omap_dsi_set_irqs_cio(dsi); in _dsi_initialize_irq()
952 spin_unlock_irqrestore(&dsi->irq_lock, flags); in _dsi_initialize_irq()
1011 static int dsi_register_isr(struct dsi_data *dsi, omap_dsi_isr_t isr, in dsi_register_isr() argument
1017 spin_lock_irqsave(&dsi->irq_lock, flags); in dsi_register_isr()
1019 r = _dsi_register_isr(isr, arg, mask, dsi->isr_tables.isr_table, in dsi_register_isr()
1020 ARRAY_SIZE(dsi->isr_tables.isr_table)); in dsi_register_isr()
1023 _omap_dsi_set_irqs(dsi); in dsi_register_isr()
1025 spin_unlock_irqrestore(&dsi->irq_lock, flags); in dsi_register_isr()
1030 static int dsi_unregister_isr(struct dsi_data *dsi, omap_dsi_isr_t isr, in dsi_unregister_isr() argument
1036 spin_lock_irqsave(&dsi->irq_lock, flags); in dsi_unregister_isr()
1038 r = _dsi_unregister_isr(isr, arg, mask, dsi->isr_tables.isr_table, in dsi_unregister_isr()
1039 ARRAY_SIZE(dsi->isr_tables.isr_table)); in dsi_unregister_isr()
1042 _omap_dsi_set_irqs(dsi); in dsi_unregister_isr()
1044 spin_unlock_irqrestore(&dsi->irq_lock, flags); in dsi_unregister_isr()
1049 static int dsi_register_isr_vc(struct dsi_data *dsi, int channel, in dsi_register_isr_vc() argument
1055 spin_lock_irqsave(&dsi->irq_lock, flags); in dsi_register_isr_vc()
1058 dsi->isr_tables.isr_table_vc[channel], in dsi_register_isr_vc()
1059 ARRAY_SIZE(dsi->isr_tables.isr_table_vc[channel])); in dsi_register_isr_vc()
1062 _omap_dsi_set_irqs_vc(dsi, channel); in dsi_register_isr_vc()
1064 spin_unlock_irqrestore(&dsi->irq_lock, flags); in dsi_register_isr_vc()
1069 static int dsi_unregister_isr_vc(struct dsi_data *dsi, int channel, in dsi_unregister_isr_vc() argument
1075 spin_lock_irqsave(&dsi->irq_lock, flags); in dsi_unregister_isr_vc()
1078 dsi->isr_tables.isr_table_vc[channel], in dsi_unregister_isr_vc()
1079 ARRAY_SIZE(dsi->isr_tables.isr_table_vc[channel])); in dsi_unregister_isr_vc()
1082 _omap_dsi_set_irqs_vc(dsi, channel); in dsi_unregister_isr_vc()
1084 spin_unlock_irqrestore(&dsi->irq_lock, flags); in dsi_unregister_isr_vc()
1089 static int dsi_register_isr_cio(struct dsi_data *dsi, omap_dsi_isr_t isr, in dsi_register_isr_cio() argument
1095 spin_lock_irqsave(&dsi->irq_lock, flags); in dsi_register_isr_cio()
1097 r = _dsi_register_isr(isr, arg, mask, dsi->isr_tables.isr_table_cio, in dsi_register_isr_cio()
1098 ARRAY_SIZE(dsi->isr_tables.isr_table_cio)); in dsi_register_isr_cio()
1101 _omap_dsi_set_irqs_cio(dsi); in dsi_register_isr_cio()
1103 spin_unlock_irqrestore(&dsi->irq_lock, flags); in dsi_register_isr_cio()
1108 static int dsi_unregister_isr_cio(struct dsi_data *dsi, omap_dsi_isr_t isr, in dsi_unregister_isr_cio() argument
1114 spin_lock_irqsave(&dsi->irq_lock, flags); in dsi_unregister_isr_cio()
1116 r = _dsi_unregister_isr(isr, arg, mask, dsi->isr_tables.isr_table_cio, in dsi_unregister_isr_cio()
1117 ARRAY_SIZE(dsi->isr_tables.isr_table_cio)); in dsi_unregister_isr_cio()
1120 _omap_dsi_set_irqs_cio(dsi); in dsi_unregister_isr_cio()
1122 spin_unlock_irqrestore(&dsi->irq_lock, flags); in dsi_unregister_isr_cio()
1127 static u32 dsi_get_errors(struct dsi_data *dsi) in dsi_get_errors() argument
1132 spin_lock_irqsave(&dsi->errors_lock, flags); in dsi_get_errors()
1133 e = dsi->errors; in dsi_get_errors()
1134 dsi->errors = 0; in dsi_get_errors()
1135 spin_unlock_irqrestore(&dsi->errors_lock, flags); in dsi_get_errors()
1139 static int dsi_runtime_get(struct dsi_data *dsi) in dsi_runtime_get() argument
1145 r = pm_runtime_get_sync(dsi->dev); in dsi_runtime_get()
1150 static void dsi_runtime_put(struct dsi_data *dsi) in dsi_runtime_put() argument
1156 r = pm_runtime_put_sync(dsi->dev); in dsi_runtime_put()
1160 static int dsi_regulator_init(struct dsi_data *dsi) in dsi_regulator_init() argument
1164 if (dsi->vdds_dsi_reg != NULL) in dsi_regulator_init()
1167 vdds_dsi = devm_regulator_get(dsi->dev, "vdd"); in dsi_regulator_init()
1171 DSSERR("can't get DSI VDD regulator\n"); in dsi_regulator_init()
1175 dsi->vdds_dsi_reg = vdds_dsi; in dsi_regulator_init()
1180 static void _dsi_print_reset_status(struct dsi_data *dsi) in _dsi_print_reset_status() argument
1186 * required after DSIPHY reset to complete the reset of the DSI complex in _dsi_print_reset_status()
1188 l = dsi_read_reg(dsi, DSI_DSIPHY_CFG5); in _dsi_print_reset_status()
1190 if (dsi->data->quirks & DSI_QUIRK_REVERSE_TXCLKESC) { in _dsi_print_reset_status()
1201 FLD_GET(dsi_read_reg(dsi, DSI_##fld), start, end) in _dsi_print_reset_status()
1203 pr_debug("DSI resets: PLL (%d) CIO (%d) PHY (%x%x%x, %d, %d, %d)\n", in _dsi_print_reset_status()
1216 static inline int dsi_if_enable(struct dsi_data *dsi, bool enable) in dsi_if_enable() argument
1221 REG_FLD_MOD(dsi, DSI_CTRL, enable, 0, 0); /* IF_EN */ in dsi_if_enable()
1223 if (!wait_for_bit_change(dsi, DSI_CTRL, 0, enable)) { in dsi_if_enable()
1231 static unsigned long dsi_get_pll_hsdiv_dispc_rate(struct dsi_data *dsi) in dsi_get_pll_hsdiv_dispc_rate() argument
1233 return dsi->pll.cinfo.clkout[HSDIV_DISPC]; in dsi_get_pll_hsdiv_dispc_rate()
1236 static unsigned long dsi_get_pll_hsdiv_dsi_rate(struct dsi_data *dsi) in dsi_get_pll_hsdiv_dsi_rate() argument
1238 return dsi->pll.cinfo.clkout[HSDIV_DSI]; in dsi_get_pll_hsdiv_dsi_rate()
1241 static unsigned long dsi_get_txbyteclkhs(struct dsi_data *dsi) in dsi_get_txbyteclkhs() argument
1243 return dsi->pll.cinfo.clkdco / 16; in dsi_get_txbyteclkhs()
1246 static unsigned long dsi_fclk_rate(struct dsi_data *dsi) in dsi_fclk_rate() argument
1251 source = dss_get_dsi_clk_source(dsi->dss, dsi->module_id); in dsi_fclk_rate()
1253 /* DSI FCLK source is DSS_CLK_FCK */ in dsi_fclk_rate()
1254 r = clk_get_rate(dsi->dss_clk); in dsi_fclk_rate()
1256 /* DSI FCLK source is dsi_pll_hsdiv_dsi_clk */ in dsi_fclk_rate()
1257 r = dsi_get_pll_hsdiv_dsi_rate(dsi); in dsi_fclk_rate()
1282 static int dsi_set_lp_clk_divisor(struct dsi_data *dsi) in dsi_set_lp_clk_divisor() argument
1287 unsigned int lpdiv_max = dsi->data->max_pll_lpdiv; in dsi_set_lp_clk_divisor()
1290 lp_clk_div = dsi->user_lp_cinfo.lp_clk_div; in dsi_set_lp_clk_divisor()
1295 dsi_fclk = dsi_fclk_rate(dsi); in dsi_set_lp_clk_divisor()
1300 dsi->current_lp_cinfo.lp_clk = lp_clk; in dsi_set_lp_clk_divisor()
1301 dsi->current_lp_cinfo.lp_clk_div = lp_clk_div; in dsi_set_lp_clk_divisor()
1304 REG_FLD_MOD(dsi, DSI_CLK_CTRL, lp_clk_div, 12, 0); in dsi_set_lp_clk_divisor()
1307 REG_FLD_MOD(dsi, DSI_CLK_CTRL, dsi_fclk > 30000000 ? 1 : 0, 21, 21); in dsi_set_lp_clk_divisor()
1312 static void dsi_enable_scp_clk(struct dsi_data *dsi) in dsi_enable_scp_clk() argument
1314 if (dsi->scp_clk_refcount++ == 0) in dsi_enable_scp_clk()
1315 REG_FLD_MOD(dsi, DSI_CLK_CTRL, 1, 14, 14); /* CIO_CLK_ICG */ in dsi_enable_scp_clk()
1318 static void dsi_disable_scp_clk(struct dsi_data *dsi) in dsi_disable_scp_clk() argument
1320 WARN_ON(dsi->scp_clk_refcount == 0); in dsi_disable_scp_clk()
1321 if (--dsi->scp_clk_refcount == 0) in dsi_disable_scp_clk()
1322 REG_FLD_MOD(dsi, DSI_CLK_CTRL, 0, 14, 14); /* CIO_CLK_ICG */ in dsi_disable_scp_clk()
1332 static int dsi_pll_power(struct dsi_data *dsi, enum dsi_pll_power_state state) in dsi_pll_power() argument
1336 /* DSI-PLL power command 0x3 is not working */ in dsi_pll_power()
1337 if ((dsi->data->quirks & DSI_QUIRK_PLL_PWR_BUG) && in dsi_pll_power()
1342 REG_FLD_MOD(dsi, DSI_CLK_CTRL, state, 31, 30); in dsi_pll_power()
1345 while (FLD_GET(dsi_read_reg(dsi, DSI_CLK_CTRL), 29, 28) != state) { in dsi_pll_power()
1347 DSSERR("Failed to set DSI PLL power mode to %d\n", in dsi_pll_power()
1358 static void dsi_pll_calc_dsi_fck(struct dsi_data *dsi, in dsi_pll_calc_dsi_fck() argument
1363 max_dsi_fck = dsi->data->max_fck_freq; in dsi_pll_calc_dsi_fck()
1371 struct dsi_data *dsi = container_of(pll, struct dsi_data, pll); in dsi_pll_enable() local
1376 r = dsi_regulator_init(dsi); in dsi_pll_enable()
1380 r = dsi_runtime_get(dsi); in dsi_pll_enable()
1387 dsi_enable_scp_clk(dsi); in dsi_pll_enable()
1389 r = regulator_enable(dsi->vdds_dsi_reg); in dsi_pll_enable()
1394 dispc_pck_free_enable(dsi->dss->dispc, 1); in dsi_pll_enable()
1396 if (!wait_for_bit_change(dsi, DSI_PLL_STATUS, 0, 1)) { in dsi_pll_enable()
1399 dispc_pck_free_enable(dsi->dss->dispc, 0); in dsi_pll_enable()
1405 dispc_pck_free_enable(dsi->dss->dispc, 0); in dsi_pll_enable()
1407 r = dsi_pll_power(dsi, DSI_PLL_POWER_ON_ALL); in dsi_pll_enable()
1416 regulator_disable(dsi->vdds_dsi_reg); in dsi_pll_enable()
1418 dsi_disable_scp_clk(dsi); in dsi_pll_enable()
1419 dsi_runtime_put(dsi); in dsi_pll_enable()
1425 struct dsi_data *dsi = container_of(pll, struct dsi_data, pll); in dsi_pll_disable() local
1427 dsi_pll_power(dsi, DSI_PLL_POWER_OFF); in dsi_pll_disable()
1429 regulator_disable(dsi->vdds_dsi_reg); in dsi_pll_disable()
1431 dsi_disable_scp_clk(dsi); in dsi_pll_disable()
1432 dsi_runtime_put(dsi); in dsi_pll_disable()
1437 static void dsi_dump_dsi_clocks(struct dsi_data *dsi, struct seq_file *s) in dsi_dump_dsi_clocks() argument
1439 struct dss_pll_clock_info *cinfo = &dsi->pll.cinfo; in dsi_dump_dsi_clocks()
1441 int dsi_module = dsi->module_id; in dsi_dump_dsi_clocks()
1442 struct dss_pll *pll = &dsi->pll; in dsi_dump_dsi_clocks()
1444 dispc_clk_src = dss_get_dispc_clk_source(dsi->dss); in dsi_dump_dsi_clocks()
1445 dsi_clk_src = dss_get_dsi_clk_source(dsi->dss, dsi_module); in dsi_dump_dsi_clocks()
1447 if (dsi_runtime_get(dsi)) in dsi_dump_dsi_clocks()
1450 seq_printf(s, "- DSI%d PLL -\n", dsi_module + 1); in dsi_dump_dsi_clocks()
1452 seq_printf(s, "dsi pll clkin\t%lu\n", clk_get_rate(pll->clkin)); in dsi_dump_dsi_clocks()
1477 seq_printf(s, "- DSI%d -\n", dsi_module + 1); in dsi_dump_dsi_clocks()
1479 seq_printf(s, "dsi fclk source = %s\n", in dsi_dump_dsi_clocks()
1482 seq_printf(s, "DSI_FCLK\t%lu\n", dsi_fclk_rate(dsi)); in dsi_dump_dsi_clocks()
1487 seq_printf(s, "TxByteClkHS\t%lu\n", dsi_get_txbyteclkhs(dsi)); in dsi_dump_dsi_clocks()
1489 seq_printf(s, "LP_CLK\t\t%lu\n", dsi->current_lp_cinfo.lp_clk); in dsi_dump_dsi_clocks()
1491 dsi_runtime_put(dsi); in dsi_dump_dsi_clocks()
1496 struct dsi_data *dsi; in dsi_dump_clocks() local
1500 dsi = dsi_get_dsi_from_id(i); in dsi_dump_clocks()
1501 if (dsi) in dsi_dump_clocks()
1502 dsi_dump_dsi_clocks(dsi, s); in dsi_dump_clocks()
1507 static void dsi_dump_dsi_irqs(struct dsi_data *dsi, struct seq_file *s) in dsi_dump_dsi_irqs() argument
1512 spin_lock_irqsave(&dsi->irq_stats_lock, flags); in dsi_dump_dsi_irqs()
1514 stats = dsi->irq_stats; in dsi_dump_dsi_irqs()
1515 memset(&dsi->irq_stats, 0, sizeof(dsi->irq_stats)); in dsi_dump_dsi_irqs()
1516 dsi->irq_stats.last_reset = jiffies; in dsi_dump_dsi_irqs()
1518 spin_unlock_irqrestore(&dsi->irq_stats_lock, flags); in dsi_dump_dsi_irqs()
1527 seq_printf(s, "-- DSI%d interrupts --\n", dsi->module_id + 1); in dsi_dump_dsi_irqs()
1596 struct dsi_data *dsi = dsi_get_dsi_from_id(0); in dsi1_dump_irqs() local
1598 dsi_dump_dsi_irqs(dsi, s); in dsi1_dump_irqs()
1604 struct dsi_data *dsi = dsi_get_dsi_from_id(1); in dsi2_dump_irqs() local
1606 dsi_dump_dsi_irqs(dsi, s); in dsi2_dump_irqs()
1611 static void dsi_dump_dsi_regs(struct dsi_data *dsi, struct seq_file *s) in dsi_dump_dsi_regs() argument
1613 #define DUMPREG(r) seq_printf(s, "%-35s %08x\n", #r, dsi_read_reg(dsi, r)) in dsi_dump_dsi_regs()
1615 if (dsi_runtime_get(dsi)) in dsi_dump_dsi_regs()
1617 dsi_enable_scp_clk(dsi); in dsi_dump_dsi_regs()
1689 dsi_disable_scp_clk(dsi); in dsi_dump_dsi_regs()
1690 dsi_runtime_put(dsi); in dsi_dump_dsi_regs()
1696 struct dsi_data *dsi = dsi_get_dsi_from_id(0); in dsi1_dump_regs() local
1698 dsi_dump_dsi_regs(dsi, s); in dsi1_dump_regs()
1704 struct dsi_data *dsi = dsi_get_dsi_from_id(1); in dsi2_dump_regs() local
1706 dsi_dump_dsi_regs(dsi, s); in dsi2_dump_regs()
1716 static int dsi_cio_power(struct dsi_data *dsi, enum dsi_cio_power_state state) in dsi_cio_power() argument
1721 REG_FLD_MOD(dsi, DSI_COMPLEXIO_CFG1, state, 28, 27); in dsi_cio_power()
1724 while (FLD_GET(dsi_read_reg(dsi, DSI_COMPLEXIO_CFG1), in dsi_cio_power()
1737 static unsigned int dsi_get_line_buf_size(struct dsi_data *dsi) in dsi_get_line_buf_size() argument
1745 if (!(dsi->data->quirks & DSI_QUIRK_GNQ)) in dsi_get_line_buf_size()
1748 val = REG_GET(dsi, DSI_GNQ, 14, 12); /* VP1_LINE_BUFFER_SIZE */ in dsi_get_line_buf_size()
1771 static int dsi_set_lane_config(struct dsi_data *dsi) in dsi_set_lane_config() argument
1784 r = dsi_read_reg(dsi, DSI_COMPLEXIO_CFG1); in dsi_set_lane_config()
1786 for (i = 0; i < dsi->num_lanes_used; ++i) { in dsi_set_lane_config()
1791 for (t = 0; t < dsi->num_lanes_supported; ++t) in dsi_set_lane_config()
1792 if (dsi->lanes[t].function == functions[i]) in dsi_set_lane_config()
1795 if (t == dsi->num_lanes_supported) in dsi_set_lane_config()
1799 polarity = dsi->lanes[t].polarity; in dsi_set_lane_config()
1806 for (; i < dsi->num_lanes_supported; ++i) { in dsi_set_lane_config()
1813 dsi_write_reg(dsi, DSI_COMPLEXIO_CFG1, r); in dsi_set_lane_config()
1818 static inline unsigned int ns2ddr(struct dsi_data *dsi, unsigned int ns) in ns2ddr() argument
1821 unsigned long ddr_clk = dsi->pll.cinfo.clkdco / 4; in ns2ddr()
1826 static inline unsigned int ddr2ns(struct dsi_data *dsi, unsigned int ddr) in ddr2ns() argument
1828 unsigned long ddr_clk = dsi->pll.cinfo.clkdco / 4; in ddr2ns()
1833 static void dsi_cio_timings(struct dsi_data *dsi) in dsi_cio_timings() argument
1845 ths_prepare = ns2ddr(dsi, 70) + 2; in dsi_cio_timings()
1848 ths_prepare_ths_zero = ns2ddr(dsi, 175) + 2; in dsi_cio_timings()
1851 ths_trail = ns2ddr(dsi, 60) + 5; in dsi_cio_timings()
1854 ths_exit = ns2ddr(dsi, 145); in dsi_cio_timings()
1857 tlpx_half = ns2ddr(dsi, 25); in dsi_cio_timings()
1860 tclk_trail = ns2ddr(dsi, 60) + 2; in dsi_cio_timings()
1863 tclk_prepare = ns2ddr(dsi, 65); in dsi_cio_timings()
1866 tclk_zero = ns2ddr(dsi, 260); in dsi_cio_timings()
1869 ths_prepare, ddr2ns(dsi, ths_prepare), in dsi_cio_timings()
1870 ths_prepare_ths_zero, ddr2ns(dsi, ths_prepare_ths_zero)); in dsi_cio_timings()
1872 ths_trail, ddr2ns(dsi, ths_trail), in dsi_cio_timings()
1873 ths_exit, ddr2ns(dsi, ths_exit)); in dsi_cio_timings()
1877 tlpx_half, ddr2ns(dsi, tlpx_half), in dsi_cio_timings()
1878 tclk_trail, ddr2ns(dsi, tclk_trail), in dsi_cio_timings()
1879 tclk_zero, ddr2ns(dsi, tclk_zero)); in dsi_cio_timings()
1881 tclk_prepare, ddr2ns(dsi, tclk_prepare)); in dsi_cio_timings()
1885 r = dsi_read_reg(dsi, DSI_DSIPHY_CFG0); in dsi_cio_timings()
1890 dsi_write_reg(dsi, DSI_DSIPHY_CFG0, r); in dsi_cio_timings()
1892 r = dsi_read_reg(dsi, DSI_DSIPHY_CFG1); in dsi_cio_timings()
1897 if (dsi->data->quirks & DSI_QUIRK_PHY_DCC) { in dsi_cio_timings()
1903 dsi_write_reg(dsi, DSI_DSIPHY_CFG1, r); in dsi_cio_timings()
1905 r = dsi_read_reg(dsi, DSI_DSIPHY_CFG2); in dsi_cio_timings()
1907 dsi_write_reg(dsi, DSI_DSIPHY_CFG2, r); in dsi_cio_timings()
1911 static void dsi_cio_enable_lane_override(struct dsi_data *dsi, in dsi_cio_enable_lane_override() argument
1917 u8 lptxscp_start = dsi->num_lanes_supported == 3 ? 22 : 26; in dsi_cio_enable_lane_override()
1921 for (i = 0; i < dsi->num_lanes_supported; ++i) { in dsi_cio_enable_lane_override()
1922 unsigned int p = dsi->lanes[i].polarity; in dsi_cio_enable_lane_override()
1943 REG_FLD_MOD(dsi, DSI_DSIPHY_CFG10, l, lptxscp_start, 17); in dsi_cio_enable_lane_override()
1948 REG_FLD_MOD(dsi, DSI_DSIPHY_CFG10, 1, 27, 27); in dsi_cio_enable_lane_override()
1951 static void dsi_cio_disable_lane_override(struct dsi_data *dsi) in dsi_cio_disable_lane_override() argument
1954 REG_FLD_MOD(dsi, DSI_DSIPHY_CFG10, 0, 27, 27); /* ENLPTXSCPDAT */ in dsi_cio_disable_lane_override()
1957 REG_FLD_MOD(dsi, DSI_DSIPHY_CFG10, 0, 22, 17); in dsi_cio_disable_lane_override()
1960 static int dsi_cio_wait_tx_clk_esc_reset(struct dsi_data *dsi) in dsi_cio_wait_tx_clk_esc_reset() argument
1968 if (dsi->data->quirks & DSI_QUIRK_REVERSE_TXCLKESC) in dsi_cio_wait_tx_clk_esc_reset()
1973 for (i = 0; i < dsi->num_lanes_supported; ++i) in dsi_cio_wait_tx_clk_esc_reset()
1974 in_use[i] = dsi->lanes[i].function != DSI_LANE_UNUSED; in dsi_cio_wait_tx_clk_esc_reset()
1981 l = dsi_read_reg(dsi, DSI_DSIPHY_CFG5); in dsi_cio_wait_tx_clk_esc_reset()
1984 for (i = 0; i < dsi->num_lanes_supported; ++i) { in dsi_cio_wait_tx_clk_esc_reset()
1989 if (ok == dsi->num_lanes_supported) in dsi_cio_wait_tx_clk_esc_reset()
1993 for (i = 0; i < dsi->num_lanes_supported; ++i) { in dsi_cio_wait_tx_clk_esc_reset()
2008 static unsigned int dsi_get_lane_mask(struct dsi_data *dsi) in dsi_get_lane_mask() argument
2013 for (i = 0; i < dsi->num_lanes_supported; ++i) { in dsi_get_lane_mask()
2014 if (dsi->lanes[i].function != DSI_LANE_UNUSED) in dsi_get_lane_mask()
2033 static int dsi_omap4_mux_pads(struct dsi_data *dsi, unsigned int lanes) in dsi_omap4_mux_pads() argument
2038 if (dsi->module_id == 0) { in dsi_omap4_mux_pads()
2043 } else if (dsi->module_id == 1) { in dsi_omap4_mux_pads()
2052 return regmap_update_bits(dsi->syscon, OMAP4_DSIPHY_SYSCON_OFFSET, in dsi_omap4_mux_pads()
2065 static int dsi_omap5_mux_pads(struct dsi_data *dsi, unsigned int lanes) in dsi_omap5_mux_pads() argument
2069 if (dsi->module_id == 0) in dsi_omap5_mux_pads()
2071 else if (dsi->module_id == 1) in dsi_omap5_mux_pads()
2076 return regmap_update_bits(dsi->syscon, OMAP5_DSIPHY_SYSCON_OFFSET, in dsi_omap5_mux_pads()
2081 static int dsi_enable_pads(struct dsi_data *dsi, unsigned int lane_mask) in dsi_enable_pads() argument
2083 if (dsi->data->model == DSI_MODEL_OMAP4) in dsi_enable_pads()
2084 return dsi_omap4_mux_pads(dsi, lane_mask); in dsi_enable_pads()
2085 if (dsi->data->model == DSI_MODEL_OMAP5) in dsi_enable_pads()
2086 return dsi_omap5_mux_pads(dsi, lane_mask); in dsi_enable_pads()
2090 static void dsi_disable_pads(struct dsi_data *dsi) in dsi_disable_pads() argument
2092 if (dsi->data->model == DSI_MODEL_OMAP4) in dsi_disable_pads()
2093 dsi_omap4_mux_pads(dsi, 0); in dsi_disable_pads()
2094 else if (dsi->data->model == DSI_MODEL_OMAP5) in dsi_disable_pads()
2095 dsi_omap5_mux_pads(dsi, 0); in dsi_disable_pads()
2098 static int dsi_cio_init(struct dsi_data *dsi) in dsi_cio_init() argument
2103 DSSDBG("DSI CIO init starts"); in dsi_cio_init()
2105 r = dsi_enable_pads(dsi, dsi_get_lane_mask(dsi)); in dsi_cio_init()
2109 dsi_enable_scp_clk(dsi); in dsi_cio_init()
2112 * required after DSIPHY reset to complete the reset of the DSI complex in dsi_cio_init()
2114 dsi_read_reg(dsi, DSI_DSIPHY_CFG5); in dsi_cio_init()
2116 if (!wait_for_bit_change(dsi, DSI_DSIPHY_CFG5, 30, 1)) { in dsi_cio_init()
2122 r = dsi_set_lane_config(dsi); in dsi_cio_init()
2127 l = dsi_read_reg(dsi, DSI_TIMING1); in dsi_cio_init()
2132 dsi_write_reg(dsi, DSI_TIMING1, l); in dsi_cio_init()
2134 if (dsi->ulps_enabled) { in dsi_cio_init()
2151 for (i = 0; i < dsi->num_lanes_supported; ++i) { in dsi_cio_init()
2152 if (dsi->lanes[i].function == DSI_LANE_UNUSED) in dsi_cio_init()
2157 dsi_cio_enable_lane_override(dsi, mask_p, 0); in dsi_cio_init()
2160 r = dsi_cio_power(dsi, DSI_COMPLEXIO_POWER_ON); in dsi_cio_init()
2164 if (!wait_for_bit_change(dsi, DSI_COMPLEXIO_CFG1, 29, 1)) { in dsi_cio_init()
2170 dsi_if_enable(dsi, true); in dsi_cio_init()
2171 dsi_if_enable(dsi, false); in dsi_cio_init()
2172 REG_FLD_MOD(dsi, DSI_CLK_CTRL, 1, 20, 20); /* LP_CLK_ENABLE */ in dsi_cio_init()
2174 r = dsi_cio_wait_tx_clk_esc_reset(dsi); in dsi_cio_init()
2178 if (dsi->ulps_enabled) { in dsi_cio_init()
2179 /* Keep Mark-1 state for 1ms (as per DSI spec) */ in dsi_cio_init()
2186 dsi_cio_disable_lane_override(dsi); in dsi_cio_init()
2190 REG_FLD_MOD(dsi, DSI_TIMING1, 0, 15, 15); in dsi_cio_init()
2192 dsi_cio_timings(dsi); in dsi_cio_init()
2194 if (dsi->mode == OMAP_DSS_DSI_VIDEO_MODE) { in dsi_cio_init()
2196 REG_FLD_MOD(dsi, DSI_CLK_CTRL, in dsi_cio_init()
2197 dsi->vm_timings.ddr_clk_always_on, 13, 13); in dsi_cio_init()
2200 dsi->ulps_enabled = false; in dsi_cio_init()
2207 REG_FLD_MOD(dsi, DSI_CLK_CTRL, 0, 20, 20); /* LP_CLK_ENABLE */ in dsi_cio_init()
2209 dsi_cio_power(dsi, DSI_COMPLEXIO_POWER_OFF); in dsi_cio_init()
2211 if (dsi->ulps_enabled) in dsi_cio_init()
2212 dsi_cio_disable_lane_override(dsi); in dsi_cio_init()
2214 dsi_disable_scp_clk(dsi); in dsi_cio_init()
2215 dsi_disable_pads(dsi); in dsi_cio_init()
2219 static void dsi_cio_uninit(struct dsi_data *dsi) in dsi_cio_uninit() argument
2222 REG_FLD_MOD(dsi, DSI_CLK_CTRL, 0, 13, 13); in dsi_cio_uninit()
2224 dsi_cio_power(dsi, DSI_COMPLEXIO_POWER_OFF); in dsi_cio_uninit()
2225 dsi_disable_scp_clk(dsi); in dsi_cio_uninit()
2226 dsi_disable_pads(dsi); in dsi_cio_uninit()
2229 static void dsi_config_tx_fifo(struct dsi_data *dsi, in dsi_config_tx_fifo() argument
2237 dsi->vc[0].tx_fifo_size = size1; in dsi_config_tx_fifo()
2238 dsi->vc[1].tx_fifo_size = size2; in dsi_config_tx_fifo()
2239 dsi->vc[2].tx_fifo_size = size3; in dsi_config_tx_fifo()
2240 dsi->vc[3].tx_fifo_size = size4; in dsi_config_tx_fifo()
2244 int size = dsi->vc[i].tx_fifo_size; in dsi_config_tx_fifo()
2258 dsi_write_reg(dsi, DSI_TX_FIFO_VC_SIZE, r); in dsi_config_tx_fifo()
2261 static void dsi_config_rx_fifo(struct dsi_data *dsi, in dsi_config_rx_fifo() argument
2269 dsi->vc[0].rx_fifo_size = size1; in dsi_config_rx_fifo()
2270 dsi->vc[1].rx_fifo_size = size2; in dsi_config_rx_fifo()
2271 dsi->vc[2].rx_fifo_size = size3; in dsi_config_rx_fifo()
2272 dsi->vc[3].rx_fifo_size = size4; in dsi_config_rx_fifo()
2276 int size = dsi->vc[i].rx_fifo_size; in dsi_config_rx_fifo()
2290 dsi_write_reg(dsi, DSI_RX_FIFO_VC_SIZE, r); in dsi_config_rx_fifo()
2293 static int dsi_force_tx_stop_mode_io(struct dsi_data *dsi) in dsi_force_tx_stop_mode_io() argument
2297 r = dsi_read_reg(dsi, DSI_TIMING1); in dsi_force_tx_stop_mode_io()
2299 dsi_write_reg(dsi, DSI_TIMING1, r); in dsi_force_tx_stop_mode_io()
2301 if (!wait_for_bit_change(dsi, DSI_TIMING1, 15, 0)) { in dsi_force_tx_stop_mode_io()
2309 static bool dsi_vc_is_enabled(struct dsi_data *dsi, int channel) in dsi_vc_is_enabled() argument
2311 return REG_GET(dsi, DSI_VC_CTRL(channel), 0, 0); in dsi_vc_is_enabled()
2318 struct dsi_data *dsi = vp_data->dsi; in dsi_packet_sent_handler_vp() local
2319 const int channel = dsi->update_channel; in dsi_packet_sent_handler_vp()
2320 u8 bit = dsi->te_enabled ? 30 : 31; in dsi_packet_sent_handler_vp()
2322 if (REG_GET(dsi, DSI_VC_TE(channel), bit, bit) == 0) in dsi_packet_sent_handler_vp()
2326 static int dsi_sync_vc_vp(struct dsi_data *dsi, int channel) in dsi_sync_vc_vp() argument
2330 .dsi = dsi, in dsi_sync_vc_vp()
2336 bit = dsi->te_enabled ? 30 : 31; in dsi_sync_vc_vp()
2338 r = dsi_register_isr_vc(dsi, channel, dsi_packet_sent_handler_vp, in dsi_sync_vc_vp()
2344 if (REG_GET(dsi, DSI_VC_TE(channel), bit, bit)) { in dsi_sync_vc_vp()
2353 dsi_unregister_isr_vc(dsi, channel, dsi_packet_sent_handler_vp, in dsi_sync_vc_vp()
2358 dsi_unregister_isr_vc(dsi, channel, dsi_packet_sent_handler_vp, in dsi_sync_vc_vp()
2368 struct dsi_data *dsi = l4_data->dsi; in dsi_packet_sent_handler_l4() local
2369 const int channel = dsi->update_channel; in dsi_packet_sent_handler_l4()
2371 if (REG_GET(dsi, DSI_VC_CTRL(channel), 5, 5) == 0) in dsi_packet_sent_handler_l4()
2375 static int dsi_sync_vc_l4(struct dsi_data *dsi, int channel) in dsi_sync_vc_l4() argument
2379 .dsi = dsi, in dsi_sync_vc_l4()
2384 r = dsi_register_isr_vc(dsi, channel, dsi_packet_sent_handler_l4, in dsi_sync_vc_l4()
2390 if (REG_GET(dsi, DSI_VC_CTRL(channel), 5, 5)) { in dsi_sync_vc_l4()
2399 dsi_unregister_isr_vc(dsi, channel, dsi_packet_sent_handler_l4, in dsi_sync_vc_l4()
2404 dsi_unregister_isr_vc(dsi, channel, dsi_packet_sent_handler_l4, in dsi_sync_vc_l4()
2410 static int dsi_sync_vc(struct dsi_data *dsi, int channel) in dsi_sync_vc() argument
2412 WARN_ON(!dsi_bus_is_locked(dsi)); in dsi_sync_vc()
2416 if (!dsi_vc_is_enabled(dsi, channel)) in dsi_sync_vc()
2419 switch (dsi->vc[channel].source) { in dsi_sync_vc()
2421 return dsi_sync_vc_vp(dsi, channel); in dsi_sync_vc()
2423 return dsi_sync_vc_l4(dsi, channel); in dsi_sync_vc()
2430 static int dsi_vc_enable(struct dsi_data *dsi, int channel, bool enable) in dsi_vc_enable() argument
2437 REG_FLD_MOD(dsi, DSI_VC_CTRL(channel), enable, 0, 0); in dsi_vc_enable()
2439 if (!wait_for_bit_change(dsi, DSI_VC_CTRL(channel), 0, enable)) { in dsi_vc_enable()
2447 static void dsi_vc_initial_config(struct dsi_data *dsi, int channel) in dsi_vc_initial_config() argument
2453 r = dsi_read_reg(dsi, DSI_VC_CTRL(channel)); in dsi_vc_initial_config()
2466 if (dsi->data->quirks & DSI_QUIRK_VC_OCP_WIDTH) in dsi_vc_initial_config()
2472 dsi_write_reg(dsi, DSI_VC_CTRL(channel), r); in dsi_vc_initial_config()
2474 dsi->vc[channel].source = DSI_VC_SOURCE_L4; in dsi_vc_initial_config()
2477 static int dsi_vc_config_source(struct dsi_data *dsi, int channel, in dsi_vc_config_source() argument
2480 if (dsi->vc[channel].source == source) in dsi_vc_config_source()
2485 dsi_sync_vc(dsi, channel); in dsi_vc_config_source()
2487 dsi_vc_enable(dsi, channel, 0); in dsi_vc_config_source()
2490 if (!wait_for_bit_change(dsi, DSI_VC_CTRL(channel), 15, 0)) { in dsi_vc_config_source()
2496 REG_FLD_MOD(dsi, DSI_VC_CTRL(channel), source, 1, 1); in dsi_vc_config_source()
2499 if (dsi->data->quirks & DSI_QUIRK_DCS_CMD_CONFIG_VC) { in dsi_vc_config_source()
2501 REG_FLD_MOD(dsi, DSI_VC_CTRL(channel), enable, 30, 30); in dsi_vc_config_source()
2504 dsi_vc_enable(dsi, channel, 1); in dsi_vc_config_source()
2506 dsi->vc[channel].source = source; in dsi_vc_config_source()
2514 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_vc_enable_hs() local
2518 WARN_ON(!dsi_bus_is_locked(dsi)); in dsi_vc_enable_hs()
2520 dsi_vc_enable(dsi, channel, 0); in dsi_vc_enable_hs()
2521 dsi_if_enable(dsi, 0); in dsi_vc_enable_hs()
2523 REG_FLD_MOD(dsi, DSI_VC_CTRL(channel), enable, 9, 9); in dsi_vc_enable_hs()
2525 dsi_vc_enable(dsi, channel, 1); in dsi_vc_enable_hs()
2526 dsi_if_enable(dsi, 1); in dsi_vc_enable_hs()
2528 dsi_force_tx_stop_mode_io(dsi); in dsi_vc_enable_hs()
2531 if (dsi->vm_timings.ddr_clk_always_on && enable) in dsi_vc_enable_hs()
2532 dsi_vc_send_null(dsi, channel); in dsi_vc_enable_hs()
2535 static void dsi_vc_flush_long_data(struct dsi_data *dsi, int channel) in dsi_vc_flush_long_data() argument
2537 while (REG_GET(dsi, DSI_VC_CTRL(channel), 20, 20)) { in dsi_vc_flush_long_data()
2539 val = dsi_read_reg(dsi, DSI_VC_SHORT_PACKET_HEADER(channel)); in dsi_vc_flush_long_data()
2585 static u16 dsi_vc_flush_receive_data(struct dsi_data *dsi, int channel) in dsi_vc_flush_receive_data() argument
2588 while (REG_GET(dsi, DSI_VC_CTRL(channel), 20, 20)) { in dsi_vc_flush_receive_data()
2591 val = dsi_read_reg(dsi, DSI_VC_SHORT_PACKET_HEADER(channel)); in dsi_vc_flush_receive_data()
2606 dsi_vc_flush_long_data(dsi, channel); in dsi_vc_flush_receive_data()
2614 static int dsi_vc_send_bta(struct dsi_data *dsi, int channel) in dsi_vc_send_bta() argument
2616 if (dsi->debug_write || dsi->debug_read) in dsi_vc_send_bta()
2619 WARN_ON(!dsi_bus_is_locked(dsi)); in dsi_vc_send_bta()
2622 if (REG_GET(dsi, DSI_VC_CTRL(channel), 20, 20)) { in dsi_vc_send_bta()
2624 dsi_vc_flush_receive_data(dsi, channel); in dsi_vc_send_bta()
2627 REG_FLD_MOD(dsi, DSI_VC_CTRL(channel), 1, 6, 6); /* BTA_EN */ in dsi_vc_send_bta()
2630 dsi_read_reg(dsi, DSI_VC_CTRL(channel)); in dsi_vc_send_bta()
2637 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_vc_send_bta_sync() local
2642 r = dsi_register_isr_vc(dsi, channel, dsi_completion_handler, in dsi_vc_send_bta_sync()
2647 r = dsi_register_isr(dsi, dsi_completion_handler, &completion, in dsi_vc_send_bta_sync()
2652 r = dsi_vc_send_bta(dsi, channel); in dsi_vc_send_bta_sync()
2663 err = dsi_get_errors(dsi); in dsi_vc_send_bta_sync()
2670 dsi_unregister_isr(dsi, dsi_completion_handler, &completion, in dsi_vc_send_bta_sync()
2673 dsi_unregister_isr_vc(dsi, channel, dsi_completion_handler, in dsi_vc_send_bta_sync()
2679 static inline void dsi_vc_write_long_header(struct dsi_data *dsi, int channel, in dsi_vc_write_long_header() argument
2685 WARN_ON(!dsi_bus_is_locked(dsi)); in dsi_vc_write_long_header()
2687 data_id = data_type | dsi->vc[channel].vc_id << 6; in dsi_vc_write_long_header()
2692 dsi_write_reg(dsi, DSI_VC_LONG_PACKET_HEADER(channel), val); in dsi_vc_write_long_header()
2695 static inline void dsi_vc_write_long_payload(struct dsi_data *dsi, int channel, in dsi_vc_write_long_payload() argument
2705 dsi_write_reg(dsi, DSI_VC_LONG_PACKET_PAYLOAD(channel), val); in dsi_vc_write_long_payload()
2708 static int dsi_vc_send_long(struct dsi_data *dsi, int channel, u8 data_type, in dsi_vc_send_long() argument
2717 if (dsi->debug_write) in dsi_vc_send_long()
2721 if (dsi->vc[channel].tx_fifo_size * 32 * 4 < len + 4) { in dsi_vc_send_long()
2726 dsi_vc_config_source(dsi, channel, DSI_VC_SOURCE_L4); in dsi_vc_send_long()
2728 dsi_vc_write_long_header(dsi, channel, data_type, len, ecc); in dsi_vc_send_long()
2732 if (dsi->debug_write) in dsi_vc_send_long()
2740 dsi_vc_write_long_payload(dsi, channel, b1, b2, b3, b4); in dsi_vc_send_long()
2747 if (dsi->debug_write) in dsi_vc_send_long()
2765 dsi_vc_write_long_payload(dsi, channel, b1, b2, b3, 0); in dsi_vc_send_long()
2771 static int dsi_vc_send_short(struct dsi_data *dsi, int channel, u8 data_type, in dsi_vc_send_short() argument
2777 WARN_ON(!dsi_bus_is_locked(dsi)); in dsi_vc_send_short()
2779 if (dsi->debug_write) in dsi_vc_send_short()
2784 dsi_vc_config_source(dsi, channel, DSI_VC_SOURCE_L4); in dsi_vc_send_short()
2786 if (FLD_GET(dsi_read_reg(dsi, DSI_VC_CTRL(channel)), 16, 16)) { in dsi_vc_send_short()
2791 data_id = data_type | dsi->vc[channel].vc_id << 6; in dsi_vc_send_short()
2795 dsi_write_reg(dsi, DSI_VC_SHORT_PACKET_HEADER(channel), r); in dsi_vc_send_short()
2800 static int dsi_vc_send_null(struct dsi_data *dsi, int channel) in dsi_vc_send_null() argument
2802 return dsi_vc_send_long(dsi, channel, MIPI_DSI_NULL_PACKET, NULL, 0, 0); in dsi_vc_send_null()
2805 static int dsi_vc_write_nosync_common(struct dsi_data *dsi, int channel, in dsi_vc_write_nosync_common() argument
2813 r = dsi_vc_send_short(dsi, channel, in dsi_vc_write_nosync_common()
2816 r = dsi_vc_send_short(dsi, channel, in dsi_vc_write_nosync_common()
2821 r = dsi_vc_send_short(dsi, channel, in dsi_vc_write_nosync_common()
2827 r = dsi_vc_send_long(dsi, channel, in dsi_vc_write_nosync_common()
2839 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_vc_dcs_write_nosync() local
2841 return dsi_vc_write_nosync_common(dsi, channel, data, len, in dsi_vc_dcs_write_nosync()
2848 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_vc_generic_write_nosync() local
2850 return dsi_vc_write_nosync_common(dsi, channel, data, len, in dsi_vc_generic_write_nosync()
2858 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_vc_write_common() local
2861 r = dsi_vc_write_nosync_common(dsi, channel, data, len, type); in dsi_vc_write_common()
2870 if (REG_GET(dsi, DSI_VC_CTRL(channel), 20, 20)) { in dsi_vc_write_common()
2872 dsi_vc_flush_receive_data(dsi, channel); in dsi_vc_write_common()
2898 static int dsi_vc_dcs_send_read_request(struct dsi_data *dsi, int channel, in dsi_vc_dcs_send_read_request() argument
2903 if (dsi->debug_read) in dsi_vc_dcs_send_read_request()
2907 r = dsi_vc_send_short(dsi, channel, MIPI_DSI_DCS_READ, dcs_cmd, 0); in dsi_vc_dcs_send_read_request()
2917 static int dsi_vc_generic_send_read_request(struct dsi_data *dsi, int channel, in dsi_vc_generic_send_read_request() argument
2924 if (dsi->debug_read) in dsi_vc_generic_send_read_request()
2942 r = dsi_vc_send_short(dsi, channel, data_type, data, 0); in dsi_vc_generic_send_read_request()
2952 static int dsi_vc_read_rx_fifo(struct dsi_data *dsi, int channel, u8 *buf, in dsi_vc_read_rx_fifo() argument
2960 if (REG_GET(dsi, DSI_VC_CTRL(channel), 20, 20) == 0) { in dsi_vc_read_rx_fifo()
2966 val = dsi_read_reg(dsi, DSI_VC_SHORT_PACKET_HEADER(channel)); in dsi_vc_read_rx_fifo()
2967 if (dsi->debug_read) in dsi_vc_read_rx_fifo()
2980 if (dsi->debug_read) in dsi_vc_read_rx_fifo()
2997 if (dsi->debug_read) in dsi_vc_read_rx_fifo()
3016 if (dsi->debug_read) in dsi_vc_read_rx_fifo()
3029 val = dsi_read_reg(dsi, in dsi_vc_read_rx_fifo()
3031 if (dsi->debug_read) in dsi_vc_read_rx_fifo()
3063 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_vc_dcs_read() local
3066 r = dsi_vc_dcs_send_read_request(dsi, channel, dcs_cmd); in dsi_vc_dcs_read()
3074 r = dsi_vc_read_rx_fifo(dsi, channel, buf, buflen, in dsi_vc_dcs_read()
3093 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_vc_generic_read() local
3096 r = dsi_vc_generic_send_read_request(dsi, channel, reqdata, reqlen); in dsi_vc_generic_read()
3104 r = dsi_vc_read_rx_fifo(dsi, channel, buf, buflen, in dsi_vc_generic_read()
3120 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_vc_set_max_rx_packet_size() local
3122 return dsi_vc_send_short(dsi, channel, in dsi_vc_set_max_rx_packet_size()
3126 static int dsi_enter_ulps(struct dsi_data *dsi) in dsi_enter_ulps() argument
3134 WARN_ON(!dsi_bus_is_locked(dsi)); in dsi_enter_ulps()
3136 WARN_ON(dsi->ulps_enabled); in dsi_enter_ulps()
3138 if (dsi->ulps_enabled) in dsi_enter_ulps()
3142 if (REG_GET(dsi, DSI_CLK_CTRL, 13, 13)) { in dsi_enter_ulps()
3143 dsi_if_enable(dsi, 0); in dsi_enter_ulps()
3144 REG_FLD_MOD(dsi, DSI_CLK_CTRL, 0, 13, 13); in dsi_enter_ulps()
3145 dsi_if_enable(dsi, 1); in dsi_enter_ulps()
3148 dsi_sync_vc(dsi, 0); in dsi_enter_ulps()
3149 dsi_sync_vc(dsi, 1); in dsi_enter_ulps()
3150 dsi_sync_vc(dsi, 2); in dsi_enter_ulps()
3151 dsi_sync_vc(dsi, 3); in dsi_enter_ulps()
3153 dsi_force_tx_stop_mode_io(dsi); in dsi_enter_ulps()
3155 dsi_vc_enable(dsi, 0, false); in dsi_enter_ulps()
3156 dsi_vc_enable(dsi, 1, false); in dsi_enter_ulps()
3157 dsi_vc_enable(dsi, 2, false); in dsi_enter_ulps()
3158 dsi_vc_enable(dsi, 3, false); in dsi_enter_ulps()
3160 if (REG_GET(dsi, DSI_COMPLEXIO_CFG2, 16, 16)) { /* HS_BUSY */ in dsi_enter_ulps()
3165 if (REG_GET(dsi, DSI_COMPLEXIO_CFG2, 17, 17)) { /* LP_BUSY */ in dsi_enter_ulps()
3170 r = dsi_register_isr_cio(dsi, dsi_completion_handler, &completion, in dsi_enter_ulps()
3177 for (i = 0; i < dsi->num_lanes_supported; ++i) { in dsi_enter_ulps()
3178 if (dsi->lanes[i].function == DSI_LANE_UNUSED) in dsi_enter_ulps()
3184 REG_FLD_MOD(dsi, DSI_COMPLEXIO_CFG2, mask, 9, 5); in dsi_enter_ulps()
3187 dsi_read_reg(dsi, DSI_COMPLEXIO_CFG2); in dsi_enter_ulps()
3196 dsi_unregister_isr_cio(dsi, dsi_completion_handler, &completion, in dsi_enter_ulps()
3200 REG_FLD_MOD(dsi, DSI_COMPLEXIO_CFG2, 0, 9, 5); in dsi_enter_ulps()
3203 dsi_read_reg(dsi, DSI_COMPLEXIO_CFG2); in dsi_enter_ulps()
3205 dsi_cio_power(dsi, DSI_COMPLEXIO_POWER_ULPS); in dsi_enter_ulps()
3207 dsi_if_enable(dsi, false); in dsi_enter_ulps()
3209 dsi->ulps_enabled = true; in dsi_enter_ulps()
3214 dsi_unregister_isr_cio(dsi, dsi_completion_handler, &completion, in dsi_enter_ulps()
3219 static void dsi_set_lp_rx_timeout(struct dsi_data *dsi, unsigned int ticks, in dsi_set_lp_rx_timeout() argument
3229 fck = dsi_fclk_rate(dsi); in dsi_set_lp_rx_timeout()
3231 r = dsi_read_reg(dsi, DSI_TIMING2); in dsi_set_lp_rx_timeout()
3236 dsi_write_reg(dsi, DSI_TIMING2, r); in dsi_set_lp_rx_timeout()
3246 static void dsi_set_ta_timeout(struct dsi_data *dsi, unsigned int ticks, in dsi_set_ta_timeout() argument
3256 fck = dsi_fclk_rate(dsi); in dsi_set_ta_timeout()
3258 r = dsi_read_reg(dsi, DSI_TIMING1); in dsi_set_ta_timeout()
3263 dsi_write_reg(dsi, DSI_TIMING1, r); in dsi_set_ta_timeout()
3273 static void dsi_set_stop_state_counter(struct dsi_data *dsi, unsigned int ticks, in dsi_set_stop_state_counter() argument
3283 fck = dsi_fclk_rate(dsi); in dsi_set_stop_state_counter()
3285 r = dsi_read_reg(dsi, DSI_TIMING1); in dsi_set_stop_state_counter()
3290 dsi_write_reg(dsi, DSI_TIMING1, r); in dsi_set_stop_state_counter()
3300 static void dsi_set_hs_tx_timeout(struct dsi_data *dsi, unsigned int ticks, in dsi_set_hs_tx_timeout() argument
3310 fck = dsi_get_txbyteclkhs(dsi); in dsi_set_hs_tx_timeout()
3312 r = dsi_read_reg(dsi, DSI_TIMING2); in dsi_set_hs_tx_timeout()
3317 dsi_write_reg(dsi, DSI_TIMING2, r); in dsi_set_hs_tx_timeout()
3327 static void dsi_config_vp_num_line_buffers(struct dsi_data *dsi) in dsi_config_vp_num_line_buffers() argument
3331 if (dsi->mode == OMAP_DSS_DSI_VIDEO_MODE) { in dsi_config_vp_num_line_buffers()
3332 int bpp = dsi_get_pixel_size(dsi->pix_fmt); in dsi_config_vp_num_line_buffers()
3333 struct videomode *vm = &dsi->vm; in dsi_config_vp_num_line_buffers()
3338 if (dsi->line_buffer_size <= vm->hactive * bpp / 8) in dsi_config_vp_num_line_buffers()
3348 REG_FLD_MOD(dsi, DSI_CTRL, num_line_buffers, 13, 12); in dsi_config_vp_num_line_buffers()
3351 static void dsi_config_vp_sync_events(struct dsi_data *dsi) in dsi_config_vp_sync_events() argument
3356 if (dsi->vm_timings.trans_mode == OMAP_DSS_DSI_PULSE_MODE) in dsi_config_vp_sync_events()
3361 r = dsi_read_reg(dsi, DSI_CTRL); in dsi_config_vp_sync_events()
3369 dsi_write_reg(dsi, DSI_CTRL, r); in dsi_config_vp_sync_events()
3372 static void dsi_config_blanking_modes(struct dsi_data *dsi) in dsi_config_blanking_modes() argument
3374 int blanking_mode = dsi->vm_timings.blanking_mode; in dsi_config_blanking_modes()
3375 int hfp_blanking_mode = dsi->vm_timings.hfp_blanking_mode; in dsi_config_blanking_modes()
3376 int hbp_blanking_mode = dsi->vm_timings.hbp_blanking_mode; in dsi_config_blanking_modes()
3377 int hsa_blanking_mode = dsi->vm_timings.hsa_blanking_mode; in dsi_config_blanking_modes()
3384 r = dsi_read_reg(dsi, DSI_CTRL); in dsi_config_blanking_modes()
3389 dsi_write_reg(dsi, DSI_CTRL, r); in dsi_config_blanking_modes()
3454 static void dsi_config_cmd_mode_interleaving(struct dsi_data *dsi) in dsi_config_cmd_mode_interleaving() argument
3462 struct videomode *vm = &dsi->vm; in dsi_config_cmd_mode_interleaving()
3463 int bpp = dsi_get_pixel_size(dsi->pix_fmt); in dsi_config_cmd_mode_interleaving()
3464 int ndl = dsi->num_lanes_used - 1; in dsi_config_cmd_mode_interleaving()
3465 int dsi_fclk_hsdiv = dsi->user_dsi_cinfo.mX[HSDIV_DSI] + 1; in dsi_config_cmd_mode_interleaving()
3472 r = dsi_read_reg(dsi, DSI_CTRL); in dsi_config_cmd_mode_interleaving()
3478 r = dsi_read_reg(dsi, DSI_VM_TIMING1); in dsi_config_cmd_mode_interleaving()
3483 r = dsi_read_reg(dsi, DSI_CLK_TIMING); in dsi_config_cmd_mode_interleaving()
3487 r = dsi_read_reg(dsi, DSI_VM_TIMING7); in dsi_config_cmd_mode_interleaving()
3491 r = dsi_read_reg(dsi, DSI_CLK_CTRL); in dsi_config_cmd_mode_interleaving()
3495 r = dsi_read_reg(dsi, DSI_DSIPHY_CFG0); in dsi_config_cmd_mode_interleaving()
3498 r = dsi_read_reg(dsi, DSI_DSIPHY_CFG1); in dsi_config_cmd_mode_interleaving()
3544 DSSDBG("DSI HS interleaving(TXBYTECLKHS) HSA %d, HFP %d, HBP %d, BLLP %d\n", in dsi_config_cmd_mode_interleaving()
3548 DSSDBG("DSI LP interleaving(bytes) HSA %d, HFP %d, HBP %d, BLLP %d\n", in dsi_config_cmd_mode_interleaving()
3552 r = dsi_read_reg(dsi, DSI_VM_TIMING4); in dsi_config_cmd_mode_interleaving()
3556 dsi_write_reg(dsi, DSI_VM_TIMING4, r); in dsi_config_cmd_mode_interleaving()
3558 r = dsi_read_reg(dsi, DSI_VM_TIMING5); in dsi_config_cmd_mode_interleaving()
3562 dsi_write_reg(dsi, DSI_VM_TIMING5, r); in dsi_config_cmd_mode_interleaving()
3564 r = dsi_read_reg(dsi, DSI_VM_TIMING6); in dsi_config_cmd_mode_interleaving()
3567 dsi_write_reg(dsi, DSI_VM_TIMING6, r); in dsi_config_cmd_mode_interleaving()
3570 static int dsi_proto_config(struct dsi_data *dsi) in dsi_proto_config() argument
3575 dsi_config_tx_fifo(dsi, DSI_FIFO_SIZE_32, in dsi_proto_config()
3580 dsi_config_rx_fifo(dsi, DSI_FIFO_SIZE_32, in dsi_proto_config()
3586 dsi_set_stop_state_counter(dsi, 0x1000, false, false); in dsi_proto_config()
3587 dsi_set_ta_timeout(dsi, 0x1fff, true, true); in dsi_proto_config()
3588 dsi_set_lp_rx_timeout(dsi, 0x1fff, true, true); in dsi_proto_config()
3589 dsi_set_hs_tx_timeout(dsi, 0x1fff, true, true); in dsi_proto_config()
3591 switch (dsi_get_pixel_size(dsi->pix_fmt)) { in dsi_proto_config()
3606 r = dsi_read_reg(dsi, DSI_CTRL); in dsi_proto_config()
3615 if (!(dsi->data->quirks & DSI_QUIRK_DCS_CMD_CONFIG_VC)) { in dsi_proto_config()
3621 dsi_write_reg(dsi, DSI_CTRL, r); in dsi_proto_config()
3623 dsi_config_vp_num_line_buffers(dsi); in dsi_proto_config()
3625 if (dsi->mode == OMAP_DSS_DSI_VIDEO_MODE) { in dsi_proto_config()
3626 dsi_config_vp_sync_events(dsi); in dsi_proto_config()
3627 dsi_config_blanking_modes(dsi); in dsi_proto_config()
3628 dsi_config_cmd_mode_interleaving(dsi); in dsi_proto_config()
3631 dsi_vc_initial_config(dsi, 0); in dsi_proto_config()
3632 dsi_vc_initial_config(dsi, 1); in dsi_proto_config()
3633 dsi_vc_initial_config(dsi, 2); in dsi_proto_config()
3634 dsi_vc_initial_config(dsi, 3); in dsi_proto_config()
3639 static void dsi_proto_timings(struct dsi_data *dsi) in dsi_proto_timings() argument
3648 int ndl = dsi->num_lanes_used - 1; in dsi_proto_timings()
3651 r = dsi_read_reg(dsi, DSI_DSIPHY_CFG0); in dsi_proto_timings()
3658 r = dsi_read_reg(dsi, DSI_DSIPHY_CFG1); in dsi_proto_timings()
3663 r = dsi_read_reg(dsi, DSI_DSIPHY_CFG2); in dsi_proto_timings()
3669 tclk_post = ns2ddr(dsi, 60) + 26; in dsi_proto_timings()
3680 r = dsi_read_reg(dsi, DSI_CLK_TIMING); in dsi_proto_timings()
3683 dsi_write_reg(dsi, DSI_CLK_TIMING, r); in dsi_proto_timings()
3697 dsi_write_reg(dsi, DSI_VM_TIMING7, r); in dsi_proto_timings()
3702 if (dsi->mode == OMAP_DSS_DSI_VIDEO_MODE) { in dsi_proto_timings()
3704 int hsa = dsi->vm_timings.hsa; in dsi_proto_timings()
3705 int hfp = dsi->vm_timings.hfp; in dsi_proto_timings()
3706 int hbp = dsi->vm_timings.hbp; in dsi_proto_timings()
3707 int vsa = dsi->vm_timings.vsa; in dsi_proto_timings()
3708 int vfp = dsi->vm_timings.vfp; in dsi_proto_timings()
3709 int vbp = dsi->vm_timings.vbp; in dsi_proto_timings()
3710 int window_sync = dsi->vm_timings.window_sync; in dsi_proto_timings()
3712 struct videomode *vm = &dsi->vm; in dsi_proto_timings()
3713 int bpp = dsi_get_pixel_size(dsi->pix_fmt); in dsi_proto_timings()
3716 hsync_end = dsi->vm_timings.trans_mode == OMAP_DSS_DSI_PULSE_MODE; in dsi_proto_timings()
3731 r = dsi_read_reg(dsi, DSI_VM_TIMING1); in dsi_proto_timings()
3735 dsi_write_reg(dsi, DSI_VM_TIMING1, r); in dsi_proto_timings()
3737 r = dsi_read_reg(dsi, DSI_VM_TIMING2); in dsi_proto_timings()
3742 dsi_write_reg(dsi, DSI_VM_TIMING2, r); in dsi_proto_timings()
3744 r = dsi_read_reg(dsi, DSI_VM_TIMING3); in dsi_proto_timings()
3747 dsi_write_reg(dsi, DSI_VM_TIMING3, r); in dsi_proto_timings()
3754 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_configure_pins() local
3772 if (num_pins < 4 || num_pins > dsi->num_lanes_supported * 2 in dsi_configure_pins()
3788 if (dx < 0 || dx >= dsi->num_lanes_supported * 2) in dsi_configure_pins()
3791 if (dy < 0 || dy >= dsi->num_lanes_supported * 2) in dsi_configure_pins()
3811 memcpy(dsi->lanes, lanes, sizeof(dsi->lanes)); in dsi_configure_pins()
3812 dsi->num_lanes_used = num_lanes; in dsi_configure_pins()
3819 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_enable_video_output() local
3820 int bpp = dsi_get_pixel_size(dsi->pix_fmt); in dsi_enable_video_output()
3821 struct omap_dss_device *out = &dsi->output; in dsi_enable_video_output()
3831 r = dsi_display_init_dispc(dsi); in dsi_enable_video_output()
3835 if (dsi->mode == OMAP_DSS_DSI_VIDEO_MODE) { in dsi_enable_video_output()
3836 switch (dsi->pix_fmt) { in dsi_enable_video_output()
3854 dsi_if_enable(dsi, false); in dsi_enable_video_output()
3855 dsi_vc_enable(dsi, channel, false); in dsi_enable_video_output()
3858 REG_FLD_MOD(dsi, DSI_VC_CTRL(channel), 1, 4, 4); in dsi_enable_video_output()
3860 word_count = DIV_ROUND_UP(dsi->vm.hactive * bpp, 8); in dsi_enable_video_output()
3862 dsi_vc_write_long_header(dsi, channel, data_type, in dsi_enable_video_output()
3865 dsi_vc_enable(dsi, channel, true); in dsi_enable_video_output()
3866 dsi_if_enable(dsi, true); in dsi_enable_video_output()
3869 r = dss_mgr_enable(&dsi->output); in dsi_enable_video_output()
3876 if (dsi->mode == OMAP_DSS_DSI_VIDEO_MODE) { in dsi_enable_video_output()
3877 dsi_if_enable(dsi, false); in dsi_enable_video_output()
3878 dsi_vc_enable(dsi, channel, false); in dsi_enable_video_output()
3881 dsi_display_uninit_dispc(dsi); in dsi_enable_video_output()
3888 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_disable_video_output() local
3890 if (dsi->mode == OMAP_DSS_DSI_VIDEO_MODE) { in dsi_disable_video_output()
3891 dsi_if_enable(dsi, false); in dsi_disable_video_output()
3892 dsi_vc_enable(dsi, channel, false); in dsi_disable_video_output()
3895 REG_FLD_MOD(dsi, DSI_VC_CTRL(channel), 0, 4, 4); in dsi_disable_video_output()
3897 dsi_vc_enable(dsi, channel, true); in dsi_disable_video_output()
3898 dsi_if_enable(dsi, true); in dsi_disable_video_output()
3901 dss_mgr_disable(&dsi->output); in dsi_disable_video_output()
3903 dsi_display_uninit_dispc(dsi); in dsi_disable_video_output()
3906 static void dsi_update_screen_dispc(struct dsi_data *dsi) in dsi_update_screen_dispc() argument
3916 const unsigned channel = dsi->update_channel; in dsi_update_screen_dispc()
3917 const unsigned int line_buf_size = dsi->line_buffer_size; in dsi_update_screen_dispc()
3918 u16 w = dsi->vm.hactive; in dsi_update_screen_dispc()
3919 u16 h = dsi->vm.vactive; in dsi_update_screen_dispc()
3923 dsi_vc_config_source(dsi, channel, DSI_VC_SOURCE_VP); in dsi_update_screen_dispc()
3925 bytespp = dsi_get_pixel_size(dsi->pix_fmt) / 8; in dsi_update_screen_dispc()
3944 dsi_write_reg(dsi, DSI_VC_TE(channel), l); in dsi_update_screen_dispc()
3946 dsi_vc_write_long_header(dsi, channel, MIPI_DSI_DCS_LONG_WRITE, in dsi_update_screen_dispc()
3949 if (dsi->te_enabled) in dsi_update_screen_dispc()
3953 dsi_write_reg(dsi, DSI_VC_TE(channel), l); in dsi_update_screen_dispc()
3961 dispc_disable_sidle(dsi->dss->dispc); in dsi_update_screen_dispc()
3963 dsi_perf_mark_start(dsi); in dsi_update_screen_dispc()
3965 r = schedule_delayed_work(&dsi->framedone_timeout_work, in dsi_update_screen_dispc()
3969 dss_mgr_set_timings(&dsi->output, &dsi->vm); in dsi_update_screen_dispc()
3971 dss_mgr_start_update(&dsi->output); in dsi_update_screen_dispc()
3973 if (dsi->te_enabled) { in dsi_update_screen_dispc()
3976 REG_FLD_MOD(dsi, DSI_TIMING2, 0, 15, 15); /* LP_RX_TO */ in dsi_update_screen_dispc()
3978 dsi_vc_send_bta(dsi, channel); in dsi_update_screen_dispc()
3981 mod_timer(&dsi->te_timer, jiffies + msecs_to_jiffies(250)); in dsi_update_screen_dispc()
3993 static void dsi_handle_framedone(struct dsi_data *dsi, int error) in dsi_handle_framedone() argument
3996 dispc_enable_sidle(dsi->dss->dispc); in dsi_handle_framedone()
3998 if (dsi->te_enabled) { in dsi_handle_framedone()
4000 REG_FLD_MOD(dsi, DSI_TIMING2, 1, 15, 15); /* LP_RX_TO */ in dsi_handle_framedone()
4003 dsi->framedone_callback(error, dsi->framedone_data); in dsi_handle_framedone()
4006 dsi_perf_show(dsi, "DISPC"); in dsi_handle_framedone()
4011 struct dsi_data *dsi = container_of(work, struct dsi_data, in dsi_framedone_timeout_work_callback() local
4018 * DSI */ in dsi_framedone_timeout_work_callback()
4022 dsi_handle_framedone(dsi, -ETIMEDOUT); in dsi_framedone_timeout_work_callback()
4027 struct dsi_data *dsi = data; in dsi_framedone_irq_callback() local
4030 * turns itself off. However, DSI still has the pixels in its buffers, in dsi_framedone_irq_callback()
4034 cancel_delayed_work(&dsi->framedone_timeout_work); in dsi_framedone_irq_callback()
4036 dsi_handle_framedone(dsi, 0); in dsi_framedone_irq_callback()
4042 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_update() local
4045 dsi_perf_mark_setup(dsi); in dsi_update()
4047 dsi->update_channel = channel; in dsi_update()
4049 dsi->framedone_callback = callback; in dsi_update()
4050 dsi->framedone_data = data; in dsi_update()
4052 dw = dsi->vm.hactive; in dsi_update()
4053 dh = dsi->vm.vactive; in dsi_update()
4056 dsi->update_bytes = dw * dh * in dsi_update()
4057 dsi_get_pixel_size(dsi->pix_fmt) / 8; in dsi_update()
4059 dsi_update_screen_dispc(dsi); in dsi_update()
4066 static int dsi_configure_dispc_clocks(struct dsi_data *dsi) in dsi_configure_dispc_clocks() argument
4072 fck = dsi_get_pll_hsdiv_dispc_rate(dsi); in dsi_configure_dispc_clocks()
4074 dispc_cinfo.lck_div = dsi->user_dispc_cinfo.lck_div; in dsi_configure_dispc_clocks()
4075 dispc_cinfo.pck_div = dsi->user_dispc_cinfo.pck_div; in dsi_configure_dispc_clocks()
4077 r = dispc_calc_clock_rates(dsi->dss->dispc, fck, &dispc_cinfo); in dsi_configure_dispc_clocks()
4083 dsi->mgr_config.clock_info = dispc_cinfo; in dsi_configure_dispc_clocks()
4088 static int dsi_display_init_dispc(struct dsi_data *dsi) in dsi_display_init_dispc() argument
4090 enum omap_channel channel = dsi->output.dispc_channel; in dsi_display_init_dispc()
4093 dss_select_lcd_clk_source(dsi->dss, channel, dsi->module_id == 0 ? in dsi_display_init_dispc()
4097 if (dsi->mode == OMAP_DSS_DSI_CMD_MODE) { in dsi_display_init_dispc()
4098 r = dss_mgr_register_framedone_handler(&dsi->output, in dsi_display_init_dispc()
4099 dsi_framedone_irq_callback, dsi); in dsi_display_init_dispc()
4105 dsi->mgr_config.stallmode = true; in dsi_display_init_dispc()
4106 dsi->mgr_config.fifohandcheck = true; in dsi_display_init_dispc()
4108 dsi->mgr_config.stallmode = false; in dsi_display_init_dispc()
4109 dsi->mgr_config.fifohandcheck = false; in dsi_display_init_dispc()
4116 dsi->vm.flags &= ~DISPLAY_FLAGS_INTERLACED; in dsi_display_init_dispc()
4117 dsi->vm.flags &= ~DISPLAY_FLAGS_HSYNC_LOW; in dsi_display_init_dispc()
4118 dsi->vm.flags |= DISPLAY_FLAGS_HSYNC_HIGH; in dsi_display_init_dispc()
4119 dsi->vm.flags &= ~DISPLAY_FLAGS_VSYNC_LOW; in dsi_display_init_dispc()
4120 dsi->vm.flags |= DISPLAY_FLAGS_VSYNC_HIGH; in dsi_display_init_dispc()
4121 dsi->vm.flags &= ~DISPLAY_FLAGS_PIXDATA_NEGEDGE; in dsi_display_init_dispc()
4122 dsi->vm.flags |= DISPLAY_FLAGS_PIXDATA_POSEDGE; in dsi_display_init_dispc()
4123 dsi->vm.flags &= ~DISPLAY_FLAGS_DE_LOW; in dsi_display_init_dispc()
4124 dsi->vm.flags |= DISPLAY_FLAGS_DE_HIGH; in dsi_display_init_dispc()
4125 dsi->vm.flags &= ~DISPLAY_FLAGS_SYNC_POSEDGE; in dsi_display_init_dispc()
4126 dsi->vm.flags |= DISPLAY_FLAGS_SYNC_NEGEDGE; in dsi_display_init_dispc()
4128 dss_mgr_set_timings(&dsi->output, &dsi->vm); in dsi_display_init_dispc()
4130 r = dsi_configure_dispc_clocks(dsi); in dsi_display_init_dispc()
4134 dsi->mgr_config.io_pad_mode = DSS_IO_PAD_MODE_BYPASS; in dsi_display_init_dispc()
4135 dsi->mgr_config.video_port_width = in dsi_display_init_dispc()
4136 dsi_get_pixel_size(dsi->pix_fmt); in dsi_display_init_dispc()
4137 dsi->mgr_config.lcden_sig_polarity = 0; in dsi_display_init_dispc()
4139 dss_mgr_set_lcd_config(&dsi->output, &dsi->mgr_config); in dsi_display_init_dispc()
4143 if (dsi->mode == OMAP_DSS_DSI_CMD_MODE) in dsi_display_init_dispc()
4144 dss_mgr_unregister_framedone_handler(&dsi->output, in dsi_display_init_dispc()
4145 dsi_framedone_irq_callback, dsi); in dsi_display_init_dispc()
4147 dss_select_lcd_clk_source(dsi->dss, channel, DSS_CLK_SRC_FCK); in dsi_display_init_dispc()
4151 static void dsi_display_uninit_dispc(struct dsi_data *dsi) in dsi_display_uninit_dispc() argument
4153 enum omap_channel channel = dsi->output.dispc_channel; in dsi_display_uninit_dispc()
4155 if (dsi->mode == OMAP_DSS_DSI_CMD_MODE) in dsi_display_uninit_dispc()
4156 dss_mgr_unregister_framedone_handler(&dsi->output, in dsi_display_uninit_dispc()
4157 dsi_framedone_irq_callback, dsi); in dsi_display_uninit_dispc()
4159 dss_select_lcd_clk_source(dsi->dss, channel, DSS_CLK_SRC_FCK); in dsi_display_uninit_dispc()
4162 static int dsi_configure_dsi_clocks(struct dsi_data *dsi) in dsi_configure_dsi_clocks() argument
4167 cinfo = dsi->user_dsi_cinfo; in dsi_configure_dsi_clocks()
4169 r = dss_pll_set_config(&dsi->pll, &cinfo); in dsi_configure_dsi_clocks()
4171 DSSERR("Failed to set dsi clocks\n"); in dsi_configure_dsi_clocks()
4178 static int dsi_display_init_dsi(struct dsi_data *dsi) in dsi_display_init_dsi() argument
4182 r = dss_pll_enable(&dsi->pll); in dsi_display_init_dsi()
4186 r = dsi_configure_dsi_clocks(dsi); in dsi_display_init_dsi()
4190 dss_select_dsi_clk_source(dsi->dss, dsi->module_id, in dsi_display_init_dsi()
4191 dsi->module_id == 0 ? in dsi_display_init_dsi()
4196 if (!dsi->vdds_dsi_enabled) { in dsi_display_init_dsi()
4197 r = regulator_enable(dsi->vdds_dsi_reg); in dsi_display_init_dsi()
4201 dsi->vdds_dsi_enabled = true; in dsi_display_init_dsi()
4204 r = dsi_cio_init(dsi); in dsi_display_init_dsi()
4208 _dsi_print_reset_status(dsi); in dsi_display_init_dsi()
4210 dsi_proto_timings(dsi); in dsi_display_init_dsi()
4211 dsi_set_lp_clk_divisor(dsi); in dsi_display_init_dsi()
4214 _dsi_print_reset_status(dsi); in dsi_display_init_dsi()
4216 r = dsi_proto_config(dsi); in dsi_display_init_dsi()
4221 dsi_vc_enable(dsi, 0, 1); in dsi_display_init_dsi()
4222 dsi_vc_enable(dsi, 1, 1); in dsi_display_init_dsi()
4223 dsi_vc_enable(dsi, 2, 1); in dsi_display_init_dsi()
4224 dsi_vc_enable(dsi, 3, 1); in dsi_display_init_dsi()
4225 dsi_if_enable(dsi, 1); in dsi_display_init_dsi()
4226 dsi_force_tx_stop_mode_io(dsi); in dsi_display_init_dsi()
4230 dsi_cio_uninit(dsi); in dsi_display_init_dsi()
4232 regulator_disable(dsi->vdds_dsi_reg); in dsi_display_init_dsi()
4233 dsi->vdds_dsi_enabled = false; in dsi_display_init_dsi()
4235 dss_select_dsi_clk_source(dsi->dss, dsi->module_id, DSS_CLK_SRC_FCK); in dsi_display_init_dsi()
4237 dss_pll_disable(&dsi->pll); in dsi_display_init_dsi()
4242 static void dsi_display_uninit_dsi(struct dsi_data *dsi, bool disconnect_lanes, in dsi_display_uninit_dsi() argument
4245 if (enter_ulps && !dsi->ulps_enabled) in dsi_display_uninit_dsi()
4246 dsi_enter_ulps(dsi); in dsi_display_uninit_dsi()
4249 dsi_if_enable(dsi, 0); in dsi_display_uninit_dsi()
4250 dsi_vc_enable(dsi, 0, 0); in dsi_display_uninit_dsi()
4251 dsi_vc_enable(dsi, 1, 0); in dsi_display_uninit_dsi()
4252 dsi_vc_enable(dsi, 2, 0); in dsi_display_uninit_dsi()
4253 dsi_vc_enable(dsi, 3, 0); in dsi_display_uninit_dsi()
4255 dss_select_dsi_clk_source(dsi->dss, dsi->module_id, DSS_CLK_SRC_FCK); in dsi_display_uninit_dsi()
4256 dsi_cio_uninit(dsi); in dsi_display_uninit_dsi()
4257 dss_pll_disable(&dsi->pll); in dsi_display_uninit_dsi()
4260 regulator_disable(dsi->vdds_dsi_reg); in dsi_display_uninit_dsi()
4261 dsi->vdds_dsi_enabled = false; in dsi_display_uninit_dsi()
4267 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_display_enable() local
4272 WARN_ON(!dsi_bus_is_locked(dsi)); in dsi_display_enable()
4274 mutex_lock(&dsi->lock); in dsi_display_enable()
4276 r = dsi_runtime_get(dsi); in dsi_display_enable()
4280 _dsi_initialize_irq(dsi); in dsi_display_enable()
4282 r = dsi_display_init_dsi(dsi); in dsi_display_enable()
4286 mutex_unlock(&dsi->lock); in dsi_display_enable()
4291 dsi_runtime_put(dsi); in dsi_display_enable()
4293 mutex_unlock(&dsi->lock); in dsi_display_enable()
4301 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_display_disable() local
4305 WARN_ON(!dsi_bus_is_locked(dsi)); in dsi_display_disable()
4307 mutex_lock(&dsi->lock); in dsi_display_disable()
4309 dsi_sync_vc(dsi, 0); in dsi_display_disable()
4310 dsi_sync_vc(dsi, 1); in dsi_display_disable()
4311 dsi_sync_vc(dsi, 2); in dsi_display_disable()
4312 dsi_sync_vc(dsi, 3); in dsi_display_disable()
4314 dsi_display_uninit_dsi(dsi, disconnect_lanes, enter_ulps); in dsi_display_disable()
4316 dsi_runtime_put(dsi); in dsi_display_disable()
4318 mutex_unlock(&dsi->lock); in dsi_display_disable()
4323 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_enable_te() local
4325 dsi->te_enabled = enable; in dsi_enable_te()
4444 return dispc_div_calc(ctx->dsi->dss->dispc, dispc, in dsi_cm_calc_hsdiv_cb()
4453 struct dsi_data *dsi = ctx->dsi; in dsi_cm_calc_pll_cb() local
4461 dsi->data->max_fck_freq, in dsi_cm_calc_pll_cb()
4465 static bool dsi_cm_calc(struct dsi_data *dsi, in dsi_cm_calc() argument
4474 clkin = clk_get_rate(dsi->pll.clkin); in dsi_cm_calc()
4476 ndl = dsi->num_lanes_used - 1; in dsi_cm_calc()
4489 ctx->dsi = dsi; in dsi_cm_calc()
4490 ctx->pll = &dsi->pll; in dsi_cm_calc()
4506 struct dsi_data *dsi = ctx->dsi; in dsi_vm_calc_blanking() local
4509 int ndl = dsi->num_lanes_used - 1; in dsi_vm_calc_blanking()
4543 * When there are no line buffers, DISPC and DSI must have the in dsi_vm_calc_blanking()
4544 * same tput. Otherwise DISPC tput needs to be higher than DSI's. in dsi_vm_calc_blanking()
4546 if (dsi->line_buffer_size < xres * bitspp / 8) { in dsi_vm_calc_blanking()
4554 /* DSI tput must be over the min requirement */ in dsi_vm_calc_blanking()
4558 /* When non-burst mode, DSI tput must be below max requirement. */ in dsi_vm_calc_blanking()
4575 /* DSI htot to match the panel's nominal pck */ in dsi_vm_calc_blanking()
4582 /* total DSI blanking needed to achieve panel's TL */ in dsi_vm_calc_blanking()
4585 /* DISPC htot to match the DSI TL */ in dsi_vm_calc_blanking()
4588 /* verify that the DSI and DISPC TLs are the same */ in dsi_vm_calc_blanking()
4594 /* setup DSI videomode */ in dsi_vm_calc_blanking()
4718 print_dsi_vm("dsi ", &ctx->dsi_vm); in dsi_vm_calc_dispc_cb()
4745 return dispc_div_calc(ctx->dsi->dss->dispc, dispc, in dsi_vm_calc_hsdiv_cb()
4754 struct dsi_data *dsi = ctx->dsi; in dsi_vm_calc_pll_cb() local
4762 dsi->data->max_fck_freq, in dsi_vm_calc_pll_cb()
4766 static bool dsi_vm_calc(struct dsi_data *dsi, in dsi_vm_calc() argument
4774 int ndl = dsi->num_lanes_used - 1; in dsi_vm_calc()
4778 clkin = clk_get_rate(dsi->pll.clkin); in dsi_vm_calc()
4781 ctx->dsi = dsi; in dsi_vm_calc()
4782 ctx->pll = &dsi->pll; in dsi_vm_calc()
4811 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_set_config() local
4816 mutex_lock(&dsi->lock); in dsi_set_config()
4818 dsi->pix_fmt = config->pixel_format; in dsi_set_config()
4819 dsi->mode = config->mode; in dsi_set_config()
4822 ok = dsi_vm_calc(dsi, config, &ctx); in dsi_set_config()
4824 ok = dsi_cm_calc(dsi, config, &ctx); in dsi_set_config()
4827 DSSERR("failed to find suitable DSI clock settings\n"); in dsi_set_config()
4832 dsi_pll_calc_dsi_fck(dsi, &ctx.dsi_cinfo); in dsi_set_config()
4835 config->lp_clk_min, config->lp_clk_max, &dsi->user_lp_cinfo); in dsi_set_config()
4837 DSSERR("failed to find suitable DSI LP clock settings\n"); in dsi_set_config()
4841 dsi->user_dsi_cinfo = ctx.dsi_cinfo; in dsi_set_config()
4842 dsi->user_dispc_cinfo = ctx.dispc_cinfo; in dsi_set_config()
4844 dsi->vm = ctx.vm; in dsi_set_config()
4845 dsi->vm_timings = ctx.dsi_vm; in dsi_set_config()
4847 mutex_unlock(&dsi->lock); in dsi_set_config()
4851 mutex_unlock(&dsi->lock); in dsi_set_config()
4857 * Return a hardcoded channel for the DSI output. This should work for
4862 static enum omap_channel dsi_get_channel(struct dsi_data *dsi) in dsi_get_channel() argument
4864 switch (dsi->data->model) { in dsi_get_channel()
4869 switch (dsi->module_id) { in dsi_get_channel()
4880 switch (dsi->module_id) { in dsi_get_channel()
4898 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_request_vc() local
4901 for (i = 0; i < ARRAY_SIZE(dsi->vc); i++) { in dsi_request_vc()
4902 if (!dsi->vc[i].dssdev) { in dsi_request_vc()
4903 dsi->vc[i].dssdev = dssdev; in dsi_request_vc()
4915 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_set_vc_id() local
4927 if (dsi->vc[channel].dssdev != dssdev) { in dsi_set_vc_id()
4933 dsi->vc[channel].vc_id = vc_id; in dsi_set_vc_id()
4940 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_release_vc() local
4943 dsi->vc[channel].dssdev == dssdev) { in dsi_release_vc()
4944 dsi->vc[channel].dssdev = NULL; in dsi_release_vc()
4945 dsi->vc[channel].vc_id = 0; in dsi_release_vc()
4950 static int dsi_get_clocks(struct dsi_data *dsi) in dsi_get_clocks() argument
4954 clk = devm_clk_get(dsi->dev, "fck"); in dsi_get_clocks()
4960 dsi->dss_clk = clk; in dsi_get_clocks()
4968 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_connect() local
4971 r = dsi_regulator_init(dsi); in dsi_connect()
4975 r = dss_mgr_connect(&dsi->output, dssdev); in dsi_connect()
4983 dss_mgr_disconnect(&dsi->output, dssdev); in dsi_connect()
4993 struct dsi_data *dsi = to_dsi_data(dssdev); in dsi_disconnect() local
5002 dss_mgr_disconnect(&dsi->output, dssdev); in dsi_disconnect()
5044 static void dsi_init_output(struct dsi_data *dsi) in dsi_init_output() argument
5046 struct omap_dss_device *out = &dsi->output; in dsi_init_output()
5048 out->dev = dsi->dev; in dsi_init_output()
5049 out->id = dsi->module_id == 0 ? in dsi_init_output()
5053 out->name = dsi->module_id == 0 ? "dsi.0" : "dsi.1"; in dsi_init_output()
5054 out->dispc_channel = dsi_get_channel(dsi); in dsi_init_output()
5055 out->ops.dsi = &dsi_ops; in dsi_init_output()
5061 static void dsi_uninit_output(struct dsi_data *dsi) in dsi_uninit_output() argument
5063 struct omap_dss_device *out = &dsi->output; in dsi_uninit_output()
5068 static int dsi_probe_of(struct dsi_data *dsi) in dsi_probe_of() argument
5070 struct device_node *node = dsi->dev->of_node; in dsi_probe_of()
5084 dev_err(dsi->dev, "failed to find lane data\n"); in dsi_probe_of()
5092 num_pins > dsi->num_lanes_supported * 2) { in dsi_probe_of()
5093 dev_err(dsi->dev, "bad number of lanes\n"); in dsi_probe_of()
5100 dev_err(dsi->dev, "failed to read lane data\n"); in dsi_probe_of()
5108 r = dsi_configure_pins(&dsi->output, &pin_cfg); in dsi_probe_of()
5110 dev_err(dsi->dev, "failed to configure pins"); in dsi_probe_of()
5210 static int dsi_init_pll_data(struct dss_device *dss, struct dsi_data *dsi) in dsi_init_pll_data() argument
5212 struct dss_pll *pll = &dsi->pll; in dsi_init_pll_data()
5216 clk = devm_clk_get(dsi->dev, "sys_clk"); in dsi_init_pll_data()
5222 pll->name = dsi->module_id == 0 ? "dsi0" : "dsi1"; in dsi_init_pll_data()
5223 pll->id = dsi->module_id == 0 ? DSS_PLL_DSI1 : DSS_PLL_DSI2; in dsi_init_pll_data()
5225 pll->base = dsi->pll_base; in dsi_init_pll_data()
5226 pll->hw = dsi->data->pll_hw; in dsi_init_pll_data()
5290 { .compatible = "ti,omap3-dsi", .data = &dsi_of_data_omap36xx, },
5291 { .compatible = "ti,omap4-dsi", .data = &dsi_of_data_omap4, },
5292 { .compatible = "ti,omap5-dsi", .data = &dsi_of_data_omap5, },
5310 struct dsi_data *dsi; in dsi_bind() local
5314 dsi = devm_kzalloc(dev, sizeof(*dsi), GFP_KERNEL); in dsi_bind()
5315 if (!dsi) in dsi_bind()
5318 dsi->dss = dss; in dsi_bind()
5319 dsi->dev = dev; in dsi_bind()
5320 dev_set_drvdata(dev, dsi); in dsi_bind()
5322 spin_lock_init(&dsi->irq_lock); in dsi_bind()
5323 spin_lock_init(&dsi->errors_lock); in dsi_bind()
5324 dsi->errors = 0; in dsi_bind()
5327 spin_lock_init(&dsi->irq_stats_lock); in dsi_bind()
5328 dsi->irq_stats.last_reset = jiffies; in dsi_bind()
5331 mutex_init(&dsi->lock); in dsi_bind()
5332 sema_init(&dsi->bus_lock, 1); in dsi_bind()
5334 INIT_DEFERRABLE_WORK(&dsi->framedone_timeout_work, in dsi_bind()
5338 timer_setup(&dsi->te_timer, dsi_te_timeout, 0); in dsi_bind()
5342 dsi->proto_base = devm_ioremap_resource(dev, dsi_mem); in dsi_bind()
5343 if (IS_ERR(dsi->proto_base)) in dsi_bind()
5344 return PTR_ERR(dsi->proto_base); in dsi_bind()
5347 dsi->phy_base = devm_ioremap_resource(dev, res); in dsi_bind()
5348 if (IS_ERR(dsi->phy_base)) in dsi_bind()
5349 return PTR_ERR(dsi->phy_base); in dsi_bind()
5352 dsi->pll_base = devm_ioremap_resource(dev, res); in dsi_bind()
5353 if (IS_ERR(dsi->pll_base)) in dsi_bind()
5354 return PTR_ERR(dsi->pll_base); in dsi_bind()
5356 dsi->irq = platform_get_irq(pdev, 0); in dsi_bind()
5357 if (dsi->irq < 0) { in dsi_bind()
5362 r = devm_request_irq(dev, dsi->irq, omap_dsi_irq_handler, in dsi_bind()
5363 IRQF_SHARED, dev_name(dev), dsi); in dsi_bind()
5371 dsi->data = soc->data; in dsi_bind()
5373 dsi->data = of_match_node(dsi_of_match, dev->of_node)->data; in dsi_bind()
5375 d = dsi->data->modules; in dsi_bind()
5380 DSSERR("unsupported DSI module\n"); in dsi_bind()
5384 dsi->module_id = d->id; in dsi_bind()
5386 if (dsi->data->model == DSI_MODEL_OMAP4 || in dsi_bind()
5387 dsi->data->model == DSI_MODEL_OMAP5) { in dsi_bind()
5395 dsi->data->model == DSI_MODEL_OMAP4 ? in dsi_bind()
5400 dsi->syscon = syscon_node_to_regmap(np); in dsi_bind()
5404 /* DSI VCs initialization */ in dsi_bind()
5405 for (i = 0; i < ARRAY_SIZE(dsi->vc); i++) { in dsi_bind()
5406 dsi->vc[i].source = DSI_VC_SOURCE_L4; in dsi_bind()
5407 dsi->vc[i].dssdev = NULL; in dsi_bind()
5408 dsi->vc[i].vc_id = 0; in dsi_bind()
5411 r = dsi_get_clocks(dsi); in dsi_bind()
5415 dsi_init_pll_data(dss, dsi); in dsi_bind()
5419 r = dsi_runtime_get(dsi); in dsi_bind()
5423 rev = dsi_read_reg(dsi, DSI_REVISION); in dsi_bind()
5424 dev_dbg(dev, "OMAP DSI rev %d.%d\n", in dsi_bind()
5427 /* DSI on OMAP3 doesn't have register DSI_GNQ, set number in dsi_bind()
5429 if (dsi->data->quirks & DSI_QUIRK_GNQ) in dsi_bind()
5431 dsi->num_lanes_supported = 1 + REG_GET(dsi, DSI_GNQ, 11, 9); in dsi_bind()
5433 dsi->num_lanes_supported = 3; in dsi_bind()
5435 dsi->line_buffer_size = dsi_get_line_buf_size(dsi); in dsi_bind()
5437 dsi_init_output(dsi); in dsi_bind()
5439 r = dsi_probe_of(dsi); in dsi_bind()
5441 DSSERR("Invalid DSI DT data\n"); in dsi_bind()
5447 DSSERR("Failed to populate DSI child devices: %d\n", r); in dsi_bind()
5449 dsi_runtime_put(dsi); in dsi_bind()
5451 if (dsi->module_id == 0) in dsi_bind()
5452 dsi->debugfs.regs = dss_debugfs_create_file(dss, "dsi1_regs", in dsi_bind()
5454 &dsi); in dsi_bind()
5456 dsi->debugfs.regs = dss_debugfs_create_file(dss, "dsi2_regs", in dsi_bind()
5458 &dsi); in dsi_bind()
5460 if (dsi->module_id == 0) in dsi_bind()
5461 dsi->debugfs.irqs = dss_debugfs_create_file(dss, "dsi1_irqs", in dsi_bind()
5463 &dsi); in dsi_bind()
5465 dsi->debugfs.irqs = dss_debugfs_create_file(dss, "dsi2_irqs", in dsi_bind()
5467 &dsi); in dsi_bind()
5473 dsi_uninit_output(dsi); in dsi_bind()
5474 dsi_runtime_put(dsi); in dsi_bind()
5483 struct dsi_data *dsi = dev_get_drvdata(dev); in dsi_unbind() local
5485 dss_debugfs_remove_file(dsi->debugfs.irqs); in dsi_unbind()
5486 dss_debugfs_remove_file(dsi->debugfs.regs); in dsi_unbind()
5490 WARN_ON(dsi->scp_clk_refcount > 0); in dsi_unbind()
5492 dss_pll_unregister(&dsi->pll); in dsi_unbind()
5494 dsi_uninit_output(dsi); in dsi_unbind()
5498 if (dsi->vdds_dsi_reg != NULL && dsi->vdds_dsi_enabled) { in dsi_unbind()
5499 regulator_disable(dsi->vdds_dsi_reg); in dsi_unbind()
5500 dsi->vdds_dsi_enabled = false; in dsi_unbind()
5522 struct dsi_data *dsi = dev_get_drvdata(dev); in dsi_runtime_suspend() local
5524 dsi->is_enabled = false; in dsi_runtime_suspend()
5527 /* wait for current handler to finish before turning the DSI off */ in dsi_runtime_suspend()
5528 synchronize_irq(dsi->irq); in dsi_runtime_suspend()
5530 dispc_runtime_put(dsi->dss->dispc); in dsi_runtime_suspend()
5537 struct dsi_data *dsi = dev_get_drvdata(dev); in dsi_runtime_resume() local
5540 r = dispc_runtime_get(dsi->dss->dispc); in dsi_runtime_resume()
5544 dsi->is_enabled = true; in dsi_runtime_resume()