• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2023 HPMicro
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  *
6  */
7 
8 
9 #ifndef HPM_DDRCTL_H
10 #define HPM_DDRCTL_H
11 
12 typedef struct {
13     __RW uint32_t MSTR;                        /* 0x0: Description: Master Register */
14     __R  uint32_t STAT;                        /* 0x4: Description: Operating Mode Status Register */
15     __R  uint8_t  RESERVED0[8];                /* 0x8 - 0xF: Reserved */
16     __RW uint32_t MRCTRL0;                     /* 0x10: Description: Mode Register Read/Write Control Register 0 */
17     __RW uint32_t MRCTRL1;                     /* 0x14: Description: Mode Register Read/Write Control Register 1 */
18     __R  uint32_t MRSTAT;                      /* 0x18: Description: Mode Register Read/Write Status Register */
19     __R  uint8_t  RESERVED1[20];               /* 0x1C - 0x2F: Reserved */
20     __RW uint32_t PWRCTL;                      /* 0x30: Description: Low Power Control Register */
21     __RW uint32_t PWRTMG;                      /* 0x34: Description: Low Power Timing Register */
22     __RW uint32_t HWLPCTL;                     /* 0x38: Description: Hardware Low Power Control Register */
23     __R  uint8_t  RESERVED2[20];               /* 0x3C - 0x4F: Reserved */
24     __RW uint32_t RFSHCTL0;                    /* 0x50: Description: Refresh Control Register 0 */
25     __RW uint32_t RFSHCTL1;                    /* 0x54: Description: Refresh Control Register 1 */
26     __R  uint8_t  RESERVED3[8];                /* 0x58 - 0x5F: Reserved */
27     __RW uint32_t RFSHCTL3;                    /* 0x60: Description: Refresh Control Register 0 */
28     __RW uint32_t RFSHTMG;                     /* 0x64: Description: Refresh Timing Register */
29     __R  uint8_t  RESERVED4[60];               /* 0x68 - 0xA3: Reserved */
30     __R  uint32_t ECCUADDR0;                   /* 0xA4: Description: ECC Uncorrected Error Address Register 0 */
31     __R  uint8_t  RESERVED5[24];               /* 0xA8 - 0xBF: Reserved */
32     __RW uint32_t CRCPARCTL0;                  /* 0xC0: Description: CRC Parity Control Register0 */
33     __R  uint8_t  RESERVED6[8];                /* 0xC4 - 0xCB: Reserved */
34     __R  uint32_t CRCPARSTAT;                  /* 0xCC: Description: CRC Parity Status Register */
35     __RW uint32_t INIT0;                       /* 0xD0: Description: SDRAM Initialization Register 0 */
36     __RW uint32_t INIT1;                       /* 0xD4: Description: SDRAM Initialization Register 1 */
37     __R  uint8_t  RESERVED7[4];                /* 0xD8 - 0xDB: Reserved */
38     __RW uint32_t INIT3;                       /* 0xDC: Description: SDRAM Initialization Register 3 */
39     __RW uint32_t INIT4;                       /* 0xE0: Description: SDRAM Initialization Register 4 */
40     __RW uint32_t INIT5;                       /* 0xE4: Description: SDRAM Initialization Register 5 */
41     __R  uint8_t  RESERVED8[8];                /* 0xE8 - 0xEF: Reserved */
42     __RW uint32_t DIMMCTL;                     /* 0xF0: Description: DIMM Control Register */
43     __RW uint32_t RANKCTL;                     /* 0xF4: Description: Rank Control Register */
44     __R  uint8_t  RESERVED9[8];                /* 0xF8 - 0xFF: Reserved */
45     __RW uint32_t DRAMTMG0;                    /* 0x100: Description: SDRAM Timing Register 0 */
46     __RW uint32_t DRAMTMG1;                    /* 0x104: Description: SDRAM Timing Register 1 */
47     __RW uint32_t DRAMTMG2;                    /* 0x108: Description: SDRAM Timing Register 2 */
48     __RW uint32_t DRAMTMG3;                    /* 0x10C: Description: SDRAM Timing Register 3 */
49     __RW uint32_t DRAMTMG4;                    /* 0x110: Description: SDRAM Timing Register 4 */
50     __RW uint32_t DRAMTMG5;                    /* 0x114: Description: SDRAM Timing Register 5 */
51     __R  uint8_t  RESERVED10[8];               /* 0x118 - 0x11F: Reserved */
52     __RW uint32_t DRAMTMG8;                    /* 0x120: Description: SDRAM Timing Register 8 */
53     __R  uint8_t  RESERVED11[92];              /* 0x124 - 0x17F: Reserved */
54     __RW uint32_t ZQCTL0;                      /* 0x180: Description: ZQ Control Register 0 */
55     __RW uint32_t ZQCTL1;                      /* 0x184: Description: ZQ Control Register 1 */
56     __R  uint8_t  RESERVED12[4];               /* 0x188 - 0x18B: Reserved */
57     __R  uint32_t ZQSTAT;                      /* 0x18C: Description: ZQ Status Register */
58     __RW uint32_t DFITMG0;                     /* 0x190: Description: DFI Timing Register 0 */
59     __RW uint32_t DFITMG1;                     /* 0x194: Description: DFI Timing Register 1 */
60     __RW uint32_t DFILPCFG0;                   /* 0x198: Description: DFI Low Power Configuration Register 0 */
61     __R  uint8_t  RESERVED13[4];               /* 0x19C - 0x19F: Reserved */
62     __RW uint32_t DFIUPD0;                     /* 0x1A0: Description: DFI Update Register 0 */
63     __RW uint32_t DFIUPD1;                     /* 0x1A4: Description: DFI Update Register 1 */
64     __RW uint32_t DFIUPD2;                     /* 0x1A8: Description: DFI Update Register 2 */
65     __RW uint32_t DFIUPD3;                     /* 0x1AC: Description: DFI Update Register 3 */
66     __RW uint32_t DFIMISC;                     /* 0x1B0: Description: DFI Miscellaneous Control Register */
67     __RW uint32_t DFITMG2;                     /* 0x1B4: Description: DFI Timing Register 2 */
68     __R  uint8_t  RESERVED14[72];              /* 0x1B8 - 0x1FF: Reserved */
69     __RW uint32_t ADDRMAP0;                    /* 0x200: Description: Address Map Register 0 */
70     __RW uint32_t ADDRMAP1;                    /* 0x204: Description: Address Map Register 1 */
71     __RW uint32_t ADDRMAP2;                    /* 0x208: Description: Address Map Register 2 */
72     __RW uint32_t ADDRMAP3;                    /* 0x20C: Description: Address Map Register 3 */
73     __RW uint32_t ADDRMAP4;                    /* 0x210: Description: Address Map Register 4 */
74     __RW uint32_t ADDRMAP5;                    /* 0x214: Description: Address Map Register 5 */
75     __RW uint32_t ADDRMAP6;                    /* 0x218: Description: Address Map Register 6 */
76     __R  uint8_t  RESERVED15[36];              /* 0x21C - 0x23F: Reserved */
77     __RW uint32_t ODTCFG;                      /* 0x240: Description: ODT Configuration Register */
78     __RW uint32_t ODTMAP;                      /* 0x244: Description: ODT/Rank Map Register */
79     __R  uint8_t  RESERVED16[8];               /* 0x248 - 0x24F: Reserved */
80     __RW uint32_t SCHED;                       /* 0x250: Description: Scheduler Control Register */
81     __RW uint32_t SCHED1;                      /* 0x254: Description: Scheduler Control Register 1 */
82     __R  uint8_t  RESERVED17[4];               /* 0x258 - 0x25B: Reserved */
83     __RW uint32_t PERFHPR1;                    /* 0x25C: Description: High Priority Read CAM Register 1 */
84     __R  uint8_t  RESERVED18[4];               /* 0x260 - 0x263: Reserved */
85     __RW uint32_t PERFLPR1;                    /* 0x264: Description: Low Priority Read CAM Register 1 */
86     __R  uint8_t  RESERVED19[4];               /* 0x268 - 0x26B: Reserved */
87     __RW uint32_t PERFWR1;                     /* 0x26C: Description: Write CAM Register 1 */
88     __R  uint8_t  RESERVED20[4];               /* 0x270 - 0x273: Reserved */
89     __RW uint32_t PERFVPR1;                    /* 0x274: Description: Variable Priority Read CAM Register 1 */
90     __RW uint32_t PERFVPW1;                    /* 0x278: Description: Variable Priority Write CAM Register 1 */
91     __R  uint8_t  RESERVED21[132];             /* 0x27C - 0x2FF: Reserved */
92     __RW uint32_t DBG0;                        /* 0x300: Description: Debug Register 0 */
93     __RW uint32_t DBG1;                        /* 0x304: Description: Debug Register 1 */
94     __R  uint32_t DBGCAM;                      /* 0x308: Description: CAM Debug Register */
95     __RW uint32_t DBGCMD;                      /* 0x30C: Description: Command Debug Register */
96     __R  uint32_t DBGSTAT;                     /* 0x310: Description: Status Debug Register */
97     __R  uint8_t  RESERVED22[232];             /* 0x314 - 0x3FB: Reserved */
98     __R  uint32_t PSTAT;                       /* 0x3FC: Description: Port Status Register */
99     __RW uint32_t PCCFG;                       /* 0x400: Description: Port Common Configuration Register */
100     struct {
101         __RW uint32_t R;                       /* 0x404: Description: Port n Configuration Read Register */
102         __RW uint32_t W;                       /* 0x408: Description: Port n Configuration Write Register */
103         __RW uint32_t C;                       /* 0x40C: Description: Port n Common Configuration Register */
104         struct {
105             __RW uint32_t MASKCH;              /* 0x410: Description: Port n Channel m Configuration ID Mask Register */
106             __RW uint32_t VALUECH;             /* 0x414: Description: Port n Channel m Configuration ID Value Register */
107         } ID[16];
108         __RW uint32_t CTRL;                    /* 0x490: Description: Port n Control Register */
109         __RW uint32_t QOS0;                    /* 0x494: Description: Port n Read QoS Configuration Register 0 */
110         __RW uint32_t QOS1;                    /* 0x498: Description: Port n Read QoS Configuration Register 1 */
111         __RW uint32_t WQOS0;                   /* 0x49C: Description: Port n Write QoS Configuration Register 0 */
112         __RW uint32_t WQOS1;                   /* 0x4A0: Description: Port n Write QoS Configuration Register 1 */
113         __R  uint8_t  RESERVED0[16];           /* 0x4A4 - 0x4B3: Reserved */
114     } PCFG[16];
115     struct {
116         __RW uint32_t BASE;                    /* 0xF04: Description: SAR Base Address Register n */
117         __RW uint32_t SIZE;                    /* 0xF08: Description: SAR Size Register n */
118     } SAR[4];
119     __RW uint32_t SBRCTL;                      /* 0xF24: Description: Scrubber Control Register */
120     __R  uint32_t SBRSTAT;                     /* 0xF28: Description: Scrubber Status Register */
121     __RW uint32_t SBRWDATA0;                   /* 0xF2C: Description: Scrubber Write Data Pattern0 */
122     __R  uint8_t  RESERVED23[4];               /* 0xF30 - 0xF33: Reserved */
123 } DDRCTL_Type;
124 
125 
126 /* Bitfield definition for register: MSTR */
127 /*
128  * ACTIVE_RANKS (R/W)
129  *
130  * Description: Only present for multi-rank configurations. Each bit represents one rank. For two-rank configurations, only bits[25:24] are present.
131  * 1 - populated
132  * 0 - unpopulated
133  * LSB is the lowest rank number.
134  * For 2 ranks following combinations are legal:
135  * 01 - One rank
136  * 11 - Two ranks
137  * Others - Reserved.
138  * For 4 ranks following combinations are legal:
139  * 0001 - One rank
140  * 0011 - Two ranks
141  * 1111 - Four ranks
142  * Value After Reset: "(MEMC_NUM_RANKS==4) ? 0xF
143  * :((MEMC_NUM_RANKS==2) ? 0x3 : 0x1)"
144  * Exists: MEMC_NUM_RANKS>1
145  */
146 #define DDRCTL_MSTR_ACTIVE_RANKS_MASK (0xF000000UL)
147 #define DDRCTL_MSTR_ACTIVE_RANKS_SHIFT (24U)
148 #define DDRCTL_MSTR_ACTIVE_RANKS_SET(x) (((uint32_t)(x) << DDRCTL_MSTR_ACTIVE_RANKS_SHIFT) & DDRCTL_MSTR_ACTIVE_RANKS_MASK)
149 #define DDRCTL_MSTR_ACTIVE_RANKS_GET(x) (((uint32_t)(x) & DDRCTL_MSTR_ACTIVE_RANKS_MASK) >> DDRCTL_MSTR_ACTIVE_RANKS_SHIFT)
150 
151 /*
152  * BURST_RDWR (R/W)
153  *
154  * Description: SDRAM burst length used:
155  * 0001 - Burst length of 2 (only supported for mDDR)
156  * 0010 - Burst length of 4
157  * 0100 - Burst length of 8
158  * 1000 - Burst length of 16 (only supported for mDDR and LPDDR2)
159  * All other values are reserved.
160  * This controls the burst size used to access the SDRAM. This must match the burst length mode register setting in the SDRAM. Burst length of 2 is not supported with AXI ports when MEMC_BURST_LENGTH is 8.
161  * Value After Reset: 0x4
162  * Exists: Always
163  */
164 #define DDRCTL_MSTR_BURST_RDWR_MASK (0xF0000UL)
165 #define DDRCTL_MSTR_BURST_RDWR_SHIFT (16U)
166 #define DDRCTL_MSTR_BURST_RDWR_SET(x) (((uint32_t)(x) << DDRCTL_MSTR_BURST_RDWR_SHIFT) & DDRCTL_MSTR_BURST_RDWR_MASK)
167 #define DDRCTL_MSTR_BURST_RDWR_GET(x) (((uint32_t)(x) & DDRCTL_MSTR_BURST_RDWR_MASK) >> DDRCTL_MSTR_BURST_RDWR_SHIFT)
168 
169 /*
170  * DLL_OFF_MODE (R/W)
171  *
172  * Description: Set to 1 when uMCTL2 and DRAM has to be put in DLL-off mode for low frequency operation.
173  * Set to 0 to put uMCTL2 and DRAM in DLL-on mode for normal frequency operation.
174  * Value After Reset: 0x0
175  * Exists: MEMC_DDR3_OR_4==1
176  */
177 #define DDRCTL_MSTR_DLL_OFF_MODE_MASK (0x8000U)
178 #define DDRCTL_MSTR_DLL_OFF_MODE_SHIFT (15U)
179 #define DDRCTL_MSTR_DLL_OFF_MODE_SET(x) (((uint32_t)(x) << DDRCTL_MSTR_DLL_OFF_MODE_SHIFT) & DDRCTL_MSTR_DLL_OFF_MODE_MASK)
180 #define DDRCTL_MSTR_DLL_OFF_MODE_GET(x) (((uint32_t)(x) & DDRCTL_MSTR_DLL_OFF_MODE_MASK) >> DDRCTL_MSTR_DLL_OFF_MODE_SHIFT)
181 
182 /*
183  * DATA_BUS_WIDTH (R/W)
184  *
185  * Description: Selects proportion of DQ bus width that is used by the SDRAM
186  * 00 - Full DQ bus width to SDRAM
187  * 01 - Half DQ bus width to SDRAM
188  * 10 - Quarter DQ bus width to SDRAM
189  * 11 - Reserved.
190  * Note that half bus width mode is only supported when the SDRAM bus width is a multiple of 16, and quarter bus width mode is only supported when the SDRAM bus width is a multiple of 32 and the configuration parameter MEMC_QBUS_SUPPORT is set. Bus width refers to DQ bus width (excluding any ECC width).
191  * Value After Reset: 0x0
192  * Exists: Always
193  */
194 #define DDRCTL_MSTR_DATA_BUS_WIDTH_MASK (0x3000U)
195 #define DDRCTL_MSTR_DATA_BUS_WIDTH_SHIFT (12U)
196 #define DDRCTL_MSTR_DATA_BUS_WIDTH_SET(x) (((uint32_t)(x) << DDRCTL_MSTR_DATA_BUS_WIDTH_SHIFT) & DDRCTL_MSTR_DATA_BUS_WIDTH_MASK)
197 #define DDRCTL_MSTR_DATA_BUS_WIDTH_GET(x) (((uint32_t)(x) & DDRCTL_MSTR_DATA_BUS_WIDTH_MASK) >> DDRCTL_MSTR_DATA_BUS_WIDTH_SHIFT)
198 
199 /*
200  * EN_2T_TIMING_MODE (R/W)
201  *
202  * Description: If 1, then uMCTL2 uses 2T timing. Otherwise, uses 1T timing. In 2T timing, all command signals (except chip select) are held for 2 clocks on the SDRAM bus. Chip select is asserted on the second cycle of the command
203  * Note: 2T timing is not supported in LPDDR2/LPDDR3 mode Note: 2T timing is not supported if the configuration parameter MEMC_CMD_RTN2IDLE is set
204  * Note: 2T timing is not supported in DDR4 geardown mode.
205  * Value After Reset: 0x0
206  * Exists: MEMC_CMD_RTN2IDLE==0
207  */
208 #define DDRCTL_MSTR_EN_2T_TIMING_MODE_MASK (0x400U)
209 #define DDRCTL_MSTR_EN_2T_TIMING_MODE_SHIFT (10U)
210 #define DDRCTL_MSTR_EN_2T_TIMING_MODE_SET(x) (((uint32_t)(x) << DDRCTL_MSTR_EN_2T_TIMING_MODE_SHIFT) & DDRCTL_MSTR_EN_2T_TIMING_MODE_MASK)
211 #define DDRCTL_MSTR_EN_2T_TIMING_MODE_GET(x) (((uint32_t)(x) & DDRCTL_MSTR_EN_2T_TIMING_MODE_MASK) >> DDRCTL_MSTR_EN_2T_TIMING_MODE_SHIFT)
212 
213 /*
214  * BURSTCHOP (R/W)
215  *
216  * Description: When set, enable burst-chop in DDR3/DDR4. This is only supported in full bus width mode (MSTR.data_bus_width = 00). If DDR4 CRC/parity retry is enabled (CRCPARCTL1.crc_parity_retry_enable = 1), burst chop is not supported, and this bit must be set to '0'
217  * Value After Reset: 0x0
218  * Exists: MEMC_DDR3==1 || MEMC_DDR4==1
219  */
220 #define DDRCTL_MSTR_BURSTCHOP_MASK (0x200U)
221 #define DDRCTL_MSTR_BURSTCHOP_SHIFT (9U)
222 #define DDRCTL_MSTR_BURSTCHOP_SET(x) (((uint32_t)(x) << DDRCTL_MSTR_BURSTCHOP_SHIFT) & DDRCTL_MSTR_BURSTCHOP_MASK)
223 #define DDRCTL_MSTR_BURSTCHOP_GET(x) (((uint32_t)(x) & DDRCTL_MSTR_BURSTCHOP_MASK) >> DDRCTL_MSTR_BURSTCHOP_SHIFT)
224 
225 /*
226  * DDR3 (R/W)
227  *
228  * Description: Select DDR3 SDRAM
229  * 1 - DDR3 SDRAM device in use
230  * 0 - non-DDR3 SDRAM device in use Only present in designs that support DDR3.
231  * Value After Reset: "(MEMC_DDR3_EN==1) ? 0x1 : 0x0"
232  * Exists: MEMC_DDR3==1
233  */
234 #define DDRCTL_MSTR_DDR3_MASK (0x1U)
235 #define DDRCTL_MSTR_DDR3_SHIFT (0U)
236 #define DDRCTL_MSTR_DDR3_SET(x) (((uint32_t)(x) << DDRCTL_MSTR_DDR3_SHIFT) & DDRCTL_MSTR_DDR3_MASK)
237 #define DDRCTL_MSTR_DDR3_GET(x) (((uint32_t)(x) & DDRCTL_MSTR_DDR3_MASK) >> DDRCTL_MSTR_DDR3_SHIFT)
238 
239 /* Bitfield definition for register: STAT */
240 /*
241  * SELFREF_TYPE (R)
242  *
243  * Description: Flags if Self Refresh is entered and if it was under Automatic Self Refresh control only or not.
244  * 00 - SDRAM is not in Self Refresh
245  * 11 - SDRAM is in Self Refresh and Self Refresh was caused by Automatic Self Refresh only
246  * 10 - SDRAM is in Self Refresh and Self Refresh was not caused solely under Automatic Self Refresh control. It could have been caused by Hardware Low Power Interface and/or Software (reg_ddrc_selfref_sw).
247  * Value After Reset: 0x0
248  * Exists: Always
249  */
250 #define DDRCTL_STAT_SELFREF_TYPE_MASK (0x30U)
251 #define DDRCTL_STAT_SELFREF_TYPE_SHIFT (4U)
252 #define DDRCTL_STAT_SELFREF_TYPE_GET(x) (((uint32_t)(x) & DDRCTL_STAT_SELFREF_TYPE_MASK) >> DDRCTL_STAT_SELFREF_TYPE_SHIFT)
253 
254 /*
255  * OPERATING_MODE (R)
256  *
257  * Description: Operating mode. This is 3-bits wide in configurations with mDDR/LPDDR2/LPDDR3/DDR4 support and 2-bits in all other configurations.
258  * non-mDDR/LPDDR2/LPDDR3 and non-DDR4 designs:
259  * 00 - Init
260  * 01 - Normal
261  * 10 - Power-down
262  * 11 - Self refresh
263  * mDDR/LPDDR2/LPDDR3 or DDR4 designs:
264  * 000 - Init
265  * 001 - Normal
266  * 010 - Power-down
267  * 011 - Self refresh
268  * 1XX - Deep power-down / Maximum Power Saving Mode
269  * Value After Reset: 0x0
270  * Exists: Always
271  */
272 #define DDRCTL_STAT_OPERATING_MODE_MASK (0x7U)
273 #define DDRCTL_STAT_OPERATING_MODE_SHIFT (0U)
274 #define DDRCTL_STAT_OPERATING_MODE_GET(x) (((uint32_t)(x) & DDRCTL_STAT_OPERATING_MODE_MASK) >> DDRCTL_STAT_OPERATING_MODE_SHIFT)
275 
276 /* Bitfield definition for register: MRCTRL0 */
277 /*
278  * MR_WR (R/W)
279  *
280  * Description: Setting this register bit to 1 triggers a mode register read or write operation. When the MR operation is complete, the uMCTL2 automatically clears this bit. The other register fields of this register must be written in a separate APB transaction, before setting this mr_wr bit. It is recommended NOT to set this signal if in Init, Deep power- down or MPSM operating modes.
281  * Value After Reset: 0x0
282  * Exists: Always
283  */
284 #define DDRCTL_MRCTRL0_MR_WR_MASK (0x80000000UL)
285 #define DDRCTL_MRCTRL0_MR_WR_SHIFT (31U)
286 #define DDRCTL_MRCTRL0_MR_WR_SET(x) (((uint32_t)(x) << DDRCTL_MRCTRL0_MR_WR_SHIFT) & DDRCTL_MRCTRL0_MR_WR_MASK)
287 #define DDRCTL_MRCTRL0_MR_WR_GET(x) (((uint32_t)(x) & DDRCTL_MRCTRL0_MR_WR_MASK) >> DDRCTL_MRCTRL0_MR_WR_SHIFT)
288 
289 /*
290  * MR_ADDR (R/W)
291  *
292  * Description: Address of the mode register that is to be written to.
293  * 0000 - MR0
294  * 0001 - MR1
295  * 0010 - MR2
296  * 0011 - MR3
297  * 0100 - MR4
298  * 0101 - MR5
299  * 0110 - MR6
300  * 0111 - MR7
301  * Don't Care for LPDDR2/LPDDR3 (see MRCTRL1.mr_data for mode register addressing in LPDDR2/LPDDR3)
302  * This signal is also used for writing to control words of RDIMMs. In that case, it corresponds to the bank address bits sent to the RDIMM
303  * In case of DDR4, the bit[3:2] corresponds to the bank group bits. Therefore, the bit[3] as well as the bit[2:0] must be set to an appropriate value which is considered both the Address Mirroring of UDIMMs/RDIMMs and the Output Inversion of RDIMMs.
304  * Value After Reset: 0x0
305  * Exists: Always
306  */
307 #define DDRCTL_MRCTRL0_MR_ADDR_MASK (0xF000U)
308 #define DDRCTL_MRCTRL0_MR_ADDR_SHIFT (12U)
309 #define DDRCTL_MRCTRL0_MR_ADDR_SET(x) (((uint32_t)(x) << DDRCTL_MRCTRL0_MR_ADDR_SHIFT) & DDRCTL_MRCTRL0_MR_ADDR_MASK)
310 #define DDRCTL_MRCTRL0_MR_ADDR_GET(x) (((uint32_t)(x) & DDRCTL_MRCTRL0_MR_ADDR_MASK) >> DDRCTL_MRCTRL0_MR_ADDR_SHIFT)
311 
312 /*
313  * MR_RANK (R/W)
314  *
315  * Description: Controls which rank is accessed by MRCTRL0.mr_wr. Normally, it is desired to access all ranks, so all bits should be set to 1. However, for multi-rank UDIMMs/RDIMMs which implement address mirroring, it may be necessary to access ranks individually.
316  * Examples (assume uMCTL2 is configured for 4 ranks):
317  * 0x1 - select rank 0 only
318  * 0x2 - select rank 1 only
319  * 0x5 - select ranks 0 and 2
320  * 0xA - select ranks 1 and 3
321  * 0xF - select ranks 0, 1, 2 and 3
322  * Value After Reset: "(MEMC_NUM_RANKS==4) ? 0xF
323  * :((MEMC_NUM_RANKS==2) ? 0x3 : 0x1)"
324  * Exists: Always
325  */
326 #define DDRCTL_MRCTRL0_MR_RANK_MASK (0xF0U)
327 #define DDRCTL_MRCTRL0_MR_RANK_SHIFT (4U)
328 #define DDRCTL_MRCTRL0_MR_RANK_SET(x) (((uint32_t)(x) << DDRCTL_MRCTRL0_MR_RANK_SHIFT) & DDRCTL_MRCTRL0_MR_RANK_MASK)
329 #define DDRCTL_MRCTRL0_MR_RANK_GET(x) (((uint32_t)(x) & DDRCTL_MRCTRL0_MR_RANK_MASK) >> DDRCTL_MRCTRL0_MR_RANK_SHIFT)
330 
331 /* Bitfield definition for register: MRCTRL1 */
332 /*
333  * MR_DATA (R/W)
334  *
335  * Description: Mode register write data for all non- LPDDR2/non-LPDDR3 modes.
336  * For LPDDR2/LPDDR3, MRCTRL1[15:0] are interpreted as [15:8] MR Address and [7:0] MR data for writes, don't care for reads. This is 18-bits wide in configurations with DDR4 support and 16-bits in all other configurations.
337  * Value After Reset: 0x0
338  * Exists: Always
339  */
340 #define DDRCTL_MRCTRL1_MR_DATA_MASK (0x3FFFFUL)
341 #define DDRCTL_MRCTRL1_MR_DATA_SHIFT (0U)
342 #define DDRCTL_MRCTRL1_MR_DATA_SET(x) (((uint32_t)(x) << DDRCTL_MRCTRL1_MR_DATA_SHIFT) & DDRCTL_MRCTRL1_MR_DATA_MASK)
343 #define DDRCTL_MRCTRL1_MR_DATA_GET(x) (((uint32_t)(x) & DDRCTL_MRCTRL1_MR_DATA_MASK) >> DDRCTL_MRCTRL1_MR_DATA_SHIFT)
344 
345 /* Bitfield definition for register: MRSTAT */
346 /*
347  * MR_WR_BUSY (R)
348  *
349  * Description: The SoC core may initiate a MR write operation only if this signal is low. This signal goes high in the clock after the uMCTL2 accepts the MRW/MRR request. It goes low when the MRW/MRR command is issued to the SDRAM. It is recommended not to perform MRW/MRR commands when 'MRSTAT.mr_wr_busy' is high.
350  * 0 - Indicates that the SoC core can initiate a mode register write operation
351  * 1 - Indicates that mode register write operation is in progress
352  * Value After Reset: 0x0
353  * Exists: Always
354  */
355 #define DDRCTL_MRSTAT_MR_WR_BUSY_MASK (0x1U)
356 #define DDRCTL_MRSTAT_MR_WR_BUSY_SHIFT (0U)
357 #define DDRCTL_MRSTAT_MR_WR_BUSY_GET(x) (((uint32_t)(x) & DDRCTL_MRSTAT_MR_WR_BUSY_MASK) >> DDRCTL_MRSTAT_MR_WR_BUSY_SHIFT)
358 
359 /* Bitfield definition for register: PWRCTL */
360 /*
361  * SELFREF_SW (R/W)
362  *
363  * Description: A value of 1 to this register causes system to move to Self Refresh state immediately, as long as it is not in INIT or DPD/MPSM operating_mode. This is referred to as Software Entry/Exit to Self Refresh.
364  * 1 - Software Entry to Self Refresh
365  * 0 - Software Exit from Self Refresh
366  * Value After Reset: 0x0
367  * Exists: Always
368  */
369 #define DDRCTL_PWRCTL_SELFREF_SW_MASK (0x20U)
370 #define DDRCTL_PWRCTL_SELFREF_SW_SHIFT (5U)
371 #define DDRCTL_PWRCTL_SELFREF_SW_SET(x) (((uint32_t)(x) << DDRCTL_PWRCTL_SELFREF_SW_SHIFT) & DDRCTL_PWRCTL_SELFREF_SW_MASK)
372 #define DDRCTL_PWRCTL_SELFREF_SW_GET(x) (((uint32_t)(x) & DDRCTL_PWRCTL_SELFREF_SW_MASK) >> DDRCTL_PWRCTL_SELFREF_SW_SHIFT)
373 
374 /*
375  * EN_DFI_DRAM_CLK_DISABLE (R/W)
376  *
377  * Description: Enable the assertion of dfi_dram_clk_disable whenever a clock is not required by the SDRAM.
378  * If set to 0, dfi_dram_clk_disable is never asserted. Assertion of dfi_dram_clk_disable is as follows:
379  * In DDR2/DDR3, can only be asserted in Self Refresh. In DDR4, can be asserted in following:
380  * in Self Refresh.
381  * in Maximum Power Saving Mode
382  * In mDDR/LPDDR2/LPDDR3, can be asserted in following:
383  * in Self Refresh
384  * in Power Down
385  * in Deep Power Down
386  * during Normal operation (Clock Stop)
387  * Value After Reset: 0x0
388  * Exists: Always
389  */
390 #define DDRCTL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE_MASK (0x8U)
391 #define DDRCTL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE_SHIFT (3U)
392 #define DDRCTL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE_SET(x) (((uint32_t)(x) << DDRCTL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE_SHIFT) & DDRCTL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE_MASK)
393 #define DDRCTL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE_GET(x) (((uint32_t)(x) & DDRCTL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE_MASK) >> DDRCTL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE_SHIFT)
394 
395 /*
396  * POWERDOWN_EN (R/W)
397  *
398  * Description: If true then the uMCTL2 goes into power-down after a programmable number of cycles "maximum idle clocks before power down" (PWRTMG.powerdown_to_x32).
399  * This register bit may be re-programmed during the course of normal operation.
400  * Value After Reset: 0x0
401  * Exists: Always
402  */
403 #define DDRCTL_PWRCTL_POWERDOWN_EN_MASK (0x2U)
404 #define DDRCTL_PWRCTL_POWERDOWN_EN_SHIFT (1U)
405 #define DDRCTL_PWRCTL_POWERDOWN_EN_SET(x) (((uint32_t)(x) << DDRCTL_PWRCTL_POWERDOWN_EN_SHIFT) & DDRCTL_PWRCTL_POWERDOWN_EN_MASK)
406 #define DDRCTL_PWRCTL_POWERDOWN_EN_GET(x) (((uint32_t)(x) & DDRCTL_PWRCTL_POWERDOWN_EN_MASK) >> DDRCTL_PWRCTL_POWERDOWN_EN_SHIFT)
407 
408 /*
409  * SELFREF_EN (R/W)
410  *
411  * Description: If true then the uMCTL2 puts the SDRAM into Self Refresh after a programmable number of cycles "maximum idle clocks before Self Refresh (PWRTMG.selfref_to_x32)". This register bit may be re- programmed during the course of normal operation.
412  * Value After Reset: 0x0
413  * Exists: Always
414  */
415 #define DDRCTL_PWRCTL_SELFREF_EN_MASK (0x1U)
416 #define DDRCTL_PWRCTL_SELFREF_EN_SHIFT (0U)
417 #define DDRCTL_PWRCTL_SELFREF_EN_SET(x) (((uint32_t)(x) << DDRCTL_PWRCTL_SELFREF_EN_SHIFT) & DDRCTL_PWRCTL_SELFREF_EN_MASK)
418 #define DDRCTL_PWRCTL_SELFREF_EN_GET(x) (((uint32_t)(x) & DDRCTL_PWRCTL_SELFREF_EN_MASK) >> DDRCTL_PWRCTL_SELFREF_EN_SHIFT)
419 
420 /* Bitfield definition for register: PWRTMG */
421 /*
422  * SELFREF_TO_X32 (R/W)
423  *
424  * Description: After this many clocks of NOP or deselect the uMCTL2 automatically puts the SDRAM into Self Refresh. This must be enabled in the PWRCTL.selfref_en.
425  * Unit: Multiples of 32 clocks. FOR PERFORMANCE ONLY.
426  * Value After Reset: 0x40
427  * Exists: Always
428  */
429 #define DDRCTL_PWRTMG_SELFREF_TO_X32_MASK (0xFF0000UL)
430 #define DDRCTL_PWRTMG_SELFREF_TO_X32_SHIFT (16U)
431 #define DDRCTL_PWRTMG_SELFREF_TO_X32_SET(x) (((uint32_t)(x) << DDRCTL_PWRTMG_SELFREF_TO_X32_SHIFT) & DDRCTL_PWRTMG_SELFREF_TO_X32_MASK)
432 #define DDRCTL_PWRTMG_SELFREF_TO_X32_GET(x) (((uint32_t)(x) & DDRCTL_PWRTMG_SELFREF_TO_X32_MASK) >> DDRCTL_PWRTMG_SELFREF_TO_X32_SHIFT)
433 
434 /*
435  * POWERDOWN_TO_X32 (R/W)
436  *
437  * Description: After this many clocks of NOP or deselect the uMCTL2 automatically puts the SDRAM into power-down. This must be enabled in the PWRCTL.powerdown_en.
438  * Unit: Multiples of 32 clocks FOR PERFORMANCE ONLY.
439  * Value After Reset: 0x10
440  * Exists: Always
441  */
442 #define DDRCTL_PWRTMG_POWERDOWN_TO_X32_MASK (0x1FU)
443 #define DDRCTL_PWRTMG_POWERDOWN_TO_X32_SHIFT (0U)
444 #define DDRCTL_PWRTMG_POWERDOWN_TO_X32_SET(x) (((uint32_t)(x) << DDRCTL_PWRTMG_POWERDOWN_TO_X32_SHIFT) & DDRCTL_PWRTMG_POWERDOWN_TO_X32_MASK)
445 #define DDRCTL_PWRTMG_POWERDOWN_TO_X32_GET(x) (((uint32_t)(x) & DDRCTL_PWRTMG_POWERDOWN_TO_X32_MASK) >> DDRCTL_PWRTMG_POWERDOWN_TO_X32_SHIFT)
446 
447 /* Bitfield definition for register: HWLPCTL */
448 /*
449  * HW_LP_IDLE_X32 (R/W)
450  *
451  * Description: Hardware idle period. The cactive_ddrc output is driven low if the system is idle for hw_lp_idle * 32 cycles if not in INIT or DPD/MPSM operating_mode. The hardware idle function is disabled when hw_lp_idle_x32=0.
452  * Unit: Multiples of 32 clocks. FOR PERFORMANCE ONLY.
453  * Value After Reset: 0x0
454  * Exists: Always
455  */
456 #define DDRCTL_HWLPCTL_HW_LP_IDLE_X32_MASK (0xFFF0000UL)
457 #define DDRCTL_HWLPCTL_HW_LP_IDLE_X32_SHIFT (16U)
458 #define DDRCTL_HWLPCTL_HW_LP_IDLE_X32_SET(x) (((uint32_t)(x) << DDRCTL_HWLPCTL_HW_LP_IDLE_X32_SHIFT) & DDRCTL_HWLPCTL_HW_LP_IDLE_X32_MASK)
459 #define DDRCTL_HWLPCTL_HW_LP_IDLE_X32_GET(x) (((uint32_t)(x) & DDRCTL_HWLPCTL_HW_LP_IDLE_X32_MASK) >> DDRCTL_HWLPCTL_HW_LP_IDLE_X32_SHIFT)
460 
461 /*
462  * HW_LP_EXIT_IDLE_EN (R/W)
463  *
464  * Description: When this bit is programmed to 1 the cactive_in_ddrc pin of the DDRC can be used to exit from the automatic clock stop, automatic power down or automatic self-refresh modes. Note, it will not cause exit of Self-Refresh that was caused by Hardware Low Power Interface and/or Software (PWRCTL.selfref_sw).
465  * Value After Reset: 0x1
466  * Exists: Always
467  */
468 #define DDRCTL_HWLPCTL_HW_LP_EXIT_IDLE_EN_MASK (0x2U)
469 #define DDRCTL_HWLPCTL_HW_LP_EXIT_IDLE_EN_SHIFT (1U)
470 #define DDRCTL_HWLPCTL_HW_LP_EXIT_IDLE_EN_SET(x) (((uint32_t)(x) << DDRCTL_HWLPCTL_HW_LP_EXIT_IDLE_EN_SHIFT) & DDRCTL_HWLPCTL_HW_LP_EXIT_IDLE_EN_MASK)
471 #define DDRCTL_HWLPCTL_HW_LP_EXIT_IDLE_EN_GET(x) (((uint32_t)(x) & DDRCTL_HWLPCTL_HW_LP_EXIT_IDLE_EN_MASK) >> DDRCTL_HWLPCTL_HW_LP_EXIT_IDLE_EN_SHIFT)
472 
473 /*
474  * HW_LP_EN (R/W)
475  *
476  * Description: Enable for Hardware Low Power Interface.
477  * Value After Reset: 0x1
478  * Exists: Always
479  */
480 #define DDRCTL_HWLPCTL_HW_LP_EN_MASK (0x1U)
481 #define DDRCTL_HWLPCTL_HW_LP_EN_SHIFT (0U)
482 #define DDRCTL_HWLPCTL_HW_LP_EN_SET(x) (((uint32_t)(x) << DDRCTL_HWLPCTL_HW_LP_EN_SHIFT) & DDRCTL_HWLPCTL_HW_LP_EN_MASK)
483 #define DDRCTL_HWLPCTL_HW_LP_EN_GET(x) (((uint32_t)(x) & DDRCTL_HWLPCTL_HW_LP_EN_MASK) >> DDRCTL_HWLPCTL_HW_LP_EN_SHIFT)
484 
485 /* Bitfield definition for register: RFSHCTL0 */
486 /*
487  * REFRESH_MARGIN (R/W)
488  *
489  * Description: Threshold value in number of clock cycles before the critical refresh or page timer expires. A critical refresh is to be issued before this threshold is reached. It is recommended that this not be changed from the default value, currently shown as 0x2. It must always be less than internally used t_rfc_nom_x32. Note that, in LPDDR2/LPDDR3, internally used t_rfc_nom_x32 may be equal to RFSHTMG.t_rfc_nom_x32>>2 if derating is enabled (DERATEEN.derate_enable=1). Otherwise, internally used t_rfc_nom_x32 will be equal to RFSHTMG.t_rfc_nom_x32.
490  * Unit: Multiples of 32 clocks. Value After Reset: 0x2 Exists: Always
491  */
492 #define DDRCTL_RFSHCTL0_REFRESH_MARGIN_MASK (0xF00000UL)
493 #define DDRCTL_RFSHCTL0_REFRESH_MARGIN_SHIFT (20U)
494 #define DDRCTL_RFSHCTL0_REFRESH_MARGIN_SET(x) (((uint32_t)(x) << DDRCTL_RFSHCTL0_REFRESH_MARGIN_SHIFT) & DDRCTL_RFSHCTL0_REFRESH_MARGIN_MASK)
495 #define DDRCTL_RFSHCTL0_REFRESH_MARGIN_GET(x) (((uint32_t)(x) & DDRCTL_RFSHCTL0_REFRESH_MARGIN_MASK) >> DDRCTL_RFSHCTL0_REFRESH_MARGIN_SHIFT)
496 
497 /*
498  * REFRESH_TO_X32 (R/W)
499  *
500  * Description: If the refresh timer (tRFCnom, also known as tREFI) has expired at least once, but it has not expired (RFSHCTL0.refresh_burst+1) times yet, then a speculative refresh may be performed. A speculative refresh is a refresh performed at a time when refresh would be useful, but before it is absolutely required. When the SDRAM bus is idle for a period of time determined by this RFSHCTL0.refresh_to_x32 and the refresh timer has expired at least once since the last refresh, then a speculative refresh is performed. Speculative refreshes continues successively until there are no refreshes pending or until new reads or writes are issued to the uMCTL2.
501  * FOR PERFORMANCE ONLY.
502  * Value After Reset: 0x10
503  * Exists: Always
504  */
505 #define DDRCTL_RFSHCTL0_REFRESH_TO_X32_MASK (0x1F000UL)
506 #define DDRCTL_RFSHCTL0_REFRESH_TO_X32_SHIFT (12U)
507 #define DDRCTL_RFSHCTL0_REFRESH_TO_X32_SET(x) (((uint32_t)(x) << DDRCTL_RFSHCTL0_REFRESH_TO_X32_SHIFT) & DDRCTL_RFSHCTL0_REFRESH_TO_X32_MASK)
508 #define DDRCTL_RFSHCTL0_REFRESH_TO_X32_GET(x) (((uint32_t)(x) & DDRCTL_RFSHCTL0_REFRESH_TO_X32_MASK) >> DDRCTL_RFSHCTL0_REFRESH_TO_X32_SHIFT)
509 
510 /*
511  * REFRESH_BURST (R/W)
512  *
513  * Description: The programmed value + 1 is the number of refresh timeouts that is allowed to accumulate before traffic is blocked and the refreshes are forced to execute. Closing pages to perform a refresh is a one-time penalty that must be paid for each group of refreshes. Therefore, performing refreshes in a burst reduces the per-refresh penalty of these page closings. Higher numbers for RFSHCTL.refresh_burst slightly increases utilization; lower numbers decreases the worst-case latency associated with refreshes.
514  * 0 - single refresh
515  * 1 - burst-of-2 refresh
516  * 7 - burst-of-8 refresh
517  * For information on burst refresh feature refer to section 3.9 of DDR2 JEDEC specification - JESD79-2F.pdf.
518  * For DDR2/3, the refresh is always per-rank and not per- bank. The rank refresh can be accumulated over 8*tREFI cycles using the burst refresh feature. In DDR4 mode, according to Fine Granuarity feature, 8 refreshes can be postponed in 1X mode, 16 refreshes in 2X mode and 32 refreshes in 4X mode. If using PHY-initiated updates, care must be taken in the setting of RFSHCTL0.refresh_burst, to ensure that tRFCmax is not violated due to a PHY-initiated update occurring shortly before a refresh burst was due. In this situation, the refresh burst will be delayed until the PHY- initiated update is complete.
519  * Value After Reset: 0x0
520  * Exists: Always
521  */
522 #define DDRCTL_RFSHCTL0_REFRESH_BURST_MASK (0x1F0U)
523 #define DDRCTL_RFSHCTL0_REFRESH_BURST_SHIFT (4U)
524 #define DDRCTL_RFSHCTL0_REFRESH_BURST_SET(x) (((uint32_t)(x) << DDRCTL_RFSHCTL0_REFRESH_BURST_SHIFT) & DDRCTL_RFSHCTL0_REFRESH_BURST_MASK)
525 #define DDRCTL_RFSHCTL0_REFRESH_BURST_GET(x) (((uint32_t)(x) & DDRCTL_RFSHCTL0_REFRESH_BURST_MASK) >> DDRCTL_RFSHCTL0_REFRESH_BURST_SHIFT)
526 
527 /* Bitfield definition for register: RFSHCTL1 */
528 /*
529  * REFRESH_TIMER1_START_VALUE_X32 (R/W)
530  *
531  * Description: Refresh timer start for rank 1 (only present in multi-rank configurations). This is useful in staggering the refreshes to multiple ranks to help traffic to proceed. This is explained in Refresh Controls section of architecture chapter.
532  * Unit: Multiples of 32 clocks. FOR PERFORMANCE ONLY.
533  * Value After Reset: 0x0
534  * Exists: MEMC_NUM_RANKS>1
535  */
536 #define DDRCTL_RFSHCTL1_REFRESH_TIMER1_START_VALUE_X32_MASK (0xFFF0000UL)
537 #define DDRCTL_RFSHCTL1_REFRESH_TIMER1_START_VALUE_X32_SHIFT (16U)
538 #define DDRCTL_RFSHCTL1_REFRESH_TIMER1_START_VALUE_X32_SET(x) (((uint32_t)(x) << DDRCTL_RFSHCTL1_REFRESH_TIMER1_START_VALUE_X32_SHIFT) & DDRCTL_RFSHCTL1_REFRESH_TIMER1_START_VALUE_X32_MASK)
539 #define DDRCTL_RFSHCTL1_REFRESH_TIMER1_START_VALUE_X32_GET(x) (((uint32_t)(x) & DDRCTL_RFSHCTL1_REFRESH_TIMER1_START_VALUE_X32_MASK) >> DDRCTL_RFSHCTL1_REFRESH_TIMER1_START_VALUE_X32_SHIFT)
540 
541 /*
542  * REFRESH_TIMER0_START_VALUE_X32 (R/W)
543  *
544  * Description: Refresh timer start for rank 0 (only present in multi-rank configurations). This is useful in staggering the refreshes to multiple ranks to help traffic to proceed. This is explained in Refresh Controls section of architecture chapter.
545  * Unit: Multiples of 32 clocks. FOR PERFORMANCE ONLY.
546  * Value After Reset: 0x0
547  * Exists: MEMC_NUM_RANKS>1
548  */
549 #define DDRCTL_RFSHCTL1_REFRESH_TIMER0_START_VALUE_X32_MASK (0xFFFU)
550 #define DDRCTL_RFSHCTL1_REFRESH_TIMER0_START_VALUE_X32_SHIFT (0U)
551 #define DDRCTL_RFSHCTL1_REFRESH_TIMER0_START_VALUE_X32_SET(x) (((uint32_t)(x) << DDRCTL_RFSHCTL1_REFRESH_TIMER0_START_VALUE_X32_SHIFT) & DDRCTL_RFSHCTL1_REFRESH_TIMER0_START_VALUE_X32_MASK)
552 #define DDRCTL_RFSHCTL1_REFRESH_TIMER0_START_VALUE_X32_GET(x) (((uint32_t)(x) & DDRCTL_RFSHCTL1_REFRESH_TIMER0_START_VALUE_X32_MASK) >> DDRCTL_RFSHCTL1_REFRESH_TIMER0_START_VALUE_X32_SHIFT)
553 
554 /* Bitfield definition for register: RFSHCTL3 */
555 /*
556  * REFRESH_UPDATE_LEVEL (R/W)
557  *
558  * Description: Toggle this signal (either from 0 to 1 or from 1 to 0) to indicate that the refresh register(s) have been updated.
559  * The value is automatically updated when exiting soft reset, so it does not need to be toggled initially.
560  * Value After Reset: 0x0
561  * Exists: Always
562  */
563 #define DDRCTL_RFSHCTL3_REFRESH_UPDATE_LEVEL_MASK (0x2U)
564 #define DDRCTL_RFSHCTL3_REFRESH_UPDATE_LEVEL_SHIFT (1U)
565 #define DDRCTL_RFSHCTL3_REFRESH_UPDATE_LEVEL_SET(x) (((uint32_t)(x) << DDRCTL_RFSHCTL3_REFRESH_UPDATE_LEVEL_SHIFT) & DDRCTL_RFSHCTL3_REFRESH_UPDATE_LEVEL_MASK)
566 #define DDRCTL_RFSHCTL3_REFRESH_UPDATE_LEVEL_GET(x) (((uint32_t)(x) & DDRCTL_RFSHCTL3_REFRESH_UPDATE_LEVEL_MASK) >> DDRCTL_RFSHCTL3_REFRESH_UPDATE_LEVEL_SHIFT)
567 
568 /*
569  * DIS_AUTO_REFRESH (R/W)
570  *
571  * Description: When '1', disable auto-refresh generated by the uMCTL2. When auto-refresh is disabled, the SoC core must generate refreshes using the registers reg_ddrc_rank0_refresh, reg_ddrc_rank1_refresh, reg_ddrc_rank2_refresh and reg_ddrc_rank3_refresh.
572  * When dis_auto_refresh transitions from 0 to 1, any pending refreshes are immediately scheduled by the uMCTL2.
573  * If DDR4 CRC/parity retry is enabled (CRCPARCTL1.crc_parity_retry_enable = 1), disable auto- refresh is not supported, and this bit must be set to '0'.
574  * This register field is changeable on the fly.
575  * Value After Reset: 0x0
576  * Exists: Always
577  */
578 #define DDRCTL_RFSHCTL3_DIS_AUTO_REFRESH_MASK (0x1U)
579 #define DDRCTL_RFSHCTL3_DIS_AUTO_REFRESH_SHIFT (0U)
580 #define DDRCTL_RFSHCTL3_DIS_AUTO_REFRESH_SET(x) (((uint32_t)(x) << DDRCTL_RFSHCTL3_DIS_AUTO_REFRESH_SHIFT) & DDRCTL_RFSHCTL3_DIS_AUTO_REFRESH_MASK)
581 #define DDRCTL_RFSHCTL3_DIS_AUTO_REFRESH_GET(x) (((uint32_t)(x) & DDRCTL_RFSHCTL3_DIS_AUTO_REFRESH_MASK) >> DDRCTL_RFSHCTL3_DIS_AUTO_REFRESH_SHIFT)
582 
583 /* Bitfield definition for register: RFSHTMG */
584 /*
585  * T_RFC_NOM_X32 (R/W)
586  *
587  * Description: tREFI: Average time interval between refreshes per rank (specification: 7.8us for DDR2, DDR3 and DDR4. See JEDEC specification for mDDR, LPDDR2 and LPDDR3).
588  * For LPDDR2/LPDDR3:
589  * if using all-bank refreshes (RFSHCTL0.per_bank_refresh
590  * = 0), this register should be set to tREFIab
591  * if using per-bank refreshes (RFSHCTL0.per_bank_refresh = 1), this register should be set to tREFIpb
592  * For configurations with MEMC_FREQ_RATIO=2, program this to (tREFI/2), no rounding up.
593  * In DDR4 mode, tREFI value is different depending on the refresh mode. The user should program the appropriate value from the spec based on the value programmed in the refresh mode register.
594  * Note that RFSHTMG.t_rfc_nom_x32 * 32 must be greater than RFSHTMG.t_rfc_min. Unit: Multiples of 32 clocks.
595  * Value After Reset: 0x62
596  * Exists: Always
597  */
598 #define DDRCTL_RFSHTMG_T_RFC_NOM_X32_MASK (0xFFF0000UL)
599 #define DDRCTL_RFSHTMG_T_RFC_NOM_X32_SHIFT (16U)
600 #define DDRCTL_RFSHTMG_T_RFC_NOM_X32_SET(x) (((uint32_t)(x) << DDRCTL_RFSHTMG_T_RFC_NOM_X32_SHIFT) & DDRCTL_RFSHTMG_T_RFC_NOM_X32_MASK)
601 #define DDRCTL_RFSHTMG_T_RFC_NOM_X32_GET(x) (((uint32_t)(x) & DDRCTL_RFSHTMG_T_RFC_NOM_X32_MASK) >> DDRCTL_RFSHTMG_T_RFC_NOM_X32_SHIFT)
602 
603 /*
604  * T_RFC_MIN (R/W)
605  *
606  * Description: tRFC (min): Minimum time from refresh to refresh or activate.
607  * For LPDDR2/LPDDR3:
608  * if using all-bank refreshes (RFSHCTL0.per_bank_refresh
609  * = 0), this register should be set to tRFCab
610  * if using per-bank refreshes (RFSHCTL0.per_bank_refresh = 1), this register should be set to tRFCpb
611  * For configurations with MEMC_FREQ_RATIO=2, program this to tRFC(min)/2 and round up to next integer value.
612  * In DDR4 mode, tRFC(min) value is different depending on the refresh mode (fixed 1X,2X,4X) and the device density. The user should program the appropriate value from the spec based on the 'refresh_mode' and the device density that is used.
613  * Unit: Clocks.
614  * Value After Reset: 0x8c
615  * Exists: Always
616  */
617 #define DDRCTL_RFSHTMG_T_RFC_MIN_MASK (0x1FFU)
618 #define DDRCTL_RFSHTMG_T_RFC_MIN_SHIFT (0U)
619 #define DDRCTL_RFSHTMG_T_RFC_MIN_SET(x) (((uint32_t)(x) << DDRCTL_RFSHTMG_T_RFC_MIN_SHIFT) & DDRCTL_RFSHTMG_T_RFC_MIN_MASK)
620 #define DDRCTL_RFSHTMG_T_RFC_MIN_GET(x) (((uint32_t)(x) & DDRCTL_RFSHTMG_T_RFC_MIN_MASK) >> DDRCTL_RFSHTMG_T_RFC_MIN_SHIFT)
621 
622 /* Bitfield definition for register: ECCUADDR0 */
623 /*
624  * ECC_UNCORR_RANK (R)
625  *
626  * Description: Rank number of a read resulting in an uncorrected ECC error
627  * Value After Reset: 0x0
628  * Exists: MEMC_NUM_RANKS>1
629  */
630 #define DDRCTL_ECCUADDR0_ECC_UNCORR_RANK_MASK (0x3000000UL)
631 #define DDRCTL_ECCUADDR0_ECC_UNCORR_RANK_SHIFT (24U)
632 #define DDRCTL_ECCUADDR0_ECC_UNCORR_RANK_GET(x) (((uint32_t)(x) & DDRCTL_ECCUADDR0_ECC_UNCORR_RANK_MASK) >> DDRCTL_ECCUADDR0_ECC_UNCORR_RANK_SHIFT)
633 
634 /*
635  * ECC_UNCORR_ROW (R)
636  *
637  * Description: Page/row number of a read resulting in an uncorrected ECC error. This is 18-bits wide in configurations with DDR4 support and 16-bits in all other configurations.
638  * Value After Reset: 0x0
639  * Exists: Always
640  */
641 #define DDRCTL_ECCUADDR0_ECC_UNCORR_ROW_MASK (0x3FFFFUL)
642 #define DDRCTL_ECCUADDR0_ECC_UNCORR_ROW_SHIFT (0U)
643 #define DDRCTL_ECCUADDR0_ECC_UNCORR_ROW_GET(x) (((uint32_t)(x) & DDRCTL_ECCUADDR0_ECC_UNCORR_ROW_MASK) >> DDRCTL_ECCUADDR0_ECC_UNCORR_ROW_SHIFT)
644 
645 /* Bitfield definition for register: CRCPARCTL0 */
646 /*
647  * DFI_ALERT_ERR_CNT_CLR (R/W1C)
648  *
649  * Description: DFI alert error count clear. Clear bit for DFI alert error counter. Asserting this bit will clear the DFI alert error counter, CRCPARSTAT.dfi_alert_err_cnt. When the clear operation is complete, the uMCTL2 automatically clears this bit.
650  * Value After Reset: 0x0
651  * Exists: Always
652  */
653 #define DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_CNT_CLR_MASK (0x4U)
654 #define DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_CNT_CLR_SHIFT (2U)
655 #define DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_CNT_CLR_SET(x) (((uint32_t)(x) << DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_CNT_CLR_SHIFT) & DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_CNT_CLR_MASK)
656 #define DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_CNT_CLR_GET(x) (((uint32_t)(x) & DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_CNT_CLR_MASK) >> DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_CNT_CLR_SHIFT)
657 
658 /*
659  * DFI_ALERT_ERR_INT_CLR (R/W1C)
660  *
661  * Description: Interrupt clear bit for DFI alert error. If this bit is set, the alert error interrupt on CRCPARSTAT.dfi_alert_err_int will be cleared. When the clear operation is complete, the uMCTL2 automatically clears this bit.
662  * Value After Reset: 0x0
663  * Exists: Always
664  */
665 #define DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_INT_CLR_MASK (0x2U)
666 #define DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_INT_CLR_SHIFT (1U)
667 #define DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_INT_CLR_SET(x) (((uint32_t)(x) << DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_INT_CLR_SHIFT) & DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_INT_CLR_MASK)
668 #define DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_INT_CLR_GET(x) (((uint32_t)(x) & DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_INT_CLR_MASK) >> DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_INT_CLR_SHIFT)
669 
670 /*
671  * DFI_ALERT_ERR_INT_EN (R/W)
672  *
673  * Description: Interrupt enable bit for DFI alert error. If this bit is set, any parity/CRC error detected on the dfi_alert_n input will result in an interrupt being set on CRCPARSTAT.dfi_alert_err_int.
674  * Value After Reset: 0x0
675  * Exists: Always
676  */
677 #define DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_INT_EN_MASK (0x1U)
678 #define DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_INT_EN_SHIFT (0U)
679 #define DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_INT_EN_SET(x) (((uint32_t)(x) << DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_INT_EN_SHIFT) & DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_INT_EN_MASK)
680 #define DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_INT_EN_GET(x) (((uint32_t)(x) & DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_INT_EN_MASK) >> DDRCTL_CRCPARCTL0_DFI_ALERT_ERR_INT_EN_SHIFT)
681 
682 /* Bitfield definition for register: CRCPARSTAT */
683 /*
684  * DFI_ALERT_ERR_INT (R)
685  *
686  * Description: DFI alert error interrupt.
687  * If a parity/CRC error is detected on dfi_alert_n, and the interrupt is enabled by CRCPARCTL0.dfi_alert_err_int_en, this interrupt bit will be set. It will remain set until cleared by CRCPARCTL0.dfi_alert_err_int_clr
688  * Value After Reset: 0x0
689  * Exists: Always
690  */
691 #define DDRCTL_CRCPARSTAT_DFI_ALERT_ERR_INT_MASK (0x10000UL)
692 #define DDRCTL_CRCPARSTAT_DFI_ALERT_ERR_INT_SHIFT (16U)
693 #define DDRCTL_CRCPARSTAT_DFI_ALERT_ERR_INT_GET(x) (((uint32_t)(x) & DDRCTL_CRCPARSTAT_DFI_ALERT_ERR_INT_MASK) >> DDRCTL_CRCPARSTAT_DFI_ALERT_ERR_INT_SHIFT)
694 
695 /*
696  * DFI_ALERT_ERR_CNT (R)
697  *
698  * Description: DFI alert error count.
699  * If a parity/CRC error is detected on dfi_alert_n, this counter be incremented. This is independent of the setting of CRCPARCTL0.dfi_alert_err_int_en. It will saturate at 0xFFFF, and can be cleared by asserting CRCPARCTL0.dfi_alert_err_cnt_clr.
700  * Value After Reset: 0x0
701  * Exists: Always
702  */
703 #define DDRCTL_CRCPARSTAT_DFI_ALERT_ERR_CNT_MASK (0xFFFFU)
704 #define DDRCTL_CRCPARSTAT_DFI_ALERT_ERR_CNT_SHIFT (0U)
705 #define DDRCTL_CRCPARSTAT_DFI_ALERT_ERR_CNT_GET(x) (((uint32_t)(x) & DDRCTL_CRCPARSTAT_DFI_ALERT_ERR_CNT_MASK) >> DDRCTL_CRCPARSTAT_DFI_ALERT_ERR_CNT_SHIFT)
706 
707 /* Bitfield definition for register: INIT0 */
708 /*
709  * SKIP_DRAM_INIT (R/W)
710  *
711  * Description: If lower bit is enabled the SDRAM initialization routine is skipped. The upper bit decides what state the controller starts up in when reset is removed
712  * 00 - SDRAM Intialization routine is run after power-up
713  * 01 - SDRAM Intialization routine is skipped after power- up. Controller starts up in Normal Mode
714  * 11 - SDRAM Intialization routine is skipped after power- up. Controller starts up in Self-refresh Mode
715  * 10 - SDRAM Intialization routine is run after power-up.
716  * Value After Reset: 0x0
717  * Exists: Always
718  */
719 #define DDRCTL_INIT0_SKIP_DRAM_INIT_MASK (0xC0000000UL)
720 #define DDRCTL_INIT0_SKIP_DRAM_INIT_SHIFT (30U)
721 #define DDRCTL_INIT0_SKIP_DRAM_INIT_SET(x) (((uint32_t)(x) << DDRCTL_INIT0_SKIP_DRAM_INIT_SHIFT) & DDRCTL_INIT0_SKIP_DRAM_INIT_MASK)
722 #define DDRCTL_INIT0_SKIP_DRAM_INIT_GET(x) (((uint32_t)(x) & DDRCTL_INIT0_SKIP_DRAM_INIT_MASK) >> DDRCTL_INIT0_SKIP_DRAM_INIT_SHIFT)
723 
724 /*
725  * POST_CKE_X1024 (R/W)
726  *
727  * Description: Cycles to wait after driving CKE high to start the SDRAM initialization sequence.
728  * Unit: 1024 clocks.
729  * DDR2 typically requires a 400 ns delay, requiring this value to be programmed to 2 at all clock speeds.
730  * LPDDR2/LPDDR3 typically requires this to be programmed for a delay of 200 us.
731  * For configurations with MEMC_FREQ_RATIO=2, program this to JEDEC spec value divided by 2, and round it up to next integer value.
732  * Value After Reset: 0x2
733  * Exists: Always
734  */
735 #define DDRCTL_INIT0_POST_CKE_X1024_MASK (0x3FF0000UL)
736 #define DDRCTL_INIT0_POST_CKE_X1024_SHIFT (16U)
737 #define DDRCTL_INIT0_POST_CKE_X1024_SET(x) (((uint32_t)(x) << DDRCTL_INIT0_POST_CKE_X1024_SHIFT) & DDRCTL_INIT0_POST_CKE_X1024_MASK)
738 #define DDRCTL_INIT0_POST_CKE_X1024_GET(x) (((uint32_t)(x) & DDRCTL_INIT0_POST_CKE_X1024_MASK) >> DDRCTL_INIT0_POST_CKE_X1024_SHIFT)
739 
740 /*
741  * PRE_CKE_X1024 (R/W)
742  *
743  * Description: Cycles to wait after reset before driving CKE high to start the SDRAM initialization sequence.
744  * Unit: 1024 clock cycles.
745  * DDR2 specifications typically require this to be programmed for a delay of >= 200 us.
746  * LPDDR2/LPDDR3: tINIT1 of 100 ns (min)
747  * For configurations with MEMC_FREQ_RATIO=2, program this to JEDEC spec value divided by 2, and round it up to next integer value.
748  * Value After Reset: 0x4e
749  * Exists: Always
750  */
751 #define DDRCTL_INIT0_PRE_CKE_X1024_MASK (0x3FFU)
752 #define DDRCTL_INIT0_PRE_CKE_X1024_SHIFT (0U)
753 #define DDRCTL_INIT0_PRE_CKE_X1024_SET(x) (((uint32_t)(x) << DDRCTL_INIT0_PRE_CKE_X1024_SHIFT) & DDRCTL_INIT0_PRE_CKE_X1024_MASK)
754 #define DDRCTL_INIT0_PRE_CKE_X1024_GET(x) (((uint32_t)(x) & DDRCTL_INIT0_PRE_CKE_X1024_MASK) >> DDRCTL_INIT0_PRE_CKE_X1024_SHIFT)
755 
756 /* Bitfield definition for register: INIT1 */
757 /*
758  * DRAM_RSTN_X1024 (R/W)
759  *
760  * Description: Number of cycles to assert SDRAM reset signal during init sequence.
761  * This is only present for designs supporting DDR3/DDR4 devices. For use with a Synopsys DDR PHY, this should be set to a minimum of 1
762  * Value After Reset: 0x0
763  * Exists: MEMC_DDR3==1 || MEMC_DDR4==1
764  */
765 #define DDRCTL_INIT1_DRAM_RSTN_X1024_MASK (0xFF0000UL)
766 #define DDRCTL_INIT1_DRAM_RSTN_X1024_SHIFT (16U)
767 #define DDRCTL_INIT1_DRAM_RSTN_X1024_SET(x) (((uint32_t)(x) << DDRCTL_INIT1_DRAM_RSTN_X1024_SHIFT) & DDRCTL_INIT1_DRAM_RSTN_X1024_MASK)
768 #define DDRCTL_INIT1_DRAM_RSTN_X1024_GET(x) (((uint32_t)(x) & DDRCTL_INIT1_DRAM_RSTN_X1024_MASK) >> DDRCTL_INIT1_DRAM_RSTN_X1024_SHIFT)
769 
770 /*
771  * FINAL_WAIT_X32 (R/W)
772  *
773  * Description: Cycles to wait after completing the SDRAM initialization sequence before starting the dynamic scheduler.
774  * Unit: Counts of a global timer that pulses every 32 clock cycles.
775  * There is no known specific requirement for this; it may be set to zero.
776  * Value After Reset: 0x0
777  * Exists: Always
778  */
779 #define DDRCTL_INIT1_FINAL_WAIT_X32_MASK (0x7F00U)
780 #define DDRCTL_INIT1_FINAL_WAIT_X32_SHIFT (8U)
781 #define DDRCTL_INIT1_FINAL_WAIT_X32_SET(x) (((uint32_t)(x) << DDRCTL_INIT1_FINAL_WAIT_X32_SHIFT) & DDRCTL_INIT1_FINAL_WAIT_X32_MASK)
782 #define DDRCTL_INIT1_FINAL_WAIT_X32_GET(x) (((uint32_t)(x) & DDRCTL_INIT1_FINAL_WAIT_X32_MASK) >> DDRCTL_INIT1_FINAL_WAIT_X32_SHIFT)
783 
784 /*
785  * PRE_OCD_X32 (R/W)
786  *
787  * Description: Wait period before driving the OCD complete command to SDRAM.
788  * Unit: Counts of a global timer that pulses every 32 clock cycles.
789  * There is no known specific requirement for this; it may be set to zero.
790  * Value After Reset: 0x0
791  * Exists: Always
792  */
793 #define DDRCTL_INIT1_PRE_OCD_X32_MASK (0xFU)
794 #define DDRCTL_INIT1_PRE_OCD_X32_SHIFT (0U)
795 #define DDRCTL_INIT1_PRE_OCD_X32_SET(x) (((uint32_t)(x) << DDRCTL_INIT1_PRE_OCD_X32_SHIFT) & DDRCTL_INIT1_PRE_OCD_X32_MASK)
796 #define DDRCTL_INIT1_PRE_OCD_X32_GET(x) (((uint32_t)(x) & DDRCTL_INIT1_PRE_OCD_X32_MASK) >> DDRCTL_INIT1_PRE_OCD_X32_SHIFT)
797 
798 /* Bitfield definition for register: INIT3 */
799 /*
800  * MR (R/W)
801  *
802  * Description: DDR2: Value to write to MR register. Bit 8 is for DLL and the setting here is ignored. The uMCTL2 sets this bit appropriately.
803  * DDR3/DDR4: Value loaded into MR0 register. mDDR: Value to write to MR register.
804  * LPDDR2/LPDDR3 - Value to write to MR1 register
805  * Value After Reset: 0x0
806  * Exists: Always
807  */
808 #define DDRCTL_INIT3_MR_MASK (0xFFFF0000UL)
809 #define DDRCTL_INIT3_MR_SHIFT (16U)
810 #define DDRCTL_INIT3_MR_SET(x) (((uint32_t)(x) << DDRCTL_INIT3_MR_SHIFT) & DDRCTL_INIT3_MR_MASK)
811 #define DDRCTL_INIT3_MR_GET(x) (((uint32_t)(x) & DDRCTL_INIT3_MR_MASK) >> DDRCTL_INIT3_MR_SHIFT)
812 
813 /*
814  * EMR (R/W)
815  *
816  * Description: DDR2: Value to write to EMR register. Bits 9:7 are for OCD and the setting in this register is ignored. The uMCTL2 sets those bits appropriately.
817  * DDR3/DDR4: Value to write to MR1 register Set bit 7 to 0. If PHY-evaluation mode training is enabled, this bit is set appropriately by the uMCTL2 during write leveling.
818  * mDDR: Value to write to EMR register. LPDDR2/LPDDR3 - Value to write to MR2 register Value After Reset: 0x510
819  * Exists: Always
820  */
821 #define DDRCTL_INIT3_EMR_MASK (0xFFFFU)
822 #define DDRCTL_INIT3_EMR_SHIFT (0U)
823 #define DDRCTL_INIT3_EMR_SET(x) (((uint32_t)(x) << DDRCTL_INIT3_EMR_SHIFT) & DDRCTL_INIT3_EMR_MASK)
824 #define DDRCTL_INIT3_EMR_GET(x) (((uint32_t)(x) & DDRCTL_INIT3_EMR_MASK) >> DDRCTL_INIT3_EMR_SHIFT)
825 
826 /* Bitfield definition for register: INIT4 */
827 /*
828  * EMR2 (R/W)
829  *
830  * Description: DDR2: Value to write to EMR2 register. DDR3/DDR4: Value to write to MR2 register LPDDR2/LPDDR3: Value to write to MR3 register mDDR: Unused
831  * Value After Reset: 0x0
832  * Exists: Always
833  */
834 #define DDRCTL_INIT4_EMR2_MASK (0xFFFF0000UL)
835 #define DDRCTL_INIT4_EMR2_SHIFT (16U)
836 #define DDRCTL_INIT4_EMR2_SET(x) (((uint32_t)(x) << DDRCTL_INIT4_EMR2_SHIFT) & DDRCTL_INIT4_EMR2_MASK)
837 #define DDRCTL_INIT4_EMR2_GET(x) (((uint32_t)(x) & DDRCTL_INIT4_EMR2_MASK) >> DDRCTL_INIT4_EMR2_SHIFT)
838 
839 /*
840  * EMR3 (R/W)
841  *
842  * Description: DDR2: Value to write to EMR3 register. DDR3/DDR4: Value to write to MR3 register mDDR/LPDDR2/LPDDR3: Unused
843  * Value After Reset: 0x0
844  * Exists: Always
845  */
846 #define DDRCTL_INIT4_EMR3_MASK (0xFFFFU)
847 #define DDRCTL_INIT4_EMR3_SHIFT (0U)
848 #define DDRCTL_INIT4_EMR3_SET(x) (((uint32_t)(x) << DDRCTL_INIT4_EMR3_SHIFT) & DDRCTL_INIT4_EMR3_MASK)
849 #define DDRCTL_INIT4_EMR3_GET(x) (((uint32_t)(x) & DDRCTL_INIT4_EMR3_MASK) >> DDRCTL_INIT4_EMR3_SHIFT)
850 
851 /* Bitfield definition for register: INIT5 */
852 /*
853  * DEV_ZQINIT_X32 (R/W)
854  *
855  * Description: ZQ initial calibration, tZQINIT. Present only in designs configured to support DDR3 or DDR4 or LPDDR2/LPDDR3.
856  * Unit: 32 clock cycles.
857  * DDR3 typically requires 512 clocks. DDR4 requires 1024 clocks.
858  * LPDDR2/LPDDR3 requires 1 us.
859  * Value After Reset: 0x10
860  * Exists: MEMC_DDR3==1 || MEMC_DDR4 == 1 || MEMC_LPDDR2==1
861  */
862 #define DDRCTL_INIT5_DEV_ZQINIT_X32_MASK (0xFF0000UL)
863 #define DDRCTL_INIT5_DEV_ZQINIT_X32_SHIFT (16U)
864 #define DDRCTL_INIT5_DEV_ZQINIT_X32_SET(x) (((uint32_t)(x) << DDRCTL_INIT5_DEV_ZQINIT_X32_SHIFT) & DDRCTL_INIT5_DEV_ZQINIT_X32_MASK)
865 #define DDRCTL_INIT5_DEV_ZQINIT_X32_GET(x) (((uint32_t)(x) & DDRCTL_INIT5_DEV_ZQINIT_X32_MASK) >> DDRCTL_INIT5_DEV_ZQINIT_X32_SHIFT)
866 
867 /* Bitfield definition for register: DIMMCTL */
868 /*
869  * DIMM_ADDR_MIRR_EN (R/W)
870  *
871  * Description: Address Mirroring Enable (for multi-rank UDIMM implementations and multi-rank DDR4 RDIMM implementations).
872  * Some UDIMMs and DDR4 RDIMMs implement address mirroring for odd ranks, which means that the following address, bank address and bank group bits are swapped: (A3, A4), (A5, A6), (A7, A8), (BA0, BA1) and also (A11, A13),
873  * (BG0, BG1) for the DDR4. Setting this bit ensures that, for mode register accesses during the automatic initialization routine, these bits are swapped within the uMCTL2 to compensate for this UDIMM/RDIMM swapping. In addition to the automatic initialization routine, in case of DDR4 UDIMM/RDIMM, they are swapped during the automatic MRS access to enable/disable of a particular DDR4 feature.
874  * Note: This has no effect on the address of any other memory accesses, or of software-driven mode register accesses.
875  * This is not supported for mDDR, LPDDR2 or LPDDR3 SDRAMs.
876  * Note: In case of x16 DDR4 DIMMs, BG1 output of MRS for the odd ranks is same as BG0 because BG1 is invalid, hence dimm_dis_bg_mirroring register must be set to 1.
877  * 1 - For odd ranks, implement address mirroring for MRS commands to during initialization and for any automatic DDR4 MRS commands (to be used if UDIMM/RDIMM implements address mirroring)
878  * 0 - Do not implement address mirroring
879  * Value After Reset: 0x0
880  * Exists: Always
881  */
882 #define DDRCTL_DIMMCTL_DIMM_ADDR_MIRR_EN_MASK (0x2U)
883 #define DDRCTL_DIMMCTL_DIMM_ADDR_MIRR_EN_SHIFT (1U)
884 #define DDRCTL_DIMMCTL_DIMM_ADDR_MIRR_EN_SET(x) (((uint32_t)(x) << DDRCTL_DIMMCTL_DIMM_ADDR_MIRR_EN_SHIFT) & DDRCTL_DIMMCTL_DIMM_ADDR_MIRR_EN_MASK)
885 #define DDRCTL_DIMMCTL_DIMM_ADDR_MIRR_EN_GET(x) (((uint32_t)(x) & DDRCTL_DIMMCTL_DIMM_ADDR_MIRR_EN_MASK) >> DDRCTL_DIMMCTL_DIMM_ADDR_MIRR_EN_SHIFT)
886 
887 /*
888  * DIMM_STAGGER_CS_EN (R/W)
889  *
890  * Description: Staggering enable for multi-rank accesses (for multi-rank UDIMM and RDIMM implementations only). This is not supported for DDR4, mDDR, LPDDR2 or LPDDR3 SDRAMs.
891  * 1 - Stagger accesses to even and odd ranks
892  * 0 - Do not stagger accesses
893  * Value After Reset: 0x0
894  * Exists: Always
895  */
896 #define DDRCTL_DIMMCTL_DIMM_STAGGER_CS_EN_MASK (0x1U)
897 #define DDRCTL_DIMMCTL_DIMM_STAGGER_CS_EN_SHIFT (0U)
898 #define DDRCTL_DIMMCTL_DIMM_STAGGER_CS_EN_SET(x) (((uint32_t)(x) << DDRCTL_DIMMCTL_DIMM_STAGGER_CS_EN_SHIFT) & DDRCTL_DIMMCTL_DIMM_STAGGER_CS_EN_MASK)
899 #define DDRCTL_DIMMCTL_DIMM_STAGGER_CS_EN_GET(x) (((uint32_t)(x) & DDRCTL_DIMMCTL_DIMM_STAGGER_CS_EN_MASK) >> DDRCTL_DIMMCTL_DIMM_STAGGER_CS_EN_SHIFT)
900 
901 /* Bitfield definition for register: RANKCTL */
902 /*
903  * DIFF_RANK_WR_GAP (R/W)
904  *
905  * Description: Only present for multi-rank configurations. Indicates the number of clocks of gap in data responses when performing consecutive writes to different ranks.
906  * This is used to switch the delays in the PHY to match the rank requirements.
907  * The value programmed in this register takes care of the ODT switch off timing requirement when switching ranks during writes.
908  * For configurations with MEMC_FREQ_RATIO=2, program this to (N/2) and round it up to the next integer value. N is value required by PHY, in terms of PHY clocks.
909  * Value After Reset: 0x6
910  * Exists: MEMC_NUM_RANKS>1
911  */
912 #define DDRCTL_RANKCTL_DIFF_RANK_WR_GAP_MASK (0xF00U)
913 #define DDRCTL_RANKCTL_DIFF_RANK_WR_GAP_SHIFT (8U)
914 #define DDRCTL_RANKCTL_DIFF_RANK_WR_GAP_SET(x) (((uint32_t)(x) << DDRCTL_RANKCTL_DIFF_RANK_WR_GAP_SHIFT) & DDRCTL_RANKCTL_DIFF_RANK_WR_GAP_MASK)
915 #define DDRCTL_RANKCTL_DIFF_RANK_WR_GAP_GET(x) (((uint32_t)(x) & DDRCTL_RANKCTL_DIFF_RANK_WR_GAP_MASK) >> DDRCTL_RANKCTL_DIFF_RANK_WR_GAP_SHIFT)
916 
917 /*
918  * DIFF_RANK_RD_GAP (R/W)
919  *
920  * Description: Only present for multi-rank configurations. Indicates the number of clocks of gap in data responses when performing consecutive reads to different ranks.
921  * This is used to switch the delays in the PHY to match the rank requirements.
922  * The value programmed in this register takes care of the ODT switch off timing requirement when switching ranks during reads.
923  * For configurations with MEMC_FREQ_RATIO=2, program this to (N/2) and round it up to the next integer value. N is value required by PHY, in terms of PHY clocks.
924  * Value After Reset: 0x6
925  * Exists: MEMC_NUM_RANKS>1
926  */
927 #define DDRCTL_RANKCTL_DIFF_RANK_RD_GAP_MASK (0xF0U)
928 #define DDRCTL_RANKCTL_DIFF_RANK_RD_GAP_SHIFT (4U)
929 #define DDRCTL_RANKCTL_DIFF_RANK_RD_GAP_SET(x) (((uint32_t)(x) << DDRCTL_RANKCTL_DIFF_RANK_RD_GAP_SHIFT) & DDRCTL_RANKCTL_DIFF_RANK_RD_GAP_MASK)
930 #define DDRCTL_RANKCTL_DIFF_RANK_RD_GAP_GET(x) (((uint32_t)(x) & DDRCTL_RANKCTL_DIFF_RANK_RD_GAP_MASK) >> DDRCTL_RANKCTL_DIFF_RANK_RD_GAP_SHIFT)
931 
932 /*
933  * MAX_RANK_RD (R/W)
934  *
935  * Description: Only present for multi-rank configurations. Background: Reads to the same rank can be performed back-to-back. Reads to different ranks require additional gap
936  * dictated by the register RANKCTL.diff_rank_rd_gap. This is
937  * to avoid possible data bus contention as well as to give PHY enough time to switch the delay when changing ranks. The uMCTL2 arbitrates for bus access on a cycle-by-cycle basis; therefore after a read is scheduled, there are few clock cycles (determined by the value on diff_rank_rd_gap register) in which only reads from the same rank are eligible to be scheduled. This prevents reads from other ranks from having fair access to the data bus.
938  * This parameter represents the maximum number of reads that can be scheduled consecutively to the same rank. After this number is reached, a delay equal to RANKCTL.diff_rank_rd_gap is inserted by the scheduler to allow all ranks a fair opportunity to be scheduled. Higher numbers increase bandwidth utilization, lower numbers increase fairness.
939  * This feature can be DISABLED by setting this register to 0. When set to 0, the Controller will stay on the same rank as long as commands are available for it.
940  * Minimum programmable value is 0 (feature disabled) and maximum programmable value is 0xF.
941  * Feature limitation: max_rank_rd feature works as described only in the mode in which one command at the DDRC input results in one DFI command at the output. An example of this mode is: BL8 hardware configuration (MEMC_BURST_LENGTH=8) and Full bus width mode (MSTR.data_bus_width=2'b00) and BL8 mode of operation (MSTR.burst_rdwr=4'b0100). In modes where single HIF command results in multiple DFI commands (eg: Half Bus Width, BL4 etc.), the same rank commands would be serviced for as long as they are available, which is equivalent to this feature being disabled.
942  * FOR PERFORMANCE ONLY.
943  * Value After Reset: 0xf
944  * Exists: MEMC_NUM_RANKS>1
945  */
946 #define DDRCTL_RANKCTL_MAX_RANK_RD_MASK (0xFU)
947 #define DDRCTL_RANKCTL_MAX_RANK_RD_SHIFT (0U)
948 #define DDRCTL_RANKCTL_MAX_RANK_RD_SET(x) (((uint32_t)(x) << DDRCTL_RANKCTL_MAX_RANK_RD_SHIFT) & DDRCTL_RANKCTL_MAX_RANK_RD_MASK)
949 #define DDRCTL_RANKCTL_MAX_RANK_RD_GET(x) (((uint32_t)(x) & DDRCTL_RANKCTL_MAX_RANK_RD_MASK) >> DDRCTL_RANKCTL_MAX_RANK_RD_SHIFT)
950 
951 /* Bitfield definition for register: DRAMTMG0 */
952 /*
953  * WR2PRE (R/W)
954  *
955  * Description: Minimum time between write and precharge to same bank.
956  * Unit: Clocks
957  * Specifications: WL + BL/2 + tWR = approximately 8 cycles + 15 ns = 14 clocks @400MHz and less for lower frequencies where:
958  * WL = write latency
959  * BL = burst length. This must match the value programmed in the BL bit of the mode register to the SDRAM. BST (burst terminate) is not supported at present.
960  * tWR = Write recovery time. This comes directly from the SDRAM specification.
961  * Add one extra cycle for LPDDR2/LPDDR3 for this parameter. For configurations with MEMC_FREQ_RATIO=2, 1T mode, divide the above value by 2. No rounding up.
962  * For configurations with MEMC_FREQ_RATIO=2, 2T mode, divide the above value by 2 and add 1. No rounding up.
963  * Value After Reset: 0xf
964  * Exists: Always
965  */
966 #define DDRCTL_DRAMTMG0_WR2PRE_MASK (0x7F000000UL)
967 #define DDRCTL_DRAMTMG0_WR2PRE_SHIFT (24U)
968 #define DDRCTL_DRAMTMG0_WR2PRE_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG0_WR2PRE_SHIFT) & DDRCTL_DRAMTMG0_WR2PRE_MASK)
969 #define DDRCTL_DRAMTMG0_WR2PRE_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG0_WR2PRE_MASK) >> DDRCTL_DRAMTMG0_WR2PRE_SHIFT)
970 
971 /*
972  * T_FAW (R/W)
973  *
974  * Description: tFAW Valid only when 8 or more banks(or banks x bank groups) are present.
975  * In 8-bank design, at most 4 banks must be activated in a rolling window of tFAW cycles.
976  * For configurations with MEMC_FREQ_RATIO=2, program this to (tFAW/2) and round up to next integer value.
977  * In a 4-bank design, set this register to 0x1 independent of the MEMC_FREQ_RATIO configuration.
978  * Unit: Clocks
979  * Value After Reset: 0x10
980  * Exists: Always
981  */
982 #define DDRCTL_DRAMTMG0_T_FAW_MASK (0x3F0000UL)
983 #define DDRCTL_DRAMTMG0_T_FAW_SHIFT (16U)
984 #define DDRCTL_DRAMTMG0_T_FAW_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG0_T_FAW_SHIFT) & DDRCTL_DRAMTMG0_T_FAW_MASK)
985 #define DDRCTL_DRAMTMG0_T_FAW_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG0_T_FAW_MASK) >> DDRCTL_DRAMTMG0_T_FAW_SHIFT)
986 
987 /*
988  * T_RAS_MAX (R/W)
989  *
990  * Description: tRAS(max): Maximum time between activate and precharge to same bank. This is the maximum time that a page can be kept open
991  * Minimum value of this register is 1. Zero is invalid.
992  * For configurations with MEMC_FREQ_RATIO=2, program this to (tRAS(max)-1)/2. No rounding up.
993  * Unit: Multiples of 1024 clocks. Value After Reset: 0x1b Exists: Always
994  */
995 #define DDRCTL_DRAMTMG0_T_RAS_MAX_MASK (0x7F00U)
996 #define DDRCTL_DRAMTMG0_T_RAS_MAX_SHIFT (8U)
997 #define DDRCTL_DRAMTMG0_T_RAS_MAX_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG0_T_RAS_MAX_SHIFT) & DDRCTL_DRAMTMG0_T_RAS_MAX_MASK)
998 #define DDRCTL_DRAMTMG0_T_RAS_MAX_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG0_T_RAS_MAX_MASK) >> DDRCTL_DRAMTMG0_T_RAS_MAX_SHIFT)
999 
1000 /*
1001  * T_RAS_MIN (R/W)
1002  *
1003  * Description: tRAS(min): Minimum time between activate and precharge to the same bank.
1004  * For configurations with MEMC_FREQ_RATIO=2, 1T mode, program this to tRAS(min)/2. No rounding up.
1005  * For configurations with MEMC_FREQ_RATIO=2, 2T mode, program this to (tRAS(min)/2 + 1). No rounding up of the division operation.
1006  * Unit: Clocks
1007  * Value After Reset: 0xf
1008  * Exists: Always
1009  */
1010 #define DDRCTL_DRAMTMG0_T_RAS_MIN_MASK (0x3FU)
1011 #define DDRCTL_DRAMTMG0_T_RAS_MIN_SHIFT (0U)
1012 #define DDRCTL_DRAMTMG0_T_RAS_MIN_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG0_T_RAS_MIN_SHIFT) & DDRCTL_DRAMTMG0_T_RAS_MIN_MASK)
1013 #define DDRCTL_DRAMTMG0_T_RAS_MIN_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG0_T_RAS_MIN_MASK) >> DDRCTL_DRAMTMG0_T_RAS_MIN_SHIFT)
1014 
1015 /* Bitfield definition for register: DRAMTMG1 */
1016 /*
1017  * T_XP (R/W)
1018  *
1019  * Description: tXP: Minimum time after power-down exit to any operation. For DDR3, this should be programmed to tXPDLL if slow powerdown exit is selected in MR0[12].
1020  * If C/A parity for DDR4 is used, set to (tXP+PL) instead. For configurations with MEMC_FREQ_RATIO=2, program this to (tXP/2) and round it up to the next integer value.
1021  * Units: Clocks
1022  * Value After Reset: 0x8
1023  * Exists: Always
1024  */
1025 #define DDRCTL_DRAMTMG1_T_XP_MASK (0x1F0000UL)
1026 #define DDRCTL_DRAMTMG1_T_XP_SHIFT (16U)
1027 #define DDRCTL_DRAMTMG1_T_XP_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG1_T_XP_SHIFT) & DDRCTL_DRAMTMG1_T_XP_MASK)
1028 #define DDRCTL_DRAMTMG1_T_XP_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG1_T_XP_MASK) >> DDRCTL_DRAMTMG1_T_XP_SHIFT)
1029 
1030 /*
1031  * RD2PRE (R/W)
1032  *
1033  * Description: tRTP: Minimum time from read to precharge of same bank.
1034  * DDR2: tAL + BL/2 + max(tRTP, 2) - 2
1035  * DDR3: tAL + max (tRTP, 4)
1036  * DDR4: Max of following two equations:	tAL + max (tRTP, 4) or,	RL + BL/2 - tRP.
1037  * mDDR: BL/2
1038  * LPDDR2: Depends on if it's LPDDR2-S2 or LPDDR2-S4: LPDDR2-S2: BL/2 + tRTP - 1.	LPDDR2-S4: BL/2 + max(tRTP,2) - 2.
1039  * LPDDR3: BL/2 + max(tRTP,4) - 4
1040  * For configurations with MEMC_FREQ_RATIO=2, 1T mode, divide the above value by 2. No rounding up.
1041  * For configurations with MEMC_FREQ_RATIO=2, 2T mode, divide the above value by 2 and add 1. No rounding up of division operation.
1042  * Unit: Clocks.
1043  * Value After Reset: 0x4
1044  * Exists: Always
1045  */
1046 #define DDRCTL_DRAMTMG1_RD2PRE_MASK (0x1F00U)
1047 #define DDRCTL_DRAMTMG1_RD2PRE_SHIFT (8U)
1048 #define DDRCTL_DRAMTMG1_RD2PRE_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG1_RD2PRE_SHIFT) & DDRCTL_DRAMTMG1_RD2PRE_MASK)
1049 #define DDRCTL_DRAMTMG1_RD2PRE_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG1_RD2PRE_MASK) >> DDRCTL_DRAMTMG1_RD2PRE_SHIFT)
1050 
1051 /*
1052  * T_RC (R/W)
1053  *
1054  * Description: tRC: Minimum time between activates to same bank.
1055  * For configurations with MEMC_FREQ_RATIO=2, program this to (tRC/2) and round up to next integer value.
1056  * Unit: Clocks.
1057  * Value After Reset: 0x14
1058  * Exists: Always
1059  */
1060 #define DDRCTL_DRAMTMG1_T_RC_MASK (0x7FU)
1061 #define DDRCTL_DRAMTMG1_T_RC_SHIFT (0U)
1062 #define DDRCTL_DRAMTMG1_T_RC_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG1_T_RC_SHIFT) & DDRCTL_DRAMTMG1_T_RC_MASK)
1063 #define DDRCTL_DRAMTMG1_T_RC_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG1_T_RC_MASK) >> DDRCTL_DRAMTMG1_T_RC_SHIFT)
1064 
1065 /* Bitfield definition for register: DRAMTMG2 */
1066 /*
1067  * RD2WR (R/W)
1068  *
1069  * Description: DDR2/3/mDDR: RL + BL/2 + 2 - WL DDR4: RL + BL/2 + 1 + WR_PREAMBLE - WL
1070  * LPDDR2/LPDDR3: RL + BL/2 + RU(tDQSCKmax/tCK) + 1 - WL.
1071  * Minimum time from read command to write command. Include time for bus turnaround and all per-bank, per-rank, and global constraints.
1072  * Unit: Clocks. Where:
1073  * WL = write latency
1074  * BL = burst length. This must match the value programmed in the BL bit of the mode register to the SDRAM
1075  * RL = read latency = CAS latency
1076  * WR_PREAMBLE = write preamble. This is unique to DDR4.
1077  * For configurations with MEMC_FREQ_RATIO=2, divide the value calculated using the above equation by 2, and round it up to next integer.
1078  * Value After Reset: 0x6
1079  * Exists: Always
1080  */
1081 #define DDRCTL_DRAMTMG2_RD2WR_MASK (0x1F00U)
1082 #define DDRCTL_DRAMTMG2_RD2WR_SHIFT (8U)
1083 #define DDRCTL_DRAMTMG2_RD2WR_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG2_RD2WR_SHIFT) & DDRCTL_DRAMTMG2_RD2WR_MASK)
1084 #define DDRCTL_DRAMTMG2_RD2WR_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG2_RD2WR_MASK) >> DDRCTL_DRAMTMG2_RD2WR_SHIFT)
1085 
1086 /*
1087  * WR2RD (R/W)
1088  *
1089  * Description: DDR4: WL + BL/2 + tWTR_L Others: WL + BL/2 + tWTR
1090  * In DDR4, minimum time from write command to read command for same bank group. In others, minimum time from write command to read command. Includes time for bus turnaround, recovery times, and all per-bank, per-rank, and global constraints.
1091  * Unit: Clocks. Where:
1092  * WL = write latency
1093  * BL = burst length. This must match the value programmed in the BL bit of the mode register to the SDRAM
1094  * tWTR_L = internal write to read command delay for same bank group. This comes directly from the SDRAM specification.
1095  * tWTR = internal write to read command delay. This comes directly from the SDRAM specification.
1096  * Add one extra cycle for LPDDR2/LPDDR3 operation.
1097  * For configurations with MEMC_FREQ_RATIO=2, divide the value calculated using the above equation by 2, and round it up to next integer.
1098  * Value After Reset: 0xd
1099  * Exists: Always
1100  */
1101 #define DDRCTL_DRAMTMG2_WR2RD_MASK (0x3FU)
1102 #define DDRCTL_DRAMTMG2_WR2RD_SHIFT (0U)
1103 #define DDRCTL_DRAMTMG2_WR2RD_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG2_WR2RD_SHIFT) & DDRCTL_DRAMTMG2_WR2RD_MASK)
1104 #define DDRCTL_DRAMTMG2_WR2RD_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG2_WR2RD_MASK) >> DDRCTL_DRAMTMG2_WR2RD_SHIFT)
1105 
1106 /* Bitfield definition for register: DRAMTMG3 */
1107 /*
1108  * T_MRD (R/W)
1109  *
1110  * Description: tMRD: Cycles between load mode commands. If MEMC_DDR3_OR_4 = 0, this parameter is also used to define the cycles between load mode command and following non-load mode command.
1111  * For configurations with MEMC_FREQ_RATIO=2, program this to (tMRD/2) and round it up to the next integer value.
1112  * If C/A parity for DDR4 is used, set to tMRD_PAR(tMOD+PL) instead
1113  * Value After Reset: 0x4
1114  * Exists: Always
1115  */
1116 #define DDRCTL_DRAMTMG3_T_MRD_MASK (0x3F000UL)
1117 #define DDRCTL_DRAMTMG3_T_MRD_SHIFT (12U)
1118 #define DDRCTL_DRAMTMG3_T_MRD_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG3_T_MRD_SHIFT) & DDRCTL_DRAMTMG3_T_MRD_MASK)
1119 #define DDRCTL_DRAMTMG3_T_MRD_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG3_T_MRD_MASK) >> DDRCTL_DRAMTMG3_T_MRD_SHIFT)
1120 
1121 /*
1122  * T_MOD (R/W)
1123  *
1124  * Description: tMOD: Present if MEMC_DDR3_OR_4 = 1. Cycles between load mode command and following non-load mode command. This is required to be programmed even when a design that supports DDR3/4 is running in DDR2 mode.
1125  * If C/A parity for DDR4 is used, set to tMOD_PAR(tMOD+PL) instead
1126  * Set to tMOD if MEMC_FREQ_RATIO=1, or tMOD/2 (rounded up to next integer) if MEMC_FREQ_RATIO=2. Note that if using RDIMM, depending on the PHY, it may be necessary to use a value of tMOD + 1 or (tMOD + 1)/2 to compensate for the extra cycle of latency applied to mode register writes by the RDIMM chip
1127  * Value After Reset: "(MEMC_DDR3_EN==1 || MEMC_DDR4_EN==1 ) ? 0xc : 0x0"
1128  * Exists: MEMC_DDR3==1 || MEMC_DDR4==1
1129  */
1130 #define DDRCTL_DRAMTMG3_T_MOD_MASK (0x3FFU)
1131 #define DDRCTL_DRAMTMG3_T_MOD_SHIFT (0U)
1132 #define DDRCTL_DRAMTMG3_T_MOD_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG3_T_MOD_SHIFT) & DDRCTL_DRAMTMG3_T_MOD_MASK)
1133 #define DDRCTL_DRAMTMG3_T_MOD_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG3_T_MOD_MASK) >> DDRCTL_DRAMTMG3_T_MOD_SHIFT)
1134 
1135 /* Bitfield definition for register: DRAMTMG4 */
1136 /*
1137  * T_RCD (R/W)
1138  *
1139  * Description: tRCD - tAL: Minimum time from activate to read or write command to same bank.
1140  * For configurations with MEMC_FREQ_RATIO=2, program this to ((tRCD - tAL)/2) and round it up to the next integer value.
1141  * Minimum value allowed for this register is 1, which implies minimum (tRCD - tAL) value to be 2 in configurations with MEMC_FREQ_RATIO=2.
1142  * Unit: Clocks.
1143  * Value After Reset: 0x5
1144  * Exists: Always
1145  */
1146 #define DDRCTL_DRAMTMG4_T_RCD_MASK (0x1F000000UL)
1147 #define DDRCTL_DRAMTMG4_T_RCD_SHIFT (24U)
1148 #define DDRCTL_DRAMTMG4_T_RCD_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG4_T_RCD_SHIFT) & DDRCTL_DRAMTMG4_T_RCD_MASK)
1149 #define DDRCTL_DRAMTMG4_T_RCD_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG4_T_RCD_MASK) >> DDRCTL_DRAMTMG4_T_RCD_SHIFT)
1150 
1151 /*
1152  * T_CCD (R/W)
1153  *
1154  * Description: DDR4: tCCD_L: This is the minimum time between two reads or two writes for same bank group. Others: tCCD: This is the minimum time between two reads
1155  * or two writes.
1156  * For configurations with MEMC_FREQ_RATIO=2, program this to (tCCD_L/2 or tCCD/2) and round it up to the next integer value.
1157  * Unit: clocks.
1158  * Value After Reset: 0x4
1159  * Exists: Always
1160  */
1161 #define DDRCTL_DRAMTMG4_T_CCD_MASK (0x70000UL)
1162 #define DDRCTL_DRAMTMG4_T_CCD_SHIFT (16U)
1163 #define DDRCTL_DRAMTMG4_T_CCD_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG4_T_CCD_SHIFT) & DDRCTL_DRAMTMG4_T_CCD_MASK)
1164 #define DDRCTL_DRAMTMG4_T_CCD_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG4_T_CCD_MASK) >> DDRCTL_DRAMTMG4_T_CCD_SHIFT)
1165 
1166 /*
1167  * T_RRD (R/W)
1168  *
1169  * Description: DDR4: tRRD_L: Minimum time between activates from bank "a" to bank "b" for same bank group. Others: tRRD: Minimum time between activates from bank
1170  * "a" to bank "b"
1171  * For configurations with MEMC_FREQ_RATIO=2, program this to (tRRD_L/2 or tRRD/2) and round it up to the next integer value.
1172  * Unit: Clocks.
1173  * Value After Reset: 0x4
1174  * Exists: Always
1175  */
1176 #define DDRCTL_DRAMTMG4_T_RRD_MASK (0xF00U)
1177 #define DDRCTL_DRAMTMG4_T_RRD_SHIFT (8U)
1178 #define DDRCTL_DRAMTMG4_T_RRD_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG4_T_RRD_SHIFT) & DDRCTL_DRAMTMG4_T_RRD_MASK)
1179 #define DDRCTL_DRAMTMG4_T_RRD_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG4_T_RRD_MASK) >> DDRCTL_DRAMTMG4_T_RRD_SHIFT)
1180 
1181 /*
1182  * T_RP (R/W)
1183  *
1184  * Description: tRP: Minimum time from precharge to activate of same bank.
1185  * For configurations with MEMC_FREQ_RATIO=2, program this to (tRP/2 + 1). No round up of the fraction.
1186  * Unit: Clocks.
1187  * Value After Reset: 0x5
1188  * Exists: Always
1189  */
1190 #define DDRCTL_DRAMTMG4_T_RP_MASK (0x1FU)
1191 #define DDRCTL_DRAMTMG4_T_RP_SHIFT (0U)
1192 #define DDRCTL_DRAMTMG4_T_RP_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG4_T_RP_SHIFT) & DDRCTL_DRAMTMG4_T_RP_MASK)
1193 #define DDRCTL_DRAMTMG4_T_RP_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG4_T_RP_MASK) >> DDRCTL_DRAMTMG4_T_RP_SHIFT)
1194 
1195 /* Bitfield definition for register: DRAMTMG5 */
1196 /*
1197  * T_CKSRX (R/W)
1198  *
1199  * Description: This is the time before Self Refresh Exit that CK is maintained as a valid clock before issuing SRX. Specifies the clock stable time before SRX.
1200  * Recommended settings:
1201  * mDDR: 1
1202  * LPDDR2: 2
1203  * LPDDR3: 2
1204  * DDR2: 1
1205  * DDR3: tCKSRX
1206  * DDR4: tCKSRX
1207  * For configurations with MEMC_FREQ_RATIO=2, program this to recommended value divided by two and round it up to next integer.
1208  * Value After Reset: 0x5
1209  * Exists: Always
1210  */
1211 #define DDRCTL_DRAMTMG5_T_CKSRX_MASK (0xF000000UL)
1212 #define DDRCTL_DRAMTMG5_T_CKSRX_SHIFT (24U)
1213 #define DDRCTL_DRAMTMG5_T_CKSRX_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG5_T_CKSRX_SHIFT) & DDRCTL_DRAMTMG5_T_CKSRX_MASK)
1214 #define DDRCTL_DRAMTMG5_T_CKSRX_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG5_T_CKSRX_MASK) >> DDRCTL_DRAMTMG5_T_CKSRX_SHIFT)
1215 
1216 /*
1217  * T_CKSRE (R/W)
1218  *
1219  * Description: This is the time after Self Refresh Down Entry that CK is maintained as a valid clock. Specifies the clock disable delay after SRE.
1220  * Recommended settings:
1221  * mDDR: 0
1222  * LPDDR2: 2
1223  * LPDDR3: 2
1224  * DDR2: 1
1225  * DDR3: max (10 ns, 5 tCK)
1226  * DDR4: max (10 ns, 5 tCK)
1227  * For configurations with MEMC_FREQ_RATIO=2, program this to recommended value divided by two and round it up to next integer.
1228  * Value After Reset: 0x5
1229  * Exists: Always
1230  */
1231 #define DDRCTL_DRAMTMG5_T_CKSRE_MASK (0xF0000UL)
1232 #define DDRCTL_DRAMTMG5_T_CKSRE_SHIFT (16U)
1233 #define DDRCTL_DRAMTMG5_T_CKSRE_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG5_T_CKSRE_SHIFT) & DDRCTL_DRAMTMG5_T_CKSRE_MASK)
1234 #define DDRCTL_DRAMTMG5_T_CKSRE_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG5_T_CKSRE_MASK) >> DDRCTL_DRAMTMG5_T_CKSRE_SHIFT)
1235 
1236 /*
1237  * T_CKESR (R/W)
1238  *
1239  * Description: Minimum CKE low width for Self refresh entry to exit timing im memory clock cycles.
1240  * Recommended settings:
1241  * mDDR: tRFC
1242  * LPDDR2: tCKESR
1243  * LPDDR3: tCKESR
1244  * DDR2: tCKE
1245  * DDR3: tCKE + 1
1246  * DDR4: tCKE + 1
1247  * For configurations with MEMC_FREQ_RATIO=2, program this to recommended value divided by two and round it up to next integer.
1248  * Value After Reset: 0x4
1249  * Exists: Always
1250  */
1251 #define DDRCTL_DRAMTMG5_T_CKESR_MASK (0x3F00U)
1252 #define DDRCTL_DRAMTMG5_T_CKESR_SHIFT (8U)
1253 #define DDRCTL_DRAMTMG5_T_CKESR_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG5_T_CKESR_SHIFT) & DDRCTL_DRAMTMG5_T_CKESR_MASK)
1254 #define DDRCTL_DRAMTMG5_T_CKESR_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG5_T_CKESR_MASK) >> DDRCTL_DRAMTMG5_T_CKESR_SHIFT)
1255 
1256 /*
1257  * T_CKE (R/W)
1258  *
1259  * Description: Minimum number of cycles of CKE HIGH/LOW during power-down and self refresh.
1260  * LPDDR2/LPDDR3 mode: Set this to the larger of tCKE or tCKESR
1261  * Non-LPDDR2/non-LPDDR3 designs: Set this to tCKE value.
1262  * For configurations with MEMC_FREQ_RATIO=2, program this to (value described above)/2 and round it up to the next integer value.
1263  * Unit: Clocks.
1264  * Value After Reset: 0x3
1265  * Exists: Always
1266  */
1267 #define DDRCTL_DRAMTMG5_T_CKE_MASK (0x1FU)
1268 #define DDRCTL_DRAMTMG5_T_CKE_SHIFT (0U)
1269 #define DDRCTL_DRAMTMG5_T_CKE_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG5_T_CKE_SHIFT) & DDRCTL_DRAMTMG5_T_CKE_MASK)
1270 #define DDRCTL_DRAMTMG5_T_CKE_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG5_T_CKE_MASK) >> DDRCTL_DRAMTMG5_T_CKE_SHIFT)
1271 
1272 /* Bitfield definition for register: DRAMTMG8 */
1273 /*
1274  * T_XS_DLL_X32 (R/W)
1275  *
1276  * Description: tXSDLL: Exit Self Refresh to commands requiring a locked DLL.
1277  * For configurations with MEMC_FREQ_RATIO=2, program this to the above value divided by 2 and round up to next integer value.
1278  * Unit: Multiples of 32 clocks.
1279  * Note: In LPDDR2/LPDDR3/Mobile DDR mode, t_xs_x32 and t_xs_dll_x32 must be set the same values derived from tXSR.
1280  * Value After Reset: 0x44
1281  * Exists: Always
1282  */
1283 #define DDRCTL_DRAMTMG8_T_XS_DLL_X32_MASK (0x7F00U)
1284 #define DDRCTL_DRAMTMG8_T_XS_DLL_X32_SHIFT (8U)
1285 #define DDRCTL_DRAMTMG8_T_XS_DLL_X32_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG8_T_XS_DLL_X32_SHIFT) & DDRCTL_DRAMTMG8_T_XS_DLL_X32_MASK)
1286 #define DDRCTL_DRAMTMG8_T_XS_DLL_X32_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG8_T_XS_DLL_X32_MASK) >> DDRCTL_DRAMTMG8_T_XS_DLL_X32_SHIFT)
1287 
1288 /*
1289  * T_XS_X32 (R/W)
1290  *
1291  * Description: tXS: Exit Self Refresh to commands not requiring a locked DLL.
1292  * For configurations with MEMC_FREQ_RATIO=2, program this to the above value divided by 2 and round up to next integer value.
1293  * Unit: Multiples of 32 clocks.
1294  * Note: In LPDDR2/LPDDR3/Mobile DDR mode, t_xs_x32 and t_xs_dll_x32 must be set the same values derived from tXSR.
1295  * Value After Reset: 0x5
1296  * Exists: Always
1297  */
1298 #define DDRCTL_DRAMTMG8_T_XS_X32_MASK (0x7FU)
1299 #define DDRCTL_DRAMTMG8_T_XS_X32_SHIFT (0U)
1300 #define DDRCTL_DRAMTMG8_T_XS_X32_SET(x) (((uint32_t)(x) << DDRCTL_DRAMTMG8_T_XS_X32_SHIFT) & DDRCTL_DRAMTMG8_T_XS_X32_MASK)
1301 #define DDRCTL_DRAMTMG8_T_XS_X32_GET(x) (((uint32_t)(x) & DDRCTL_DRAMTMG8_T_XS_X32_MASK) >> DDRCTL_DRAMTMG8_T_XS_X32_SHIFT)
1302 
1303 /* Bitfield definition for register: ZQCTL0 */
1304 /*
1305  * DIS_AUTO_ZQ (R/W)
1306  *
1307  * Description:
1308  * 1 - Disable uMCTL2 generation of ZQCS command. Register reg_ddrc_zq_calib_short can be used instead to control ZQ calibration commands.
1309  * 0 - Internally generate ZQCS commands based on ZQCTL1.t_zq_short_interval_x1024.
1310  * This is only present for designs supporting DDR3/DDR4 or LPDDR2/LPDDR3 devices.
1311  * Value After Reset: 0x0
1312  * Exists: MEMC_DDR3==1 || MEMC_DDR4==1 || MEMC_LPDDR2==1
1313  */
1314 #define DDRCTL_ZQCTL0_DIS_AUTO_ZQ_MASK (0x80000000UL)
1315 #define DDRCTL_ZQCTL0_DIS_AUTO_ZQ_SHIFT (31U)
1316 #define DDRCTL_ZQCTL0_DIS_AUTO_ZQ_SET(x) (((uint32_t)(x) << DDRCTL_ZQCTL0_DIS_AUTO_ZQ_SHIFT) & DDRCTL_ZQCTL0_DIS_AUTO_ZQ_MASK)
1317 #define DDRCTL_ZQCTL0_DIS_AUTO_ZQ_GET(x) (((uint32_t)(x) & DDRCTL_ZQCTL0_DIS_AUTO_ZQ_MASK) >> DDRCTL_ZQCTL0_DIS_AUTO_ZQ_SHIFT)
1318 
1319 /*
1320  * DIS_SRX_ZQCL (R/W)
1321  *
1322  * Description:
1323  * 1 - Disable issuing of ZQCL command at Self-Refresh exit. Only applicable when run in DDR3 or DDR4 or LPDDR2 or LPDDR3 mode.
1324  * 0 - Enable issuing of ZQCL command at Self-Refresh exit. Only applicable when run in DDR3 or DDR4 or LPDDR2 or LPDDR3 mode.
1325  * This is only present for designs supporting DDR3/DDR4 or LPDDR2/LPDDR3 devices.
1326  * Value After Reset: 0x0
1327  * Exists: MEMC_DDR3==1 || MEMC_DDR4==1 || MEMC_LPDDR2==1
1328  */
1329 #define DDRCTL_ZQCTL0_DIS_SRX_ZQCL_MASK (0x40000000UL)
1330 #define DDRCTL_ZQCTL0_DIS_SRX_ZQCL_SHIFT (30U)
1331 #define DDRCTL_ZQCTL0_DIS_SRX_ZQCL_SET(x) (((uint32_t)(x) << DDRCTL_ZQCTL0_DIS_SRX_ZQCL_SHIFT) & DDRCTL_ZQCTL0_DIS_SRX_ZQCL_MASK)
1332 #define DDRCTL_ZQCTL0_DIS_SRX_ZQCL_GET(x) (((uint32_t)(x) & DDRCTL_ZQCTL0_DIS_SRX_ZQCL_MASK) >> DDRCTL_ZQCTL0_DIS_SRX_ZQCL_SHIFT)
1333 
1334 /*
1335  * ZQ_RESISTOR_SHARED (R/W)
1336  *
1337  * Description:
1338  * 1 - Denotes that ZQ resistor is shared between ranks. Means ZQinit/ZQCL/ZQCS commands are sent to one rank at a time with tZQinit/tZQCL/tZQCS timing met between commands so that commands to different ranks do not overlap.
1339  * 0 - ZQ resistor is not shared.
1340  * This is only present for designs supporting DDR3/DDR4 or LPDDR2/LPDDR3 devices.
1341  * Value After Reset: 0x0
1342  * Exists: MEMC_DDR3==1 || MEMC_DDR4==1 || MEMC_LPDDR2==1
1343  */
1344 #define DDRCTL_ZQCTL0_ZQ_RESISTOR_SHARED_MASK (0x20000000UL)
1345 #define DDRCTL_ZQCTL0_ZQ_RESISTOR_SHARED_SHIFT (29U)
1346 #define DDRCTL_ZQCTL0_ZQ_RESISTOR_SHARED_SET(x) (((uint32_t)(x) << DDRCTL_ZQCTL0_ZQ_RESISTOR_SHARED_SHIFT) & DDRCTL_ZQCTL0_ZQ_RESISTOR_SHARED_MASK)
1347 #define DDRCTL_ZQCTL0_ZQ_RESISTOR_SHARED_GET(x) (((uint32_t)(x) & DDRCTL_ZQCTL0_ZQ_RESISTOR_SHARED_MASK) >> DDRCTL_ZQCTL0_ZQ_RESISTOR_SHARED_SHIFT)
1348 
1349 /*
1350  * T_ZQ_LONG_NOP (R/W)
1351  *
1352  * Description: tZQoper for DDR3/DDR4, tZQCL for LPDDR2/LPDDR3: Number of cycles of NOP required after a ZQCL (ZQ calibration long) command is issued to SDRAM.
1353  * For configurations with MEMC_FREQ_RATIO=2: DDR3/DDR4: program this to tZQoper/2 and round it up to the next integer value.
1354  * LPDDR2/LPDDR3: program this to tZQCL/2 and round it up to the next integer value.
1355  * Unit: Clock cycles.
1356  * This is only present for designs supporting DDR3/DDR4 or LPDDR2/LPDDR3 devices.
1357  * Value After Reset: 0x200
1358  * Exists: MEMC_DDR3==1 || MEMC_DDR4==1 || MEMC_LPDDR2==1
1359  */
1360 #define DDRCTL_ZQCTL0_T_ZQ_LONG_NOP_MASK (0x3FF0000UL)
1361 #define DDRCTL_ZQCTL0_T_ZQ_LONG_NOP_SHIFT (16U)
1362 #define DDRCTL_ZQCTL0_T_ZQ_LONG_NOP_SET(x) (((uint32_t)(x) << DDRCTL_ZQCTL0_T_ZQ_LONG_NOP_SHIFT) & DDRCTL_ZQCTL0_T_ZQ_LONG_NOP_MASK)
1363 #define DDRCTL_ZQCTL0_T_ZQ_LONG_NOP_GET(x) (((uint32_t)(x) & DDRCTL_ZQCTL0_T_ZQ_LONG_NOP_MASK) >> DDRCTL_ZQCTL0_T_ZQ_LONG_NOP_SHIFT)
1364 
1365 /*
1366  * T_ZQ_SHORT_NOP (R/W)
1367  *
1368  * Description: tZQCS: Number of cycles of NOP required after a ZQCS (ZQ calibration short) command is issued to SDRAM.
1369  * For configurations with MEMC_FREQ_RATIO=2, program this to tZQCS/2 and round it up to the next integer value. Unit: Clock cycles.
1370  * This is only present for designs supporting DDR3/DDR4 or LPDDR2/LPDDR3 devices.
1371  * Value After Reset: 0x40
1372  * Exists: MEMC_DDR3==1 || MEMC_DDR4==1 || MEMC_LPDDR2==1
1373  */
1374 #define DDRCTL_ZQCTL0_T_ZQ_SHORT_NOP_MASK (0x3FFU)
1375 #define DDRCTL_ZQCTL0_T_ZQ_SHORT_NOP_SHIFT (0U)
1376 #define DDRCTL_ZQCTL0_T_ZQ_SHORT_NOP_SET(x) (((uint32_t)(x) << DDRCTL_ZQCTL0_T_ZQ_SHORT_NOP_SHIFT) & DDRCTL_ZQCTL0_T_ZQ_SHORT_NOP_MASK)
1377 #define DDRCTL_ZQCTL0_T_ZQ_SHORT_NOP_GET(x) (((uint32_t)(x) & DDRCTL_ZQCTL0_T_ZQ_SHORT_NOP_MASK) >> DDRCTL_ZQCTL0_T_ZQ_SHORT_NOP_SHIFT)
1378 
1379 /* Bitfield definition for register: ZQCTL1 */
1380 /*
1381  * T_ZQ_SHORT_INTERVAL_X1024 (R/W)
1382  *
1383  * Description: Average interval to wait between automatically issuing ZQCS (ZQ calibration short) commands to DDR3/DDR4/LPDDR2/LPDDR3 devices.
1384  * Meaningless, if ZQCTL0.dis_auto_zq=1. Unit: 1024 clock cycles.
1385  * This is only present for designs supporting DDR3/DDR4 or LPDDR2/LPDDR3 devices.
1386  * Value After Reset: 0x100
1387  * Exists: MEMC_DDR3==1 || MEMC_DDR4==1 || MEMC_LPDDR2==1
1388  */
1389 #define DDRCTL_ZQCTL1_T_ZQ_SHORT_INTERVAL_X1024_MASK (0xFFFFFUL)
1390 #define DDRCTL_ZQCTL1_T_ZQ_SHORT_INTERVAL_X1024_SHIFT (0U)
1391 #define DDRCTL_ZQCTL1_T_ZQ_SHORT_INTERVAL_X1024_SET(x) (((uint32_t)(x) << DDRCTL_ZQCTL1_T_ZQ_SHORT_INTERVAL_X1024_SHIFT) & DDRCTL_ZQCTL1_T_ZQ_SHORT_INTERVAL_X1024_MASK)
1392 #define DDRCTL_ZQCTL1_T_ZQ_SHORT_INTERVAL_X1024_GET(x) (((uint32_t)(x) & DDRCTL_ZQCTL1_T_ZQ_SHORT_INTERVAL_X1024_MASK) >> DDRCTL_ZQCTL1_T_ZQ_SHORT_INTERVAL_X1024_SHIFT)
1393 
1394 /* Bitfield definition for register: ZQSTAT */
1395 /*
1396  * ZQ_RESET_BUSY (R)
1397  *
1398  * Description: SoC core may initiate a ZQ Reset operation only if this signal is low. This signal goes high in the clock after the uMCTL2 accepts the ZQ Reset request. It goes low when the ZQ Reset command is issued to the SDRAM and the associated NOP period is over. It is recommended not to perform ZQ Reset commands when this signal is high.
1399  * 0 - Indicates that the SoC core can initiate a ZQ Reset operation
1400  * 1 - Indicates that ZQ Reset operation is in progress
1401  * Value After Reset: 0x0
1402  * Exists: Always
1403  */
1404 #define DDRCTL_ZQSTAT_ZQ_RESET_BUSY_MASK (0x1U)
1405 #define DDRCTL_ZQSTAT_ZQ_RESET_BUSY_SHIFT (0U)
1406 #define DDRCTL_ZQSTAT_ZQ_RESET_BUSY_GET(x) (((uint32_t)(x) & DDRCTL_ZQSTAT_ZQ_RESET_BUSY_MASK) >> DDRCTL_ZQSTAT_ZQ_RESET_BUSY_SHIFT)
1407 
1408 /* Bitfield definition for register: DFITMG0 */
1409 /*
1410  * DFI_T_CTRL_DELAY (R/W)
1411  *
1412  * Description: Specifies the number of DFI clock cycles after an assertion or de-assertion of the DFI control signals that the control signals at the PHY-DRAM interface reflect the assertion or de-assertion. If the DFI clock and the memory clock are not phase-aligned, this timing parameter should be rounded up to the next integer value. Note that if using RDIMM, depending on the PHY, it may be necessary to increment this parameter by 1. This is to compensate for the extra cycle of latency through the RDIMM
1413  * Value After Reset: 0x7
1414  * Exists: Always
1415  */
1416 #define DDRCTL_DFITMG0_DFI_T_CTRL_DELAY_MASK (0x1F000000UL)
1417 #define DDRCTL_DFITMG0_DFI_T_CTRL_DELAY_SHIFT (24U)
1418 #define DDRCTL_DFITMG0_DFI_T_CTRL_DELAY_SET(x) (((uint32_t)(x) << DDRCTL_DFITMG0_DFI_T_CTRL_DELAY_SHIFT) & DDRCTL_DFITMG0_DFI_T_CTRL_DELAY_MASK)
1419 #define DDRCTL_DFITMG0_DFI_T_CTRL_DELAY_GET(x) (((uint32_t)(x) & DDRCTL_DFITMG0_DFI_T_CTRL_DELAY_MASK) >> DDRCTL_DFITMG0_DFI_T_CTRL_DELAY_SHIFT)
1420 
1421 /*
1422  * DFI_RDDATA_USE_SDR (R/W)
1423  *
1424  * Description: Defines whether dfi_rddata_en/dfi_rddata/dfi_rddata_valid is generated using HDR or SDR values Selects whether value in DFITMG0.dfi_t_rddata_en is in terms of SDR or HDR clock cycles:
1425  * 0 in terms of HDR clock cycles
1426  * 1 in terms of SDR clock cycles
1427  * Refer to PHY specification for correct value.
1428  * Value After Reset: 0x0
1429  * Exists: MEMC_FREQ_RATIO==2
1430  */
1431 #define DDRCTL_DFITMG0_DFI_RDDATA_USE_SDR_MASK (0x800000UL)
1432 #define DDRCTL_DFITMG0_DFI_RDDATA_USE_SDR_SHIFT (23U)
1433 #define DDRCTL_DFITMG0_DFI_RDDATA_USE_SDR_SET(x) (((uint32_t)(x) << DDRCTL_DFITMG0_DFI_RDDATA_USE_SDR_SHIFT) & DDRCTL_DFITMG0_DFI_RDDATA_USE_SDR_MASK)
1434 #define DDRCTL_DFITMG0_DFI_RDDATA_USE_SDR_GET(x) (((uint32_t)(x) & DDRCTL_DFITMG0_DFI_RDDATA_USE_SDR_MASK) >> DDRCTL_DFITMG0_DFI_RDDATA_USE_SDR_SHIFT)
1435 
1436 /*
1437  * DFI_T_RDDATA_EN (R/W)
1438  *
1439  * Description: Time from the assertion of a read command on the DFI interface to the assertion of the dfi_rddata_en signal. Refer to PHY specification for correct value.
1440  * This corresponds to the DFI parameter trddata_en. Note that, depending on the PHY, if using RDIMM, it may be necessary to use the value (CL + 1) in the calculation of trddata_en. This is to compensate for the extra cycle of latency through the RDIMM.
1441  * Unit: Clocks
1442  * Value After Reset: 0x2
1443  * Exists: Always
1444  */
1445 #define DDRCTL_DFITMG0_DFI_T_RDDATA_EN_MASK (0x3F0000UL)
1446 #define DDRCTL_DFITMG0_DFI_T_RDDATA_EN_SHIFT (16U)
1447 #define DDRCTL_DFITMG0_DFI_T_RDDATA_EN_SET(x) (((uint32_t)(x) << DDRCTL_DFITMG0_DFI_T_RDDATA_EN_SHIFT) & DDRCTL_DFITMG0_DFI_T_RDDATA_EN_MASK)
1448 #define DDRCTL_DFITMG0_DFI_T_RDDATA_EN_GET(x) (((uint32_t)(x) & DDRCTL_DFITMG0_DFI_T_RDDATA_EN_MASK) >> DDRCTL_DFITMG0_DFI_T_RDDATA_EN_SHIFT)
1449 
1450 /*
1451  * DFI_WRDATA_USE_SDR (R/W)
1452  *
1453  * Description: Defines whether dfi_wrdata_en/dfi_wrdata/dfi_wrdata_mask is generated using HDR or SDR values Selects whether value in DFITMG0.dfi_tphy_wrlat is in terms of SDR or HDR clock cycles Selects whether value in DFITMG0.dfi_tphy_wrdata is in terms of SDR or HDR clock cycles
1454  * 0 in terms of HDR clock cycles
1455  * 1 in terms of SDR clock cycles
1456  * Refer to PHY specification for correct value.
1457  * Value After Reset: 0x0
1458  * Exists: MEMC_FREQ_RATIO==2
1459  */
1460 #define DDRCTL_DFITMG0_DFI_WRDATA_USE_SDR_MASK (0x8000U)
1461 #define DDRCTL_DFITMG0_DFI_WRDATA_USE_SDR_SHIFT (15U)
1462 #define DDRCTL_DFITMG0_DFI_WRDATA_USE_SDR_SET(x) (((uint32_t)(x) << DDRCTL_DFITMG0_DFI_WRDATA_USE_SDR_SHIFT) & DDRCTL_DFITMG0_DFI_WRDATA_USE_SDR_MASK)
1463 #define DDRCTL_DFITMG0_DFI_WRDATA_USE_SDR_GET(x) (((uint32_t)(x) & DDRCTL_DFITMG0_DFI_WRDATA_USE_SDR_MASK) >> DDRCTL_DFITMG0_DFI_WRDATA_USE_SDR_SHIFT)
1464 
1465 /*
1466  * DFI_TPHY_WRDATA (R/W)
1467  *
1468  * Description: Specifies the number of clock cycles between when dfi_wrdata_en is asserted to when the associated write data is driven on the dfi_wrdata signal. This corresponds to the DFI timing parameter tphy_wrdata. Refer to PHY specification for correct value. Note, max supported value is 8.
1469  * Unit: Clocks
1470  * Value After Reset: 0x0
1471  * Exists: Always
1472  */
1473 #define DDRCTL_DFITMG0_DFI_TPHY_WRDATA_MASK (0x3F00U)
1474 #define DDRCTL_DFITMG0_DFI_TPHY_WRDATA_SHIFT (8U)
1475 #define DDRCTL_DFITMG0_DFI_TPHY_WRDATA_SET(x) (((uint32_t)(x) << DDRCTL_DFITMG0_DFI_TPHY_WRDATA_SHIFT) & DDRCTL_DFITMG0_DFI_TPHY_WRDATA_MASK)
1476 #define DDRCTL_DFITMG0_DFI_TPHY_WRDATA_GET(x) (((uint32_t)(x) & DDRCTL_DFITMG0_DFI_TPHY_WRDATA_MASK) >> DDRCTL_DFITMG0_DFI_TPHY_WRDATA_SHIFT)
1477 
1478 /*
1479  * DFI_TPHY_WRLAT (R/W)
1480  *
1481  * Description: Write latency
1482  * Number of clocks from the write command to write data enable (dfi_wrdata_en). This corresponds to the DFI timing parameter tphy_wrlat. The minimum supported value is as follows:
1483  * 0 for configurations with MEMC_WL0 = 1
1484  * 1 for configurations with MEMC_WL0 = 0
1485  * Refer to PHY specification for correct value.Note that, depending on the PHY, if using RDIMM, it may be necessary to use the value (CL + 1) in the calculation of tphy_wrlat. This is to compensate for the extra cycle of latency through the RDIMM.
1486  * Value After Reset: 0x2
1487  * Exists: Always
1488  */
1489 #define DDRCTL_DFITMG0_DFI_TPHY_WRLAT_MASK (0x3FU)
1490 #define DDRCTL_DFITMG0_DFI_TPHY_WRLAT_SHIFT (0U)
1491 #define DDRCTL_DFITMG0_DFI_TPHY_WRLAT_SET(x) (((uint32_t)(x) << DDRCTL_DFITMG0_DFI_TPHY_WRLAT_SHIFT) & DDRCTL_DFITMG0_DFI_TPHY_WRLAT_MASK)
1492 #define DDRCTL_DFITMG0_DFI_TPHY_WRLAT_GET(x) (((uint32_t)(x) & DDRCTL_DFITMG0_DFI_TPHY_WRLAT_MASK) >> DDRCTL_DFITMG0_DFI_TPHY_WRLAT_SHIFT)
1493 
1494 /* Bitfield definition for register: DFITMG1 */
1495 /*
1496  * DFI_T_WRDATA_DELAY (R/W)
1497  *
1498  * Description: Specifies the number of DFI clocks between when the dfi_wrdata_en signal is asserted and when the corresponding write data transfer is completed on the DRAM bus. This corresponds to the DFI timing parameter twrdata_delay. Refer to PHY specification for correct value. For DFI 3.0 PHY, set to twrdata_delay, a new timing parameter introduced in DFI 3.0. For DFI 2.1 PHY, set to tphy_wrdata + (delay of DFI write data to the DRAM). Value to be programmed is in terms of DFI clocks, not PHY clocks. In FREQ_RATIO=2, divide PHY's value by 2 and round up to next integer. If using DFITMG0.dfi_wrdata_use_sdr=1, add 1 to the value.
1499  * Unit: Clocks
1500  * Value After Reset: 0x0
1501  * Exists: Always
1502  */
1503 #define DDRCTL_DFITMG1_DFI_T_WRDATA_DELAY_MASK (0x1F0000UL)
1504 #define DDRCTL_DFITMG1_DFI_T_WRDATA_DELAY_SHIFT (16U)
1505 #define DDRCTL_DFITMG1_DFI_T_WRDATA_DELAY_SET(x) (((uint32_t)(x) << DDRCTL_DFITMG1_DFI_T_WRDATA_DELAY_SHIFT) & DDRCTL_DFITMG1_DFI_T_WRDATA_DELAY_MASK)
1506 #define DDRCTL_DFITMG1_DFI_T_WRDATA_DELAY_GET(x) (((uint32_t)(x) & DDRCTL_DFITMG1_DFI_T_WRDATA_DELAY_MASK) >> DDRCTL_DFITMG1_DFI_T_WRDATA_DELAY_SHIFT)
1507 
1508 /*
1509  * DFI_T_DRAM_CLK_DISABLE (R/W)
1510  *
1511  * Description: Specifies the number of DFI clock cycles from the assertion of the dfi_dram_clk_disable signal on the DFI until the clock to the DRAM memory devices, at the PHY- DRAM boundary, maintains a low value. If the DFI clock and the memory clock are not phase aligned, this timing parameter should be rounded up to the next integer value.
1512  * Value After Reset: 0x4
1513  * Exists: Always
1514  */
1515 #define DDRCTL_DFITMG1_DFI_T_DRAM_CLK_DISABLE_MASK (0xF00U)
1516 #define DDRCTL_DFITMG1_DFI_T_DRAM_CLK_DISABLE_SHIFT (8U)
1517 #define DDRCTL_DFITMG1_DFI_T_DRAM_CLK_DISABLE_SET(x) (((uint32_t)(x) << DDRCTL_DFITMG1_DFI_T_DRAM_CLK_DISABLE_SHIFT) & DDRCTL_DFITMG1_DFI_T_DRAM_CLK_DISABLE_MASK)
1518 #define DDRCTL_DFITMG1_DFI_T_DRAM_CLK_DISABLE_GET(x) (((uint32_t)(x) & DDRCTL_DFITMG1_DFI_T_DRAM_CLK_DISABLE_MASK) >> DDRCTL_DFITMG1_DFI_T_DRAM_CLK_DISABLE_SHIFT)
1519 
1520 /*
1521  * DFI_T_DRAM_CLK_ENABLE (R/W)
1522  *
1523  * Description: Specifies the number of DFI clock cycles from the de-assertion of the dfi_dram_clk_disable signal on the DFI until the first valid rising edge of the clock to the DRAM memory devices, at the PHY-DRAM boundary. If the DFI clock and the memory clock are not phase aligned, this timing parameter should be rounded up to the next integer value.
1524  * Value After Reset: 0x4
1525  * Exists: Always
1526  */
1527 #define DDRCTL_DFITMG1_DFI_T_DRAM_CLK_ENABLE_MASK (0xFU)
1528 #define DDRCTL_DFITMG1_DFI_T_DRAM_CLK_ENABLE_SHIFT (0U)
1529 #define DDRCTL_DFITMG1_DFI_T_DRAM_CLK_ENABLE_SET(x) (((uint32_t)(x) << DDRCTL_DFITMG1_DFI_T_DRAM_CLK_ENABLE_SHIFT) & DDRCTL_DFITMG1_DFI_T_DRAM_CLK_ENABLE_MASK)
1530 #define DDRCTL_DFITMG1_DFI_T_DRAM_CLK_ENABLE_GET(x) (((uint32_t)(x) & DDRCTL_DFITMG1_DFI_T_DRAM_CLK_ENABLE_MASK) >> DDRCTL_DFITMG1_DFI_T_DRAM_CLK_ENABLE_SHIFT)
1531 
1532 /* Bitfield definition for register: DFILPCFG0 */
1533 /*
1534  * DFI_TLP_RESP (R/W)
1535  *
1536  * Description: Setting for DFI's tlp_resp time.
1537  * Same value is used for both Power Down, Self Refresh, Deep Power Down and Maximum Power Saving modes. DFI 2.1 specification onwards, recommends using a fixed
1538  * value of 7 always.
1539  * Value After Reset: 0x7
1540  * Exists: Always
1541  */
1542 #define DDRCTL_DFILPCFG0_DFI_TLP_RESP_MASK (0xF000000UL)
1543 #define DDRCTL_DFILPCFG0_DFI_TLP_RESP_SHIFT (24U)
1544 #define DDRCTL_DFILPCFG0_DFI_TLP_RESP_SET(x) (((uint32_t)(x) << DDRCTL_DFILPCFG0_DFI_TLP_RESP_SHIFT) & DDRCTL_DFILPCFG0_DFI_TLP_RESP_MASK)
1545 #define DDRCTL_DFILPCFG0_DFI_TLP_RESP_GET(x) (((uint32_t)(x) & DDRCTL_DFILPCFG0_DFI_TLP_RESP_MASK) >> DDRCTL_DFILPCFG0_DFI_TLP_RESP_SHIFT)
1546 
1547 /*
1548  * DFI_LP_WAKEUP_SR (R/W)
1549  *
1550  * Description: Value to drive on dfi_lp_wakeup signal when Self Refresh mode is entered.
1551  * Determines the DFI's tlp_wakeup time:
1552  * 0x0 - 16 cycles
1553  * 0x1 - 32 cycles
1554  * 0x2 - 64 cycles
1555  * 0x3 - 128 cycles
1556  * 0x4 - 256 cycles
1557  * 0x5 - 512 cycles
1558  * 0x6 - 1024 cycles
1559  * 0x7 - 2048 cycles
1560  * 0x8 - 4096 cycles
1561  * 0x9 - 8192 cycles
1562  * 0xA - 16384 cycles
1563  * 0xB - 32768 cycles
1564  * 0xC - 65536 cycles
1565  * 0xD - 131072 cycles
1566  * 0xE - 262144 cycles
1567  * 0xF - Unlimited Value After Reset: 0x0 Exists: Always
1568  */
1569 #define DDRCTL_DFILPCFG0_DFI_LP_WAKEUP_SR_MASK (0xF000U)
1570 #define DDRCTL_DFILPCFG0_DFI_LP_WAKEUP_SR_SHIFT (12U)
1571 #define DDRCTL_DFILPCFG0_DFI_LP_WAKEUP_SR_SET(x) (((uint32_t)(x) << DDRCTL_DFILPCFG0_DFI_LP_WAKEUP_SR_SHIFT) & DDRCTL_DFILPCFG0_DFI_LP_WAKEUP_SR_MASK)
1572 #define DDRCTL_DFILPCFG0_DFI_LP_WAKEUP_SR_GET(x) (((uint32_t)(x) & DDRCTL_DFILPCFG0_DFI_LP_WAKEUP_SR_MASK) >> DDRCTL_DFILPCFG0_DFI_LP_WAKEUP_SR_SHIFT)
1573 
1574 /*
1575  * DFI_LP_EN_SR (R/W)
1576  *
1577  * Description: Enables DFI Low Power interface handshaking during Self Refresh Entry/Exit.
1578  * 0 - Disabled
1579  * 1 - Enabled
1580  * Value After Reset: 0x0
1581  * Exists: Always
1582  */
1583 #define DDRCTL_DFILPCFG0_DFI_LP_EN_SR_MASK (0x100U)
1584 #define DDRCTL_DFILPCFG0_DFI_LP_EN_SR_SHIFT (8U)
1585 #define DDRCTL_DFILPCFG0_DFI_LP_EN_SR_SET(x) (((uint32_t)(x) << DDRCTL_DFILPCFG0_DFI_LP_EN_SR_SHIFT) & DDRCTL_DFILPCFG0_DFI_LP_EN_SR_MASK)
1586 #define DDRCTL_DFILPCFG0_DFI_LP_EN_SR_GET(x) (((uint32_t)(x) & DDRCTL_DFILPCFG0_DFI_LP_EN_SR_MASK) >> DDRCTL_DFILPCFG0_DFI_LP_EN_SR_SHIFT)
1587 
1588 /*
1589  * DFI_LP_WAKEUP_PD (R/W)
1590  *
1591  * Description: Value to drive on dfi_lp_wakeup signal when Power Down mode is entered.
1592  * Determines the DFI's tlp_wakeup time:
1593  * 0x0 - 16 cycles
1594  * 0x1 - 32 cycles
1595  * 0x2 - 64 cycles
1596  * 0x3 - 128 cycles
1597  * 0x4 - 256 cycles
1598  * 0x5 - 512 cycles
1599  * 0x6 - 1024 cycles
1600  * 0x7 - 2048 cycles
1601  * 0x8 - 4096 cycles
1602  * 0x9 - 8192 cycles
1603  * 0xA - 16384 cycles
1604  * 0xB - 32768 cycles
1605  * 0xC - 65536 cycles
1606  * 0xD - 131072 cycles
1607  * 0xE - 262144 cycles
1608  * 0xF - Unlimited Value After Reset: 0x0 Exists: Always
1609  */
1610 #define DDRCTL_DFILPCFG0_DFI_LP_WAKEUP_PD_MASK (0xF0U)
1611 #define DDRCTL_DFILPCFG0_DFI_LP_WAKEUP_PD_SHIFT (4U)
1612 #define DDRCTL_DFILPCFG0_DFI_LP_WAKEUP_PD_SET(x) (((uint32_t)(x) << DDRCTL_DFILPCFG0_DFI_LP_WAKEUP_PD_SHIFT) & DDRCTL_DFILPCFG0_DFI_LP_WAKEUP_PD_MASK)
1613 #define DDRCTL_DFILPCFG0_DFI_LP_WAKEUP_PD_GET(x) (((uint32_t)(x) & DDRCTL_DFILPCFG0_DFI_LP_WAKEUP_PD_MASK) >> DDRCTL_DFILPCFG0_DFI_LP_WAKEUP_PD_SHIFT)
1614 
1615 /*
1616  * DFI_LP_EN_PD (R/W)
1617  *
1618  * Description: Enables DFI Low Power interface handshaking during Power Down Entry/Exit.
1619  * 0 - Disabled
1620  * 1 - Enabled
1621  * Value After Reset: 0x0
1622  * Exists: Always
1623  */
1624 #define DDRCTL_DFILPCFG0_DFI_LP_EN_PD_MASK (0x1U)
1625 #define DDRCTL_DFILPCFG0_DFI_LP_EN_PD_SHIFT (0U)
1626 #define DDRCTL_DFILPCFG0_DFI_LP_EN_PD_SET(x) (((uint32_t)(x) << DDRCTL_DFILPCFG0_DFI_LP_EN_PD_SHIFT) & DDRCTL_DFILPCFG0_DFI_LP_EN_PD_MASK)
1627 #define DDRCTL_DFILPCFG0_DFI_LP_EN_PD_GET(x) (((uint32_t)(x) & DDRCTL_DFILPCFG0_DFI_LP_EN_PD_MASK) >> DDRCTL_DFILPCFG0_DFI_LP_EN_PD_SHIFT)
1628 
1629 /* Bitfield definition for register: DFIUPD0 */
1630 /*
1631  * DIS_AUTO_CTRLUPD (R/W)
1632  *
1633  * Description: When '1', disable the automatic dfi_ctrlupd_req generation by the uMCTL2. The core must issue the dfi_ctrlupd_req signal using register reg_ddrc_ctrlupd. This register field is changeable on the fly.
1634  * When '0', uMCTL2 issues dfi_ctrlupd_req periodically.
1635  * Value After Reset: 0x0
1636  * Exists: Always
1637  */
1638 #define DDRCTL_DFIUPD0_DIS_AUTO_CTRLUPD_MASK (0x80000000UL)
1639 #define DDRCTL_DFIUPD0_DIS_AUTO_CTRLUPD_SHIFT (31U)
1640 #define DDRCTL_DFIUPD0_DIS_AUTO_CTRLUPD_SET(x) (((uint32_t)(x) << DDRCTL_DFIUPD0_DIS_AUTO_CTRLUPD_SHIFT) & DDRCTL_DFIUPD0_DIS_AUTO_CTRLUPD_MASK)
1641 #define DDRCTL_DFIUPD0_DIS_AUTO_CTRLUPD_GET(x) (((uint32_t)(x) & DDRCTL_DFIUPD0_DIS_AUTO_CTRLUPD_MASK) >> DDRCTL_DFIUPD0_DIS_AUTO_CTRLUPD_SHIFT)
1642 
1643 /*
1644  * DFI_T_CTRLUP_MAX (R/W)
1645  *
1646  * Description: Specifies the maximum number of clock cycles that the dfi_ctrlupd_req signal can assert. Lowest value to assign to this variable is 0x40.
1647  * Unit: Clocks
1648  * Value After Reset: 0x40
1649  * Exists: Always
1650  */
1651 #define DDRCTL_DFIUPD0_DFI_T_CTRLUP_MAX_MASK (0x3FF0000UL)
1652 #define DDRCTL_DFIUPD0_DFI_T_CTRLUP_MAX_SHIFT (16U)
1653 #define DDRCTL_DFIUPD0_DFI_T_CTRLUP_MAX_SET(x) (((uint32_t)(x) << DDRCTL_DFIUPD0_DFI_T_CTRLUP_MAX_SHIFT) & DDRCTL_DFIUPD0_DFI_T_CTRLUP_MAX_MASK)
1654 #define DDRCTL_DFIUPD0_DFI_T_CTRLUP_MAX_GET(x) (((uint32_t)(x) & DDRCTL_DFIUPD0_DFI_T_CTRLUP_MAX_MASK) >> DDRCTL_DFIUPD0_DFI_T_CTRLUP_MAX_SHIFT)
1655 
1656 /*
1657  * DFI_T_CTRLUP_MIN (R/W)
1658  *
1659  * Description: Specifies the minimum number of clock cycles that the dfi_ctrlupd_req signal must be asserted. The uMCTL2 expects the PHY to respond within this time. If the PHY does not respond, the uMCTL2 will de-assert dfi_ctrlupd_req after dfi_t_ctrlup_min + 2 cycles. Lowest value to assign to this variable is 0x3.
1660  * Unit: Clocks
1661  * Value After Reset: 0x3
1662  * Exists: Always
1663  */
1664 #define DDRCTL_DFIUPD0_DFI_T_CTRLUP_MIN_MASK (0x3FFU)
1665 #define DDRCTL_DFIUPD0_DFI_T_CTRLUP_MIN_SHIFT (0U)
1666 #define DDRCTL_DFIUPD0_DFI_T_CTRLUP_MIN_SET(x) (((uint32_t)(x) << DDRCTL_DFIUPD0_DFI_T_CTRLUP_MIN_SHIFT) & DDRCTL_DFIUPD0_DFI_T_CTRLUP_MIN_MASK)
1667 #define DDRCTL_DFIUPD0_DFI_T_CTRLUP_MIN_GET(x) (((uint32_t)(x) & DDRCTL_DFIUPD0_DFI_T_CTRLUP_MIN_MASK) >> DDRCTL_DFIUPD0_DFI_T_CTRLUP_MIN_SHIFT)
1668 
1669 /* Bitfield definition for register: DFIUPD1 */
1670 /*
1671  * DFI_T_CTRLUPD_INTERVAL_MIN_X1024 (R/W)
1672  *
1673  * Description: This is the minimum amount of time between uMCTL2 initiated DFI update requests (which is executed whenever the uMCTL2 is idle). Set this number higher to reduce the frequency of update requests, which can have a small impact on the latency of the first read request when the uMCTL2 is idle.
1674  * Unit: 1024 clocks Value After Reset: 0x0
1675  *  Exists: Always
1676  */
1677 #define DDRCTL_DFIUPD1_DFI_T_CTRLUPD_INTERVAL_MIN_X1024_MASK (0xFF0000UL)
1678 #define DDRCTL_DFIUPD1_DFI_T_CTRLUPD_INTERVAL_MIN_X1024_SHIFT (16U)
1679 #define DDRCTL_DFIUPD1_DFI_T_CTRLUPD_INTERVAL_MIN_X1024_SET(x) (((uint32_t)(x) << DDRCTL_DFIUPD1_DFI_T_CTRLUPD_INTERVAL_MIN_X1024_SHIFT) & DDRCTL_DFIUPD1_DFI_T_CTRLUPD_INTERVAL_MIN_X1024_MASK)
1680 #define DDRCTL_DFIUPD1_DFI_T_CTRLUPD_INTERVAL_MIN_X1024_GET(x) (((uint32_t)(x) & DDRCTL_DFIUPD1_DFI_T_CTRLUPD_INTERVAL_MIN_X1024_MASK) >> DDRCTL_DFIUPD1_DFI_T_CTRLUPD_INTERVAL_MIN_X1024_SHIFT)
1681 
1682 /*
1683  * DFI_T_CTRLUPD_INTERVAL_MAX_X1024 (R/W)
1684  *
1685  * Description: This is the maximum amount of time between uMCTL2 initiated DFI update requests. This timer resets with each update request; when the timer expires dfi_ctrlupd_req is sent and traffic is blocked until the dfi_ctrlupd_ackx is received. PHY can use this idle time to recalibrate the delay lines to the DLLs. The DFI controller update is also used to reset PHY FIFO pointers in case of data capture errors.
1686  * Updates are required to maintain calibration over PVT, but frequent updates may impact performance.
1687  * Note: Value programmed for DFIUPD1.dfi_t_ctrlupd_interval_max_x1024 must be greater than DFIUPD1.dfi_t_ctrlupd_interval_min_x1024.
1688  * Unit: 1024 clocks Value After Reset: 0x0
1689  * Exists: Always
1690  */
1691 #define DDRCTL_DFIUPD1_DFI_T_CTRLUPD_INTERVAL_MAX_X1024_MASK (0xFFU)
1692 #define DDRCTL_DFIUPD1_DFI_T_CTRLUPD_INTERVAL_MAX_X1024_SHIFT (0U)
1693 #define DDRCTL_DFIUPD1_DFI_T_CTRLUPD_INTERVAL_MAX_X1024_SET(x) (((uint32_t)(x) << DDRCTL_DFIUPD1_DFI_T_CTRLUPD_INTERVAL_MAX_X1024_SHIFT) & DDRCTL_DFIUPD1_DFI_T_CTRLUPD_INTERVAL_MAX_X1024_MASK)
1694 #define DDRCTL_DFIUPD1_DFI_T_CTRLUPD_INTERVAL_MAX_X1024_GET(x) (((uint32_t)(x) & DDRCTL_DFIUPD1_DFI_T_CTRLUPD_INTERVAL_MAX_X1024_MASK) >> DDRCTL_DFIUPD1_DFI_T_CTRLUPD_INTERVAL_MAX_X1024_SHIFT)
1695 
1696 /* Bitfield definition for register: DFIUPD2 */
1697 /*
1698  * DFI_PHYUPD_EN (R/W)
1699  *
1700  * Description: Enables the support for acknowledging PHY- initiated updates:
1701  * 0 - Disabled
1702  * 1 - Enabled
1703  * Value After Reset: 0x1
1704  * Exists: Always
1705  */
1706 #define DDRCTL_DFIUPD2_DFI_PHYUPD_EN_MASK (0x80000000UL)
1707 #define DDRCTL_DFIUPD2_DFI_PHYUPD_EN_SHIFT (31U)
1708 #define DDRCTL_DFIUPD2_DFI_PHYUPD_EN_SET(x) (((uint32_t)(x) << DDRCTL_DFIUPD2_DFI_PHYUPD_EN_SHIFT) & DDRCTL_DFIUPD2_DFI_PHYUPD_EN_MASK)
1709 #define DDRCTL_DFIUPD2_DFI_PHYUPD_EN_GET(x) (((uint32_t)(x) & DDRCTL_DFIUPD2_DFI_PHYUPD_EN_MASK) >> DDRCTL_DFIUPD2_DFI_PHYUPD_EN_SHIFT)
1710 
1711 /*
1712  * DFI_PHYUPD_TYPE1 (R/W)
1713  *
1714  * Description: Specifies the maximum number of DFI clock cycles that the dfi_phyupd_req signal may remain asserted after the assertion of the dfi_phyupd_ack signal for dfi_phyupd_type = 2'b01. The dfi_phyupd_req signal may de-assert at any cycle after the assertion of the dfi_phyupd_ack signal.
1715  * Value After Reset: 0x10
1716  * Exists: Always
1717  */
1718 #define DDRCTL_DFIUPD2_DFI_PHYUPD_TYPE1_MASK (0xFFF0000UL)
1719 #define DDRCTL_DFIUPD2_DFI_PHYUPD_TYPE1_SHIFT (16U)
1720 #define DDRCTL_DFIUPD2_DFI_PHYUPD_TYPE1_SET(x) (((uint32_t)(x) << DDRCTL_DFIUPD2_DFI_PHYUPD_TYPE1_SHIFT) & DDRCTL_DFIUPD2_DFI_PHYUPD_TYPE1_MASK)
1721 #define DDRCTL_DFIUPD2_DFI_PHYUPD_TYPE1_GET(x) (((uint32_t)(x) & DDRCTL_DFIUPD2_DFI_PHYUPD_TYPE1_MASK) >> DDRCTL_DFIUPD2_DFI_PHYUPD_TYPE1_SHIFT)
1722 
1723 /*
1724  * DFI_PHYUPD_TYPE0 (R/W)
1725  *
1726  * Description: Specifies the maximum number of DFI clock cycles that the dfi_phyupd_req signal may remain asserted after the assertion of the dfi_phyupd_ack signal for dfi_phyupd_type = 2'b00. The dfi_phyupd_req signal may de-assert at any cycle after the assertion of the dfi_phyupd_ack signal.
1727  * Value After Reset: 0x10
1728  * Exists: Always
1729  */
1730 #define DDRCTL_DFIUPD2_DFI_PHYUPD_TYPE0_MASK (0xFFFU)
1731 #define DDRCTL_DFIUPD2_DFI_PHYUPD_TYPE0_SHIFT (0U)
1732 #define DDRCTL_DFIUPD2_DFI_PHYUPD_TYPE0_SET(x) (((uint32_t)(x) << DDRCTL_DFIUPD2_DFI_PHYUPD_TYPE0_SHIFT) & DDRCTL_DFIUPD2_DFI_PHYUPD_TYPE0_MASK)
1733 #define DDRCTL_DFIUPD2_DFI_PHYUPD_TYPE0_GET(x) (((uint32_t)(x) & DDRCTL_DFIUPD2_DFI_PHYUPD_TYPE0_MASK) >> DDRCTL_DFIUPD2_DFI_PHYUPD_TYPE0_SHIFT)
1734 
1735 /* Bitfield definition for register: DFIUPD3 */
1736 /*
1737  * DFI_PHYUPD_TYPE3 (R/W)
1738  *
1739  * Description: Specifies the maximum number of DFI clock cycles that the dfi_phyupd_req signal may remain asserted after the assertion of the dfi_phyupd_ack signal for dfi_phyupd_type = 2'b11. The dfi_phyupd_req signal may de-assert at any cycle after the assertion of the dfi_phyupd_ack signal.
1740  * Value After Reset: 0x10
1741  * Exists: Always
1742  */
1743 #define DDRCTL_DFIUPD3_DFI_PHYUPD_TYPE3_MASK (0xFFF0000UL)
1744 #define DDRCTL_DFIUPD3_DFI_PHYUPD_TYPE3_SHIFT (16U)
1745 #define DDRCTL_DFIUPD3_DFI_PHYUPD_TYPE3_SET(x) (((uint32_t)(x) << DDRCTL_DFIUPD3_DFI_PHYUPD_TYPE3_SHIFT) & DDRCTL_DFIUPD3_DFI_PHYUPD_TYPE3_MASK)
1746 #define DDRCTL_DFIUPD3_DFI_PHYUPD_TYPE3_GET(x) (((uint32_t)(x) & DDRCTL_DFIUPD3_DFI_PHYUPD_TYPE3_MASK) >> DDRCTL_DFIUPD3_DFI_PHYUPD_TYPE3_SHIFT)
1747 
1748 /*
1749  * DFI_PHYUPD_TYPE2 (R/W)
1750  *
1751  * Description: Specifies the maximum number of DFI clock cycles that the dfi_phyupd_req signal may remain asserted after the assertion of the dfi_phyupd_ack signal for dfi_phyupd_type = 2'b10. The dfi_phyupd_req signal may de-assert at any cycle after the assertion of the dfi_phyupd_ack signal.
1752  * Value After Reset: 0x10
1753  * Exists: Always
1754  */
1755 #define DDRCTL_DFIUPD3_DFI_PHYUPD_TYPE2_MASK (0xFFFU)
1756 #define DDRCTL_DFIUPD3_DFI_PHYUPD_TYPE2_SHIFT (0U)
1757 #define DDRCTL_DFIUPD3_DFI_PHYUPD_TYPE2_SET(x) (((uint32_t)(x) << DDRCTL_DFIUPD3_DFI_PHYUPD_TYPE2_SHIFT) & DDRCTL_DFIUPD3_DFI_PHYUPD_TYPE2_MASK)
1758 #define DDRCTL_DFIUPD3_DFI_PHYUPD_TYPE2_GET(x) (((uint32_t)(x) & DDRCTL_DFIUPD3_DFI_PHYUPD_TYPE2_MASK) >> DDRCTL_DFIUPD3_DFI_PHYUPD_TYPE2_SHIFT)
1759 
1760 /* Bitfield definition for register: DFIMISC */
1761 /*
1762  * DFI_INIT_COMPLETE_EN (R/W)
1763  *
1764  * Description: PHY initialization complete enable signal. When asserted the dfi_init_complete signal can be used to trigger SDRAM initialisation
1765  * Value After Reset: 0x1
1766  * Exists: Always
1767  */
1768 #define DDRCTL_DFIMISC_DFI_INIT_COMPLETE_EN_MASK (0x1U)
1769 #define DDRCTL_DFIMISC_DFI_INIT_COMPLETE_EN_SHIFT (0U)
1770 #define DDRCTL_DFIMISC_DFI_INIT_COMPLETE_EN_SET(x) (((uint32_t)(x) << DDRCTL_DFIMISC_DFI_INIT_COMPLETE_EN_SHIFT) & DDRCTL_DFIMISC_DFI_INIT_COMPLETE_EN_MASK)
1771 #define DDRCTL_DFIMISC_DFI_INIT_COMPLETE_EN_GET(x) (((uint32_t)(x) & DDRCTL_DFIMISC_DFI_INIT_COMPLETE_EN_MASK) >> DDRCTL_DFIMISC_DFI_INIT_COMPLETE_EN_SHIFT)
1772 
1773 /* Bitfield definition for register: DFITMG2 */
1774 /*
1775  * DFI_TPHY_RDCSLAT (R/W)
1776  *
1777  * Description: Number of clocks between when a read command is sent on the DFI control interface and when the associated dfi_rddata_cs_n signal is asserted. This corresponds to the DFI timing parameter tphy_rdcslat. Refer to PHY specification for correct value.
1778  * Value After Reset: 0x2
1779  * Exists: Always
1780  */
1781 #define DDRCTL_DFITMG2_DFI_TPHY_RDCSLAT_MASK (0x3F00U)
1782 #define DDRCTL_DFITMG2_DFI_TPHY_RDCSLAT_SHIFT (8U)
1783 #define DDRCTL_DFITMG2_DFI_TPHY_RDCSLAT_SET(x) (((uint32_t)(x) << DDRCTL_DFITMG2_DFI_TPHY_RDCSLAT_SHIFT) & DDRCTL_DFITMG2_DFI_TPHY_RDCSLAT_MASK)
1784 #define DDRCTL_DFITMG2_DFI_TPHY_RDCSLAT_GET(x) (((uint32_t)(x) & DDRCTL_DFITMG2_DFI_TPHY_RDCSLAT_MASK) >> DDRCTL_DFITMG2_DFI_TPHY_RDCSLAT_SHIFT)
1785 
1786 /*
1787  * DFI_TPHY_WRCSLAT (R/W)
1788  *
1789  * Description: Number of clocks between when a write command is sent on the DFI control interface and when the associated dfi_wrdata_cs_n signal is asserted. This corresponds to the DFI timing parameter tphy_wrcslat. The minimum supported value is as follows:
1790  * 0 for configurations with MEMC_WL0 = 1
1791  * 1 for configurations with MEMC_WL0 = 0 Refer to PHY specification for correct value. Value After Reset: 0x2
1792  * Exists: Always
1793  */
1794 #define DDRCTL_DFITMG2_DFI_TPHY_WRCSLAT_MASK (0x3FU)
1795 #define DDRCTL_DFITMG2_DFI_TPHY_WRCSLAT_SHIFT (0U)
1796 #define DDRCTL_DFITMG2_DFI_TPHY_WRCSLAT_SET(x) (((uint32_t)(x) << DDRCTL_DFITMG2_DFI_TPHY_WRCSLAT_SHIFT) & DDRCTL_DFITMG2_DFI_TPHY_WRCSLAT_MASK)
1797 #define DDRCTL_DFITMG2_DFI_TPHY_WRCSLAT_GET(x) (((uint32_t)(x) & DDRCTL_DFITMG2_DFI_TPHY_WRCSLAT_MASK) >> DDRCTL_DFITMG2_DFI_TPHY_WRCSLAT_SHIFT)
1798 
1799 /* Bitfield definition for register: ADDRMAP0 */
1800 /*
1801  * ADDRMAP_CS_BIT0 (R/W)
1802  *
1803  * Description: Selects the HIF address bit used as rank address bit 0.
1804  * Valid Range: 0 to 27, and 31
1805  * Internal Base: 6
1806  * The selected HIF address bit is determined by adding the internal base to the value of this field.
1807  * If set to 31, rank address bit 0 is set to 0.
1808  * Value After Reset: 0x0
1809  * Exists: MEMC_NUM_RANKS>1
1810  */
1811 #define DDRCTL_ADDRMAP0_ADDRMAP_CS_BIT0_MASK (0x1FU)
1812 #define DDRCTL_ADDRMAP0_ADDRMAP_CS_BIT0_SHIFT (0U)
1813 #define DDRCTL_ADDRMAP0_ADDRMAP_CS_BIT0_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP0_ADDRMAP_CS_BIT0_SHIFT) & DDRCTL_ADDRMAP0_ADDRMAP_CS_BIT0_MASK)
1814 #define DDRCTL_ADDRMAP0_ADDRMAP_CS_BIT0_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP0_ADDRMAP_CS_BIT0_MASK) >> DDRCTL_ADDRMAP0_ADDRMAP_CS_BIT0_SHIFT)
1815 
1816 /* Bitfield definition for register: ADDRMAP1 */
1817 /*
1818  * ADDRMAP_BANK_B2 (R/W)
1819  *
1820  * Description: Selects the HIF address bit used as bank address bit 2.
1821  * Valid Range: 0 to 29 and 31
1822  * Internal Base: 4
1823  * The selected HIF address bit is determined by adding the internal base to the value of this field.
1824  * If set to 31, bank address bit 2 is set to 0.
1825  * Value After Reset: 0x0
1826  * Exists: Always
1827  */
1828 #define DDRCTL_ADDRMAP1_ADDRMAP_BANK_B2_MASK (0x1F0000UL)
1829 #define DDRCTL_ADDRMAP1_ADDRMAP_BANK_B2_SHIFT (16U)
1830 #define DDRCTL_ADDRMAP1_ADDRMAP_BANK_B2_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP1_ADDRMAP_BANK_B2_SHIFT) & DDRCTL_ADDRMAP1_ADDRMAP_BANK_B2_MASK)
1831 #define DDRCTL_ADDRMAP1_ADDRMAP_BANK_B2_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP1_ADDRMAP_BANK_B2_MASK) >> DDRCTL_ADDRMAP1_ADDRMAP_BANK_B2_SHIFT)
1832 
1833 /*
1834  * ADDRMAP_BANK_B1 (R/W)
1835  *
1836  * Description: Selects the HIF address bits used as bank address bit 1.
1837  * Valid Range: 0 to 30
1838  * Internal Base: 3
1839  * The selected HIF address bit for each of the bank address bits is determined by adding the internal base to the value of this field.
1840  * Value After Reset: 0x0
1841  * Exists: Always
1842  */
1843 #define DDRCTL_ADDRMAP1_ADDRMAP_BANK_B1_MASK (0x1F00U)
1844 #define DDRCTL_ADDRMAP1_ADDRMAP_BANK_B1_SHIFT (8U)
1845 #define DDRCTL_ADDRMAP1_ADDRMAP_BANK_B1_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP1_ADDRMAP_BANK_B1_SHIFT) & DDRCTL_ADDRMAP1_ADDRMAP_BANK_B1_MASK)
1846 #define DDRCTL_ADDRMAP1_ADDRMAP_BANK_B1_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP1_ADDRMAP_BANK_B1_MASK) >> DDRCTL_ADDRMAP1_ADDRMAP_BANK_B1_SHIFT)
1847 
1848 /*
1849  * ADDRMAP_BANK_B0 (R/W)
1850  *
1851  * Description: Selects the HIF address bits used as bank address bit 0.
1852  * Valid Range: 0 to 30
1853  * Internal Base: 2
1854  * The selected HIF address bit for each of the bank address bits is determined by adding the internal base to the value of this field.
1855  * Value After Reset: 0x0
1856  * Exists: Always
1857  */
1858 #define DDRCTL_ADDRMAP1_ADDRMAP_BANK_B0_MASK (0x1FU)
1859 #define DDRCTL_ADDRMAP1_ADDRMAP_BANK_B0_SHIFT (0U)
1860 #define DDRCTL_ADDRMAP1_ADDRMAP_BANK_B0_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP1_ADDRMAP_BANK_B0_SHIFT) & DDRCTL_ADDRMAP1_ADDRMAP_BANK_B0_MASK)
1861 #define DDRCTL_ADDRMAP1_ADDRMAP_BANK_B0_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP1_ADDRMAP_BANK_B0_MASK) >> DDRCTL_ADDRMAP1_ADDRMAP_BANK_B0_SHIFT)
1862 
1863 /* Bitfield definition for register: ADDRMAP2 */
1864 /*
1865  * ADDRMAP_COL_B5 (R/W)
1866  *
1867  * Description: Full bus width mode: Selects the HIF address bit used as column address bit 5 (if MEMC_BURST_LENGTH = 4) or 6 (if
1868  * MEMC_BURST_LENGTH = 8).
1869  * Half bus width mode: Selects the HIF address bit used as column address bit 6 (if MEMC_BURST_LENGTH = 4) or 7 (if MEMC_BURST_LENGTH = 8).
1870  * Quarter bus width mode: Selects the HIF address bit used as column address bit 7 (if MEMC_BURST_LENGTH = 4) or 8 (if MEMC_BURST_LENGTH = 8).
1871  * Valid Range: 0 to 7, and 15
1872  * Internal Base: 5
1873  * The selected HIF address bit is determined by adding the internal base to the value of this field. If set to 15, this column address bit is set to 0.
1874  * Value After Reset: 0x0
1875  * Exists: Always
1876  */
1877 #define DDRCTL_ADDRMAP2_ADDRMAP_COL_B5_MASK (0xF000000UL)
1878 #define DDRCTL_ADDRMAP2_ADDRMAP_COL_B5_SHIFT (24U)
1879 #define DDRCTL_ADDRMAP2_ADDRMAP_COL_B5_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP2_ADDRMAP_COL_B5_SHIFT) & DDRCTL_ADDRMAP2_ADDRMAP_COL_B5_MASK)
1880 #define DDRCTL_ADDRMAP2_ADDRMAP_COL_B5_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP2_ADDRMAP_COL_B5_MASK) >> DDRCTL_ADDRMAP2_ADDRMAP_COL_B5_SHIFT)
1881 
1882 /*
1883  * ADDRMAP_COL_B4 (R/W)
1884  *
1885  * Description: Full bus width mode: Selects the HIF address bit used as column address bit 4 (if MEMC_BURST_LENGTH = 4) or 5 (if
1886  * MEMC_BURST_LENGTH = 8).
1887  * Half bus width mode: Selects the HIF address bit used as column address bit 5 (if MEMC_BURST_LENGTH = 4) or 6 (if MEMC_BURST_LENGTH = 8).
1888  * Quarter bus width mode: Selects the HIF address bit used as column address bit 6 (if MEMC_BURST_LENGTH = 4) or 7 (if MEMC_BURST_LENGTH = 8).
1889  * Valid Range: 0 to 7, and 15
1890  * Internal Base: 4
1891  * The selected HIF address bit is determined by adding the internal base to the value of this field. If set to 15, this column address bit is set to 0.
1892  * Value After Reset: 0x0
1893  * Exists: Always
1894  */
1895 #define DDRCTL_ADDRMAP2_ADDRMAP_COL_B4_MASK (0xF0000UL)
1896 #define DDRCTL_ADDRMAP2_ADDRMAP_COL_B4_SHIFT (16U)
1897 #define DDRCTL_ADDRMAP2_ADDRMAP_COL_B4_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP2_ADDRMAP_COL_B4_SHIFT) & DDRCTL_ADDRMAP2_ADDRMAP_COL_B4_MASK)
1898 #define DDRCTL_ADDRMAP2_ADDRMAP_COL_B4_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP2_ADDRMAP_COL_B4_MASK) >> DDRCTL_ADDRMAP2_ADDRMAP_COL_B4_SHIFT)
1899 
1900 /*
1901  * ADDRMAP_COL_B3 (R/W)
1902  *
1903  * Description: Full bus width mode: Selects the HIF address bit used as column address bit 3 (if MEMC_BURST_LENGTH = 4) or 4 (if
1904  * MEMC_BURST_LENGTH = 8).
1905  * Half bus width mode: Selects the HIF address bit used as column address bit 4 (if MEMC_BURST_LENGTH = 4) or 5 (if MEMC_BURST_LENGTH = 8).
1906  * Quarter bus width mode: Selects the HIF address bit used as column address bit 5 (if MEMC_BURST_LENGTH = 4) or 6 (if MEMC_BURST_LENGTH = 8).
1907  * Valid Range: 0 to 7
1908  * Internal Base: 3
1909  * The selected HIF address bit is determined by adding the internal base to the value of this field.
1910  * Value After Reset: 0x0
1911  * Exists: Always
1912  */
1913 #define DDRCTL_ADDRMAP2_ADDRMAP_COL_B3_MASK (0xF00U)
1914 #define DDRCTL_ADDRMAP2_ADDRMAP_COL_B3_SHIFT (8U)
1915 #define DDRCTL_ADDRMAP2_ADDRMAP_COL_B3_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP2_ADDRMAP_COL_B3_SHIFT) & DDRCTL_ADDRMAP2_ADDRMAP_COL_B3_MASK)
1916 #define DDRCTL_ADDRMAP2_ADDRMAP_COL_B3_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP2_ADDRMAP_COL_B3_MASK) >> DDRCTL_ADDRMAP2_ADDRMAP_COL_B3_SHIFT)
1917 
1918 /*
1919  * ADDRMAP_COL_B2 (R/W)
1920  *
1921  * Description: Full bus width mode: Selects the HIF address bit used as column address bit 2 (if MEMC_BURST_LENGTH = 4) or 3 (if
1922  * MEMC_BURST_LENGTH = 8).
1923  * Half bus width mode: Selects the HIF address bit used as column address bit 3 (if MEMC_BURST_LENGTH = 4) or 4 (if MEMC_BURST_LENGTH = 8).
1924  * Quarter bus width mode: Selects the HIF address bit used as column address bit 4 (if MEMC_BURST_LENGTH = 4) or 5 (if MEMC_BURST_LENGTH = 8).
1925  * Valid Range: 0 to 7
1926  * Internal Base: 2
1927  * The selected HIF address bit is determined by adding the internal base to the value of this field.
1928  * Value After Reset: 0x0
1929  * Exists: Always
1930  */
1931 #define DDRCTL_ADDRMAP2_ADDRMAP_COL_B2_MASK (0xFU)
1932 #define DDRCTL_ADDRMAP2_ADDRMAP_COL_B2_SHIFT (0U)
1933 #define DDRCTL_ADDRMAP2_ADDRMAP_COL_B2_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP2_ADDRMAP_COL_B2_SHIFT) & DDRCTL_ADDRMAP2_ADDRMAP_COL_B2_MASK)
1934 #define DDRCTL_ADDRMAP2_ADDRMAP_COL_B2_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP2_ADDRMAP_COL_B2_MASK) >> DDRCTL_ADDRMAP2_ADDRMAP_COL_B2_SHIFT)
1935 
1936 /* Bitfield definition for register: ADDRMAP3 */
1937 /*
1938  * ADDRMAP_COL_B9 (R/W)
1939  *
1940  * Description: Full bus width mode: Selects the HIF address bit used as column address bit 9 (if MEMC_BURST_LENGTH = 4) or 11 (10 in LPDDR2/LPDDR3 mode) (if MEMC_BURST_LENGTH = 8)
1941  * Half bus width mode: Selects the HIF address bit used as column address bit 11 (10 in LPDDR2/LPDDR3 mode) (if MEMC_BURST_LENGTH = 4) or 13 (11 in LPDDR2/LPDDR3 mode) (if MEMC_BURST_LENGTH = 8).
1942  * (Column address bit 11 in LPDDR2/LPDDR3 mode) Quarter bus width mode: Selects the HIF address bit used as column address bit 13 (11 in LPDDR2/LPDDR3 mode) (if
1943  * MEMC_BURST_LENGTH = 4) or UNUSED (if
1944  * MEMC_BURST_LENGTH = 8).
1945  * Valid Range: 0 to 7, and 15
1946  * Internal Base: 9
1947  * The selected HIF address bit is determined by adding the internal base to the value of this field.
1948  * If set to 15, this column address bit is set to 0.
1949  * Note: Per JEDEC DDR2/3/mDDR specification, column address bit 10 is reserved for indicating auto-precharge, and hence no source address bit can be mapped to column address bit 10.
1950  * In LPDDR2/LPDDR3, there is a dedicated bit for auto- precharge in the CA bus and hence column bit 10 is used. Value After Reset: 0x0
1951  * Exists: Always
1952  */
1953 #define DDRCTL_ADDRMAP3_ADDRMAP_COL_B9_MASK (0xF000000UL)
1954 #define DDRCTL_ADDRMAP3_ADDRMAP_COL_B9_SHIFT (24U)
1955 #define DDRCTL_ADDRMAP3_ADDRMAP_COL_B9_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP3_ADDRMAP_COL_B9_SHIFT) & DDRCTL_ADDRMAP3_ADDRMAP_COL_B9_MASK)
1956 #define DDRCTL_ADDRMAP3_ADDRMAP_COL_B9_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP3_ADDRMAP_COL_B9_MASK) >> DDRCTL_ADDRMAP3_ADDRMAP_COL_B9_SHIFT)
1957 
1958 /*
1959  * ADDRMAP_COL_B8 (R/W)
1960  *
1961  * Description: Full bus width mode: Selects the HIF address bit used as column address bit 8 (if MEMC_BURST_LENGTH = 4) or 9 (if
1962  * MEMC_BURST_LENGTH = 8).
1963  * Half bus width mode: Selects the HIF address bit used as column address bit 9 (if MEMC_BURST_LENGTH = 4) or 11 (10 in LPDDR2/LPDDR3 mode) (if MEMC_BURST_LENGTH = 8).
1964  * Quarter bus width mode: Selects the HIF address bit used as column address bit 11 (10 in LPDDR2/LPDDR3 mode) (if MEMC_BURST_LENGTH = 4) or 13 (11 in LPDDR2/LPDDR3 mode) (if MEMC_BURST_LENGTH = 8).
1965  * Valid Range: 0 to 7, and 15
1966  * Internal Base: 8
1967  * The selected HIF address bit is determined by adding the internal base to the value of this field.
1968  * If set to 15, this column address bit is set to 0.
1969  * Note: Per JEDEC DDR2/3/mDDR specification, column address bit 10 is reserved for indicating auto-precharge, and hence no source address bit can be mapped to column address bit 10.
1970  * In LPDDR2/LPDDR3, there is a dedicated bit for auto- precharge in the CA bus and hence column bit 10 is used. Value After Reset: 0x0
1971  * Exists: Always
1972  */
1973 #define DDRCTL_ADDRMAP3_ADDRMAP_COL_B8_MASK (0xF0000UL)
1974 #define DDRCTL_ADDRMAP3_ADDRMAP_COL_B8_SHIFT (16U)
1975 #define DDRCTL_ADDRMAP3_ADDRMAP_COL_B8_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP3_ADDRMAP_COL_B8_SHIFT) & DDRCTL_ADDRMAP3_ADDRMAP_COL_B8_MASK)
1976 #define DDRCTL_ADDRMAP3_ADDRMAP_COL_B8_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP3_ADDRMAP_COL_B8_MASK) >> DDRCTL_ADDRMAP3_ADDRMAP_COL_B8_SHIFT)
1977 
1978 /*
1979  * ADDRMAP_COL_B7 (R/W)
1980  *
1981  * Description: Full bus width mode: Selects the HIF address bit used as column address bit 7 (if MEMC_BURST_LENGTH = 4) or 8 (if
1982  * MEMC_BURST_LENGTH = 8).
1983  * Half bus width mode: Selects the HIF address bit used as column address bit 8 (if MEMC_BURST_LENGTH = 4) or 9 (if MEMC_BURST_LENGTH = 8).
1984  * Quarter bus width mode: Selects the HIF address bit used as column address bit 9 (if MEMC_BURST_LENGTH = 4) or 11 (10 in LPDDR2/LPDDR3 mode) (if MEMC_BURST_LENGTH = 8).
1985  * Valid Range: 0 to 7, and 15
1986  * Internal Base: 7
1987  * The selected HIF address bit is determined by adding the internal base to the value of this field. If set to 15, this column address bit is set to 0.
1988  * Note: Per JEDEC DDR2/3/mDDR specification, column address bit 10 is reserved for indicating auto-precharge and hence no source address bit can be mapped to column address bit 10.
1989  * In LPDDR2/LPDDR3, there is a dedicated bit for auto- precharge in the CA bus and hence column bit 10 is used. Value After Reset: 0x0
1990  * Exists: Always
1991  */
1992 #define DDRCTL_ADDRMAP3_ADDRMAP_COL_B7_MASK (0xF00U)
1993 #define DDRCTL_ADDRMAP3_ADDRMAP_COL_B7_SHIFT (8U)
1994 #define DDRCTL_ADDRMAP3_ADDRMAP_COL_B7_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP3_ADDRMAP_COL_B7_SHIFT) & DDRCTL_ADDRMAP3_ADDRMAP_COL_B7_MASK)
1995 #define DDRCTL_ADDRMAP3_ADDRMAP_COL_B7_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP3_ADDRMAP_COL_B7_MASK) >> DDRCTL_ADDRMAP3_ADDRMAP_COL_B7_SHIFT)
1996 
1997 /*
1998  * ADDRMAP_COL_B6 (R/W)
1999  *
2000  * Description: Full bus width mode: Selects the HIF address bit used as column address bit 6 (if MEMC_BURST_LENGTH = 4) or 7 (if
2001  * MEMC_BURST_LENGTH = 8).
2002  * Half bus width mode: Selects the HIF address bit used as column address bit 7 (if MEMC_BURST_LENGTH = 4) or 8 (if MEMC_BURST_LENGTH = 8).
2003  * Quarter bus width mode: Selects the HIF address bit used as column address bit 8 (if MEMC_BURST_LENGTH = 4) or 9 (if MEMC_BURST_LENGTH = 8).
2004  * Valid Range: 0 to 7, and 15
2005  * Internal Base: 6
2006  * The selected HIF address bit is determined by adding the internal base to the value of this field. If set to 15, this column address bit is set to 0.
2007  * Value After Reset: 0x0
2008  * Exists: Always
2009  */
2010 #define DDRCTL_ADDRMAP3_ADDRMAP_COL_B6_MASK (0xFU)
2011 #define DDRCTL_ADDRMAP3_ADDRMAP_COL_B6_SHIFT (0U)
2012 #define DDRCTL_ADDRMAP3_ADDRMAP_COL_B6_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP3_ADDRMAP_COL_B6_SHIFT) & DDRCTL_ADDRMAP3_ADDRMAP_COL_B6_MASK)
2013 #define DDRCTL_ADDRMAP3_ADDRMAP_COL_B6_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP3_ADDRMAP_COL_B6_MASK) >> DDRCTL_ADDRMAP3_ADDRMAP_COL_B6_SHIFT)
2014 
2015 /* Bitfield definition for register: ADDRMAP4 */
2016 /*
2017  * ADDRMAP_COL_B11 (R/W)
2018  *
2019  * Description: Full bus width mode: Selects the HIF address bit used as column address bit 13 (11 in LPDDR2/LPDDR3 mode) (if MEMC_BURST_LENGTH = 4) or UNUSED (if MEMC_BURST_LENGTH = 8).
2020  * Half bus width mode: Unused. To make it unused, this should be tied to 4'hF.
2021  * Quarter bus width mode: Unused. To make it unused, this must be tied to 4'hF.
2022  * Valid Range: 0 to 7, and 15
2023  * Internal Base: 11
2024  * The selected HIF address bit is determined by adding the internal base to the value of this field.
2025  * If set to 15, this column address bit is set to 0.
2026  * Note: Per JEDEC DDR2/3/mDDR specification, column address bit 10 is reserved for indicating auto-precharge, and hence no source address bit can be mapped to column address bit 10.
2027  * In LPDDR2/LPDDR3, there is a dedicated bit for auto- precharge in the CA bus and hence column bit 10 is used. Value After Reset: 0x0
2028  * Exists: Always
2029  */
2030 #define DDRCTL_ADDRMAP4_ADDRMAP_COL_B11_MASK (0xF00U)
2031 #define DDRCTL_ADDRMAP4_ADDRMAP_COL_B11_SHIFT (8U)
2032 #define DDRCTL_ADDRMAP4_ADDRMAP_COL_B11_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP4_ADDRMAP_COL_B11_SHIFT) & DDRCTL_ADDRMAP4_ADDRMAP_COL_B11_MASK)
2033 #define DDRCTL_ADDRMAP4_ADDRMAP_COL_B11_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP4_ADDRMAP_COL_B11_MASK) >> DDRCTL_ADDRMAP4_ADDRMAP_COL_B11_SHIFT)
2034 
2035 /*
2036  * ADDRMAP_COL_B10 (R/W)
2037  *
2038  * Description: Full bus width mode: Selects the HIF address bit used as column address bit 11 (10 in LPDDR2/LPDDR3 mode) (if MEMC_BURST_LENGTH = 4) or 13 (11 in LPDDR2/LPDDR3 mode) (if MEMC_BURST_LENGTH = 8).
2039  * Half bus width mode: Selects the HIF address bit used as column address bit 13 (11 in LPDDR2/LPDDR3 mode) (if MEMC_BURST_LENGTH = 4) or UNUSED (if MEMC_BURST_LENGTH = 8)
2040  * Quarter bus width mode: UNUSED. To make it unused, this must be tied to 4'hF.
2041  * Valid Range: 0 to 7, and 15
2042  * Internal Base: 10
2043  * The selected HIF address bit is determined by adding the internal base to the value of this field.
2044  * If set to 15, this column address bit is set to 0.
2045  * Note: Per JEDEC DDR2/3/mDDR specification, column address bit 10 is reserved for indicating auto-precharge, and hence no source address bit can be mapped to column address bit 10.
2046  * In LPDDR2/LPDDR3, there is a dedicated bit for auto- precharge in the CA bus and hence column bit 10 is used. Value After Reset: 0x0
2047  * Exists: Always
2048  */
2049 #define DDRCTL_ADDRMAP4_ADDRMAP_COL_B10_MASK (0xFU)
2050 #define DDRCTL_ADDRMAP4_ADDRMAP_COL_B10_SHIFT (0U)
2051 #define DDRCTL_ADDRMAP4_ADDRMAP_COL_B10_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP4_ADDRMAP_COL_B10_SHIFT) & DDRCTL_ADDRMAP4_ADDRMAP_COL_B10_MASK)
2052 #define DDRCTL_ADDRMAP4_ADDRMAP_COL_B10_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP4_ADDRMAP_COL_B10_MASK) >> DDRCTL_ADDRMAP4_ADDRMAP_COL_B10_SHIFT)
2053 
2054 /* Bitfield definition for register: ADDRMAP5 */
2055 /*
2056  * ADDRMAP_ROW_B11 (R/W)
2057  *
2058  * Description: Selects the HIF address bit used as row address bit 11.
2059  * Valid Range: 0 to 11, and 15
2060  * Internal Base: 17
2061  * The selected HIF address bit is determined by adding the internal base to the value of this field.
2062  * If set to 15, row address bit 11 is set to 0.
2063  * Value After Reset: 0x0
2064  * Exists: Always
2065  */
2066 #define DDRCTL_ADDRMAP5_ADDRMAP_ROW_B11_MASK (0xF000000UL)
2067 #define DDRCTL_ADDRMAP5_ADDRMAP_ROW_B11_SHIFT (24U)
2068 #define DDRCTL_ADDRMAP5_ADDRMAP_ROW_B11_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP5_ADDRMAP_ROW_B11_SHIFT) & DDRCTL_ADDRMAP5_ADDRMAP_ROW_B11_MASK)
2069 #define DDRCTL_ADDRMAP5_ADDRMAP_ROW_B11_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP5_ADDRMAP_ROW_B11_MASK) >> DDRCTL_ADDRMAP5_ADDRMAP_ROW_B11_SHIFT)
2070 
2071 /*
2072  * ADDRMAP_ROW_B2_10 (R/W)
2073  *
2074  * Description: Selects the HIF address bits used as row address bits 2 to 10.
2075  * Valid Range: 0 to 11
2076  * Internal Base: 8 (for row address bit 2), 9 (for row address bit 3), 10 (for row address bit 4) etc increasing to 16 (for row
2077  * address bit 10)
2078  * The selected HIF address bit for each of the row address bits is determined by adding the internal base to the value of this field.
2079  * Value After Reset: 0x0
2080  * Exists: Always
2081  */
2082 #define DDRCTL_ADDRMAP5_ADDRMAP_ROW_B2_10_MASK (0xF0000UL)
2083 #define DDRCTL_ADDRMAP5_ADDRMAP_ROW_B2_10_SHIFT (16U)
2084 #define DDRCTL_ADDRMAP5_ADDRMAP_ROW_B2_10_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP5_ADDRMAP_ROW_B2_10_SHIFT) & DDRCTL_ADDRMAP5_ADDRMAP_ROW_B2_10_MASK)
2085 #define DDRCTL_ADDRMAP5_ADDRMAP_ROW_B2_10_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP5_ADDRMAP_ROW_B2_10_MASK) >> DDRCTL_ADDRMAP5_ADDRMAP_ROW_B2_10_SHIFT)
2086 
2087 /*
2088  * ADDRMAP_ROW_B1 (R/W)
2089  *
2090  * Description: Selects the HIF address bits used as row address bit 1.
2091  * Valid Range: 0 to 11
2092  * Internal Base: 7
2093  * The selected HIF address bit for each of the row address bits is determined by adding the internal base to the value of this field.
2094  * Value After Reset: 0x0
2095  * Exists: Always
2096  */
2097 #define DDRCTL_ADDRMAP5_ADDRMAP_ROW_B1_MASK (0xF00U)
2098 #define DDRCTL_ADDRMAP5_ADDRMAP_ROW_B1_SHIFT (8U)
2099 #define DDRCTL_ADDRMAP5_ADDRMAP_ROW_B1_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP5_ADDRMAP_ROW_B1_SHIFT) & DDRCTL_ADDRMAP5_ADDRMAP_ROW_B1_MASK)
2100 #define DDRCTL_ADDRMAP5_ADDRMAP_ROW_B1_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP5_ADDRMAP_ROW_B1_MASK) >> DDRCTL_ADDRMAP5_ADDRMAP_ROW_B1_SHIFT)
2101 
2102 /*
2103  * ADDRMAP_ROW_B0 (R/W)
2104  *
2105  * Description: Selects the HIF address bits used as row address bit 0.
2106  * Valid Range: 0 to 11
2107  * Internal Base: 6
2108  * The selected HIF address bit for each of the row address bits is determined by adding the internal base to the value of this field.
2109  * Value After Reset: 0x0
2110  * Exists: Always
2111  */
2112 #define DDRCTL_ADDRMAP5_ADDRMAP_ROW_B0_MASK (0xFU)
2113 #define DDRCTL_ADDRMAP5_ADDRMAP_ROW_B0_SHIFT (0U)
2114 #define DDRCTL_ADDRMAP5_ADDRMAP_ROW_B0_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP5_ADDRMAP_ROW_B0_SHIFT) & DDRCTL_ADDRMAP5_ADDRMAP_ROW_B0_MASK)
2115 #define DDRCTL_ADDRMAP5_ADDRMAP_ROW_B0_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP5_ADDRMAP_ROW_B0_MASK) >> DDRCTL_ADDRMAP5_ADDRMAP_ROW_B0_SHIFT)
2116 
2117 /* Bitfield definition for register: ADDRMAP6 */
2118 /*
2119  * ADDRMAP_ROW_B15 (R/W)
2120  *
2121  * Description: Selects the HIF address bit used as row address bit 15.
2122  * Valid Range: 0 to 11, and 15
2123  * Internal Base: 21
2124  * The selected HIF address bit is determined by adding the internal base to the value of this field.
2125  * If set to 15, row address bit 15 is set to 0.
2126  * Value After Reset: 0x0
2127  * Exists: Always
2128  */
2129 #define DDRCTL_ADDRMAP6_ADDRMAP_ROW_B15_MASK (0xF000000UL)
2130 #define DDRCTL_ADDRMAP6_ADDRMAP_ROW_B15_SHIFT (24U)
2131 #define DDRCTL_ADDRMAP6_ADDRMAP_ROW_B15_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP6_ADDRMAP_ROW_B15_SHIFT) & DDRCTL_ADDRMAP6_ADDRMAP_ROW_B15_MASK)
2132 #define DDRCTL_ADDRMAP6_ADDRMAP_ROW_B15_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP6_ADDRMAP_ROW_B15_MASK) >> DDRCTL_ADDRMAP6_ADDRMAP_ROW_B15_SHIFT)
2133 
2134 /*
2135  * ADDRMAP_ROW_B14 (R/W)
2136  *
2137  * Description: Selects the HIF address bit used as row address bit 14.
2138  * Valid Range: 0 to 11, and 15
2139  * Internal Base: 20
2140  * The selected HIF address bit is determined by adding the internal base to the value of this field.
2141  * If set to 15, row address bit 14 is set to 0.
2142  * Value After Reset: 0x0
2143  * Exists: Always
2144  */
2145 #define DDRCTL_ADDRMAP6_ADDRMAP_ROW_B14_MASK (0xF0000UL)
2146 #define DDRCTL_ADDRMAP6_ADDRMAP_ROW_B14_SHIFT (16U)
2147 #define DDRCTL_ADDRMAP6_ADDRMAP_ROW_B14_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP6_ADDRMAP_ROW_B14_SHIFT) & DDRCTL_ADDRMAP6_ADDRMAP_ROW_B14_MASK)
2148 #define DDRCTL_ADDRMAP6_ADDRMAP_ROW_B14_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP6_ADDRMAP_ROW_B14_MASK) >> DDRCTL_ADDRMAP6_ADDRMAP_ROW_B14_SHIFT)
2149 
2150 /*
2151  * ADDRMAP_ROW_B13 (R/W)
2152  *
2153  * Description: Selects the HIF address bit used as row address bit 13.
2154  * Valid Range: 0 to 11, and 15
2155  * Internal Base: 19
2156  * The selected HIF address bit is determined by adding the internal base to the value of this field.
2157  * If set to 15, row address bit 13 is set to 0.
2158  * Value After Reset: 0x0
2159  * Exists: Always
2160  */
2161 #define DDRCTL_ADDRMAP6_ADDRMAP_ROW_B13_MASK (0xF00U)
2162 #define DDRCTL_ADDRMAP6_ADDRMAP_ROW_B13_SHIFT (8U)
2163 #define DDRCTL_ADDRMAP6_ADDRMAP_ROW_B13_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP6_ADDRMAP_ROW_B13_SHIFT) & DDRCTL_ADDRMAP6_ADDRMAP_ROW_B13_MASK)
2164 #define DDRCTL_ADDRMAP6_ADDRMAP_ROW_B13_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP6_ADDRMAP_ROW_B13_MASK) >> DDRCTL_ADDRMAP6_ADDRMAP_ROW_B13_SHIFT)
2165 
2166 /*
2167  * ADDRMAP_ROW_B12 (R/W)
2168  *
2169  * Description: Selects the HIF address bit used as row address bit 12.
2170  * Valid Range: 0 to 11, and 15
2171  * Internal Base: 18
2172  * The selected HIF address bit is determined by adding the internal base to the value of this field.
2173  * If set to 15, row address bit 12 is set to 0.
2174  * Value After Reset: 0x0
2175  * Exists: Always
2176  */
2177 #define DDRCTL_ADDRMAP6_ADDRMAP_ROW_B12_MASK (0xFU)
2178 #define DDRCTL_ADDRMAP6_ADDRMAP_ROW_B12_SHIFT (0U)
2179 #define DDRCTL_ADDRMAP6_ADDRMAP_ROW_B12_SET(x) (((uint32_t)(x) << DDRCTL_ADDRMAP6_ADDRMAP_ROW_B12_SHIFT) & DDRCTL_ADDRMAP6_ADDRMAP_ROW_B12_MASK)
2180 #define DDRCTL_ADDRMAP6_ADDRMAP_ROW_B12_GET(x) (((uint32_t)(x) & DDRCTL_ADDRMAP6_ADDRMAP_ROW_B12_MASK) >> DDRCTL_ADDRMAP6_ADDRMAP_ROW_B12_SHIFT)
2181 
2182 /* Bitfield definition for register: ODTCFG */
2183 /*
2184  * WR_ODT_HOLD (R/W)
2185  *
2186  * Description: Cycles to hold ODT for a write command. The minimum supported value is 2. DDR2/DDR3/DDR4
2187  * BL8 - 0x6
2188  * BL4 - 0x4
2189  * LPDDR3 - RU(tDQSSmax/tCK) + 4
2190  * Value After Reset: 0x4
2191  * Exists: Always
2192  */
2193 #define DDRCTL_ODTCFG_WR_ODT_HOLD_MASK (0xF000000UL)
2194 #define DDRCTL_ODTCFG_WR_ODT_HOLD_SHIFT (24U)
2195 #define DDRCTL_ODTCFG_WR_ODT_HOLD_SET(x) (((uint32_t)(x) << DDRCTL_ODTCFG_WR_ODT_HOLD_SHIFT) & DDRCTL_ODTCFG_WR_ODT_HOLD_MASK)
2196 #define DDRCTL_ODTCFG_WR_ODT_HOLD_GET(x) (((uint32_t)(x) & DDRCTL_ODTCFG_WR_ODT_HOLD_MASK) >> DDRCTL_ODTCFG_WR_ODT_HOLD_SHIFT)
2197 
2198 /*
2199  * WR_ODT_DELAY (R/W)
2200  *
2201  * Description: The delay, in clock cycles, from issuing a write command to setting ODT values associated with that command. ODT setting must remain constant for the entire time that DQS is driven by the uMCTL2. ODT is used only in DDR2, DDR3, DDR4 and LPDDR3 designs.
2202  * Recommended values:
2203  * DDR2
2204  * If (CWL + AL < 3), then 0.
2205  * If (CWL + AL >= 3), then (CWL + AL - 3) DDR3 - 0
2206  * DDR4 - DFITMG1.dfi_t_cmd_lat (to adjust for CAL mode)
2207  * LPDDR3 - (CWL - RU(tODToffmax/tCK))
2208  * Value After Reset: 0x0
2209  * Exists: Always
2210  */
2211 #define DDRCTL_ODTCFG_WR_ODT_DELAY_MASK (0x1F0000UL)
2212 #define DDRCTL_ODTCFG_WR_ODT_DELAY_SHIFT (16U)
2213 #define DDRCTL_ODTCFG_WR_ODT_DELAY_SET(x) (((uint32_t)(x) << DDRCTL_ODTCFG_WR_ODT_DELAY_SHIFT) & DDRCTL_ODTCFG_WR_ODT_DELAY_MASK)
2214 #define DDRCTL_ODTCFG_WR_ODT_DELAY_GET(x) (((uint32_t)(x) & DDRCTL_ODTCFG_WR_ODT_DELAY_MASK) >> DDRCTL_ODTCFG_WR_ODT_DELAY_SHIFT)
2215 
2216 /*
2217  * RD_ODT_HOLD (R/W)
2218  *
2219  * Description: Cycles to hold ODT for a read command. The minimum supported value is 2. Recommended values: DDR2/DDR3
2220  * BL8 - 0x6
2221  * BL4 - 0x4
2222  * DDR4 - 0x6, but needs to be reduced to 0x5 in CAL mode to avoid overlap of read and write ODT LPDDR3 - RU(tDQSCKmax/tCK) + 4 + 1
2223  * Value After Reset: 0x4
2224  * Exists: Always
2225  */
2226 #define DDRCTL_ODTCFG_RD_ODT_HOLD_MASK (0xF00U)
2227 #define DDRCTL_ODTCFG_RD_ODT_HOLD_SHIFT (8U)
2228 #define DDRCTL_ODTCFG_RD_ODT_HOLD_SET(x) (((uint32_t)(x) << DDRCTL_ODTCFG_RD_ODT_HOLD_SHIFT) & DDRCTL_ODTCFG_RD_ODT_HOLD_MASK)
2229 #define DDRCTL_ODTCFG_RD_ODT_HOLD_GET(x) (((uint32_t)(x) & DDRCTL_ODTCFG_RD_ODT_HOLD_MASK) >> DDRCTL_ODTCFG_RD_ODT_HOLD_SHIFT)
2230 
2231 /*
2232  * RD_ODT_DELAY (R/W)
2233  *
2234  * Description: The delay, in clock cycles, from issuing a read command to setting ODT values associated with that command. ODT setting must remain constant for the entire time that DQS is driven by the uMCTL2. ODT is used only in DDR2, DDR3, DDR4 and LPDDR3 designs.
2235  * Recommended values:
2236  * DDR2
2237  * If (CL + AL < 4), then 0.
2238  * If (CL + AL >= 4), then (CL + AL - 4) DDR3
2239  * (CL - CWL) DDR4
2240  * If CAL mode is enabled, CL - CWL + DFITMG1.dfi_t_cmd_lat
2241  * If CAL mode is not enabled, CL - CWL -1, or 0 if CL - CWL < 1
2242  * LPDDR3, MEMC_FREQ_RATIO=2
2243  * CL - RU(tODToffmax/tCK)) Value After Reset: 0x0 Exists: Always
2244  */
2245 #define DDRCTL_ODTCFG_RD_ODT_DELAY_MASK (0x7CU)
2246 #define DDRCTL_ODTCFG_RD_ODT_DELAY_SHIFT (2U)
2247 #define DDRCTL_ODTCFG_RD_ODT_DELAY_SET(x) (((uint32_t)(x) << DDRCTL_ODTCFG_RD_ODT_DELAY_SHIFT) & DDRCTL_ODTCFG_RD_ODT_DELAY_MASK)
2248 #define DDRCTL_ODTCFG_RD_ODT_DELAY_GET(x) (((uint32_t)(x) & DDRCTL_ODTCFG_RD_ODT_DELAY_MASK) >> DDRCTL_ODTCFG_RD_ODT_DELAY_SHIFT)
2249 
2250 /* Bitfield definition for register: ODTMAP */
2251 /*
2252  * RANK1_RD_ODT (R/W)
2253  *
2254  * Description: Indicates which remote ODTs must be turned on during a read from rank 1.
2255  * Each rank has a remote ODT (in the SDRAM) which can be turned on by setting the appropriate bit here.
2256  * Rank 0 is controlled by the LSB; rank 1 is controlled by bit next to the LSB, etc.
2257  * For each rank, set its bit to 1 to enable its ODT.
2258  * Present only in configurations that have 2 or more ranks Value After Reset: "(MEMC_NUM_RANKS>1) ? 0x2 : 0x0" Exists: MEMC_NUM_RANKS>1
2259  */
2260 #define DDRCTL_ODTMAP_RANK1_RD_ODT_MASK (0xF000U)
2261 #define DDRCTL_ODTMAP_RANK1_RD_ODT_SHIFT (12U)
2262 #define DDRCTL_ODTMAP_RANK1_RD_ODT_SET(x) (((uint32_t)(x) << DDRCTL_ODTMAP_RANK1_RD_ODT_SHIFT) & DDRCTL_ODTMAP_RANK1_RD_ODT_MASK)
2263 #define DDRCTL_ODTMAP_RANK1_RD_ODT_GET(x) (((uint32_t)(x) & DDRCTL_ODTMAP_RANK1_RD_ODT_MASK) >> DDRCTL_ODTMAP_RANK1_RD_ODT_SHIFT)
2264 
2265 /*
2266  * RANK1_WR_ODT (R/W)
2267  *
2268  * Description: Indicates which remote ODTs must be turned on during a write to rank 1.
2269  * Each rank has a remote ODT (in the SDRAM) which can be turned on by setting the appropriate bit here.
2270  * Rank 0 is controlled by the LSB; rank 1 is controlled by bit next to the LSB, etc.
2271  * For each rank, set its bit to 1 to enable its ODT.
2272  * Present only in configurations that have 2 or more ranks Value After Reset: "(MEMC_NUM_RANKS>1) ? 0x2 : 0x0" Exists: MEMC_NUM_RANKS>1
2273  */
2274 #define DDRCTL_ODTMAP_RANK1_WR_ODT_MASK (0xF00U)
2275 #define DDRCTL_ODTMAP_RANK1_WR_ODT_SHIFT (8U)
2276 #define DDRCTL_ODTMAP_RANK1_WR_ODT_SET(x) (((uint32_t)(x) << DDRCTL_ODTMAP_RANK1_WR_ODT_SHIFT) & DDRCTL_ODTMAP_RANK1_WR_ODT_MASK)
2277 #define DDRCTL_ODTMAP_RANK1_WR_ODT_GET(x) (((uint32_t)(x) & DDRCTL_ODTMAP_RANK1_WR_ODT_MASK) >> DDRCTL_ODTMAP_RANK1_WR_ODT_SHIFT)
2278 
2279 /*
2280  * RANK0_RD_ODT (R/W)
2281  *
2282  * Description: Indicates which remote ODTs must be turned on during a read from rank 0.
2283  * Each rank has a remote ODT (in the SDRAM) which can be turned on by setting the appropriate bit here.
2284  * Rank 0 is controlled by the LSB; rank 1 is controlled by bit next to the LSB, etc.
2285  * For each rank, set its bit to 1 to enable its ODT.
2286  * Value After Reset: 0x1
2287  * Exists: Always
2288  */
2289 #define DDRCTL_ODTMAP_RANK0_RD_ODT_MASK (0xF0U)
2290 #define DDRCTL_ODTMAP_RANK0_RD_ODT_SHIFT (4U)
2291 #define DDRCTL_ODTMAP_RANK0_RD_ODT_SET(x) (((uint32_t)(x) << DDRCTL_ODTMAP_RANK0_RD_ODT_SHIFT) & DDRCTL_ODTMAP_RANK0_RD_ODT_MASK)
2292 #define DDRCTL_ODTMAP_RANK0_RD_ODT_GET(x) (((uint32_t)(x) & DDRCTL_ODTMAP_RANK0_RD_ODT_MASK) >> DDRCTL_ODTMAP_RANK0_RD_ODT_SHIFT)
2293 
2294 /*
2295  * RANK0_WR_ODT (R/W)
2296  *
2297  * Description: Indicates which remote ODTs must be turned on during a write to rank 0.
2298  * Each rank has a remote ODT (in the SDRAM) which can be turned on by setting the appropriate bit here.
2299  * Rank 0 is controlled by the LSB; rank 1 is controlled by bit next to the LSB, etc.
2300  * For each rank, set its bit to 1 to enable its ODT.
2301  * Value After Reset: 0x1
2302  * Exists: Always
2303  */
2304 #define DDRCTL_ODTMAP_RANK0_WR_ODT_MASK (0xFU)
2305 #define DDRCTL_ODTMAP_RANK0_WR_ODT_SHIFT (0U)
2306 #define DDRCTL_ODTMAP_RANK0_WR_ODT_SET(x) (((uint32_t)(x) << DDRCTL_ODTMAP_RANK0_WR_ODT_SHIFT) & DDRCTL_ODTMAP_RANK0_WR_ODT_MASK)
2307 #define DDRCTL_ODTMAP_RANK0_WR_ODT_GET(x) (((uint32_t)(x) & DDRCTL_ODTMAP_RANK0_WR_ODT_MASK) >> DDRCTL_ODTMAP_RANK0_WR_ODT_SHIFT)
2308 
2309 /* Bitfield definition for register: SCHED */
2310 /*
2311  * RDWR_IDLE_GAP (R/W)
2312  *
2313  * Description: When the preferred transaction store is empty for these many clock cycles, switch to the alternate transaction store if it is non-empty.
2314  * The read transaction store (both high and low priority) is the default preferred transaction store and the write transaction store is the alternative store.
2315  * When prefer write over read is set this is reversed.
2316  * 0x0 is a legal value for this register. When set to 0x0, the transaction store switching will happen immediately when the switching conditions become true.
2317  * FOR PERFORMANCE ONLY
2318  * Value After Reset: 0x0
2319  * Exists: Always
2320  */
2321 #define DDRCTL_SCHED_RDWR_IDLE_GAP_MASK (0x7F000000UL)
2322 #define DDRCTL_SCHED_RDWR_IDLE_GAP_SHIFT (24U)
2323 #define DDRCTL_SCHED_RDWR_IDLE_GAP_SET(x) (((uint32_t)(x) << DDRCTL_SCHED_RDWR_IDLE_GAP_SHIFT) & DDRCTL_SCHED_RDWR_IDLE_GAP_MASK)
2324 #define DDRCTL_SCHED_RDWR_IDLE_GAP_GET(x) (((uint32_t)(x) & DDRCTL_SCHED_RDWR_IDLE_GAP_MASK) >> DDRCTL_SCHED_RDWR_IDLE_GAP_SHIFT)
2325 
2326 /*
2327  * GO2CRITICAL_HYSTERESIS (R/W)
2328  *
2329  * Description: UNUSED Value After Reset: 0x0 Exists: Always
2330  */
2331 #define DDRCTL_SCHED_GO2CRITICAL_HYSTERESIS_MASK (0xFF0000UL)
2332 #define DDRCTL_SCHED_GO2CRITICAL_HYSTERESIS_SHIFT (16U)
2333 #define DDRCTL_SCHED_GO2CRITICAL_HYSTERESIS_SET(x) (((uint32_t)(x) << DDRCTL_SCHED_GO2CRITICAL_HYSTERESIS_SHIFT) & DDRCTL_SCHED_GO2CRITICAL_HYSTERESIS_MASK)
2334 #define DDRCTL_SCHED_GO2CRITICAL_HYSTERESIS_GET(x) (((uint32_t)(x) & DDRCTL_SCHED_GO2CRITICAL_HYSTERESIS_MASK) >> DDRCTL_SCHED_GO2CRITICAL_HYSTERESIS_SHIFT)
2335 
2336 /*
2337  * LPR_NUM_ENTRIES (R/W)
2338  *
2339  * Description: Number of entries in the low priority transaction store is this value + 1.
2340  * (MEMC_NO_OF_ENTRY - (SCHED.lpr_num_entries + 1)) is the number of entries available for the high priority transaction store.
2341  * Setting this to maximum value allocates all entries to low priority transaction store.
2342  * Setting this to 0 allocates 1 entry to low priority transaction store and the rest to high priority transaction store.
2343  * Note: In ECC configurations, the numbers of write and low priority read credits issued is one less than in the non-ECC case. One entry each is reserved in the write and low- priority read CAMs for storing the RMW requests arising out of single bit error correction RMW operation.
2344  * Value After Reset: "MEMC_NO_OF_ENTRY/2"
2345  * Exists: Always
2346  */
2347 #define DDRCTL_SCHED_LPR_NUM_ENTRIES_MASK (0x3F00U)
2348 #define DDRCTL_SCHED_LPR_NUM_ENTRIES_SHIFT (8U)
2349 #define DDRCTL_SCHED_LPR_NUM_ENTRIES_SET(x) (((uint32_t)(x) << DDRCTL_SCHED_LPR_NUM_ENTRIES_SHIFT) & DDRCTL_SCHED_LPR_NUM_ENTRIES_MASK)
2350 #define DDRCTL_SCHED_LPR_NUM_ENTRIES_GET(x) (((uint32_t)(x) & DDRCTL_SCHED_LPR_NUM_ENTRIES_MASK) >> DDRCTL_SCHED_LPR_NUM_ENTRIES_SHIFT)
2351 
2352 /*
2353  * PAGECLOSE (R/W)
2354  *
2355  * Description: If true, bank is kept open only until there are page hit transactions available in the CAM to that bank. The last read or write command in the CAM with a bank and page hit will be executed with auto-precharge if SCHED1.pageclose_timer=0. Even if this register set to 1 and SCHED1.pageclose_timer is set to 0, explicit precharge (and not auto-precharge) may be issued in some cases where there is a mode switch between Write and Read or between LPR and HPR. The Read and Write commands that are executed as part of the ECC scrub requests are also executed without auto-precharge. If false, the bank remains open until there is a need to close it (to open a different page, or for page timeout or refresh timeout) - also known as open page policy. The open page policy can be overridden by setting the per-command-autopre bit on the HIF interface (co_ih_rxcmd_autopre).
2356  * The pageclose feature provids a midway between Open and Close page policies.
2357  * FOR PERFORMANCE ONLY.
2358  * Value After Reset: 0x1
2359  * Exists: Always
2360  */
2361 #define DDRCTL_SCHED_PAGECLOSE_MASK (0x4U)
2362 #define DDRCTL_SCHED_PAGECLOSE_SHIFT (2U)
2363 #define DDRCTL_SCHED_PAGECLOSE_SET(x) (((uint32_t)(x) << DDRCTL_SCHED_PAGECLOSE_SHIFT) & DDRCTL_SCHED_PAGECLOSE_MASK)
2364 #define DDRCTL_SCHED_PAGECLOSE_GET(x) (((uint32_t)(x) & DDRCTL_SCHED_PAGECLOSE_MASK) >> DDRCTL_SCHED_PAGECLOSE_SHIFT)
2365 
2366 /*
2367  * PREFER_WRITE (R/W)
2368  *
2369  * Description: If set then the bank selector prefers writes over reads.
2370  * FOR DEBUG ONLY.
2371  * Value After Reset: 0x0
2372  * Exists: Always
2373  */
2374 #define DDRCTL_SCHED_PREFER_WRITE_MASK (0x2U)
2375 #define DDRCTL_SCHED_PREFER_WRITE_SHIFT (1U)
2376 #define DDRCTL_SCHED_PREFER_WRITE_SET(x) (((uint32_t)(x) << DDRCTL_SCHED_PREFER_WRITE_SHIFT) & DDRCTL_SCHED_PREFER_WRITE_MASK)
2377 #define DDRCTL_SCHED_PREFER_WRITE_GET(x) (((uint32_t)(x) & DDRCTL_SCHED_PREFER_WRITE_MASK) >> DDRCTL_SCHED_PREFER_WRITE_SHIFT)
2378 
2379 /*
2380  * FORCE_LOW_PRI_N (R/W)
2381  *
2382  * Description: Active low signal. When asserted ('0'), all incoming transactions are forced to low priority. This implies that all High Priority Read (HPR) and Variable Priority Read commands (VPR) will be treated as Low Priority Read (LPR) commands. On the write side, all Variable Priority Write (VPW) commands will be treated as Normal Priority Write (NPW) commands. Forcing the incoming transactions to low priority implicitly turns off Bypass path for read commands.
2383  * FOR PERFORMANCE ONLY.
2384  * Value After Reset: 0x1
2385  * Exists: Always
2386  */
2387 #define DDRCTL_SCHED_FORCE_LOW_PRI_N_MASK (0x1U)
2388 #define DDRCTL_SCHED_FORCE_LOW_PRI_N_SHIFT (0U)
2389 #define DDRCTL_SCHED_FORCE_LOW_PRI_N_SET(x) (((uint32_t)(x) << DDRCTL_SCHED_FORCE_LOW_PRI_N_SHIFT) & DDRCTL_SCHED_FORCE_LOW_PRI_N_MASK)
2390 #define DDRCTL_SCHED_FORCE_LOW_PRI_N_GET(x) (((uint32_t)(x) & DDRCTL_SCHED_FORCE_LOW_PRI_N_MASK) >> DDRCTL_SCHED_FORCE_LOW_PRI_N_SHIFT)
2391 
2392 /* Bitfield definition for register: SCHED1 */
2393 /*
2394  * PAGECLOSE_TIMER (R/W)
2395  *
2396  * Description: This field works in conjunction with SCHED.pageclose. It only has meaning if SCHED.pageclose==1.
2397  * If SCHED.pageclose==1 and pageclose_timer==0, then an auto-precharge may be scheduled for last read or write command in the CAM with a bank and page hit. Note, sometimes an explicit precharge is scheduled instead of the auto-precharge. See SCHED.pageclose for details of when this may happen.
2398  * If SCHED.pageclose==1 and pageclose_timer>0, then an auto-precharge is not scheduled for last read or write command in the CAM with a bank and page hit. Instead, a timer is started, with pageclose_timer as the initial value. There is a timer on a per bank basis. The timer decrements unless the next read or write in the CAM to a bank is a page hit. It gets reset to pageclose_timer value if the next read or write in the CAM to a bank is a page hit. Once the timer has reached zero, an explcit precharge will be attempted to be scheduled.
2399  * Value After Reset: 0x0
2400  * Exists: Always
2401  */
2402 #define DDRCTL_SCHED1_PAGECLOSE_TIMER_MASK (0xFFU)
2403 #define DDRCTL_SCHED1_PAGECLOSE_TIMER_SHIFT (0U)
2404 #define DDRCTL_SCHED1_PAGECLOSE_TIMER_SET(x) (((uint32_t)(x) << DDRCTL_SCHED1_PAGECLOSE_TIMER_SHIFT) & DDRCTL_SCHED1_PAGECLOSE_TIMER_MASK)
2405 #define DDRCTL_SCHED1_PAGECLOSE_TIMER_GET(x) (((uint32_t)(x) & DDRCTL_SCHED1_PAGECLOSE_TIMER_MASK) >> DDRCTL_SCHED1_PAGECLOSE_TIMER_SHIFT)
2406 
2407 /* Bitfield definition for register: PERFHPR1 */
2408 /*
2409  * HPR_XACT_RUN_LENGTH (R/W)
2410  *
2411  * Description: Number of transactions that are serviced once the HPR queue goes critical is the smaller of:
2412  * This number
2413  * Number of transactions available Unit: Transaction.
2414  * FOR PERFORMANCE ONLY.
2415  * Value After Reset: 0xf
2416  * Exists: Always
2417  */
2418 #define DDRCTL_PERFHPR1_HPR_XACT_RUN_LENGTH_MASK (0xFF000000UL)
2419 #define DDRCTL_PERFHPR1_HPR_XACT_RUN_LENGTH_SHIFT (24U)
2420 #define DDRCTL_PERFHPR1_HPR_XACT_RUN_LENGTH_SET(x) (((uint32_t)(x) << DDRCTL_PERFHPR1_HPR_XACT_RUN_LENGTH_SHIFT) & DDRCTL_PERFHPR1_HPR_XACT_RUN_LENGTH_MASK)
2421 #define DDRCTL_PERFHPR1_HPR_XACT_RUN_LENGTH_GET(x) (((uint32_t)(x) & DDRCTL_PERFHPR1_HPR_XACT_RUN_LENGTH_MASK) >> DDRCTL_PERFHPR1_HPR_XACT_RUN_LENGTH_SHIFT)
2422 
2423 /*
2424  * HPR_MAX_STARVE (R/W)
2425  *
2426  * Description: Number of clocks that the HPR queue can be starved before it goes critical. The minimum valid functional value for this register is 0x1. Programming it to 0x0 will disable the starvation functionality; during normal operation, this function should not be disabled as it will cause excessive latencies.
2427  * Unit: Clock cycles.
2428  * FOR PERFORMANCE ONLY.
2429  * Value After Reset: 0x1
2430  * Exists: Always
2431  */
2432 #define DDRCTL_PERFHPR1_HPR_MAX_STARVE_MASK (0xFFFFU)
2433 #define DDRCTL_PERFHPR1_HPR_MAX_STARVE_SHIFT (0U)
2434 #define DDRCTL_PERFHPR1_HPR_MAX_STARVE_SET(x) (((uint32_t)(x) << DDRCTL_PERFHPR1_HPR_MAX_STARVE_SHIFT) & DDRCTL_PERFHPR1_HPR_MAX_STARVE_MASK)
2435 #define DDRCTL_PERFHPR1_HPR_MAX_STARVE_GET(x) (((uint32_t)(x) & DDRCTL_PERFHPR1_HPR_MAX_STARVE_MASK) >> DDRCTL_PERFHPR1_HPR_MAX_STARVE_SHIFT)
2436 
2437 /* Bitfield definition for register: PERFLPR1 */
2438 /*
2439  * LPR_XACT_RUN_LENGTH (R/W)
2440  *
2441  * Description: Number of transactions that are serviced once the LPR queue goes critical is the smaller of:
2442  * This number
2443  * Number of transactions available. Unit: Transaction.
2444  * FOR PERFORMANCE ONLY.
2445  * Value After Reset: 0xf
2446  * Exists: Always
2447  */
2448 #define DDRCTL_PERFLPR1_LPR_XACT_RUN_LENGTH_MASK (0xFF000000UL)
2449 #define DDRCTL_PERFLPR1_LPR_XACT_RUN_LENGTH_SHIFT (24U)
2450 #define DDRCTL_PERFLPR1_LPR_XACT_RUN_LENGTH_SET(x) (((uint32_t)(x) << DDRCTL_PERFLPR1_LPR_XACT_RUN_LENGTH_SHIFT) & DDRCTL_PERFLPR1_LPR_XACT_RUN_LENGTH_MASK)
2451 #define DDRCTL_PERFLPR1_LPR_XACT_RUN_LENGTH_GET(x) (((uint32_t)(x) & DDRCTL_PERFLPR1_LPR_XACT_RUN_LENGTH_MASK) >> DDRCTL_PERFLPR1_LPR_XACT_RUN_LENGTH_SHIFT)
2452 
2453 /*
2454  * LPR_MAX_STARVE (R/W)
2455  *
2456  * Description: Number of clocks that the LPR queue can be starved before it goes critical. The minimum valid functional value for this register is 0x1. Programming it to 0x0 will disable the starvation functionality; during normal operation, this function should not be disabled as it will cause excessive latencies.
2457  * Unit: Clock cycles.
2458  * FOR PERFORMANCE ONLY.
2459  * Value After Reset: 0x7f
2460  * Exists: Always
2461  */
2462 #define DDRCTL_PERFLPR1_LPR_MAX_STARVE_MASK (0xFFFFU)
2463 #define DDRCTL_PERFLPR1_LPR_MAX_STARVE_SHIFT (0U)
2464 #define DDRCTL_PERFLPR1_LPR_MAX_STARVE_SET(x) (((uint32_t)(x) << DDRCTL_PERFLPR1_LPR_MAX_STARVE_SHIFT) & DDRCTL_PERFLPR1_LPR_MAX_STARVE_MASK)
2465 #define DDRCTL_PERFLPR1_LPR_MAX_STARVE_GET(x) (((uint32_t)(x) & DDRCTL_PERFLPR1_LPR_MAX_STARVE_MASK) >> DDRCTL_PERFLPR1_LPR_MAX_STARVE_SHIFT)
2466 
2467 /* Bitfield definition for register: PERFWR1 */
2468 /*
2469  * W_XACT_RUN_LENGTH (R/W)
2470  *
2471  * Description: Number of transactions that are serviced once the WR queue goes critical is the smaller of:
2472  * This number
2473  * Number of transactions available. Unit: Transaction.
2474  * FOR PERFORMANCE ONLY.
2475  * Value After Reset: 0xf
2476  * Exists: Always
2477  */
2478 #define DDRCTL_PERFWR1_W_XACT_RUN_LENGTH_MASK (0xFF000000UL)
2479 #define DDRCTL_PERFWR1_W_XACT_RUN_LENGTH_SHIFT (24U)
2480 #define DDRCTL_PERFWR1_W_XACT_RUN_LENGTH_SET(x) (((uint32_t)(x) << DDRCTL_PERFWR1_W_XACT_RUN_LENGTH_SHIFT) & DDRCTL_PERFWR1_W_XACT_RUN_LENGTH_MASK)
2481 #define DDRCTL_PERFWR1_W_XACT_RUN_LENGTH_GET(x) (((uint32_t)(x) & DDRCTL_PERFWR1_W_XACT_RUN_LENGTH_MASK) >> DDRCTL_PERFWR1_W_XACT_RUN_LENGTH_SHIFT)
2482 
2483 /*
2484  * W_MAX_STARVE (R/W)
2485  *
2486  * Description: Number of clocks that the WR queue can be starved before it goes critical. The minimum valid functional value for this register is 0x1. Programming it to 0x0 will disable the starvation functionality; during normal operation, this function should not be disabled as it will cause excessive latencies.
2487  * Unit: Clock cycles.
2488  * FOR PERFORMANCE ONLY.
2489  * Value After Reset: 0x7f
2490  * Exists: Always
2491  */
2492 #define DDRCTL_PERFWR1_W_MAX_STARVE_MASK (0xFFFFU)
2493 #define DDRCTL_PERFWR1_W_MAX_STARVE_SHIFT (0U)
2494 #define DDRCTL_PERFWR1_W_MAX_STARVE_SET(x) (((uint32_t)(x) << DDRCTL_PERFWR1_W_MAX_STARVE_SHIFT) & DDRCTL_PERFWR1_W_MAX_STARVE_MASK)
2495 #define DDRCTL_PERFWR1_W_MAX_STARVE_GET(x) (((uint32_t)(x) & DDRCTL_PERFWR1_W_MAX_STARVE_MASK) >> DDRCTL_PERFWR1_W_MAX_STARVE_SHIFT)
2496 
2497 /* Bitfield definition for register: PERFVPR1 */
2498 /*
2499  * VPR_TIMEOUT_RANGE (R/W)
2500  *
2501  * Description: Indicates the range of the timeout value that is used for grouping the expired VPR commands in the CAM in DDRC. For example, if the register value is set to 0xF, then the priorities of all the VPR commands whose timeout counters are 15 or below will be considered as expired-VPR commands when the timeout value of any of the VPR commands reach 0. The expired-VPR commands, when present, are given higher priority than HPR commands. The VPR commands are expected to consist of largely page hit traffic and by grouping them together the bus utilization is expected to increase. This register applies to transactions inside the DDRC only.
2502  * The Max value for this register is 0x7FF and the Min value is 0x0.
2503  * When programmed to the Max value of 0x7FF, all the VPR commands that come in to DDRC will time-out right-away and will be considered as expired-VPR.
2504  * When programmed to the Min value of 0x0, the timer of each command would have to reach a value of 0 before it will be considered as expired-VPR.
2505  * Unit: Clock cycles.
2506  * FOR PERFORMANCE ONLY.
2507  * Value After Reset: 0x0
2508  * Exists: UMCTL2_VPR_EN==1
2509  */
2510 #define DDRCTL_PERFVPR1_VPR_TIMEOUT_RANGE_MASK (0x7FFU)
2511 #define DDRCTL_PERFVPR1_VPR_TIMEOUT_RANGE_SHIFT (0U)
2512 #define DDRCTL_PERFVPR1_VPR_TIMEOUT_RANGE_SET(x) (((uint32_t)(x) << DDRCTL_PERFVPR1_VPR_TIMEOUT_RANGE_SHIFT) & DDRCTL_PERFVPR1_VPR_TIMEOUT_RANGE_MASK)
2513 #define DDRCTL_PERFVPR1_VPR_TIMEOUT_RANGE_GET(x) (((uint32_t)(x) & DDRCTL_PERFVPR1_VPR_TIMEOUT_RANGE_MASK) >> DDRCTL_PERFVPR1_VPR_TIMEOUT_RANGE_SHIFT)
2514 
2515 /* Bitfield definition for register: PERFVPW1 */
2516 /*
2517  * VPW_TIMEOUT_RANGE (R/W)
2518  *
2519  * Description: Indicates the range of the timeout value that is used for grouping the expired VPW commands in the CAM in DDRC. For example, if the register value is set to 0xF, then the priorities of all the VPW commands whose timeout counters are 15 or below will be considered as expired-VPW commands when the timeout value of any of the VPW commands reach 0. The expired-VPW commands, when present, are given higher priority than normal Write commands. The VPW commands are expected to consist of largely page hit traffic and by grouping them together the bus utilization is expected to increase. This register applies to transactions inside the DDRC only.
2520  * The Max value for this register is 0x7FF and the Min value is 0x0.
2521  * When programmed to the Max value of 0x7FF, all the VPW commands that come in to DDRC will time-out right-away and will be considered as expired-VPW.
2522  * When programmed to the Min value of 0x0, the timer of each command would have to reach a value of 0 before it will be considered as expired-VPW.
2523  * Unit: Clock cycles.
2524  * FOR PERFORMANCE ONLY.
2525  * Value After Reset: 0x0
2526  * Exists: UMCTL2_VPW_EN==1
2527  */
2528 #define DDRCTL_PERFVPW1_VPW_TIMEOUT_RANGE_MASK (0x7FFU)
2529 #define DDRCTL_PERFVPW1_VPW_TIMEOUT_RANGE_SHIFT (0U)
2530 #define DDRCTL_PERFVPW1_VPW_TIMEOUT_RANGE_SET(x) (((uint32_t)(x) << DDRCTL_PERFVPW1_VPW_TIMEOUT_RANGE_SHIFT) & DDRCTL_PERFVPW1_VPW_TIMEOUT_RANGE_MASK)
2531 #define DDRCTL_PERFVPW1_VPW_TIMEOUT_RANGE_GET(x) (((uint32_t)(x) & DDRCTL_PERFVPW1_VPW_TIMEOUT_RANGE_MASK) >> DDRCTL_PERFVPW1_VPW_TIMEOUT_RANGE_SHIFT)
2532 
2533 /* Bitfield definition for register: DBG0 */
2534 /*
2535  * DIS_COLLISION_PAGE_OPT (R/W)
2536  *
2537  * Description: When this is set to '0', auto-precharge is disabled for the flushed command in a collision case. Collision cases are write followed by read to same address, read followed by write to same address, or write followed by write to same address with DBG0.dis_wc bit = 1 (where same address comparisons exclude the two address bits representing critical word).
2538  * FOR DEBUG ONLY.
2539  * Value After Reset: 0x0
2540  * Exists: Always
2541  */
2542 #define DDRCTL_DBG0_DIS_COLLISION_PAGE_OPT_MASK (0x10U)
2543 #define DDRCTL_DBG0_DIS_COLLISION_PAGE_OPT_SHIFT (4U)
2544 #define DDRCTL_DBG0_DIS_COLLISION_PAGE_OPT_SET(x) (((uint32_t)(x) << DDRCTL_DBG0_DIS_COLLISION_PAGE_OPT_SHIFT) & DDRCTL_DBG0_DIS_COLLISION_PAGE_OPT_MASK)
2545 #define DDRCTL_DBG0_DIS_COLLISION_PAGE_OPT_GET(x) (((uint32_t)(x) & DDRCTL_DBG0_DIS_COLLISION_PAGE_OPT_MASK) >> DDRCTL_DBG0_DIS_COLLISION_PAGE_OPT_SHIFT)
2546 
2547 /*
2548  * DIS_ACT_BYPASS (R/W)
2549  *
2550  * Description: Only present in designs supporting activate bypass.
2551  * When 1, disable bypass path for high priority read activates FOR DEBUG ONLY.
2552  * Value After Reset: 0x0
2553  * Exists: MEMC_BYPASS==1
2554  */
2555 #define DDRCTL_DBG0_DIS_ACT_BYPASS_MASK (0x4U)
2556 #define DDRCTL_DBG0_DIS_ACT_BYPASS_SHIFT (2U)
2557 #define DDRCTL_DBG0_DIS_ACT_BYPASS_SET(x) (((uint32_t)(x) << DDRCTL_DBG0_DIS_ACT_BYPASS_SHIFT) & DDRCTL_DBG0_DIS_ACT_BYPASS_MASK)
2558 #define DDRCTL_DBG0_DIS_ACT_BYPASS_GET(x) (((uint32_t)(x) & DDRCTL_DBG0_DIS_ACT_BYPASS_MASK) >> DDRCTL_DBG0_DIS_ACT_BYPASS_SHIFT)
2559 
2560 /*
2561  * DIS_RD_BYPASS (R/W)
2562  *
2563  * Description: Only present in designs supporting read bypass.
2564  * When 1, disable bypass path for high priority read page hits FOR DEBUG ONLY.
2565  * Value After Reset: 0x0
2566  * Exists: MEMC_BYPASS==1
2567  */
2568 #define DDRCTL_DBG0_DIS_RD_BYPASS_MASK (0x2U)
2569 #define DDRCTL_DBG0_DIS_RD_BYPASS_SHIFT (1U)
2570 #define DDRCTL_DBG0_DIS_RD_BYPASS_SET(x) (((uint32_t)(x) << DDRCTL_DBG0_DIS_RD_BYPASS_SHIFT) & DDRCTL_DBG0_DIS_RD_BYPASS_MASK)
2571 #define DDRCTL_DBG0_DIS_RD_BYPASS_GET(x) (((uint32_t)(x) & DDRCTL_DBG0_DIS_RD_BYPASS_MASK) >> DDRCTL_DBG0_DIS_RD_BYPASS_SHIFT)
2572 
2573 /*
2574  * DIS_WC (R/W)
2575  *
2576  * Description: When 1, disable write combine. FOR DEBUG ONLY
2577  * Value After Reset: 0x0
2578  * Exists: Always
2579  */
2580 #define DDRCTL_DBG0_DIS_WC_MASK (0x1U)
2581 #define DDRCTL_DBG0_DIS_WC_SHIFT (0U)
2582 #define DDRCTL_DBG0_DIS_WC_SET(x) (((uint32_t)(x) << DDRCTL_DBG0_DIS_WC_SHIFT) & DDRCTL_DBG0_DIS_WC_MASK)
2583 #define DDRCTL_DBG0_DIS_WC_GET(x) (((uint32_t)(x) & DDRCTL_DBG0_DIS_WC_MASK) >> DDRCTL_DBG0_DIS_WC_SHIFT)
2584 
2585 /* Bitfield definition for register: DBG1 */
2586 /*
2587  * DIS_HIF (R/W)
2588  *
2589  * Description: When 1, uMCTL2 asserts the HIF command ih_co_stall. uMCTL2 will ignore the co_ih_rxcmd_valid and all other associated request signals.
2590  * This bit is intended to be switched on-the-fly.
2591  * Value After Reset: 0x0
2592  * Exists: Always
2593  */
2594 #define DDRCTL_DBG1_DIS_HIF_MASK (0x2U)
2595 #define DDRCTL_DBG1_DIS_HIF_SHIFT (1U)
2596 #define DDRCTL_DBG1_DIS_HIF_SET(x) (((uint32_t)(x) << DDRCTL_DBG1_DIS_HIF_SHIFT) & DDRCTL_DBG1_DIS_HIF_MASK)
2597 #define DDRCTL_DBG1_DIS_HIF_GET(x) (((uint32_t)(x) & DDRCTL_DBG1_DIS_HIF_MASK) >> DDRCTL_DBG1_DIS_HIF_SHIFT)
2598 
2599 /*
2600  * DIS_DQ (R/W)
2601  *
2602  * Description: When 1, uMCTL2 will not de-queue any transactions from the CAM. Bypass is also disabled. All transactions are queued in the CAM. No reads or writes are issued to SDRAM as long as this is asserted.
2603  * This bit may be used to prevent reads or writes being issued by the uMCTL2, which makes it safe to modify certain register fields associated with reads and writes (see User Guide for details). After setting this bit, it is strongly recommended to poll DBGCAM.wr_data_pipeline_empty and DBGCAM.rd_data_pipeline_empty, before making changes to any registers which affect reads and writes. This will ensure that the relevant logic in the DDRC is idle.
2604  * This bit is intended to be switched on-the-fly.
2605  * Value After Reset: 0x0
2606  * Exists: Always
2607  */
2608 #define DDRCTL_DBG1_DIS_DQ_MASK (0x1U)
2609 #define DDRCTL_DBG1_DIS_DQ_SHIFT (0U)
2610 #define DDRCTL_DBG1_DIS_DQ_SET(x) (((uint32_t)(x) << DDRCTL_DBG1_DIS_DQ_SHIFT) & DDRCTL_DBG1_DIS_DQ_MASK)
2611 #define DDRCTL_DBG1_DIS_DQ_GET(x) (((uint32_t)(x) & DDRCTL_DBG1_DIS_DQ_MASK) >> DDRCTL_DBG1_DIS_DQ_SHIFT)
2612 
2613 /* Bitfield definition for register: DBGCAM */
2614 /*
2615  * WR_DATA_PIPELINE_EMPTY (R)
2616  *
2617  * Description: This bit indicates that the write data pipeline on the DFI interface is empty. This register is intended to be polled after setting DBG1.dis_dq, to ensure that all remaining commands/data have completed.
2618  * Value After Reset: 0x0
2619  * Exists: Always
2620  */
2621 #define DDRCTL_DBGCAM_WR_DATA_PIPELINE_EMPTY_MASK (0x20000000UL)
2622 #define DDRCTL_DBGCAM_WR_DATA_PIPELINE_EMPTY_SHIFT (29U)
2623 #define DDRCTL_DBGCAM_WR_DATA_PIPELINE_EMPTY_GET(x) (((uint32_t)(x) & DDRCTL_DBGCAM_WR_DATA_PIPELINE_EMPTY_MASK) >> DDRCTL_DBGCAM_WR_DATA_PIPELINE_EMPTY_SHIFT)
2624 
2625 /*
2626  * RD_DATA_PIPELINE_EMPTY (R)
2627  *
2628  * Description: This bit indicates that the read data pipeline on the DFI interface is empty. This register is intended to be polled after setting DBG1.dis_dq, to ensure that all remaining commands/data have completed.
2629  * Value After Reset: 0x0
2630  * Exists: Always
2631  */
2632 #define DDRCTL_DBGCAM_RD_DATA_PIPELINE_EMPTY_MASK (0x10000000UL)
2633 #define DDRCTL_DBGCAM_RD_DATA_PIPELINE_EMPTY_SHIFT (28U)
2634 #define DDRCTL_DBGCAM_RD_DATA_PIPELINE_EMPTY_GET(x) (((uint32_t)(x) & DDRCTL_DBGCAM_RD_DATA_PIPELINE_EMPTY_MASK) >> DDRCTL_DBGCAM_RD_DATA_PIPELINE_EMPTY_SHIFT)
2635 
2636 /*
2637  * DBG_WR_Q_EMPTY (R)
2638  *
2639  * Description: When 1, all the Write command queues and Write data buffers inside DDRC are empty. This register is to be used for debug purpose.
2640  * An example use-case scenario: When Controller enters Self- Refresh using the Low-Power entry sequence, Controller is expected to have executed all the commands in its queues and the write and read data drained. Hence this register should be 1 at that time.
2641  * FOR DEBUG ONLY
2642  * Value After Reset: 0x0
2643  * Exists: Always
2644  */
2645 #define DDRCTL_DBGCAM_DBG_WR_Q_EMPTY_MASK (0x4000000UL)
2646 #define DDRCTL_DBGCAM_DBG_WR_Q_EMPTY_SHIFT (26U)
2647 #define DDRCTL_DBGCAM_DBG_WR_Q_EMPTY_GET(x) (((uint32_t)(x) & DDRCTL_DBGCAM_DBG_WR_Q_EMPTY_MASK) >> DDRCTL_DBGCAM_DBG_WR_Q_EMPTY_SHIFT)
2648 
2649 /*
2650  * DBG_RD_Q_EMPTY (R)
2651  *
2652  * Description: When 1, all the Read command queues and Read data buffers inside DDRC are empty. This register is to be used for debug purpose.
2653  * An example use-case scenario: When Controller enters Self- Refresh using the Low-Power entry sequence, Controller is expected to have executed all the commands in its queues and the write and read data drained. Hence this register should be 1 at that time.
2654  * FOR DEBUG ONLY
2655  * Value After Reset: 0x0
2656  * Exists: Always
2657  */
2658 #define DDRCTL_DBGCAM_DBG_RD_Q_EMPTY_MASK (0x2000000UL)
2659 #define DDRCTL_DBGCAM_DBG_RD_Q_EMPTY_SHIFT (25U)
2660 #define DDRCTL_DBGCAM_DBG_RD_Q_EMPTY_GET(x) (((uint32_t)(x) & DDRCTL_DBGCAM_DBG_RD_Q_EMPTY_MASK) >> DDRCTL_DBGCAM_DBG_RD_Q_EMPTY_SHIFT)
2661 
2662 /*
2663  * DBG_STALL (R)
2664  *
2665  * Description: Stall FOR DEBUG ONLY
2666  * Value After Reset: 0x0
2667  * Exists: Always
2668  */
2669 #define DDRCTL_DBGCAM_DBG_STALL_MASK (0x1000000UL)
2670 #define DDRCTL_DBGCAM_DBG_STALL_SHIFT (24U)
2671 #define DDRCTL_DBGCAM_DBG_STALL_GET(x) (((uint32_t)(x) & DDRCTL_DBGCAM_DBG_STALL_MASK) >> DDRCTL_DBGCAM_DBG_STALL_SHIFT)
2672 
2673 /*
2674  * DBG_W_Q_DEPTH (R)
2675  *
2676  * Description: Write queue depth
2677  * Note: The width of this field is dependent on log(MEMC_NO_OF_ENTRY+1). For example, if CAM depth
2678  * = 32, then register width is 6 bits and bit 22 is reserved. FOR DEBUG ONLY
2679  * Value After Reset: 0x0
2680  * Exists: Always
2681  */
2682 #define DDRCTL_DBGCAM_DBG_W_Q_DEPTH_MASK (0x7F0000UL)
2683 #define DDRCTL_DBGCAM_DBG_W_Q_DEPTH_SHIFT (16U)
2684 #define DDRCTL_DBGCAM_DBG_W_Q_DEPTH_GET(x) (((uint32_t)(x) & DDRCTL_DBGCAM_DBG_W_Q_DEPTH_MASK) >> DDRCTL_DBGCAM_DBG_W_Q_DEPTH_SHIFT)
2685 
2686 /*
2687  * DBG_LPR_Q_DEPTH (R)
2688  *
2689  * Description: Low priority read queue depth Note: The width of this field is dependent on
2690  * log(MEMC_NO_OF_ENTRY+1). For example, if CAM depth
2691  * = 32, then register width is 6 bits and bit 14 is reserved FOR DEBUG ONLY
2692  * Value After Reset: 0x0
2693  * Exists: Always
2694  */
2695 #define DDRCTL_DBGCAM_DBG_LPR_Q_DEPTH_MASK (0x7F00U)
2696 #define DDRCTL_DBGCAM_DBG_LPR_Q_DEPTH_SHIFT (8U)
2697 #define DDRCTL_DBGCAM_DBG_LPR_Q_DEPTH_GET(x) (((uint32_t)(x) & DDRCTL_DBGCAM_DBG_LPR_Q_DEPTH_MASK) >> DDRCTL_DBGCAM_DBG_LPR_Q_DEPTH_SHIFT)
2698 
2699 /*
2700  * DBG_HPR_Q_DEPTH (R)
2701  *
2702  * Description: High priority read queue depth Note: The width of this field is dependent on
2703  * log(MEMC_NO_OF_ENTRY+1). For example, if CAM depth
2704  * = 32, then register width is 6 bits and bit 6 is reserved FOR DEBUG ONLY
2705  * Value After Reset: 0x0
2706  * Exists: Always
2707  */
2708 #define DDRCTL_DBGCAM_DBG_HPR_Q_DEPTH_MASK (0x7FU)
2709 #define DDRCTL_DBGCAM_DBG_HPR_Q_DEPTH_SHIFT (0U)
2710 #define DDRCTL_DBGCAM_DBG_HPR_Q_DEPTH_GET(x) (((uint32_t)(x) & DDRCTL_DBGCAM_DBG_HPR_Q_DEPTH_MASK) >> DDRCTL_DBGCAM_DBG_HPR_Q_DEPTH_SHIFT)
2711 
2712 /* Bitfield definition for register: DBGCMD */
2713 /*
2714  * CTRLUPD (R/WSC)
2715  *
2716  * Description: Setting this register bit to 1 indicates to the uMCTL2 to issue a dfi_ctrlupd_req to the PHY. When this request is stored in uMCTL2, the bit is automatically cleared. This operation must only be performed when DFIUPD0.dis_auto_ctrlupd=1.
2717  * Value After Reset: 0x0
2718  * Exists: Always
2719  */
2720 #define DDRCTL_DBGCMD_CTRLUPD_MASK (0x20U)
2721 #define DDRCTL_DBGCMD_CTRLUPD_SHIFT (5U)
2722 #define DDRCTL_DBGCMD_CTRLUPD_SET(x) (((uint32_t)(x) << DDRCTL_DBGCMD_CTRLUPD_SHIFT) & DDRCTL_DBGCMD_CTRLUPD_MASK)
2723 #define DDRCTL_DBGCMD_CTRLUPD_GET(x) (((uint32_t)(x) & DDRCTL_DBGCMD_CTRLUPD_MASK) >> DDRCTL_DBGCMD_CTRLUPD_SHIFT)
2724 
2725 /*
2726  * ZQ_CALIB_SHORT (R/WSC)
2727  *
2728  * Description: Setting this register bit to 1 indicates to the uMCTL2 to issue a ZQCS (ZQ calibration short) command to the SDRAM. When this request is stored in uMCTL2, the bit is automatically cleared. This operation can be performed only when ZQCTL0.dis_auto_zq=1. It is recommended NOT to set this register bit if in Init operating mode. This register bit is ignored when in Self-Refresh and Deep power-down operating modes.
2729  * Value After Reset: 0x0
2730  * Exists: MEMC_DDR3_OR_4_OR_LPDDR2==1
2731  */
2732 #define DDRCTL_DBGCMD_ZQ_CALIB_SHORT_MASK (0x10U)
2733 #define DDRCTL_DBGCMD_ZQ_CALIB_SHORT_SHIFT (4U)
2734 #define DDRCTL_DBGCMD_ZQ_CALIB_SHORT_SET(x) (((uint32_t)(x) << DDRCTL_DBGCMD_ZQ_CALIB_SHORT_SHIFT) & DDRCTL_DBGCMD_ZQ_CALIB_SHORT_MASK)
2735 #define DDRCTL_DBGCMD_ZQ_CALIB_SHORT_GET(x) (((uint32_t)(x) & DDRCTL_DBGCMD_ZQ_CALIB_SHORT_MASK) >> DDRCTL_DBGCMD_ZQ_CALIB_SHORT_SHIFT)
2736 
2737 /*
2738  * RANK1_REFRESH (R/WSC)
2739  *
2740  * Description: Setting this register bit to 1 indicates to the uMCTL2 to issue a refresh to rank 1. When this request is stored in uMCTL2, the bit is automatically cleared. This operation can be performed only when RFSHCTL3.dis_auto_refresh=1. It is recommended NOT to set this register bit if in Init or Deep power-down operating modes or Maximum Power Saving Mode.
2741  * Value After Reset: 0x0
2742  * Exists: MEMC_NUM_RANKS>1
2743  */
2744 #define DDRCTL_DBGCMD_RANK1_REFRESH_MASK (0x2U)
2745 #define DDRCTL_DBGCMD_RANK1_REFRESH_SHIFT (1U)
2746 #define DDRCTL_DBGCMD_RANK1_REFRESH_SET(x) (((uint32_t)(x) << DDRCTL_DBGCMD_RANK1_REFRESH_SHIFT) & DDRCTL_DBGCMD_RANK1_REFRESH_MASK)
2747 #define DDRCTL_DBGCMD_RANK1_REFRESH_GET(x) (((uint32_t)(x) & DDRCTL_DBGCMD_RANK1_REFRESH_MASK) >> DDRCTL_DBGCMD_RANK1_REFRESH_SHIFT)
2748 
2749 /*
2750  * RANK0_REFRESH (R/WSC)
2751  *
2752  * Description: Setting this register bit to 1 indicates to the uMCTL2 to issue a refresh to rank 0. When this request is stored in uMCTL2, the bit is automatically cleared. This operation can be performed only when RFSHCTL3.dis_auto_refresh=1. It is recommended NOT to set this register bit if in Init or Deep power-down operating modes or Maximum Power Saving Mode.
2753  * Value After Reset: 0x0
2754  * Exists: Always
2755  */
2756 #define DDRCTL_DBGCMD_RANK0_REFRESH_MASK (0x1U)
2757 #define DDRCTL_DBGCMD_RANK0_REFRESH_SHIFT (0U)
2758 #define DDRCTL_DBGCMD_RANK0_REFRESH_SET(x) (((uint32_t)(x) << DDRCTL_DBGCMD_RANK0_REFRESH_SHIFT) & DDRCTL_DBGCMD_RANK0_REFRESH_MASK)
2759 #define DDRCTL_DBGCMD_RANK0_REFRESH_GET(x) (((uint32_t)(x) & DDRCTL_DBGCMD_RANK0_REFRESH_MASK) >> DDRCTL_DBGCMD_RANK0_REFRESH_SHIFT)
2760 
2761 /* Bitfield definition for register: DBGSTAT */
2762 /*
2763  * CTRLUPD_BUSY (R)
2764  *
2765  * Description: SoC core may initiate a ctrlupd operation only if this signal is low. This signal goes high in the clock after the uMCTL2 accepts the ctrlupd request. It goes low when the ctrlupd operation is initiated in uMCTL2. It is recommended not to perform ctrlupd operations when this signal is high.
2766  * 0 - Indicates that the SoC core can initiate a ctrlupd operation
2767  * 1 - Indicates that ctrlupd operation has not been initiated yet in uMCTL2
2768  * Value After Reset: 0x0
2769  * Exists: Always
2770  */
2771 #define DDRCTL_DBGSTAT_CTRLUPD_BUSY_MASK (0x20U)
2772 #define DDRCTL_DBGSTAT_CTRLUPD_BUSY_SHIFT (5U)
2773 #define DDRCTL_DBGSTAT_CTRLUPD_BUSY_GET(x) (((uint32_t)(x) & DDRCTL_DBGSTAT_CTRLUPD_BUSY_MASK) >> DDRCTL_DBGSTAT_CTRLUPD_BUSY_SHIFT)
2774 
2775 /*
2776  * ZQ_CALIB_SHORT_BUSY (R)
2777  *
2778  * Description: SoC core may initiate a ZQCS (ZQ calibration short) operation only if this signal is low. This signal goes high in the clock after the uMCTL2 accepts the ZQCS request. It goes low when the ZQCS operation is initiated in uMCTL2. It is recommended not to perform ZQCS operations when this signal is high.
2779  * 0 - Indicates that the SoC core can initiate a ZQCS operation
2780  * 1 - Indicates that ZQCS operation has not been initiated yet in uMCTL2
2781  * Value After Reset: 0x0
2782  * Exists: MEMC_DDR3_OR_4_OR_LPDDR2==1
2783  */
2784 #define DDRCTL_DBGSTAT_ZQ_CALIB_SHORT_BUSY_MASK (0x10U)
2785 #define DDRCTL_DBGSTAT_ZQ_CALIB_SHORT_BUSY_SHIFT (4U)
2786 #define DDRCTL_DBGSTAT_ZQ_CALIB_SHORT_BUSY_GET(x) (((uint32_t)(x) & DDRCTL_DBGSTAT_ZQ_CALIB_SHORT_BUSY_MASK) >> DDRCTL_DBGSTAT_ZQ_CALIB_SHORT_BUSY_SHIFT)
2787 
2788 /*
2789  * RANK1_REFRESH_BUSY (R)
2790  *
2791  * Description: SoC core may initiate a rank1_refresh operation (refresh operation to rank 1) only if this signal is low. This signal goes high in the clock after DBGCMD.rank1_refresh is set to one. It goes low when the rank1_refresh operation is stored in uMCTL2. It is recommended not to perform rank1_refresh operations when this signal is high.
2792  * 0 - Indicates that the SoC core can initiate a rank1_refresh operation
2793  * 1 - Indicates that rank1_refresh operation has not been stored yet in uMCTL2
2794  * Value After Reset: 0x0
2795  * Exists: MEMC_NUM_RANKS>1
2796  */
2797 #define DDRCTL_DBGSTAT_RANK1_REFRESH_BUSY_MASK (0x2U)
2798 #define DDRCTL_DBGSTAT_RANK1_REFRESH_BUSY_SHIFT (1U)
2799 #define DDRCTL_DBGSTAT_RANK1_REFRESH_BUSY_GET(x) (((uint32_t)(x) & DDRCTL_DBGSTAT_RANK1_REFRESH_BUSY_MASK) >> DDRCTL_DBGSTAT_RANK1_REFRESH_BUSY_SHIFT)
2800 
2801 /*
2802  * RANK0_REFRESH_BUSY (R)
2803  *
2804  * Description: SoC core may initiate a rank0_refresh operation (refresh operation to rank 0) only if this signal is low. This signal goes high in the clock after DBGCMD.rank0_refresh is set to one. It goes low when the rank0_refresh operation is stored in uMCTL2. It is recommended not to perform rank0_refresh operations when this signal is high.
2805  * 0 - Indicates that the SoC core can initiate a rank0_refresh operation
2806  * 1 - Indicates that rank0_refresh operation has not been stored yet in uMCTL2
2807  * Value After Reset: 0x0
2808  * Exists: Always
2809  */
2810 #define DDRCTL_DBGSTAT_RANK0_REFRESH_BUSY_MASK (0x1U)
2811 #define DDRCTL_DBGSTAT_RANK0_REFRESH_BUSY_SHIFT (0U)
2812 #define DDRCTL_DBGSTAT_RANK0_REFRESH_BUSY_GET(x) (((uint32_t)(x) & DDRCTL_DBGSTAT_RANK0_REFRESH_BUSY_MASK) >> DDRCTL_DBGSTAT_RANK0_REFRESH_BUSY_SHIFT)
2813 
2814 /* Bitfield definition for register: PSTAT */
2815 /*
2816  * WR_PORT_BUSY_15 (R)
2817  *
2818  * Description: Indicates if there are outstanding writes for port 15.
2819  * Value After Reset: 0x0
2820  * Exists: UMCTL2_PORT_15==1
2821  */
2822 #define DDRCTL_PSTAT_WR_PORT_BUSY_15_MASK (0x80000000UL)
2823 #define DDRCTL_PSTAT_WR_PORT_BUSY_15_SHIFT (31U)
2824 #define DDRCTL_PSTAT_WR_PORT_BUSY_15_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_WR_PORT_BUSY_15_MASK) >> DDRCTL_PSTAT_WR_PORT_BUSY_15_SHIFT)
2825 
2826 /*
2827  * WR_PORT_BUSY_14 (R)
2828  *
2829  * Description: Indicates if there are outstanding writes for port 14.
2830  * Value After Reset: 0x0
2831  * Exists: UMCTL2_PORT_14==1
2832  */
2833 #define DDRCTL_PSTAT_WR_PORT_BUSY_14_MASK (0x40000000UL)
2834 #define DDRCTL_PSTAT_WR_PORT_BUSY_14_SHIFT (30U)
2835 #define DDRCTL_PSTAT_WR_PORT_BUSY_14_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_WR_PORT_BUSY_14_MASK) >> DDRCTL_PSTAT_WR_PORT_BUSY_14_SHIFT)
2836 
2837 /*
2838  * WR_PORT_BUSY_13 (R)
2839  *
2840  * Description: Indicates if there are outstanding writes for port 13.
2841  * Value After Reset: 0x0
2842  * Exists: UMCTL2_PORT_13==1
2843  */
2844 #define DDRCTL_PSTAT_WR_PORT_BUSY_13_MASK (0x20000000UL)
2845 #define DDRCTL_PSTAT_WR_PORT_BUSY_13_SHIFT (29U)
2846 #define DDRCTL_PSTAT_WR_PORT_BUSY_13_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_WR_PORT_BUSY_13_MASK) >> DDRCTL_PSTAT_WR_PORT_BUSY_13_SHIFT)
2847 
2848 /*
2849  * WR_PORT_BUSY_12 (R)
2850  *
2851  * Description: Indicates if there are outstanding writes for port 12.
2852  * Value After Reset: 0x0
2853  * Exists: UMCTL2_PORT_12==1
2854  */
2855 #define DDRCTL_PSTAT_WR_PORT_BUSY_12_MASK (0x10000000UL)
2856 #define DDRCTL_PSTAT_WR_PORT_BUSY_12_SHIFT (28U)
2857 #define DDRCTL_PSTAT_WR_PORT_BUSY_12_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_WR_PORT_BUSY_12_MASK) >> DDRCTL_PSTAT_WR_PORT_BUSY_12_SHIFT)
2858 
2859 /*
2860  * WR_PORT_BUSY_11 (R)
2861  *
2862  * Description: Indicates if there are outstanding writes for port 11.
2863  * Value After Reset: 0x0
2864  * Exists: UMCTL2_PORT_11==1
2865  */
2866 #define DDRCTL_PSTAT_WR_PORT_BUSY_11_MASK (0x8000000UL)
2867 #define DDRCTL_PSTAT_WR_PORT_BUSY_11_SHIFT (27U)
2868 #define DDRCTL_PSTAT_WR_PORT_BUSY_11_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_WR_PORT_BUSY_11_MASK) >> DDRCTL_PSTAT_WR_PORT_BUSY_11_SHIFT)
2869 
2870 /*
2871  * WR_PORT_BUSY_10 (R)
2872  *
2873  * Description: Indicates if there are outstanding writes for port 10.
2874  * Value After Reset: 0x0
2875  * Exists: UMCTL2_PORT_10==1
2876  */
2877 #define DDRCTL_PSTAT_WR_PORT_BUSY_10_MASK (0x4000000UL)
2878 #define DDRCTL_PSTAT_WR_PORT_BUSY_10_SHIFT (26U)
2879 #define DDRCTL_PSTAT_WR_PORT_BUSY_10_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_WR_PORT_BUSY_10_MASK) >> DDRCTL_PSTAT_WR_PORT_BUSY_10_SHIFT)
2880 
2881 /*
2882  * WR_PORT_BUSY_9 (R)
2883  *
2884  * Description: Indicates if there are outstanding writes for port 9.
2885  * Value After Reset: 0x0
2886  * Exists: UMCTL2_PORT_9==1
2887  */
2888 #define DDRCTL_PSTAT_WR_PORT_BUSY_9_MASK (0x2000000UL)
2889 #define DDRCTL_PSTAT_WR_PORT_BUSY_9_SHIFT (25U)
2890 #define DDRCTL_PSTAT_WR_PORT_BUSY_9_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_WR_PORT_BUSY_9_MASK) >> DDRCTL_PSTAT_WR_PORT_BUSY_9_SHIFT)
2891 
2892 /*
2893  * WR_PORT_BUSY_8 (R)
2894  *
2895  * Description: Indicates if there are outstanding writes for port 8.
2896  * Value After Reset: 0x0
2897  * Exists: UMCTL2_PORT_8==1
2898  */
2899 #define DDRCTL_PSTAT_WR_PORT_BUSY_8_MASK (0x1000000UL)
2900 #define DDRCTL_PSTAT_WR_PORT_BUSY_8_SHIFT (24U)
2901 #define DDRCTL_PSTAT_WR_PORT_BUSY_8_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_WR_PORT_BUSY_8_MASK) >> DDRCTL_PSTAT_WR_PORT_BUSY_8_SHIFT)
2902 
2903 /*
2904  * WR_PORT_BUSY_7 (R)
2905  *
2906  * Description: Indicates if there are outstanding writes for port 7.
2907  * Value After Reset: 0x0
2908  * Exists: UMCTL2_PORT_7==1
2909  */
2910 #define DDRCTL_PSTAT_WR_PORT_BUSY_7_MASK (0x800000UL)
2911 #define DDRCTL_PSTAT_WR_PORT_BUSY_7_SHIFT (23U)
2912 #define DDRCTL_PSTAT_WR_PORT_BUSY_7_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_WR_PORT_BUSY_7_MASK) >> DDRCTL_PSTAT_WR_PORT_BUSY_7_SHIFT)
2913 
2914 /*
2915  * WR_PORT_BUSY_6 (R)
2916  *
2917  * Description: Indicates if there are outstanding writes for port 6.
2918  * Value After Reset: 0x0
2919  * Exists: UMCTL2_PORT_6==1
2920  */
2921 #define DDRCTL_PSTAT_WR_PORT_BUSY_6_MASK (0x400000UL)
2922 #define DDRCTL_PSTAT_WR_PORT_BUSY_6_SHIFT (22U)
2923 #define DDRCTL_PSTAT_WR_PORT_BUSY_6_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_WR_PORT_BUSY_6_MASK) >> DDRCTL_PSTAT_WR_PORT_BUSY_6_SHIFT)
2924 
2925 /*
2926  * WR_PORT_BUSY_5 (R)
2927  *
2928  * Description: Indicates if there are outstanding writes for port 5.
2929  * Value After Reset: 0x0
2930  * Exists: UMCTL2_PORT_5==1
2931  */
2932 #define DDRCTL_PSTAT_WR_PORT_BUSY_5_MASK (0x200000UL)
2933 #define DDRCTL_PSTAT_WR_PORT_BUSY_5_SHIFT (21U)
2934 #define DDRCTL_PSTAT_WR_PORT_BUSY_5_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_WR_PORT_BUSY_5_MASK) >> DDRCTL_PSTAT_WR_PORT_BUSY_5_SHIFT)
2935 
2936 /*
2937  * WR_PORT_BUSY_4 (R)
2938  *
2939  * Description: Indicates if there are outstanding writes for port 4.
2940  * Value After Reset: 0x0
2941  * Exists: UMCTL2_PORT_4==1
2942  */
2943 #define DDRCTL_PSTAT_WR_PORT_BUSY_4_MASK (0x100000UL)
2944 #define DDRCTL_PSTAT_WR_PORT_BUSY_4_SHIFT (20U)
2945 #define DDRCTL_PSTAT_WR_PORT_BUSY_4_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_WR_PORT_BUSY_4_MASK) >> DDRCTL_PSTAT_WR_PORT_BUSY_4_SHIFT)
2946 
2947 /*
2948  * WR_PORT_BUSY_3 (R)
2949  *
2950  * Description: Indicates if there are outstanding writes for port 3.
2951  * Value After Reset: 0x0
2952  * Exists: UMCTL2_PORT_3==1
2953  */
2954 #define DDRCTL_PSTAT_WR_PORT_BUSY_3_MASK (0x80000UL)
2955 #define DDRCTL_PSTAT_WR_PORT_BUSY_3_SHIFT (19U)
2956 #define DDRCTL_PSTAT_WR_PORT_BUSY_3_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_WR_PORT_BUSY_3_MASK) >> DDRCTL_PSTAT_WR_PORT_BUSY_3_SHIFT)
2957 
2958 /*
2959  * WR_PORT_BUSY_2 (R)
2960  *
2961  * Description: Indicates if there are outstanding writes for port 2.
2962  * Value After Reset: 0x0
2963  * Exists: UMCTL2_PORT_2==1
2964  */
2965 #define DDRCTL_PSTAT_WR_PORT_BUSY_2_MASK (0x40000UL)
2966 #define DDRCTL_PSTAT_WR_PORT_BUSY_2_SHIFT (18U)
2967 #define DDRCTL_PSTAT_WR_PORT_BUSY_2_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_WR_PORT_BUSY_2_MASK) >> DDRCTL_PSTAT_WR_PORT_BUSY_2_SHIFT)
2968 
2969 /*
2970  * WR_PORT_BUSY_1 (R)
2971  *
2972  * Description: Indicates if there are outstanding writes for port 1.
2973  * Value After Reset: 0x0
2974  * Exists: UMCTL2_PORT_1==1
2975  */
2976 #define DDRCTL_PSTAT_WR_PORT_BUSY_1_MASK (0x20000UL)
2977 #define DDRCTL_PSTAT_WR_PORT_BUSY_1_SHIFT (17U)
2978 #define DDRCTL_PSTAT_WR_PORT_BUSY_1_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_WR_PORT_BUSY_1_MASK) >> DDRCTL_PSTAT_WR_PORT_BUSY_1_SHIFT)
2979 
2980 /*
2981  * WR_PORT_BUSY_0 (R)
2982  *
2983  * Description: Indicates if there are outstanding writes for port 0.
2984  * Value After Reset: 0x0
2985  * Exists: UMCTL2_PORT_0==1
2986  */
2987 #define DDRCTL_PSTAT_WR_PORT_BUSY_0_MASK (0x10000UL)
2988 #define DDRCTL_PSTAT_WR_PORT_BUSY_0_SHIFT (16U)
2989 #define DDRCTL_PSTAT_WR_PORT_BUSY_0_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_WR_PORT_BUSY_0_MASK) >> DDRCTL_PSTAT_WR_PORT_BUSY_0_SHIFT)
2990 
2991 /*
2992  * RD_PORT_BUSY_15 (R)
2993  *
2994  * Description: Indicates if there are outstanding reads for port 15.
2995  * Value After Reset: 0x0
2996  * Exists: UMCTL2_PORT_15==1
2997  */
2998 #define DDRCTL_PSTAT_RD_PORT_BUSY_15_MASK (0x8000U)
2999 #define DDRCTL_PSTAT_RD_PORT_BUSY_15_SHIFT (15U)
3000 #define DDRCTL_PSTAT_RD_PORT_BUSY_15_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_RD_PORT_BUSY_15_MASK) >> DDRCTL_PSTAT_RD_PORT_BUSY_15_SHIFT)
3001 
3002 /*
3003  * RD_PORT_BUSY_14 (R)
3004  *
3005  * Description: Indicates if there are outstanding reads for port 14.
3006  * Value After Reset: 0x0
3007  * Exists: UMCTL2_PORT_14==1
3008  */
3009 #define DDRCTL_PSTAT_RD_PORT_BUSY_14_MASK (0x4000U)
3010 #define DDRCTL_PSTAT_RD_PORT_BUSY_14_SHIFT (14U)
3011 #define DDRCTL_PSTAT_RD_PORT_BUSY_14_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_RD_PORT_BUSY_14_MASK) >> DDRCTL_PSTAT_RD_PORT_BUSY_14_SHIFT)
3012 
3013 /*
3014  * RD_PORT_BUSY_13 (R)
3015  *
3016  * Description: Indicates if there are outstanding reads for port 13.
3017  * Value After Reset: 0x0
3018  * Exists: UMCTL2_PORT_13==1
3019  */
3020 #define DDRCTL_PSTAT_RD_PORT_BUSY_13_MASK (0x2000U)
3021 #define DDRCTL_PSTAT_RD_PORT_BUSY_13_SHIFT (13U)
3022 #define DDRCTL_PSTAT_RD_PORT_BUSY_13_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_RD_PORT_BUSY_13_MASK) >> DDRCTL_PSTAT_RD_PORT_BUSY_13_SHIFT)
3023 
3024 /*
3025  * RD_PORT_BUSY_12 (R)
3026  *
3027  * Description: Indicates if there are outstanding reads for port 12.
3028  * Value After Reset: 0x0
3029  * Exists: UMCTL2_PORT_12==1
3030  */
3031 #define DDRCTL_PSTAT_RD_PORT_BUSY_12_MASK (0x1000U)
3032 #define DDRCTL_PSTAT_RD_PORT_BUSY_12_SHIFT (12U)
3033 #define DDRCTL_PSTAT_RD_PORT_BUSY_12_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_RD_PORT_BUSY_12_MASK) >> DDRCTL_PSTAT_RD_PORT_BUSY_12_SHIFT)
3034 
3035 /*
3036  * RD_PORT_BUSY_11 (R)
3037  *
3038  * Description: Indicates if there are outstanding reads for port 11.
3039  * Value After Reset: 0x0
3040  * Exists: UMCTL2_PORT_11==1
3041  */
3042 #define DDRCTL_PSTAT_RD_PORT_BUSY_11_MASK (0x800U)
3043 #define DDRCTL_PSTAT_RD_PORT_BUSY_11_SHIFT (11U)
3044 #define DDRCTL_PSTAT_RD_PORT_BUSY_11_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_RD_PORT_BUSY_11_MASK) >> DDRCTL_PSTAT_RD_PORT_BUSY_11_SHIFT)
3045 
3046 /*
3047  * RD_PORT_BUSY_10 (R)
3048  *
3049  * Description: Indicates if there are outstanding reads for port 10.
3050  * Value After Reset: 0x0
3051  * Exists: UMCTL2_PORT_10==1
3052  */
3053 #define DDRCTL_PSTAT_RD_PORT_BUSY_10_MASK (0x400U)
3054 #define DDRCTL_PSTAT_RD_PORT_BUSY_10_SHIFT (10U)
3055 #define DDRCTL_PSTAT_RD_PORT_BUSY_10_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_RD_PORT_BUSY_10_MASK) >> DDRCTL_PSTAT_RD_PORT_BUSY_10_SHIFT)
3056 
3057 /*
3058  * RD_PORT_BUSY_9 (R)
3059  *
3060  * Description: Indicates if there are outstanding reads for port 9.
3061  * Value After Reset: 0x0
3062  * Exists: UMCTL2_PORT_9==1
3063  */
3064 #define DDRCTL_PSTAT_RD_PORT_BUSY_9_MASK (0x200U)
3065 #define DDRCTL_PSTAT_RD_PORT_BUSY_9_SHIFT (9U)
3066 #define DDRCTL_PSTAT_RD_PORT_BUSY_9_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_RD_PORT_BUSY_9_MASK) >> DDRCTL_PSTAT_RD_PORT_BUSY_9_SHIFT)
3067 
3068 /*
3069  * RD_PORT_BUSY_8 (R)
3070  *
3071  * Description: Indicates if there are outstanding reads for port 8.
3072  * Value After Reset: 0x0
3073  * Exists: UMCTL2_PORT_8==1
3074  */
3075 #define DDRCTL_PSTAT_RD_PORT_BUSY_8_MASK (0x100U)
3076 #define DDRCTL_PSTAT_RD_PORT_BUSY_8_SHIFT (8U)
3077 #define DDRCTL_PSTAT_RD_PORT_BUSY_8_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_RD_PORT_BUSY_8_MASK) >> DDRCTL_PSTAT_RD_PORT_BUSY_8_SHIFT)
3078 
3079 /*
3080  * RD_PORT_BUSY_7 (R)
3081  *
3082  * Description: Indicates if there are outstanding reads for port 7.
3083  * Value After Reset: 0x0
3084  * Exists: UMCTL2_PORT_7==1
3085  */
3086 #define DDRCTL_PSTAT_RD_PORT_BUSY_7_MASK (0x80U)
3087 #define DDRCTL_PSTAT_RD_PORT_BUSY_7_SHIFT (7U)
3088 #define DDRCTL_PSTAT_RD_PORT_BUSY_7_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_RD_PORT_BUSY_7_MASK) >> DDRCTL_PSTAT_RD_PORT_BUSY_7_SHIFT)
3089 
3090 /*
3091  * RD_PORT_BUSY_6 (R)
3092  *
3093  * Description: Indicates if there are outstanding reads for port 6.
3094  * Value After Reset: 0x0
3095  * Exists: UMCTL2_PORT_6==1
3096  */
3097 #define DDRCTL_PSTAT_RD_PORT_BUSY_6_MASK (0x40U)
3098 #define DDRCTL_PSTAT_RD_PORT_BUSY_6_SHIFT (6U)
3099 #define DDRCTL_PSTAT_RD_PORT_BUSY_6_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_RD_PORT_BUSY_6_MASK) >> DDRCTL_PSTAT_RD_PORT_BUSY_6_SHIFT)
3100 
3101 /*
3102  * RD_PORT_BUSY_5 (R)
3103  *
3104  * Description: Indicates if there are outstanding reads for port 5.
3105  * Value After Reset: 0x0
3106  * Exists: UMCTL2_PORT_5==1
3107  */
3108 #define DDRCTL_PSTAT_RD_PORT_BUSY_5_MASK (0x20U)
3109 #define DDRCTL_PSTAT_RD_PORT_BUSY_5_SHIFT (5U)
3110 #define DDRCTL_PSTAT_RD_PORT_BUSY_5_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_RD_PORT_BUSY_5_MASK) >> DDRCTL_PSTAT_RD_PORT_BUSY_5_SHIFT)
3111 
3112 /*
3113  * RD_PORT_BUSY_4 (R)
3114  *
3115  * Description: Indicates if there are outstanding reads for port 4.
3116  * Value After Reset: 0x0
3117  * Exists: UMCTL2_PORT_4==1
3118  */
3119 #define DDRCTL_PSTAT_RD_PORT_BUSY_4_MASK (0x10U)
3120 #define DDRCTL_PSTAT_RD_PORT_BUSY_4_SHIFT (4U)
3121 #define DDRCTL_PSTAT_RD_PORT_BUSY_4_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_RD_PORT_BUSY_4_MASK) >> DDRCTL_PSTAT_RD_PORT_BUSY_4_SHIFT)
3122 
3123 /*
3124  * RD_PORT_BUSY_3 (R)
3125  *
3126  * Description: Indicates if there are outstanding reads for port 3.
3127  * Value After Reset: 0x0
3128  * Exists: UMCTL2_PORT_3==1
3129  */
3130 #define DDRCTL_PSTAT_RD_PORT_BUSY_3_MASK (0x8U)
3131 #define DDRCTL_PSTAT_RD_PORT_BUSY_3_SHIFT (3U)
3132 #define DDRCTL_PSTAT_RD_PORT_BUSY_3_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_RD_PORT_BUSY_3_MASK) >> DDRCTL_PSTAT_RD_PORT_BUSY_3_SHIFT)
3133 
3134 /*
3135  * RD_PORT_BUSY_2 (R)
3136  *
3137  * Description: Indicates if there are outstanding reads for port 2.
3138  * Value After Reset: 0x0
3139  * Exists: UMCTL2_PORT_2==1
3140  */
3141 #define DDRCTL_PSTAT_RD_PORT_BUSY_2_MASK (0x4U)
3142 #define DDRCTL_PSTAT_RD_PORT_BUSY_2_SHIFT (2U)
3143 #define DDRCTL_PSTAT_RD_PORT_BUSY_2_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_RD_PORT_BUSY_2_MASK) >> DDRCTL_PSTAT_RD_PORT_BUSY_2_SHIFT)
3144 
3145 /*
3146  * RD_PORT_BUSY_1 (R)
3147  *
3148  * Description: Indicates if there are outstanding reads for port 1.
3149  * Value After Reset: 0x0
3150  * Exists: UMCTL2_PORT_1==1
3151  */
3152 #define DDRCTL_PSTAT_RD_PORT_BUSY_1_MASK (0x2U)
3153 #define DDRCTL_PSTAT_RD_PORT_BUSY_1_SHIFT (1U)
3154 #define DDRCTL_PSTAT_RD_PORT_BUSY_1_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_RD_PORT_BUSY_1_MASK) >> DDRCTL_PSTAT_RD_PORT_BUSY_1_SHIFT)
3155 
3156 /*
3157  * RD_PORT_BUSY_0 (R)
3158  *
3159  * Description: Indicates if there are outstanding reads for port 0.
3160  * Value After Reset: 0x0
3161  * Exists: UMCTL2_PORT_0==1
3162  */
3163 #define DDRCTL_PSTAT_RD_PORT_BUSY_0_MASK (0x1U)
3164 #define DDRCTL_PSTAT_RD_PORT_BUSY_0_SHIFT (0U)
3165 #define DDRCTL_PSTAT_RD_PORT_BUSY_0_GET(x) (((uint32_t)(x) & DDRCTL_PSTAT_RD_PORT_BUSY_0_MASK) >> DDRCTL_PSTAT_RD_PORT_BUSY_0_SHIFT)
3166 
3167 /* Bitfield definition for register: PCCFG */
3168 /*
3169  * PAGEMATCH_LIMIT (R/W)
3170  *
3171  * Description: Page match four limit. If set to 1, limits the number of consecutive same page DDRC transactions that can be granted by the Port Arbiter to four when Page Match feature is enabled.	If set to 0, there is no limit imposed on number of consecutive same page DDRC transactions.
3172  * Value After Reset: 0x0
3173  * Exists: Always
3174  */
3175 #define DDRCTL_PCCFG_PAGEMATCH_LIMIT_MASK (0x10U)
3176 #define DDRCTL_PCCFG_PAGEMATCH_LIMIT_SHIFT (4U)
3177 #define DDRCTL_PCCFG_PAGEMATCH_LIMIT_SET(x) (((uint32_t)(x) << DDRCTL_PCCFG_PAGEMATCH_LIMIT_SHIFT) & DDRCTL_PCCFG_PAGEMATCH_LIMIT_MASK)
3178 #define DDRCTL_PCCFG_PAGEMATCH_LIMIT_GET(x) (((uint32_t)(x) & DDRCTL_PCCFG_PAGEMATCH_LIMIT_MASK) >> DDRCTL_PCCFG_PAGEMATCH_LIMIT_SHIFT)
3179 
3180 /*
3181  * GO2CRITICAL_EN (R/W)
3182  *
3183  * Description: If set to 1 (enabled), sets co_gs_go2critical_wr and co_gs_go2critical_rd signals going to DDRC based on urgent input (awurgent, arurgent) coming from AXI master. If set to 0 (disabled), co_gs_go2critical_wr and co_gs_go2critical_rd signals at DDRC are driven to 1b'0.
3184  * Value After Reset: 0x0
3185  * Exists: Always
3186  */
3187 #define DDRCTL_PCCFG_GO2CRITICAL_EN_MASK (0x1U)
3188 #define DDRCTL_PCCFG_GO2CRITICAL_EN_SHIFT (0U)
3189 #define DDRCTL_PCCFG_GO2CRITICAL_EN_SET(x) (((uint32_t)(x) << DDRCTL_PCCFG_GO2CRITICAL_EN_SHIFT) & DDRCTL_PCCFG_GO2CRITICAL_EN_MASK)
3190 #define DDRCTL_PCCFG_GO2CRITICAL_EN_GET(x) (((uint32_t)(x) & DDRCTL_PCCFG_GO2CRITICAL_EN_MASK) >> DDRCTL_PCCFG_GO2CRITICAL_EN_SHIFT)
3191 
3192 /* Bitfield definition for register of struct array PCFG: R */
3193 /*
3194  * RD_PORT_PAGEMATCH_EN (R/W)
3195  *
3196  * Description: If set to 1, enables the Page Match feature. If enabled, once a requesting port is granted, the port is continued to be granted if the following immediate commands are to the same memory page (i.e. same bank and same row). See also related PCCFG.pagematch_limit register.
3197  * Value After Reset: "(MEMC_DDR4_EN==1) ? 0x0 : 0x1"
3198  * Exists: Always
3199  */
3200 #define DDRCTL_PCFG_R_RD_PORT_PAGEMATCH_EN_MASK (0x4000U)
3201 #define DDRCTL_PCFG_R_RD_PORT_PAGEMATCH_EN_SHIFT (14U)
3202 #define DDRCTL_PCFG_R_RD_PORT_PAGEMATCH_EN_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_R_RD_PORT_PAGEMATCH_EN_SHIFT) & DDRCTL_PCFG_R_RD_PORT_PAGEMATCH_EN_MASK)
3203 #define DDRCTL_PCFG_R_RD_PORT_PAGEMATCH_EN_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_R_RD_PORT_PAGEMATCH_EN_MASK) >> DDRCTL_PCFG_R_RD_PORT_PAGEMATCH_EN_SHIFT)
3204 
3205 /*
3206  * RD_PORT_URGENT_EN (R/W)
3207  *
3208  * Description: If set to 1, enables the AXI urgent sideband signal (arurgent). When enabled and arurgent is asserted by the master, that port becomes the highest priority and co_gs_go2critical_rd signal to DDRC is asserted if enabled in PCCFG.go2critical_en register. Note that arurgent signal can be asserted anytime and as long as required which is independent of address handshaking (it is not associated with any particular command).
3209  * Value After Reset: 0x0
3210  * Exists: Always
3211  */
3212 #define DDRCTL_PCFG_R_RD_PORT_URGENT_EN_MASK (0x2000U)
3213 #define DDRCTL_PCFG_R_RD_PORT_URGENT_EN_SHIFT (13U)
3214 #define DDRCTL_PCFG_R_RD_PORT_URGENT_EN_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_R_RD_PORT_URGENT_EN_SHIFT) & DDRCTL_PCFG_R_RD_PORT_URGENT_EN_MASK)
3215 #define DDRCTL_PCFG_R_RD_PORT_URGENT_EN_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_R_RD_PORT_URGENT_EN_MASK) >> DDRCTL_PCFG_R_RD_PORT_URGENT_EN_SHIFT)
3216 
3217 /*
3218  * RD_PORT_AGING_EN (R/W)
3219  *
3220  * Description: If set to 1, enables aging function for the read channel of the port.
3221  * Value After Reset: 0x0
3222  * Exists: Always
3223  */
3224 #define DDRCTL_PCFG_R_RD_PORT_AGING_EN_MASK (0x1000U)
3225 #define DDRCTL_PCFG_R_RD_PORT_AGING_EN_SHIFT (12U)
3226 #define DDRCTL_PCFG_R_RD_PORT_AGING_EN_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_R_RD_PORT_AGING_EN_SHIFT) & DDRCTL_PCFG_R_RD_PORT_AGING_EN_MASK)
3227 #define DDRCTL_PCFG_R_RD_PORT_AGING_EN_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_R_RD_PORT_AGING_EN_MASK) >> DDRCTL_PCFG_R_RD_PORT_AGING_EN_SHIFT)
3228 
3229 /*
3230  * RD_PORT_PRIORITY (R/W)
3231  *
3232  * Description: Determines the initial load value of read aging counters. These counters will be parallel loaded after reset, or after each grant to the corresponding port. The aging counters down-count every clock cycle where the port is requesting but not granted. The higher significant 5-bits of the read aging counter sets the priority of the	read channel of a given port. Port's priority will increase as the higher significant 5-bits of the counter starts to decrease.
3233  * When the aging counter becomes 0, the corresponding port channel will have the highest priority level (timeout condition
3234  * - Priority0). For multi-port configurations, the aging counters cannot be used to set port priorities when external dynamic priority inputs (arqos) are enabled (timeout is still applicable). For single port configurations, the aging counters are only used when they timeout (become 0) to force read-write direction switching. In this case, external dynamic priority input, arqos (for reads only) can still be used to set the DDRC read priority (2 priority levels: low priority read - LPR, high priority read - HPR) on a command by command basis. Note: The two LSBs of this register field are tied internally to 2'b00.
3235  * Value After Reset: 0x0
3236  * Exists: Always
3237  */
3238 #define DDRCTL_PCFG_R_RD_PORT_PRIORITY_MASK (0x3FFU)
3239 #define DDRCTL_PCFG_R_RD_PORT_PRIORITY_SHIFT (0U)
3240 #define DDRCTL_PCFG_R_RD_PORT_PRIORITY_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_R_RD_PORT_PRIORITY_SHIFT) & DDRCTL_PCFG_R_RD_PORT_PRIORITY_MASK)
3241 #define DDRCTL_PCFG_R_RD_PORT_PRIORITY_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_R_RD_PORT_PRIORITY_MASK) >> DDRCTL_PCFG_R_RD_PORT_PRIORITY_SHIFT)
3242 
3243 /* Bitfield definition for register of struct array PCFG: W */
3244 /*
3245  * WR_PORT_PAGEMATCH_EN (R/W)
3246  *
3247  * Description: If set to 1, enables the Page Match feature. If enabled, once a requesting port is granted, the port is continued to be granted if the following immediate commands are to the same memory page (i.e. same bank and same row). See also related PCCFG.pagematch_limit register.
3248  * Value After Reset: 0x1
3249  * Exists: Always
3250  */
3251 #define DDRCTL_PCFG_W_WR_PORT_PAGEMATCH_EN_MASK (0x4000U)
3252 #define DDRCTL_PCFG_W_WR_PORT_PAGEMATCH_EN_SHIFT (14U)
3253 #define DDRCTL_PCFG_W_WR_PORT_PAGEMATCH_EN_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_W_WR_PORT_PAGEMATCH_EN_SHIFT) & DDRCTL_PCFG_W_WR_PORT_PAGEMATCH_EN_MASK)
3254 #define DDRCTL_PCFG_W_WR_PORT_PAGEMATCH_EN_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_W_WR_PORT_PAGEMATCH_EN_MASK) >> DDRCTL_PCFG_W_WR_PORT_PAGEMATCH_EN_SHIFT)
3255 
3256 /*
3257  * WR_PORT_URGENT_EN (R/W)
3258  *
3259  * Description: If set to 1, enables the AXI urgent sideband signal (awurgent). When enabled and awurgent is asserted by the master, that port becomes the highest priority and co_gs_go2critical_wr signal to DDRC is asserted if enabled in PCCFG.go2critical_en register. Note that awurgent signal can be asserted anytime and as long as required which is independent of address handshaking (it is not associated with any particular command).
3260  * Value After Reset: 0x0
3261  * Exists: Always
3262  */
3263 #define DDRCTL_PCFG_W_WR_PORT_URGENT_EN_MASK (0x2000U)
3264 #define DDRCTL_PCFG_W_WR_PORT_URGENT_EN_SHIFT (13U)
3265 #define DDRCTL_PCFG_W_WR_PORT_URGENT_EN_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_W_WR_PORT_URGENT_EN_SHIFT) & DDRCTL_PCFG_W_WR_PORT_URGENT_EN_MASK)
3266 #define DDRCTL_PCFG_W_WR_PORT_URGENT_EN_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_W_WR_PORT_URGENT_EN_MASK) >> DDRCTL_PCFG_W_WR_PORT_URGENT_EN_SHIFT)
3267 
3268 /*
3269  * WR_PORT_AGING_EN (R/W)
3270  *
3271  * Description: If set to 1, enables aging function for the write channel of the port.
3272  * Value After Reset: 0x0
3273  * Exists: Always
3274  */
3275 #define DDRCTL_PCFG_W_WR_PORT_AGING_EN_MASK (0x1000U)
3276 #define DDRCTL_PCFG_W_WR_PORT_AGING_EN_SHIFT (12U)
3277 #define DDRCTL_PCFG_W_WR_PORT_AGING_EN_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_W_WR_PORT_AGING_EN_SHIFT) & DDRCTL_PCFG_W_WR_PORT_AGING_EN_MASK)
3278 #define DDRCTL_PCFG_W_WR_PORT_AGING_EN_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_W_WR_PORT_AGING_EN_MASK) >> DDRCTL_PCFG_W_WR_PORT_AGING_EN_SHIFT)
3279 
3280 /*
3281  * WR_PORT_PRIORITY (R/W)
3282  *
3283  * Description: Determines the initial load value of write aging counters. These counters will be parallel loaded after reset, or after each grant to the corresponding port. The aging counters down-count every clock cycle where the port is requesting but not granted. The higher significant 5-bits of the write aging counter sets the initial priority of the
3284  * write channel of a given port. Port's priority will increase as the higher significant 5-bits of the counter starts to decrease. When the aging counter becomes 0, the corresponding port channel will have the highest priority level. For multi-port configurations, the aging counters cannot be used to set port priorities when external dynamic priority inputs (awqos) are enabled (timeout is still applicable). For single port configurations, the aging counters are only used when they timeout (become 0) to force read-write direction switching. Note: The two LSBs of this register field are tied internally to 2'b00.
3285  * Value After Reset: 0x0
3286  * Exists: Always
3287  */
3288 #define DDRCTL_PCFG_W_WR_PORT_PRIORITY_MASK (0x3FFU)
3289 #define DDRCTL_PCFG_W_WR_PORT_PRIORITY_SHIFT (0U)
3290 #define DDRCTL_PCFG_W_WR_PORT_PRIORITY_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_W_WR_PORT_PRIORITY_SHIFT) & DDRCTL_PCFG_W_WR_PORT_PRIORITY_MASK)
3291 #define DDRCTL_PCFG_W_WR_PORT_PRIORITY_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_W_WR_PORT_PRIORITY_MASK) >> DDRCTL_PCFG_W_WR_PORT_PRIORITY_SHIFT)
3292 
3293 /* Bitfield definition for register of struct array PCFG: C */
3294 /*
3295  * AHB_ENDIANNESS (R/W)
3296  *
3297  * Description: If set to 0, enables support for little endian on the AHB port. If set to 1, enables support for big endian (BE- 32) on the AHB port. If set to 2, enables support for big endian (BE-A) on the AHB port.
3298  * Value After Reset: 0x0
3299  * Exists: UMCTL2_A_AHB_n==1
3300  */
3301 #define DDRCTL_PCFG_C_AHB_ENDIANNESS_MASK (0x3U)
3302 #define DDRCTL_PCFG_C_AHB_ENDIANNESS_SHIFT (0U)
3303 #define DDRCTL_PCFG_C_AHB_ENDIANNESS_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_C_AHB_ENDIANNESS_SHIFT) & DDRCTL_PCFG_C_AHB_ENDIANNESS_MASK)
3304 #define DDRCTL_PCFG_C_AHB_ENDIANNESS_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_C_AHB_ENDIANNESS_MASK) >> DDRCTL_PCFG_C_AHB_ENDIANNESS_SHIFT)
3305 
3306 /* Bitfield definition for register of struct array PCFG: MASKCH */
3307 /*
3308  * ID_MASK (R/W)
3309  *
3310  * Description: Determines the mask used in the ID mapping function for virtual channel m.
3311  * Value After Reset: 0x0
3312  * Exists: Always
3313  */
3314 #define DDRCTL_PCFG_ID_MASKCH_ID_MASK_MASK (0xFFFFFFFFUL)
3315 #define DDRCTL_PCFG_ID_MASKCH_ID_MASK_SHIFT (0U)
3316 #define DDRCTL_PCFG_ID_MASKCH_ID_MASK_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_ID_MASKCH_ID_MASK_SHIFT) & DDRCTL_PCFG_ID_MASKCH_ID_MASK_MASK)
3317 #define DDRCTL_PCFG_ID_MASKCH_ID_MASK_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_ID_MASKCH_ID_MASK_MASK) >> DDRCTL_PCFG_ID_MASKCH_ID_MASK_SHIFT)
3318 
3319 /* Bitfield definition for register of struct array PCFG: VALUECH */
3320 /*
3321  * ID_VALUE (R/W)
3322  *
3323  * Description: Determines the value used in the ID mapping function for virtual channel m.
3324  * Value After Reset: 0x0
3325  * Exists: Always
3326  */
3327 #define DDRCTL_PCFG_ID_VALUECH_ID_VALUE_MASK (0xFFFFFFFFUL)
3328 #define DDRCTL_PCFG_ID_VALUECH_ID_VALUE_SHIFT (0U)
3329 #define DDRCTL_PCFG_ID_VALUECH_ID_VALUE_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_ID_VALUECH_ID_VALUE_SHIFT) & DDRCTL_PCFG_ID_VALUECH_ID_VALUE_MASK)
3330 #define DDRCTL_PCFG_ID_VALUECH_ID_VALUE_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_ID_VALUECH_ID_VALUE_MASK) >> DDRCTL_PCFG_ID_VALUECH_ID_VALUE_SHIFT)
3331 
3332 /* Bitfield definition for register of struct array PCFG: CTRL */
3333 /*
3334  * PORT_EN (R/W)
3335  *
3336  * Description: Enables port n.
3337  * Value After Reset: "UMCTL2_PORT_EN_RESET_VALUE"
3338  * Exists: Always
3339  */
3340 #define DDRCTL_PCFG_CTRL_PORT_EN_MASK (0x1U)
3341 #define DDRCTL_PCFG_CTRL_PORT_EN_SHIFT (0U)
3342 #define DDRCTL_PCFG_CTRL_PORT_EN_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_CTRL_PORT_EN_SHIFT) & DDRCTL_PCFG_CTRL_PORT_EN_MASK)
3343 #define DDRCTL_PCFG_CTRL_PORT_EN_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_CTRL_PORT_EN_MASK) >> DDRCTL_PCFG_CTRL_PORT_EN_SHIFT)
3344 
3345 /* Bitfield definition for register of struct array PCFG: QOS0 */
3346 /*
3347  * RQOS_MAP_REGION1 (R/W)
3348  *
3349  * Description: This bitfield indicates the traffic class of region
3350  * 1.	Valid values are:	0: LPR, 1: VPR, 2: HPR. For dual address queue configurations, region1 maps to the blue address queue. In this case, valid values are 0: LPR and 1: VPR only.	When VPR support is disabled (UMCTL2_VPR_EN = 0) and traffic class of region 1 is set to 1 (VPR) then VPR traffic is aliased to LPR traffic.
3351  * Value After Reset: 0x0
3352  * Exists: Always
3353  */
3354 #define DDRCTL_PCFG_QOS0_RQOS_MAP_REGION1_MASK (0x300000UL)
3355 #define DDRCTL_PCFG_QOS0_RQOS_MAP_REGION1_SHIFT (20U)
3356 #define DDRCTL_PCFG_QOS0_RQOS_MAP_REGION1_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_QOS0_RQOS_MAP_REGION1_SHIFT) & DDRCTL_PCFG_QOS0_RQOS_MAP_REGION1_MASK)
3357 #define DDRCTL_PCFG_QOS0_RQOS_MAP_REGION1_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_QOS0_RQOS_MAP_REGION1_MASK) >> DDRCTL_PCFG_QOS0_RQOS_MAP_REGION1_SHIFT)
3358 
3359 /*
3360  * RQOS_MAP_REGION0 (R/W)
3361  *
3362  * Description: This bitfield indicates the traffic class of region
3363  * 0. Valid values are: 0: LPR, 1: VPR, 2: HPR. For dual address queue configurations, region 0 maps to the blue address queue. In this case, valid values are 0: LPR and 1: VPR only.	When VPR support is disabled (UMCTL2_VPR_EN = 0) and traffic class of region0 is set to 1 (VPR) then VPR traffic is aliased to LPR traffic.
3364  * Value After Reset: 0x0
3365  * Exists: Always
3366  */
3367 #define DDRCTL_PCFG_QOS0_RQOS_MAP_REGION0_MASK (0x30000UL)
3368 #define DDRCTL_PCFG_QOS0_RQOS_MAP_REGION0_SHIFT (16U)
3369 #define DDRCTL_PCFG_QOS0_RQOS_MAP_REGION0_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_QOS0_RQOS_MAP_REGION0_SHIFT) & DDRCTL_PCFG_QOS0_RQOS_MAP_REGION0_MASK)
3370 #define DDRCTL_PCFG_QOS0_RQOS_MAP_REGION0_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_QOS0_RQOS_MAP_REGION0_MASK) >> DDRCTL_PCFG_QOS0_RQOS_MAP_REGION0_SHIFT)
3371 
3372 /*
3373  * RQOS_MAP_LEVEL1 (R/W)
3374  *
3375  * Description: Separation level1 indicating the end of region0 mapping; start of region0 is 0. Possible values for level1 are 0 to 13(for dual RAQ) or 0 to 14(for single RAQ) which corresponds to arqos.	Note that for PA, arqos values are used directly as port priorities, where the higher the value corresponds to higher port priority.	All of the map_level* registers must be set to distinct values.
3376  * Value After Reset: 0x0
3377  * Exists: Always
3378  */
3379 #define DDRCTL_PCFG_QOS0_RQOS_MAP_LEVEL1_MASK (0xFU)
3380 #define DDRCTL_PCFG_QOS0_RQOS_MAP_LEVEL1_SHIFT (0U)
3381 #define DDRCTL_PCFG_QOS0_RQOS_MAP_LEVEL1_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_QOS0_RQOS_MAP_LEVEL1_SHIFT) & DDRCTL_PCFG_QOS0_RQOS_MAP_LEVEL1_MASK)
3382 #define DDRCTL_PCFG_QOS0_RQOS_MAP_LEVEL1_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_QOS0_RQOS_MAP_LEVEL1_MASK) >> DDRCTL_PCFG_QOS0_RQOS_MAP_LEVEL1_SHIFT)
3383 
3384 /* Bitfield definition for register of struct array PCFG: QOS1 */
3385 /*
3386  * RQOS_MAP_TIMEOUTR (R/W)
3387  *
3388  * Description: Specifies the timeout value for transactions mapped to the red address queue.
3389  * Value After Reset: 0x0
3390  * Exists: Always
3391  */
3392 #define DDRCTL_PCFG_QOS1_RQOS_MAP_TIMEOUTR_MASK (0x7FF0000UL)
3393 #define DDRCTL_PCFG_QOS1_RQOS_MAP_TIMEOUTR_SHIFT (16U)
3394 #define DDRCTL_PCFG_QOS1_RQOS_MAP_TIMEOUTR_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_QOS1_RQOS_MAP_TIMEOUTR_SHIFT) & DDRCTL_PCFG_QOS1_RQOS_MAP_TIMEOUTR_MASK)
3395 #define DDRCTL_PCFG_QOS1_RQOS_MAP_TIMEOUTR_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_QOS1_RQOS_MAP_TIMEOUTR_MASK) >> DDRCTL_PCFG_QOS1_RQOS_MAP_TIMEOUTR_SHIFT)
3396 
3397 /*
3398  * RQOS_MAP_TIMEOUTB (R/W)
3399  *
3400  * Description: Specifies the timeout value for transactions mapped to the blue address queue.
3401  * Value After Reset: 0x0
3402  * Exists: Always
3403  */
3404 #define DDRCTL_PCFG_QOS1_RQOS_MAP_TIMEOUTB_MASK (0x7FFU)
3405 #define DDRCTL_PCFG_QOS1_RQOS_MAP_TIMEOUTB_SHIFT (0U)
3406 #define DDRCTL_PCFG_QOS1_RQOS_MAP_TIMEOUTB_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_QOS1_RQOS_MAP_TIMEOUTB_SHIFT) & DDRCTL_PCFG_QOS1_RQOS_MAP_TIMEOUTB_MASK)
3407 #define DDRCTL_PCFG_QOS1_RQOS_MAP_TIMEOUTB_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_QOS1_RQOS_MAP_TIMEOUTB_MASK) >> DDRCTL_PCFG_QOS1_RQOS_MAP_TIMEOUTB_SHIFT)
3408 
3409 /* Bitfield definition for register of struct array PCFG: WQOS0 */
3410 /*
3411  * WQOS_MAP_REGION1 (R/W)
3412  *
3413  * Description: This bitfield indicates the traffic class of region
3414  * 1. Valid values are:
3415  * 0: NPW
3416  * 1: VPW
3417  * When VPW support is disabled (UMCTL2_VPW_EN = 0) and traffic class of region 1 is set to 1 (VPW) then VPW traffic is aliased to NPW traffic.
3418  * Value After Reset: 0x0
3419  * Exists: Always
3420  */
3421 #define DDRCTL_PCFG_WQOS0_WQOS_MAP_REGION1_MASK (0x300000UL)
3422 #define DDRCTL_PCFG_WQOS0_WQOS_MAP_REGION1_SHIFT (20U)
3423 #define DDRCTL_PCFG_WQOS0_WQOS_MAP_REGION1_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_WQOS0_WQOS_MAP_REGION1_SHIFT) & DDRCTL_PCFG_WQOS0_WQOS_MAP_REGION1_MASK)
3424 #define DDRCTL_PCFG_WQOS0_WQOS_MAP_REGION1_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_WQOS0_WQOS_MAP_REGION1_MASK) >> DDRCTL_PCFG_WQOS0_WQOS_MAP_REGION1_SHIFT)
3425 
3426 /*
3427  * WQOS_MAP_REGION0 (R/W)
3428  *
3429  * Description: This bitfield indicates the traffic class of region
3430  * 0. Valid values are:
3431  * 0: NPW
3432  * 1: VPW
3433  * When VPW support is disabled (UMCTL2_VPW_EN = 0) and traffic class of region0 is set to 1 (VPW) then VPW traffic is aliased to NPW traffic.
3434  * Value After Reset: 0x0
3435  * Exists: Always
3436  */
3437 #define DDRCTL_PCFG_WQOS0_WQOS_MAP_REGION0_MASK (0x30000UL)
3438 #define DDRCTL_PCFG_WQOS0_WQOS_MAP_REGION0_SHIFT (16U)
3439 #define DDRCTL_PCFG_WQOS0_WQOS_MAP_REGION0_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_WQOS0_WQOS_MAP_REGION0_SHIFT) & DDRCTL_PCFG_WQOS0_WQOS_MAP_REGION0_MASK)
3440 #define DDRCTL_PCFG_WQOS0_WQOS_MAP_REGION0_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_WQOS0_WQOS_MAP_REGION0_MASK) >> DDRCTL_PCFG_WQOS0_WQOS_MAP_REGION0_SHIFT)
3441 
3442 /*
3443  * WQOS_MAP_LEVEL (R/W)
3444  *
3445  * Description: Separation level indicating the end of region0 mapping; start of region0 is 0. Possible values for level1 are 0 to 14 which corresponds to awqos.	Note that for PA, awqos values are used directly as port priorities, where the higher the value corresponds to higher port priority.
3446  * Value After Reset: 0x0
3447  * Exists: Always
3448  */
3449 #define DDRCTL_PCFG_WQOS0_WQOS_MAP_LEVEL_MASK (0xFU)
3450 #define DDRCTL_PCFG_WQOS0_WQOS_MAP_LEVEL_SHIFT (0U)
3451 #define DDRCTL_PCFG_WQOS0_WQOS_MAP_LEVEL_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_WQOS0_WQOS_MAP_LEVEL_SHIFT) & DDRCTL_PCFG_WQOS0_WQOS_MAP_LEVEL_MASK)
3452 #define DDRCTL_PCFG_WQOS0_WQOS_MAP_LEVEL_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_WQOS0_WQOS_MAP_LEVEL_MASK) >> DDRCTL_PCFG_WQOS0_WQOS_MAP_LEVEL_SHIFT)
3453 
3454 /* Bitfield definition for register of struct array PCFG: WQOS1 */
3455 /*
3456  * WQOS_MAP_TIMEOUT (R/W)
3457  *
3458  * Description: Specifies the timeout value for write transactions.
3459  * Value After Reset: 0x0
3460  * Exists: Always
3461  */
3462 #define DDRCTL_PCFG_WQOS1_WQOS_MAP_TIMEOUT_MASK (0x7FFU)
3463 #define DDRCTL_PCFG_WQOS1_WQOS_MAP_TIMEOUT_SHIFT (0U)
3464 #define DDRCTL_PCFG_WQOS1_WQOS_MAP_TIMEOUT_SET(x) (((uint32_t)(x) << DDRCTL_PCFG_WQOS1_WQOS_MAP_TIMEOUT_SHIFT) & DDRCTL_PCFG_WQOS1_WQOS_MAP_TIMEOUT_MASK)
3465 #define DDRCTL_PCFG_WQOS1_WQOS_MAP_TIMEOUT_GET(x) (((uint32_t)(x) & DDRCTL_PCFG_WQOS1_WQOS_MAP_TIMEOUT_MASK) >> DDRCTL_PCFG_WQOS1_WQOS_MAP_TIMEOUT_SHIFT)
3466 
3467 /* Bitfield definition for register of struct array SAR: BASE */
3468 /*
3469  * BASE_ADDR (R/W)
3470  *
3471  * Description: Base address for address region n specified as awaddr[UMCTL2_A_ADDRW-1:x] and araddr[UMCTL2_A_ADDRW-1:x] where x is determined by the minimum block size parameter UMCTL2_SARMINSIZE: (x=log2(block size)).
3472  * Value After Reset: 0x0
3473  * Exists: Always
3474  */
3475 #define DDRCTL_SAR_BASE_BASE_ADDR_MASK (0xFFFFFFFFUL)
3476 #define DDRCTL_SAR_BASE_BASE_ADDR_SHIFT (0U)
3477 #define DDRCTL_SAR_BASE_BASE_ADDR_SET(x) (((uint32_t)(x) << DDRCTL_SAR_BASE_BASE_ADDR_SHIFT) & DDRCTL_SAR_BASE_BASE_ADDR_MASK)
3478 #define DDRCTL_SAR_BASE_BASE_ADDR_GET(x) (((uint32_t)(x) & DDRCTL_SAR_BASE_BASE_ADDR_MASK) >> DDRCTL_SAR_BASE_BASE_ADDR_SHIFT)
3479 
3480 /* Bitfield definition for register of struct array SAR: SIZE */
3481 /*
3482  * NBLOCKS (R/W)
3483  *
3484  * Description: Number of blocks for address region n. This register determines the total size of the region in multiples of minimum block size as specified by the hardware parameter UMCTL2_SARMINSIZE. The register value is encoded as number of blocks = nblocks + 1.
3485  * Value After Reset: 0x0
3486  * Exists: Always
3487  */
3488 #define DDRCTL_SAR_SIZE_NBLOCKS_MASK (0xFFU)
3489 #define DDRCTL_SAR_SIZE_NBLOCKS_SHIFT (0U)
3490 #define DDRCTL_SAR_SIZE_NBLOCKS_SET(x) (((uint32_t)(x) << DDRCTL_SAR_SIZE_NBLOCKS_SHIFT) & DDRCTL_SAR_SIZE_NBLOCKS_MASK)
3491 #define DDRCTL_SAR_SIZE_NBLOCKS_GET(x) (((uint32_t)(x) & DDRCTL_SAR_SIZE_NBLOCKS_MASK) >> DDRCTL_SAR_SIZE_NBLOCKS_SHIFT)
3492 
3493 /* Bitfield definition for register: SBRCTL */
3494 /*
3495  * SCRUB_INTERVAL (R/W)
3496  *
3497  * Description: Scrub interval. (512 x scrub_interval) number of clock cycles between two scrub read commands. If set to 0,	scrub commands are issued back-to-back. This mode of operation (scrub_interval=0) can typically be used for scrubbing the full range of memory at once	before or after SW controlled low power operations. After completing the full range of scrub while scrub_interval=0, scrub_done register is set and	sbr_done_intr interrupt signal is asserted.
3498  * Value After Reset: 0xff
3499  * Exists: UMCTL2_SBR_EN_1==1
3500  */
3501 #define DDRCTL_SBRCTL_SCRUB_INTERVAL_MASK (0x1FFF00UL)
3502 #define DDRCTL_SBRCTL_SCRUB_INTERVAL_SHIFT (8U)
3503 #define DDRCTL_SBRCTL_SCRUB_INTERVAL_SET(x) (((uint32_t)(x) << DDRCTL_SBRCTL_SCRUB_INTERVAL_SHIFT) & DDRCTL_SBRCTL_SCRUB_INTERVAL_MASK)
3504 #define DDRCTL_SBRCTL_SCRUB_INTERVAL_GET(x) (((uint32_t)(x) & DDRCTL_SBRCTL_SCRUB_INTERVAL_MASK) >> DDRCTL_SBRCTL_SCRUB_INTERVAL_SHIFT)
3505 
3506 /*
3507  * SCRUB_BURST (R/W)
3508  *
3509  * Description: Scrub burst count. Determines the number of back-to-back scrub read commands that can be issued together		when the controller is in one of the HW controlled low power modes. During low power, the period of the scrub burst	becomes \"scrub_burst*scrub_interval\" cycles.
3510  * During normal operation mode of the controller (ie. not in power-down	or self refresh), scrub_burst is ignored and only one scrub command is generated.	Valid values are: 1: 1 read, 2: 4 reads, 3: 16 reads, 4: 64 reads, 5: 256 reads,
3511  * 6: 1024 reads.
3512  * Value After Reset: 0x1
3513  * Exists: UMCTL2_SBR_EN_1==1
3514  */
3515 #define DDRCTL_SBRCTL_SCRUB_BURST_MASK (0x70U)
3516 #define DDRCTL_SBRCTL_SCRUB_BURST_SHIFT (4U)
3517 #define DDRCTL_SBRCTL_SCRUB_BURST_SET(x) (((uint32_t)(x) << DDRCTL_SBRCTL_SCRUB_BURST_SHIFT) & DDRCTL_SBRCTL_SCRUB_BURST_MASK)
3518 #define DDRCTL_SBRCTL_SCRUB_BURST_GET(x) (((uint32_t)(x) & DDRCTL_SBRCTL_SCRUB_BURST_MASK) >> DDRCTL_SBRCTL_SCRUB_BURST_SHIFT)
3519 
3520 /*
3521  * SCRUB_MODE (R/W)
3522  *
3523  * Description: scrub_mode:0 ECC scrubber will perform reads	scrub_mode:1 ECC scrubber will perform writes Value After Reset: 0x0
3524  * Exists: UMCTL2_SBR_EN_1==1
3525  */
3526 #define DDRCTL_SBRCTL_SCRUB_MODE_MASK (0x4U)
3527 #define DDRCTL_SBRCTL_SCRUB_MODE_SHIFT (2U)
3528 #define DDRCTL_SBRCTL_SCRUB_MODE_SET(x) (((uint32_t)(x) << DDRCTL_SBRCTL_SCRUB_MODE_SHIFT) & DDRCTL_SBRCTL_SCRUB_MODE_MASK)
3529 #define DDRCTL_SBRCTL_SCRUB_MODE_GET(x) (((uint32_t)(x) & DDRCTL_SBRCTL_SCRUB_MODE_MASK) >> DDRCTL_SBRCTL_SCRUB_MODE_SHIFT)
3530 
3531 /*
3532  * SCRUB_DURING_LOWPOWER (R/W)
3533  *
3534  * Description: Continue scrubbing during low power. If set to 1, burst of scrubs will be issued	in HW controlled low power modes. There are two such modes: automatically initiated		by idleness or initiated by HW low-power (LP) interface. If set to 0, the scrubber		will not attempt to send commands while the DDRC is in HW controlled low power modes.	In this case, the scrubber will remember the last address issued and will automatically continue	from there when the DDRC exits the LP mode.
3535  * Value After Reset: 0x0
3536  * Exists: UMCTL2_SBR_EN_1==1
3537  */
3538 #define DDRCTL_SBRCTL_SCRUB_DURING_LOWPOWER_MASK (0x2U)
3539 #define DDRCTL_SBRCTL_SCRUB_DURING_LOWPOWER_SHIFT (1U)
3540 #define DDRCTL_SBRCTL_SCRUB_DURING_LOWPOWER_SET(x) (((uint32_t)(x) << DDRCTL_SBRCTL_SCRUB_DURING_LOWPOWER_SHIFT) & DDRCTL_SBRCTL_SCRUB_DURING_LOWPOWER_MASK)
3541 #define DDRCTL_SBRCTL_SCRUB_DURING_LOWPOWER_GET(x) (((uint32_t)(x) & DDRCTL_SBRCTL_SCRUB_DURING_LOWPOWER_MASK) >> DDRCTL_SBRCTL_SCRUB_DURING_LOWPOWER_SHIFT)
3542 
3543 /*
3544  * SCRUB_EN (R/W)
3545  *
3546  * Description: Enable ECC scrubber. If set to 1, enables the scrubber to generate background	read commands after the memories are initialized. If set to 0, disables the scrubber, resets the address generator to 0	and clears the scrubber status. This bitfield must be accessed separately from the other bitfields in this register.
3547  * Value After Reset: 0x0
3548  * Exists: UMCTL2_SBR_EN_1==1
3549  */
3550 #define DDRCTL_SBRCTL_SCRUB_EN_MASK (0x1U)
3551 #define DDRCTL_SBRCTL_SCRUB_EN_SHIFT (0U)
3552 #define DDRCTL_SBRCTL_SCRUB_EN_SET(x) (((uint32_t)(x) << DDRCTL_SBRCTL_SCRUB_EN_SHIFT) & DDRCTL_SBRCTL_SCRUB_EN_MASK)
3553 #define DDRCTL_SBRCTL_SCRUB_EN_GET(x) (((uint32_t)(x) & DDRCTL_SBRCTL_SCRUB_EN_MASK) >> DDRCTL_SBRCTL_SCRUB_EN_SHIFT)
3554 
3555 /* Bitfield definition for register: SBRSTAT */
3556 /*
3557  * SCRUB_DONE (R)
3558  *
3559  * Description: Scrubber done. Controller sets this bit to 1, after full range of addresses are scrubbed once while scrub_interval is set to 0.	Cleared if scrub_en is set to 0 (scrubber disabled) or scrub_interval is set to a non-zero value for normal scrub operation.	The interrupt signal, sbr_done_intr, is equivalent to this status bitfield.
3560  * Value After Reset: 0x0
3561  * Exists: UMCTL2_SBR_EN_1==1
3562  */
3563 #define DDRCTL_SBRSTAT_SCRUB_DONE_MASK (0x2U)
3564 #define DDRCTL_SBRSTAT_SCRUB_DONE_SHIFT (1U)
3565 #define DDRCTL_SBRSTAT_SCRUB_DONE_GET(x) (((uint32_t)(x) & DDRCTL_SBRSTAT_SCRUB_DONE_MASK) >> DDRCTL_SBRSTAT_SCRUB_DONE_SHIFT)
3566 
3567 /*
3568  * SCRUB_BUSY (R)
3569  *
3570  * Description: Scrubber busy. Controller sets this bit to 1 when the scrubber logic has outstanding read commands being executed.	Cleared when there are no active outstanding scrub reads in the system.
3571  * Value After Reset: 0x0
3572  * Exists: UMCTL2_SBR_EN_1==1
3573  */
3574 #define DDRCTL_SBRSTAT_SCRUB_BUSY_MASK (0x1U)
3575 #define DDRCTL_SBRSTAT_SCRUB_BUSY_SHIFT (0U)
3576 #define DDRCTL_SBRSTAT_SCRUB_BUSY_GET(x) (((uint32_t)(x) & DDRCTL_SBRSTAT_SCRUB_BUSY_MASK) >> DDRCTL_SBRSTAT_SCRUB_BUSY_SHIFT)
3577 
3578 /* Bitfield definition for register: SBRWDATA0 */
3579 /*
3580  * SCRUB_PATTERN0 (R/W)
3581  *
3582  * Description: ECC Scrubber write data pattern for data bus[31:0]
3583  * Value After Reset: 0x0
3584  * Exists: UMCTL2_SBR_EN_1==1
3585  */
3586 #define DDRCTL_SBRWDATA0_SCRUB_PATTERN0_MASK (0xFFFFFFFFUL)
3587 #define DDRCTL_SBRWDATA0_SCRUB_PATTERN0_SHIFT (0U)
3588 #define DDRCTL_SBRWDATA0_SCRUB_PATTERN0_SET(x) (((uint32_t)(x) << DDRCTL_SBRWDATA0_SCRUB_PATTERN0_SHIFT) & DDRCTL_SBRWDATA0_SCRUB_PATTERN0_MASK)
3589 #define DDRCTL_SBRWDATA0_SCRUB_PATTERN0_GET(x) (((uint32_t)(x) & DDRCTL_SBRWDATA0_SCRUB_PATTERN0_MASK) >> DDRCTL_SBRWDATA0_SCRUB_PATTERN0_SHIFT)
3590 
3591 
3592 
3593 /* ID register group index macro definition */
3594 #define DDRCTL_ID_0 (0UL)
3595 #define DDRCTL_ID_1 (1UL)
3596 #define DDRCTL_ID_2 (2UL)
3597 #define DDRCTL_ID_3 (3UL)
3598 #define DDRCTL_ID_4 (4UL)
3599 #define DDRCTL_ID_5 (5UL)
3600 #define DDRCTL_ID_6 (6UL)
3601 #define DDRCTL_ID_7 (7UL)
3602 #define DDRCTL_ID_8 (8UL)
3603 #define DDRCTL_ID_9 (9UL)
3604 #define DDRCTL_ID_10 (10UL)
3605 #define DDRCTL_ID_11 (11UL)
3606 #define DDRCTL_ID_12 (12UL)
3607 #define DDRCTL_ID_13 (13UL)
3608 #define DDRCTL_ID_14 (14UL)
3609 #define DDRCTL_ID_15 (15UL)
3610 
3611 /* PCFG register group index macro definition */
3612 #define DDRCTL_PCFG_0 (0UL)
3613 #define DDRCTL_PCFG_1 (1UL)
3614 #define DDRCTL_PCFG_2 (2UL)
3615 #define DDRCTL_PCFG_3 (3UL)
3616 #define DDRCTL_PCFG_4 (4UL)
3617 #define DDRCTL_PCFG_5 (5UL)
3618 #define DDRCTL_PCFG_6 (6UL)
3619 #define DDRCTL_PCFG_7 (7UL)
3620 #define DDRCTL_PCFG_8 (8UL)
3621 #define DDRCTL_PCFG_9 (9UL)
3622 #define DDRCTL_PCFG_10 (10UL)
3623 #define DDRCTL_PCFG_11 (11UL)
3624 #define DDRCTL_PCFG_12 (12UL)
3625 #define DDRCTL_PCFG_13 (13UL)
3626 #define DDRCTL_PCFG_14 (14UL)
3627 #define DDRCTL_PCFG_15 (15UL)
3628 
3629 /* SAR register group index macro definition */
3630 #define DDRCTL_SAR_0 (0UL)
3631 #define DDRCTL_SAR_1 (1UL)
3632 #define DDRCTL_SAR_2 (2UL)
3633 #define DDRCTL_SAR_3 (3UL)
3634 
3635 
3636 #endif /* HPM_DDRCTL_H */
3637