• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2022 HPMicro
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  *
6  */
7 
8 
9 #ifndef HPM_DRAM_H
10 #define HPM_DRAM_H
11 
12 typedef struct {
13     __RW uint32_t CTRL;                        /* 0x0: Control Register */
14     __R  uint8_t  RESERVED0[4];                /* 0x4 - 0x7: Reserved */
15     __RW uint32_t BMW0;                        /* 0x8: Bus (AXI) Weight Control Register 0 */
16     __RW uint32_t BMW1;                        /* 0xC: Bus (AXI) Weight Control Register 1 */
17     __RW uint32_t BR[2];                       /* 0x10 - 0x14: Base Register 0 (for SDRAM CS0 device) */
18     __R  uint8_t  RESERVED1[32];               /* 0x18 - 0x37: Reserved */
19     __RW uint32_t INTEN;                       /* 0x38: Interrupt Enable Register */
20     __W  uint32_t INTR;                        /* 0x3C: Interrupt Status Register */
21     __RW uint32_t SDRCTRL0;                    /* 0x40: SDRAM Control Register 0 */
22     __RW uint32_t SDRCTRL1;                    /* 0x44: SDRAM Control Register 1 */
23     __RW uint32_t SDRCTRL2;                    /* 0x48: SDRAM Control Register 2 */
24     __RW uint32_t SDRCTRL3;                    /* 0x4C: SDRAM Control Register 3 */
25     __R  uint8_t  RESERVED2[64];               /* 0x50 - 0x8F: Reserved */
26     __RW uint32_t SADDR;                       /* 0x90: IP Command Control Register 0 */
27     __RW uint32_t DATSZ;                       /* 0x94: IP Command Control Register 1 */
28     __RW uint32_t BYTEMSK;                     /* 0x98: IP Command Control Register 2 */
29     __RW uint32_t IPCMD;                       /* 0x9C: IP Command Register */
30     __RW uint32_t IPTX;                        /* 0xA0: TX DATA Register */
31     __R  uint8_t  RESERVED3[12];               /* 0xA4 - 0xAF: Reserved */
32     __RW uint32_t IPRX;                        /* 0xB0: RX DATA Register */
33     __R  uint8_t  RESERVED4[12];               /* 0xB4 - 0xBF: Reserved */
34     __R  uint32_t STAT0;                       /* 0xC0: Status Register 0 */
35     __R  uint8_t  RESERVED5[140];              /* 0xC4 - 0x14F: Reserved */
36     __RW uint32_t DLYCFG;                      /* 0x150: Delay Line Config Register */
37 } DRAM_Type;
38 
39 
40 /* Bitfield definition for register: CTRL */
41 /*
42  * BTO (RW)
43  *
44  * Bus timeout cycles
45  * AXI Bus timeout cycle is as following (255*(2^BTO)):
46  * 00000b - 255*1
47  * 00001-11110b - 255*2 - 255*2^30
48  * 11111b - 255*2^31
49  */
50 #define DRAM_CTRL_BTO_MASK (0x1F000000UL)
51 #define DRAM_CTRL_BTO_SHIFT (24U)
52 #define DRAM_CTRL_BTO_SET(x) (((uint32_t)(x) << DRAM_CTRL_BTO_SHIFT) & DRAM_CTRL_BTO_MASK)
53 #define DRAM_CTRL_BTO_GET(x) (((uint32_t)(x) & DRAM_CTRL_BTO_MASK) >> DRAM_CTRL_BTO_SHIFT)
54 
55 /*
56  * CTO (RW)
57  *
58  * Command Execution timeout cycles
59  * When Command Execution time exceed this timeout cycles, IPCMDERR or AXICMDERR interrupt is
60  * generated. When CTO is set to zero, timeout cycle is 256*1024 cycle. otherwisee timeout cycle is
61  * CTO*1024 cycle.
62  */
63 #define DRAM_CTRL_CTO_MASK (0xFF0000UL)
64 #define DRAM_CTRL_CTO_SHIFT (16U)
65 #define DRAM_CTRL_CTO_SET(x) (((uint32_t)(x) << DRAM_CTRL_CTO_SHIFT) & DRAM_CTRL_CTO_MASK)
66 #define DRAM_CTRL_CTO_GET(x) (((uint32_t)(x) & DRAM_CTRL_CTO_MASK) >> DRAM_CTRL_CTO_SHIFT)
67 
68 /*
69  * DQS (RW)
70  *
71  * DQS (read strobe) mode
72  * 0b - Dummy read strobe loopbacked internally
73  * 1b - Dummy read strobe loopbacked from DQS pad
74  */
75 #define DRAM_CTRL_DQS_MASK (0x4U)
76 #define DRAM_CTRL_DQS_SHIFT (2U)
77 #define DRAM_CTRL_DQS_SET(x) (((uint32_t)(x) << DRAM_CTRL_DQS_SHIFT) & DRAM_CTRL_DQS_MASK)
78 #define DRAM_CTRL_DQS_GET(x) (((uint32_t)(x) & DRAM_CTRL_DQS_MASK) >> DRAM_CTRL_DQS_SHIFT)
79 
80 /*
81  * DIS (RW)
82  *
83  * Module Disable
84  * 0b - Module enabled
85  * 1b - Module disabled
86  */
87 #define DRAM_CTRL_DIS_MASK (0x2U)
88 #define DRAM_CTRL_DIS_SHIFT (1U)
89 #define DRAM_CTRL_DIS_SET(x) (((uint32_t)(x) << DRAM_CTRL_DIS_SHIFT) & DRAM_CTRL_DIS_MASK)
90 #define DRAM_CTRL_DIS_GET(x) (((uint32_t)(x) & DRAM_CTRL_DIS_MASK) >> DRAM_CTRL_DIS_SHIFT)
91 
92 /*
93  * RST (RW)
94  *
95  * Software Reset
96  * Reset all internal logic in SEMC except configuration register
97  */
98 #define DRAM_CTRL_RST_MASK (0x1U)
99 #define DRAM_CTRL_RST_SHIFT (0U)
100 #define DRAM_CTRL_RST_SET(x) (((uint32_t)(x) << DRAM_CTRL_RST_SHIFT) & DRAM_CTRL_RST_MASK)
101 #define DRAM_CTRL_RST_GET(x) (((uint32_t)(x) & DRAM_CTRL_RST_MASK) >> DRAM_CTRL_RST_SHIFT)
102 
103 /* Bitfield definition for register: BMW0 */
104 /*
105  * RWS (RW)
106  *
107  * Weight of slave hit with Read/Write Switch. This weight score is valid when queue command's slave is
108  * same as current executing command with read/write operation switch.
109  */
110 #define DRAM_BMW0_RWS_MASK (0xFF0000UL)
111 #define DRAM_BMW0_RWS_SHIFT (16U)
112 #define DRAM_BMW0_RWS_SET(x) (((uint32_t)(x) << DRAM_BMW0_RWS_SHIFT) & DRAM_BMW0_RWS_MASK)
113 #define DRAM_BMW0_RWS_GET(x) (((uint32_t)(x) & DRAM_BMW0_RWS_MASK) >> DRAM_BMW0_RWS_SHIFT)
114 
115 /*
116  * SH (RW)
117  *
118  * Weight of Slave Hit without read/write switch. This weight score is valid when queue command's slave is
119  * same as current executing command without read/write operation switch.
120  */
121 #define DRAM_BMW0_SH_MASK (0xFF00U)
122 #define DRAM_BMW0_SH_SHIFT (8U)
123 #define DRAM_BMW0_SH_SET(x) (((uint32_t)(x) << DRAM_BMW0_SH_SHIFT) & DRAM_BMW0_SH_MASK)
124 #define DRAM_BMW0_SH_GET(x) (((uint32_t)(x) & DRAM_BMW0_SH_MASK) >> DRAM_BMW0_SH_SHIFT)
125 
126 /*
127  * AGE (RW)
128  *
129  * Weight of AGE calculation. Each command in queue has an age signal to indicate its wait period. It is
130  * multiplied by WAGE to get weight score.
131  */
132 #define DRAM_BMW0_AGE_MASK (0xF0U)
133 #define DRAM_BMW0_AGE_SHIFT (4U)
134 #define DRAM_BMW0_AGE_SET(x) (((uint32_t)(x) << DRAM_BMW0_AGE_SHIFT) & DRAM_BMW0_AGE_MASK)
135 #define DRAM_BMW0_AGE_GET(x) (((uint32_t)(x) & DRAM_BMW0_AGE_MASK) >> DRAM_BMW0_AGE_SHIFT)
136 
137 /*
138  * QOS (RW)
139  *
140  * Weight of QOS calculation. AXI bus access has AxQOS signal set, which is used as a priority indicator
141  * for the associated write or read transaction. A higher value indicates a higher priority transaction. AxQOS
142  * is multiplied by WQOS to get weight score.
143  */
144 #define DRAM_BMW0_QOS_MASK (0xFU)
145 #define DRAM_BMW0_QOS_SHIFT (0U)
146 #define DRAM_BMW0_QOS_SET(x) (((uint32_t)(x) << DRAM_BMW0_QOS_SHIFT) & DRAM_BMW0_QOS_MASK)
147 #define DRAM_BMW0_QOS_GET(x) (((uint32_t)(x) & DRAM_BMW0_QOS_MASK) >> DRAM_BMW0_QOS_SHIFT)
148 
149 /* Bitfield definition for register: BMW1 */
150 /*
151  * BR (RW)
152  *
153  * Weight of Bank Rotation. This weight score is valid when queue command's bank is not same as current
154  * executing command.
155  */
156 #define DRAM_BMW1_BR_MASK (0xFF000000UL)
157 #define DRAM_BMW1_BR_SHIFT (24U)
158 #define DRAM_BMW1_BR_SET(x) (((uint32_t)(x) << DRAM_BMW1_BR_SHIFT) & DRAM_BMW1_BR_MASK)
159 #define DRAM_BMW1_BR_GET(x) (((uint32_t)(x) & DRAM_BMW1_BR_MASK) >> DRAM_BMW1_BR_SHIFT)
160 
161 /*
162  * RWS (RW)
163  *
164  * Weight of slave hit with Read/Write Switch. This weight score is valid when queue command's slave is
165  * same as current executing command with read/write operation switch.
166  */
167 #define DRAM_BMW1_RWS_MASK (0xFF0000UL)
168 #define DRAM_BMW1_RWS_SHIFT (16U)
169 #define DRAM_BMW1_RWS_SET(x) (((uint32_t)(x) << DRAM_BMW1_RWS_SHIFT) & DRAM_BMW1_RWS_MASK)
170 #define DRAM_BMW1_RWS_GET(x) (((uint32_t)(x) & DRAM_BMW1_RWS_MASK) >> DRAM_BMW1_RWS_SHIFT)
171 
172 /*
173  * PH (RW)
174  *
175  * Weight of Slave Hit without read/write switch. This weight score is valid when queue command's slave is
176  * same as current executing command without read/write operation switch.
177  */
178 #define DRAM_BMW1_PH_MASK (0xFF00U)
179 #define DRAM_BMW1_PH_SHIFT (8U)
180 #define DRAM_BMW1_PH_SET(x) (((uint32_t)(x) << DRAM_BMW1_PH_SHIFT) & DRAM_BMW1_PH_MASK)
181 #define DRAM_BMW1_PH_GET(x) (((uint32_t)(x) & DRAM_BMW1_PH_MASK) >> DRAM_BMW1_PH_SHIFT)
182 
183 /*
184  * AGE (RW)
185  *
186  * Weight of AGE calculation. Each command in queue has an age signal to indicate its wait period. It is
187  * multiplied by WAGE to get weight score.
188  */
189 #define DRAM_BMW1_AGE_MASK (0xF0U)
190 #define DRAM_BMW1_AGE_SHIFT (4U)
191 #define DRAM_BMW1_AGE_SET(x) (((uint32_t)(x) << DRAM_BMW1_AGE_SHIFT) & DRAM_BMW1_AGE_MASK)
192 #define DRAM_BMW1_AGE_GET(x) (((uint32_t)(x) & DRAM_BMW1_AGE_MASK) >> DRAM_BMW1_AGE_SHIFT)
193 
194 /*
195  * QOS (RW)
196  *
197  * Weight of QOS calculation. AXI bus access has AxQOS signal set, which is used as a priority indicator
198  * for the associated write or read transaction. A higher value indicates a higher priority transaction. AxQOS
199  * is multiplied by WQOS to get weight score.
200  */
201 #define DRAM_BMW1_QOS_MASK (0xFU)
202 #define DRAM_BMW1_QOS_SHIFT (0U)
203 #define DRAM_BMW1_QOS_SET(x) (((uint32_t)(x) << DRAM_BMW1_QOS_SHIFT) & DRAM_BMW1_QOS_MASK)
204 #define DRAM_BMW1_QOS_GET(x) (((uint32_t)(x) & DRAM_BMW1_QOS_MASK) >> DRAM_BMW1_QOS_SHIFT)
205 
206 /* Bitfield definition for register array: BR */
207 /*
208  * BASE (RW)
209  *
210  * Base Address
211  * This field determines high position 20 bits of SoC level Base Address. SoC level Base Address low
212  * position 12 bits are all zero.
213  */
214 #define DRAM_BR_BASE_MASK (0xFFFFF000UL)
215 #define DRAM_BR_BASE_SHIFT (12U)
216 #define DRAM_BR_BASE_SET(x) (((uint32_t)(x) << DRAM_BR_BASE_SHIFT) & DRAM_BR_BASE_MASK)
217 #define DRAM_BR_BASE_GET(x) (((uint32_t)(x) & DRAM_BR_BASE_MASK) >> DRAM_BR_BASE_SHIFT)
218 
219 /*
220  * SIZE (RW)
221  *
222  * Memory size
223  * 00000b - 4KB
224  * 00001b - 8KB
225  * 00010b - 16KB
226  * 00011b - 32KB
227  * 00100b - 64KB
228  * 00101b - 128KB
229  * 00110b - 256KB
230  * 00111b - 512KB
231  * 01000b - 1MB
232  * 01001b - 2MB
233  * 01010b - 4MB
234  * 01011b - 8MB
235  * 01100b - 16MB
236  * 01101b - 32MB
237  * 01110b - 64MB
238  * 01111b - 128MB
239  * 10000b - 256MB
240  * 10001b - 512MB
241  * 10010b - 1GB
242  * 10011b - 2GB
243  * 10100-11111b - 4GB
244  */
245 #define DRAM_BR_SIZE_MASK (0x3EU)
246 #define DRAM_BR_SIZE_SHIFT (1U)
247 #define DRAM_BR_SIZE_SET(x) (((uint32_t)(x) << DRAM_BR_SIZE_SHIFT) & DRAM_BR_SIZE_MASK)
248 #define DRAM_BR_SIZE_GET(x) (((uint32_t)(x) & DRAM_BR_SIZE_MASK) >> DRAM_BR_SIZE_SHIFT)
249 
250 /*
251  * VLD (RW)
252  *
253  * Valid
254  */
255 #define DRAM_BR_VLD_MASK (0x1U)
256 #define DRAM_BR_VLD_SHIFT (0U)
257 #define DRAM_BR_VLD_SET(x) (((uint32_t)(x) << DRAM_BR_VLD_SHIFT) & DRAM_BR_VLD_MASK)
258 #define DRAM_BR_VLD_GET(x) (((uint32_t)(x) & DRAM_BR_VLD_MASK) >> DRAM_BR_VLD_SHIFT)
259 
260 /* Bitfield definition for register: INTEN */
261 /*
262  * AXIBUSERR (RW)
263  *
264  * AXI BUS error interrupt enable
265  * 0b - Interrupt is disabled
266  * 1b - Interrupt is enabled
267  */
268 #define DRAM_INTEN_AXIBUSERR_MASK (0x8U)
269 #define DRAM_INTEN_AXIBUSERR_SHIFT (3U)
270 #define DRAM_INTEN_AXIBUSERR_SET(x) (((uint32_t)(x) << DRAM_INTEN_AXIBUSERR_SHIFT) & DRAM_INTEN_AXIBUSERR_MASK)
271 #define DRAM_INTEN_AXIBUSERR_GET(x) (((uint32_t)(x) & DRAM_INTEN_AXIBUSERR_MASK) >> DRAM_INTEN_AXIBUSERR_SHIFT)
272 
273 /*
274  * AXICMDERR (RW)
275  *
276  * AXI command error interrupt enable
277  * 0b - Interrupt is disabled
278  * 1b - Interrupt is enabled
279  */
280 #define DRAM_INTEN_AXICMDERR_MASK (0x4U)
281 #define DRAM_INTEN_AXICMDERR_SHIFT (2U)
282 #define DRAM_INTEN_AXICMDERR_SET(x) (((uint32_t)(x) << DRAM_INTEN_AXICMDERR_SHIFT) & DRAM_INTEN_AXICMDERR_MASK)
283 #define DRAM_INTEN_AXICMDERR_GET(x) (((uint32_t)(x) & DRAM_INTEN_AXICMDERR_MASK) >> DRAM_INTEN_AXICMDERR_SHIFT)
284 
285 /*
286  * IPCMDERR (RW)
287  *
288  * IP command error interrupt enable
289  * 0b - Interrupt is disabled
290  * 1b - Interrupt is enabled
291  */
292 #define DRAM_INTEN_IPCMDERR_MASK (0x2U)
293 #define DRAM_INTEN_IPCMDERR_SHIFT (1U)
294 #define DRAM_INTEN_IPCMDERR_SET(x) (((uint32_t)(x) << DRAM_INTEN_IPCMDERR_SHIFT) & DRAM_INTEN_IPCMDERR_MASK)
295 #define DRAM_INTEN_IPCMDERR_GET(x) (((uint32_t)(x) & DRAM_INTEN_IPCMDERR_MASK) >> DRAM_INTEN_IPCMDERR_SHIFT)
296 
297 /*
298  * IPCMDDONE (RW)
299  *
300  * IP command done interrupt enable
301  * 0b - Interrupt is disabled
302  * 1b - Interrupt is enabled
303  */
304 #define DRAM_INTEN_IPCMDDONE_MASK (0x1U)
305 #define DRAM_INTEN_IPCMDDONE_SHIFT (0U)
306 #define DRAM_INTEN_IPCMDDONE_SET(x) (((uint32_t)(x) << DRAM_INTEN_IPCMDDONE_SHIFT) & DRAM_INTEN_IPCMDDONE_MASK)
307 #define DRAM_INTEN_IPCMDDONE_GET(x) (((uint32_t)(x) & DRAM_INTEN_IPCMDDONE_MASK) >> DRAM_INTEN_IPCMDDONE_SHIFT)
308 
309 /* Bitfield definition for register: INTR */
310 /*
311  * AXIBUSERR (W1C)
312  *
313  * AXI bus error interrupt
314  * AXI Bus error interrupt is generated in following cases:
315  * • AXI address is invalid
316  * • AXI 8-bit or 16-bit WRAP write/read
317  */
318 #define DRAM_INTR_AXIBUSERR_MASK (0x8U)
319 #define DRAM_INTR_AXIBUSERR_SHIFT (3U)
320 #define DRAM_INTR_AXIBUSERR_SET(x) (((uint32_t)(x) << DRAM_INTR_AXIBUSERR_SHIFT) & DRAM_INTR_AXIBUSERR_MASK)
321 #define DRAM_INTR_AXIBUSERR_GET(x) (((uint32_t)(x) & DRAM_INTR_AXIBUSERR_MASK) >> DRAM_INTR_AXIBUSERR_SHIFT)
322 
323 /*
324  * AXICMDERR (W1C)
325  *
326  * AXI command error interrupt
327  * AXI command error interrupt is generated when AXI command execution timeout.
328  */
329 #define DRAM_INTR_AXICMDERR_MASK (0x4U)
330 #define DRAM_INTR_AXICMDERR_SHIFT (2U)
331 #define DRAM_INTR_AXICMDERR_SET(x) (((uint32_t)(x) << DRAM_INTR_AXICMDERR_SHIFT) & DRAM_INTR_AXICMDERR_MASK)
332 #define DRAM_INTR_AXICMDERR_GET(x) (((uint32_t)(x) & DRAM_INTR_AXICMDERR_MASK) >> DRAM_INTR_AXICMDERR_SHIFT)
333 
334 /*
335  * IPCMDERR (W1C)
336  *
337  * IP command error done interrupt
338  * IP command error interrupt is generated in following case:
339  * • IP Command Address target invalid device space
340  * • IP Command Code unsupported
341  * • IP Command triggered when previous command
342  */
343 #define DRAM_INTR_IPCMDERR_MASK (0x2U)
344 #define DRAM_INTR_IPCMDERR_SHIFT (1U)
345 #define DRAM_INTR_IPCMDERR_SET(x) (((uint32_t)(x) << DRAM_INTR_IPCMDERR_SHIFT) & DRAM_INTR_IPCMDERR_MASK)
346 #define DRAM_INTR_IPCMDERR_GET(x) (((uint32_t)(x) & DRAM_INTR_IPCMDERR_MASK) >> DRAM_INTR_IPCMDERR_SHIFT)
347 
348 /*
349  * IPCMDDONE (W1C)
350  *
351  * IP command normal done interrupt
352  */
353 #define DRAM_INTR_IPCMDDONE_MASK (0x1U)
354 #define DRAM_INTR_IPCMDDONE_SHIFT (0U)
355 #define DRAM_INTR_IPCMDDONE_SET(x) (((uint32_t)(x) << DRAM_INTR_IPCMDDONE_SHIFT) & DRAM_INTR_IPCMDDONE_MASK)
356 #define DRAM_INTR_IPCMDDONE_GET(x) (((uint32_t)(x) & DRAM_INTR_IPCMDDONE_MASK) >> DRAM_INTR_IPCMDDONE_SHIFT)
357 
358 /* Bitfield definition for register: SDRCTRL0 */
359 /*
360  * BANK2 (RW)
361  *
362  * 2 Bank selection bit
363  * 0b - SDRAM device has 4 banks.
364  * 1b - SDRAM device has 2 banks.
365  */
366 #define DRAM_SDRCTRL0_BANK2_MASK (0x4000U)
367 #define DRAM_SDRCTRL0_BANK2_SHIFT (14U)
368 #define DRAM_SDRCTRL0_BANK2_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL0_BANK2_SHIFT) & DRAM_SDRCTRL0_BANK2_MASK)
369 #define DRAM_SDRCTRL0_BANK2_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL0_BANK2_MASK) >> DRAM_SDRCTRL0_BANK2_SHIFT)
370 
371 /*
372  * CAS (RW)
373  *
374  * CAS Latency
375  * 00b - 1
376  * 01b - 1
377  * 10b - 2
378  * 11b - 3
379  */
380 #define DRAM_SDRCTRL0_CAS_MASK (0xC00U)
381 #define DRAM_SDRCTRL0_CAS_SHIFT (10U)
382 #define DRAM_SDRCTRL0_CAS_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL0_CAS_SHIFT) & DRAM_SDRCTRL0_CAS_MASK)
383 #define DRAM_SDRCTRL0_CAS_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL0_CAS_MASK) >> DRAM_SDRCTRL0_CAS_SHIFT)
384 
385 /*
386  * COL (RW)
387  *
388  * Column address bit number
389  * 00b - 12 bit
390  * 01b - 11 bit
391  * 10b - 10 bit
392  * 11b - 9 bit
393  */
394 #define DRAM_SDRCTRL0_COL_MASK (0x300U)
395 #define DRAM_SDRCTRL0_COL_SHIFT (8U)
396 #define DRAM_SDRCTRL0_COL_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL0_COL_SHIFT) & DRAM_SDRCTRL0_COL_MASK)
397 #define DRAM_SDRCTRL0_COL_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL0_COL_MASK) >> DRAM_SDRCTRL0_COL_SHIFT)
398 
399 /*
400  * COL8 (RW)
401  *
402  * Column 8 selection bit
403  * 0b - Column address bit number is decided by COL field.
404  * 1b - Column address bit number is 8. COL field is ignored.
405  */
406 #define DRAM_SDRCTRL0_COL8_MASK (0x80U)
407 #define DRAM_SDRCTRL0_COL8_SHIFT (7U)
408 #define DRAM_SDRCTRL0_COL8_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL0_COL8_SHIFT) & DRAM_SDRCTRL0_COL8_MASK)
409 #define DRAM_SDRCTRL0_COL8_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL0_COL8_MASK) >> DRAM_SDRCTRL0_COL8_SHIFT)
410 
411 /*
412  * BURSTLEN (RW)
413  *
414  * Burst Length
415  * 000b - 1
416  * 001b - 2
417  * 010b - 4
418  * 011b - 8
419  * 100b - 8
420  * 101b - 8
421  * 110b - 8
422  * 111b - 8
423  */
424 #define DRAM_SDRCTRL0_BURSTLEN_MASK (0x70U)
425 #define DRAM_SDRCTRL0_BURSTLEN_SHIFT (4U)
426 #define DRAM_SDRCTRL0_BURSTLEN_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL0_BURSTLEN_SHIFT) & DRAM_SDRCTRL0_BURSTLEN_MASK)
427 #define DRAM_SDRCTRL0_BURSTLEN_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL0_BURSTLEN_MASK) >> DRAM_SDRCTRL0_BURSTLEN_SHIFT)
428 
429 /*
430  * HIGHBAND (RW)
431  *
432  * high band select
433  * 0: use data[15:0] for 16bit SDRAM;
434  * 1: use data[31:16] for 16bit SDRAM;
435  * only used when Port Size is 16bit(PORTSZ=01b)
436  */
437 #define DRAM_SDRCTRL0_HIGHBAND_MASK (0x8U)
438 #define DRAM_SDRCTRL0_HIGHBAND_SHIFT (3U)
439 #define DRAM_SDRCTRL0_HIGHBAND_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL0_HIGHBAND_SHIFT) & DRAM_SDRCTRL0_HIGHBAND_MASK)
440 #define DRAM_SDRCTRL0_HIGHBAND_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL0_HIGHBAND_MASK) >> DRAM_SDRCTRL0_HIGHBAND_SHIFT)
441 
442 /*
443  * PORTSZ (RW)
444  *
445  * Port Size
446  * 00b - 8bit
447  * 01b - 16bit
448  * 10b - 32bit
449  */
450 #define DRAM_SDRCTRL0_PORTSZ_MASK (0x3U)
451 #define DRAM_SDRCTRL0_PORTSZ_SHIFT (0U)
452 #define DRAM_SDRCTRL0_PORTSZ_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL0_PORTSZ_SHIFT) & DRAM_SDRCTRL0_PORTSZ_MASK)
453 #define DRAM_SDRCTRL0_PORTSZ_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL0_PORTSZ_MASK) >> DRAM_SDRCTRL0_PORTSZ_SHIFT)
454 
455 /* Bitfield definition for register: SDRCTRL1 */
456 /*
457  * ACT2PRE (RW)
458  *
459  * ACT to Precharge minimum time
460  * It is promised ACT2PRE+1 clock cycles delay between ACTIVE command to PRECHARGE/PRECHARGE_ALL command.
461  */
462 #define DRAM_SDRCTRL1_ACT2PRE_MASK (0xF00000UL)
463 #define DRAM_SDRCTRL1_ACT2PRE_SHIFT (20U)
464 #define DRAM_SDRCTRL1_ACT2PRE_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL1_ACT2PRE_SHIFT) & DRAM_SDRCTRL1_ACT2PRE_MASK)
465 #define DRAM_SDRCTRL1_ACT2PRE_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL1_ACT2PRE_MASK) >> DRAM_SDRCTRL1_ACT2PRE_SHIFT)
466 
467 /*
468  * CKEOFF (RW)
469  *
470  * CKE OFF minimum time
471  * It is promised clock suspend last at leat CKEOFF+1 clock cycles.
472  */
473 #define DRAM_SDRCTRL1_CKEOFF_MASK (0xF0000UL)
474 #define DRAM_SDRCTRL1_CKEOFF_SHIFT (16U)
475 #define DRAM_SDRCTRL1_CKEOFF_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL1_CKEOFF_SHIFT) & DRAM_SDRCTRL1_CKEOFF_MASK)
476 #define DRAM_SDRCTRL1_CKEOFF_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL1_CKEOFF_MASK) >> DRAM_SDRCTRL1_CKEOFF_SHIFT)
477 
478 /*
479  * WRC (RW)
480  *
481  * Write recovery time
482  * It is promised WRC+1 clock cycles delay between WRITE command to PRECHARGE/PRECHARGE_ALL command. This could help to meet tWR timing requirement by SDRAM device.
483  */
484 #define DRAM_SDRCTRL1_WRC_MASK (0xE000U)
485 #define DRAM_SDRCTRL1_WRC_SHIFT (13U)
486 #define DRAM_SDRCTRL1_WRC_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL1_WRC_SHIFT) & DRAM_SDRCTRL1_WRC_MASK)
487 #define DRAM_SDRCTRL1_WRC_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL1_WRC_MASK) >> DRAM_SDRCTRL1_WRC_SHIFT)
488 
489 /*
490  * RFRC (RW)
491  *
492  * Refresh recovery time
493  * It is promised RFRC+1 clock cycles delay between REFRESH command to ACTIVE command. Thiscould help to meet tRFC timing requirement by SDRAM device.
494  */
495 #define DRAM_SDRCTRL1_RFRC_MASK (0x1F00U)
496 #define DRAM_SDRCTRL1_RFRC_SHIFT (8U)
497 #define DRAM_SDRCTRL1_RFRC_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL1_RFRC_SHIFT) & DRAM_SDRCTRL1_RFRC_MASK)
498 #define DRAM_SDRCTRL1_RFRC_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL1_RFRC_MASK) >> DRAM_SDRCTRL1_RFRC_SHIFT)
499 
500 /*
501  * ACT2RW (RW)
502  *
503  * ACT to Read/Write wait time
504  * It is promised ACT2RW+1 clock cycles delay between ACTIVE command to READ/WRITE command.This could help to meet tRCD timing requirement by SDRAM device.
505  */
506 #define DRAM_SDRCTRL1_ACT2RW_MASK (0xF0U)
507 #define DRAM_SDRCTRL1_ACT2RW_SHIFT (4U)
508 #define DRAM_SDRCTRL1_ACT2RW_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL1_ACT2RW_SHIFT) & DRAM_SDRCTRL1_ACT2RW_MASK)
509 #define DRAM_SDRCTRL1_ACT2RW_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL1_ACT2RW_MASK) >> DRAM_SDRCTRL1_ACT2RW_SHIFT)
510 
511 /*
512  * PRE2ACT (RW)
513  *
514  * PRECHARGE to ACT/Refresh wait time
515  * It is promised PRE2ACT+1 clock cycles delay between PRECHARGE/PRECHARGE_ALL commandto ACTIVE/REFRESH command. This could help to meet tRP timing requirement by SDRAM device.
516  */
517 #define DRAM_SDRCTRL1_PRE2ACT_MASK (0xFU)
518 #define DRAM_SDRCTRL1_PRE2ACT_SHIFT (0U)
519 #define DRAM_SDRCTRL1_PRE2ACT_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL1_PRE2ACT_SHIFT) & DRAM_SDRCTRL1_PRE2ACT_MASK)
520 #define DRAM_SDRCTRL1_PRE2ACT_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL1_PRE2ACT_MASK) >> DRAM_SDRCTRL1_PRE2ACT_SHIFT)
521 
522 /* Bitfield definition for register: SDRCTRL2 */
523 /*
524  * ITO (RW)
525  *
526  * SDRAM Idle timeout
527  * It closes all opened pages if the SDRAM idle time lasts more than idle timeout period. SDRAM is
528  * considered idle when there is no AXI Bus transfer and no SDRAM command pending.
529  * 00000000b - IDLE timeout period is 256*Prescale period.
530  * 00000001-11111111b - IDLE timeout period is ITO*Prescale period.
531  */
532 #define DRAM_SDRCTRL2_ITO_MASK (0xFF000000UL)
533 #define DRAM_SDRCTRL2_ITO_SHIFT (24U)
534 #define DRAM_SDRCTRL2_ITO_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL2_ITO_SHIFT) & DRAM_SDRCTRL2_ITO_MASK)
535 #define DRAM_SDRCTRL2_ITO_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL2_ITO_MASK) >> DRAM_SDRCTRL2_ITO_SHIFT)
536 
537 /*
538  * ACT2ACT (RW)
539  *
540  * ACT to ACT wait time
541  * It is promised ACT2ACT+1 clock cycles delay between ACTIVE command to ACTIVE command. This
542  * could help to meet tRRD timing requirement by SDRAM device.
543  */
544 #define DRAM_SDRCTRL2_ACT2ACT_MASK (0xFF0000UL)
545 #define DRAM_SDRCTRL2_ACT2ACT_SHIFT (16U)
546 #define DRAM_SDRCTRL2_ACT2ACT_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL2_ACT2ACT_SHIFT) & DRAM_SDRCTRL2_ACT2ACT_MASK)
547 #define DRAM_SDRCTRL2_ACT2ACT_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL2_ACT2ACT_MASK) >> DRAM_SDRCTRL2_ACT2ACT_SHIFT)
548 
549 /*
550  * REF2REF (RW)
551  *
552  * Refresh to Refresh wait time
553  * It is promised REF2REF+1 clock cycles delay between REFRESH command to REFRESH command.
554  * This could help to meet tRFC timing requirement by SDRAM device.
555  */
556 #define DRAM_SDRCTRL2_REF2REF_MASK (0xFF00U)
557 #define DRAM_SDRCTRL2_REF2REF_SHIFT (8U)
558 #define DRAM_SDRCTRL2_REF2REF_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL2_REF2REF_SHIFT) & DRAM_SDRCTRL2_REF2REF_MASK)
559 #define DRAM_SDRCTRL2_REF2REF_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL2_REF2REF_MASK) >> DRAM_SDRCTRL2_REF2REF_SHIFT)
560 
561 /*
562  * SRRC (RW)
563  *
564  * Self Refresh Recovery time
565  * It is promised SRRC+1 clock cycles delay between Self-REFRESH command to any command.
566  */
567 #define DRAM_SDRCTRL2_SRRC_MASK (0xFFU)
568 #define DRAM_SDRCTRL2_SRRC_SHIFT (0U)
569 #define DRAM_SDRCTRL2_SRRC_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL2_SRRC_SHIFT) & DRAM_SDRCTRL2_SRRC_MASK)
570 #define DRAM_SDRCTRL2_SRRC_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL2_SRRC_MASK) >> DRAM_SDRCTRL2_SRRC_SHIFT)
571 
572 /* Bitfield definition for register: SDRCTRL3 */
573 /*
574  * UT (RW)
575  *
576  * Refresh urgent threshold
577  * Internal refresh request is generated on every Refresh period. Before internal request timer count up to
578  * urgent request threshold, the refresh request is considered as normal refresh request. Normal refresh
579  * request is handled in lower priority than any pending AXI command or IP command to SDRAM device.
580  * When internal request timer count up to this urgent threshold, refresh request is considered as urgent
581  * refresh request. Urgent refresh request is handled in higher priority than any pending AXI command or IP
582  * command to SDRAM device.
583  * NOTE: When urgent threshold is no less than refresh period, refresh request is always considered as
584  * urgent refresh request.
585  * Refresh urgent threshold is as follwoing:
586  * 00000000b - 256*Prescaler period
587  * 00000001-11111111b - UT*Prescaler period
588  */
589 #define DRAM_SDRCTRL3_UT_MASK (0xFF000000UL)
590 #define DRAM_SDRCTRL3_UT_SHIFT (24U)
591 #define DRAM_SDRCTRL3_UT_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL3_UT_SHIFT) & DRAM_SDRCTRL3_UT_MASK)
592 #define DRAM_SDRCTRL3_UT_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL3_UT_MASK) >> DRAM_SDRCTRL3_UT_SHIFT)
593 
594 /*
595  * RT (RW)
596  *
597  * Refresh timer period
598  * Refresh timer period is as following:
599  * 00000000b - 256*Prescaler period
600  * 00000001-11111111b - RT*Prescaler period
601  */
602 #define DRAM_SDRCTRL3_RT_MASK (0xFF0000UL)
603 #define DRAM_SDRCTRL3_RT_SHIFT (16U)
604 #define DRAM_SDRCTRL3_RT_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL3_RT_SHIFT) & DRAM_SDRCTRL3_RT_MASK)
605 #define DRAM_SDRCTRL3_RT_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL3_RT_MASK) >> DRAM_SDRCTRL3_RT_SHIFT)
606 
607 /*
608  * PRESCALE (RW)
609  *
610  * Prescaler timer period
611  * Prescaler timer period is as following:
612  * 00000000b - 256*16 clock cycles
613  * 00000001-11111111b - PRESCALE*16 clock cycles
614  */
615 #define DRAM_SDRCTRL3_PRESCALE_MASK (0xFF00U)
616 #define DRAM_SDRCTRL3_PRESCALE_SHIFT (8U)
617 #define DRAM_SDRCTRL3_PRESCALE_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL3_PRESCALE_SHIFT) & DRAM_SDRCTRL3_PRESCALE_MASK)
618 #define DRAM_SDRCTRL3_PRESCALE_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL3_PRESCALE_MASK) >> DRAM_SDRCTRL3_PRESCALE_SHIFT)
619 
620 /*
621  * REBL (RW)
622  *
623  * Refresh burst length
624  * It could send multiple Auto-Refresh command in one burst when REBL is set to non-zero. The
625  * number of Auto-Refresh command cycle sent to all SDRAM device in one refresh period is as following.
626  * 000b - 1
627  * 001b - 2
628  * 010b - 3
629  * 011b - 4
630  * 100b - 5
631  * 101b - 6
632  * 110b - 7
633  * 111b - 8
634  */
635 #define DRAM_SDRCTRL3_REBL_MASK (0xEU)
636 #define DRAM_SDRCTRL3_REBL_SHIFT (1U)
637 #define DRAM_SDRCTRL3_REBL_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL3_REBL_SHIFT) & DRAM_SDRCTRL3_REBL_MASK)
638 #define DRAM_SDRCTRL3_REBL_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL3_REBL_MASK) >> DRAM_SDRCTRL3_REBL_SHIFT)
639 
640 /*
641  * REN (RW)
642  *
643  * Refresh enable
644  */
645 #define DRAM_SDRCTRL3_REN_MASK (0x1U)
646 #define DRAM_SDRCTRL3_REN_SHIFT (0U)
647 #define DRAM_SDRCTRL3_REN_SET(x) (((uint32_t)(x) << DRAM_SDRCTRL3_REN_SHIFT) & DRAM_SDRCTRL3_REN_MASK)
648 #define DRAM_SDRCTRL3_REN_GET(x) (((uint32_t)(x) & DRAM_SDRCTRL3_REN_MASK) >> DRAM_SDRCTRL3_REN_SHIFT)
649 
650 /* Bitfield definition for register: SADDR */
651 /*
652  * SA (RW)
653  *
654  * Slave address
655  */
656 #define DRAM_SADDR_SA_MASK (0xFFFFFFFFUL)
657 #define DRAM_SADDR_SA_SHIFT (0U)
658 #define DRAM_SADDR_SA_SET(x) (((uint32_t)(x) << DRAM_SADDR_SA_SHIFT) & DRAM_SADDR_SA_MASK)
659 #define DRAM_SADDR_SA_GET(x) (((uint32_t)(x) & DRAM_SADDR_SA_MASK) >> DRAM_SADDR_SA_SHIFT)
660 
661 /* Bitfield definition for register: DATSZ */
662 /*
663  * DATSZ (RW)
664  *
665  * Data Size in Byte
666  * When IP command is not a write/read operation, DATSZ field would be ignored.
667  * 000b - 4
668  * 001b - 1
669  * 010b - 2
670  * 011b - 3
671  * 100b - 4
672  * 101b - 4
673  * 110b - 4
674  * 111b - 4
675  */
676 #define DRAM_DATSZ_DATSZ_MASK (0x7U)
677 #define DRAM_DATSZ_DATSZ_SHIFT (0U)
678 #define DRAM_DATSZ_DATSZ_SET(x) (((uint32_t)(x) << DRAM_DATSZ_DATSZ_SHIFT) & DRAM_DATSZ_DATSZ_MASK)
679 #define DRAM_DATSZ_DATSZ_GET(x) (((uint32_t)(x) & DRAM_DATSZ_DATSZ_MASK) >> DRAM_DATSZ_DATSZ_SHIFT)
680 
681 /* Bitfield definition for register: BYTEMSK */
682 /*
683  * BM3 (RW)
684  *
685  * Byte Mask for Byte 3 (IPTXD bit 31:24)
686  * 0b - Byte Unmasked
687  * 1b - Byte Masked
688  */
689 #define DRAM_BYTEMSK_BM3_MASK (0x8U)
690 #define DRAM_BYTEMSK_BM3_SHIFT (3U)
691 #define DRAM_BYTEMSK_BM3_SET(x) (((uint32_t)(x) << DRAM_BYTEMSK_BM3_SHIFT) & DRAM_BYTEMSK_BM3_MASK)
692 #define DRAM_BYTEMSK_BM3_GET(x) (((uint32_t)(x) & DRAM_BYTEMSK_BM3_MASK) >> DRAM_BYTEMSK_BM3_SHIFT)
693 
694 /*
695  * BM2 (RW)
696  *
697  * Byte Mask for Byte 2 (IPTXD bit 23:16)
698  * 0b - Byte Unmasked
699  * 1b - Byte Masked
700  */
701 #define DRAM_BYTEMSK_BM2_MASK (0x4U)
702 #define DRAM_BYTEMSK_BM2_SHIFT (2U)
703 #define DRAM_BYTEMSK_BM2_SET(x) (((uint32_t)(x) << DRAM_BYTEMSK_BM2_SHIFT) & DRAM_BYTEMSK_BM2_MASK)
704 #define DRAM_BYTEMSK_BM2_GET(x) (((uint32_t)(x) & DRAM_BYTEMSK_BM2_MASK) >> DRAM_BYTEMSK_BM2_SHIFT)
705 
706 /*
707  * BM1 (RW)
708  *
709  * Byte Mask for Byte 1 (IPTXD bit 15:8)
710  * 0b - Byte Unmasked
711  * 1b - Byte Masked
712  */
713 #define DRAM_BYTEMSK_BM1_MASK (0x2U)
714 #define DRAM_BYTEMSK_BM1_SHIFT (1U)
715 #define DRAM_BYTEMSK_BM1_SET(x) (((uint32_t)(x) << DRAM_BYTEMSK_BM1_SHIFT) & DRAM_BYTEMSK_BM1_MASK)
716 #define DRAM_BYTEMSK_BM1_GET(x) (((uint32_t)(x) & DRAM_BYTEMSK_BM1_MASK) >> DRAM_BYTEMSK_BM1_SHIFT)
717 
718 /*
719  * BM0 (RW)
720  *
721  * Byte Mask for Byte 0 (IPTXD bit 7:0)
722  * 0b - Byte Unmasked
723  * 1b - Byte Masked
724  */
725 #define DRAM_BYTEMSK_BM0_MASK (0x1U)
726 #define DRAM_BYTEMSK_BM0_SHIFT (0U)
727 #define DRAM_BYTEMSK_BM0_SET(x) (((uint32_t)(x) << DRAM_BYTEMSK_BM0_SHIFT) & DRAM_BYTEMSK_BM0_MASK)
728 #define DRAM_BYTEMSK_BM0_GET(x) (((uint32_t)(x) & DRAM_BYTEMSK_BM0_MASK) >> DRAM_BYTEMSK_BM0_SHIFT)
729 
730 /* Bitfield definition for register: IPCMD */
731 /*
732  * KEY (WO)
733  *
734  * This field should be written with 0x5AA5 when trigging an IP command for all device types. The memory
735  * device is selected by BRx settings and IPCR0 registers.
736  */
737 #define DRAM_IPCMD_KEY_MASK (0xFFFF0000UL)
738 #define DRAM_IPCMD_KEY_SHIFT (16U)
739 #define DRAM_IPCMD_KEY_SET(x) (((uint32_t)(x) << DRAM_IPCMD_KEY_SHIFT) & DRAM_IPCMD_KEY_MASK)
740 #define DRAM_IPCMD_KEY_GET(x) (((uint32_t)(x) & DRAM_IPCMD_KEY_MASK) >> DRAM_IPCMD_KEY_SHIFT)
741 
742 /*
743  * CMD (RW)
744  *
745  * SDRAM Commands:
746  * • 0x8: READ
747  * • 0x9: WRITE
748  * • 0xA: MODESET
749  * • 0xB: ACTIVE
750  * • 0xC: AUTO REFRESH
751  * • 0xD: SELF REFRESH
752  * • 0xE: PRECHARGE
753  * • 0xF: PRECHARGE ALL
754  * • Others: RSVD
755  * NOTE: SELF REFRESH is sent to all SDRAM devices because they shared same CLK pin.
756  */
757 #define DRAM_IPCMD_CMD_MASK (0xFFFFU)
758 #define DRAM_IPCMD_CMD_SHIFT (0U)
759 #define DRAM_IPCMD_CMD_SET(x) (((uint32_t)(x) << DRAM_IPCMD_CMD_SHIFT) & DRAM_IPCMD_CMD_MASK)
760 #define DRAM_IPCMD_CMD_GET(x) (((uint32_t)(x) & DRAM_IPCMD_CMD_MASK) >> DRAM_IPCMD_CMD_SHIFT)
761 
762 /* Bitfield definition for register: IPTX */
763 /*
764  * DAT (RW)
765  *
766  * Data
767  */
768 #define DRAM_IPTX_DAT_MASK (0xFFFFFFFFUL)
769 #define DRAM_IPTX_DAT_SHIFT (0U)
770 #define DRAM_IPTX_DAT_SET(x) (((uint32_t)(x) << DRAM_IPTX_DAT_SHIFT) & DRAM_IPTX_DAT_MASK)
771 #define DRAM_IPTX_DAT_GET(x) (((uint32_t)(x) & DRAM_IPTX_DAT_MASK) >> DRAM_IPTX_DAT_SHIFT)
772 
773 /* Bitfield definition for register: IPRX */
774 /*
775  * DAT (RW)
776  *
777  * Data
778  */
779 #define DRAM_IPRX_DAT_MASK (0xFFFFFFFFUL)
780 #define DRAM_IPRX_DAT_SHIFT (0U)
781 #define DRAM_IPRX_DAT_SET(x) (((uint32_t)(x) << DRAM_IPRX_DAT_SHIFT) & DRAM_IPRX_DAT_MASK)
782 #define DRAM_IPRX_DAT_GET(x) (((uint32_t)(x) & DRAM_IPRX_DAT_MASK) >> DRAM_IPRX_DAT_SHIFT)
783 
784 /* Bitfield definition for register: STAT0 */
785 /*
786  * IDLE (RO)
787  *
788  * Indicating whether it is in IDLE state.
789  * When IDLE=1, it is in IDLE state. There is no pending AXI command in internal queue and no
790  * pending device access.
791  */
792 #define DRAM_STAT0_IDLE_MASK (0x1U)
793 #define DRAM_STAT0_IDLE_SHIFT (0U)
794 #define DRAM_STAT0_IDLE_GET(x) (((uint32_t)(x) & DRAM_STAT0_IDLE_MASK) >> DRAM_STAT0_IDLE_SHIFT)
795 
796 /* Bitfield definition for register: DLYCFG */
797 /*
798  * OE (RW)
799  *
800  * delay clock output enable, should be set after setting DLYEN and DLYSEL
801  */
802 #define DRAM_DLYCFG_OE_MASK (0x2000U)
803 #define DRAM_DLYCFG_OE_SHIFT (13U)
804 #define DRAM_DLYCFG_OE_SET(x) (((uint32_t)(x) << DRAM_DLYCFG_OE_SHIFT) & DRAM_DLYCFG_OE_MASK)
805 #define DRAM_DLYCFG_OE_GET(x) (((uint32_t)(x) & DRAM_DLYCFG_OE_MASK) >> DRAM_DLYCFG_OE_SHIFT)
806 
807 /*
808  * DLYSEL (RW)
809  *
810  * delay line select, 0 for 1 cell, 31 for all 32 cells
811  */
812 #define DRAM_DLYCFG_DLYSEL_MASK (0x3EU)
813 #define DRAM_DLYCFG_DLYSEL_SHIFT (1U)
814 #define DRAM_DLYCFG_DLYSEL_SET(x) (((uint32_t)(x) << DRAM_DLYCFG_DLYSEL_SHIFT) & DRAM_DLYCFG_DLYSEL_MASK)
815 #define DRAM_DLYCFG_DLYSEL_GET(x) (((uint32_t)(x) & DRAM_DLYCFG_DLYSEL_MASK) >> DRAM_DLYCFG_DLYSEL_SHIFT)
816 
817 /*
818  * DLYEN (RW)
819  *
820  * delay line enable
821  */
822 #define DRAM_DLYCFG_DLYEN_MASK (0x1U)
823 #define DRAM_DLYCFG_DLYEN_SHIFT (0U)
824 #define DRAM_DLYCFG_DLYEN_SET(x) (((uint32_t)(x) << DRAM_DLYCFG_DLYEN_SHIFT) & DRAM_DLYCFG_DLYEN_MASK)
825 #define DRAM_DLYCFG_DLYEN_GET(x) (((uint32_t)(x) & DRAM_DLYCFG_DLYEN_MASK) >> DRAM_DLYCFG_DLYEN_SHIFT)
826 
827 
828 
829 /* BR register group index macro definition */
830 #define DRAM_BR_BASE0 (0UL)
831 #define DRAM_BR_BASE1 (1UL)
832 
833 
834 #endif /* HPM_DRAM_H */