fa0df0ac3aeab83055a55646ab250715f5114bf3
[deliverable/linux.git] / arch / blackfin / mach-common / dpmc_modes.S
1 /*
2 * Copyright 2004-2008 Analog Devices Inc.
3 *
4 * Licensed under the GPL-2 or later.
5 */
6
7 #include <linux/linkage.h>
8 #include <asm/blackfin.h>
9 #include <mach/irq.h>
10 #include <asm/dpmc.h>
11
12 .section .l1.text
13
14 ENTRY(_sleep_mode)
15 [--SP] = ( R7:0, P5:0 );
16 [--SP] = RETS;
17
18 call _set_sic_iwr;
19
20 P0.H = hi(PLL_CTL);
21 P0.L = lo(PLL_CTL);
22 R1 = W[P0](z);
23 BITSET (R1, 3);
24 W[P0] = R1.L;
25
26 CLI R2;
27 SSYNC;
28 IDLE;
29 STI R2;
30
31 call _test_pll_locked;
32
33 R0 = IWR_ENABLE(0);
34 R1 = IWR_DISABLE_ALL;
35 R2 = IWR_DISABLE_ALL;
36
37 call _set_sic_iwr;
38
39 P0.H = hi(PLL_CTL);
40 P0.L = lo(PLL_CTL);
41 R7 = w[p0](z);
42 BITCLR (R7, 3);
43 BITCLR (R7, 5);
44 w[p0] = R7.L;
45 IDLE;
46 call _test_pll_locked;
47
48 RETS = [SP++];
49 ( R7:0, P5:0 ) = [SP++];
50 RTS;
51 ENDPROC(_sleep_mode)
52
53 /*
54 * This func never returns as it puts the part into hibernate, and
55 * is only called from do_hibernate, so we don't bother saving or
56 * restoring any of the normal C runtime state. When we wake up,
57 * the entry point will be in do_hibernate and not here.
58 *
59 * We accept just one argument -- the value to write to VR_CTL.
60 */
61 ENTRY(_hibernate_mode)
62 /* Save/setup the regs we need early for minor pipeline optimization */
63 R4 = R0;
64 P3.H = hi(VR_CTL);
65 P3.L = lo(VR_CTL);
66
67 /* Disable all wakeup sources */
68 R0 = IWR_DISABLE_ALL;
69 R1 = IWR_DISABLE_ALL;
70 R2 = IWR_DISABLE_ALL;
71 call _set_sic_iwr;
72 call _set_dram_srfs;
73 SSYNC;
74
75 /* Finally, we climb into our cave to hibernate */
76 W[P3] = R4.L;
77 CLI R2;
78 IDLE;
79 .Lforever:
80 jump .Lforever;
81 ENDPROC(_hibernate_mode)
82
83 ENTRY(_sleep_deeper)
84 [--SP] = ( R7:0, P5:0 );
85 [--SP] = RETS;
86
87 CLI R4;
88
89 P3 = R0;
90 P4 = R1;
91 P5 = R2;
92
93 R0 = IWR_ENABLE(0);
94 R1 = IWR_DISABLE_ALL;
95 R2 = IWR_DISABLE_ALL;
96
97 call _set_sic_iwr;
98 call _set_dram_srfs; /* Set SDRAM Self Refresh */
99
100 P0.H = hi(PLL_DIV);
101 P0.L = lo(PLL_DIV);
102 R6 = W[P0](z);
103 R0.L = 0xF;
104 W[P0] = R0.l; /* Set Max VCO to SCLK divider */
105
106 P0.H = hi(PLL_CTL);
107 P0.L = lo(PLL_CTL);
108 R5 = W[P0](z);
109 R0.L = (CONFIG_MIN_VCO_HZ/CONFIG_CLKIN_HZ) << 9;
110 W[P0] = R0.l; /* Set Min CLKIN to VCO multiplier */
111
112 SSYNC;
113 IDLE;
114
115 call _test_pll_locked;
116
117 P0.H = hi(VR_CTL);
118 P0.L = lo(VR_CTL);
119 R7 = W[P0](z);
120 R1 = 0x6;
121 R1 <<= 16;
122 R2 = 0x0404(Z);
123 R1 = R1|R2;
124
125 R2 = DEPOSIT(R7, R1);
126 W[P0] = R2; /* Set Min Core Voltage */
127
128 SSYNC;
129 IDLE;
130
131 call _test_pll_locked;
132
133 R0 = P3;
134 R1 = P4;
135 R3 = P5;
136 call _set_sic_iwr; /* Set Awake from IDLE */
137
138 P0.H = hi(PLL_CTL);
139 P0.L = lo(PLL_CTL);
140 R0 = W[P0](z);
141 BITSET (R0, 3);
142 W[P0] = R0.L; /* Turn CCLK OFF */
143 SSYNC;
144 IDLE;
145
146 call _test_pll_locked;
147
148 R0 = IWR_ENABLE(0);
149 R1 = IWR_DISABLE_ALL;
150 R2 = IWR_DISABLE_ALL;
151
152 call _set_sic_iwr; /* Set Awake from IDLE PLL */
153
154 P0.H = hi(VR_CTL);
155 P0.L = lo(VR_CTL);
156 W[P0]= R7;
157
158 SSYNC;
159 IDLE;
160
161 call _test_pll_locked;
162
163 P0.H = hi(PLL_DIV);
164 P0.L = lo(PLL_DIV);
165 W[P0]= R6; /* Restore CCLK and SCLK divider */
166
167 P0.H = hi(PLL_CTL);
168 P0.L = lo(PLL_CTL);
169 w[p0] = R5; /* Restore VCO multiplier */
170 IDLE;
171 call _test_pll_locked;
172
173 call _unset_dram_srfs; /* SDRAM Self Refresh Off */
174
175 STI R4;
176
177 RETS = [SP++];
178 ( R7:0, P5:0 ) = [SP++];
179 RTS;
180 ENDPROC(_sleep_deeper)
181
182 ENTRY(_set_dram_srfs)
183 /* set the dram to self refresh mode */
184 SSYNC;
185 #if defined(EBIU_RSTCTL) /* DDR */
186 P0.H = hi(EBIU_RSTCTL);
187 P0.L = lo(EBIU_RSTCTL);
188 R2 = [P0];
189 BITSET(R2, 3); /* SRREQ enter self-refresh mode */
190 [P0] = R2;
191 SSYNC;
192 1:
193 R2 = [P0];
194 CC = BITTST(R2, 4);
195 if !CC JUMP 1b;
196 #else /* SDRAM */
197 P0.L = lo(EBIU_SDGCTL);
198 P0.H = hi(EBIU_SDGCTL);
199 R2 = [P0];
200 BITSET(R2, 24); /* SRFS enter self-refresh mode */
201 [P0] = R2;
202 SSYNC;
203
204 P0.L = lo(EBIU_SDSTAT);
205 P0.H = hi(EBIU_SDSTAT);
206 1:
207 R2 = w[P0];
208 SSYNC;
209 cc = BITTST(R2, 1); /* SDSRA poll self-refresh status */
210 if !cc jump 1b;
211
212 P0.L = lo(EBIU_SDGCTL);
213 P0.H = hi(EBIU_SDGCTL);
214 R2 = [P0];
215 BITCLR(R2, 0); /* SCTLE disable CLKOUT */
216 [P0] = R2;
217 #endif
218 RTS;
219 ENDPROC(_set_dram_srfs)
220
221 ENTRY(_unset_dram_srfs)
222 /* set the dram out of self refresh mode */
223 #if defined(EBIU_RSTCTL) /* DDR */
224 P0.H = hi(EBIU_RSTCTL);
225 P0.L = lo(EBIU_RSTCTL);
226 R2 = [P0];
227 BITCLR(R2, 3); /* clear SRREQ bit */
228 [P0] = R2;
229 #elif defined(EBIU_SDGCTL) /* SDRAM */
230
231 P0.L = lo(EBIU_SDGCTL); /* release CLKOUT from self-refresh */
232 P0.H = hi(EBIU_SDGCTL);
233 R2 = [P0];
234 BITSET(R2, 0); /* SCTLE enable CLKOUT */
235 [P0] = R2
236 SSYNC;
237
238 P0.L = lo(EBIU_SDGCTL); /* release SDRAM from self-refresh */
239 P0.H = hi(EBIU_SDGCTL);
240 R2 = [P0];
241 BITCLR(R2, 24); /* clear SRFS bit */
242 [P0] = R2
243 #endif
244 SSYNC;
245 RTS;
246 ENDPROC(_unset_dram_srfs)
247
248 ENTRY(_set_sic_iwr)
249 #ifdef SIC_IWR0
250 P0.H = hi(SYSMMR_BASE);
251 P0.L = lo(SYSMMR_BASE);
252 [P0 + (SIC_IWR0 - SYSMMR_BASE)] = R0;
253 [P0 + (SIC_IWR1 - SYSMMR_BASE)] = R1;
254 # ifdef SIC_IWR2
255 [P0 + (SIC_IWR2 - SYSMMR_BASE)] = R2;
256 # endif
257 #else
258 P0.H = hi(SIC_IWR);
259 P0.L = lo(SIC_IWR);
260 [P0] = R0;
261 #endif
262
263 SSYNC;
264 RTS;
265 ENDPROC(_set_sic_iwr)
266
267 ENTRY(_test_pll_locked)
268 P0.H = hi(PLL_STAT);
269 P0.L = lo(PLL_STAT);
270 1:
271 R0 = W[P0] (Z);
272 CC = BITTST(R0,5);
273 IF !CC JUMP 1b;
274 RTS;
275 ENDPROC(_test_pll_locked)
276
277 .section .text
278
279 #define PM_REG0 R7
280 #define PM_REG1 R6
281 #define PM_REG2 R5
282 #define PM_REG3 R4
283 #define PM_REG4 R3
284 #define PM_REG5 R2
285 #define PM_REG6 R1
286 #define PM_REG7 R0
287 #define PM_REG8 P5
288 #define PM_REG9 P4
289 #define PM_REG10 P3
290 #define PM_REG11 P2
291 #define PM_REG12 P1
292 #define PM_REG13 P0
293
294 #define PM_REGSET0 R7:7
295 #define PM_REGSET1 R7:6
296 #define PM_REGSET2 R7:5
297 #define PM_REGSET3 R7:4
298 #define PM_REGSET4 R7:3
299 #define PM_REGSET5 R7:2
300 #define PM_REGSET6 R7:1
301 #define PM_REGSET7 R7:0
302 #define PM_REGSET8 R7:0, P5:5
303 #define PM_REGSET9 R7:0, P5:4
304 #define PM_REGSET10 R7:0, P5:3
305 #define PM_REGSET11 R7:0, P5:2
306 #define PM_REGSET12 R7:0, P5:1
307 #define PM_REGSET13 R7:0, P5:0
308
309 #define _PM_PUSH(n, x, w, base) PM_REG##n = w[FP + ((x) - (base))];
310 #define _PM_POP(n, x, w, base) w[FP + ((x) - (base))] = PM_REG##n;
311 #define PM_PUSH_SYNC(n) [--sp] = (PM_REGSET##n);
312 #define PM_POP_SYNC(n) (PM_REGSET##n) = [sp++];
313 #define PM_PUSH(n, x) PM_REG##n = [FP++];
314 #define PM_POP(n, x) [FP--] = PM_REG##n;
315 #define PM_CORE_PUSH(n, x) _PM_PUSH(n, x, , COREMMR_BASE)
316 #define PM_CORE_POP(n, x) _PM_POP(n, x, , COREMMR_BASE)
317 #define PM_SYS_PUSH(n, x) _PM_PUSH(n, x, , SYSMMR_BASE)
318 #define PM_SYS_POP(n, x) _PM_POP(n, x, , SYSMMR_BASE)
319 #define PM_SYS_PUSH16(n, x) _PM_PUSH(n, x, w, SYSMMR_BASE)
320 #define PM_SYS_POP16(n, x) _PM_POP(n, x, w, SYSMMR_BASE)
321
322 ENTRY(_do_hibernate)
323 /*
324 * Save the core regs early so we can blow them away when
325 * saving/restoring MMR states
326 */
327 [--sp] = (R7:0, P5:0);
328 [--sp] = fp;
329 [--sp] = usp;
330
331 [--sp] = i0;
332 [--sp] = i1;
333 [--sp] = i2;
334 [--sp] = i3;
335
336 [--sp] = m0;
337 [--sp] = m1;
338 [--sp] = m2;
339 [--sp] = m3;
340
341 [--sp] = l0;
342 [--sp] = l1;
343 [--sp] = l2;
344 [--sp] = l3;
345
346 [--sp] = b0;
347 [--sp] = b1;
348 [--sp] = b2;
349 [--sp] = b3;
350 [--sp] = a0.x;
351 [--sp] = a0.w;
352 [--sp] = a1.x;
353 [--sp] = a1.w;
354
355 [--sp] = LC0;
356 [--sp] = LC1;
357 [--sp] = LT0;
358 [--sp] = LT1;
359 [--sp] = LB0;
360 [--sp] = LB1;
361
362 /* We can't push RETI directly as that'll change IPEND[4] */
363 r7 = RETI;
364 [--sp] = RETS;
365 [--sp] = ASTAT;
366 [--sp] = CYCLES;
367 [--sp] = CYCLES2;
368 [--sp] = SYSCFG;
369 [--sp] = RETX;
370 [--sp] = SEQSTAT;
371 [--sp] = r7;
372
373 /* Save first func arg in M3 */
374 M3 = R0;
375
376 /* Save system MMRs */
377 FP.H = hi(SYSMMR_BASE);
378 FP.L = lo(SYSMMR_BASE);
379
380 #ifdef SIC_IMASK0
381 PM_SYS_PUSH(0, SIC_IMASK0)
382 PM_SYS_PUSH(1, SIC_IMASK1)
383 # ifdef SIC_IMASK2
384 PM_SYS_PUSH(2, SIC_IMASK2)
385 # endif
386 #else
387 PM_SYS_PUSH(0, SIC_IMASK)
388 #endif
389 #ifdef SIC_IAR0
390 PM_SYS_PUSH(3, SIC_IAR0)
391 PM_SYS_PUSH(4, SIC_IAR1)
392 PM_SYS_PUSH(5, SIC_IAR2)
393 #endif
394 #ifdef SIC_IAR3
395 PM_SYS_PUSH(6, SIC_IAR3)
396 #endif
397 #ifdef SIC_IAR4
398 PM_SYS_PUSH(7, SIC_IAR4)
399 PM_SYS_PUSH(8, SIC_IAR5)
400 PM_SYS_PUSH(9, SIC_IAR6)
401 #endif
402 #ifdef SIC_IAR7
403 PM_SYS_PUSH(10, SIC_IAR7)
404 #endif
405 #ifdef SIC_IAR8
406 PM_SYS_PUSH(11, SIC_IAR8)
407 PM_SYS_PUSH(12, SIC_IAR9)
408 PM_SYS_PUSH(13, SIC_IAR10)
409 #endif
410 PM_PUSH_SYNC(13)
411 #ifdef SIC_IAR11
412 PM_SYS_PUSH(0, SIC_IAR11)
413 #endif
414
415 #ifdef SIC_IWR
416 PM_SYS_PUSH(1, SIC_IWR)
417 #endif
418 #ifdef SIC_IWR0
419 PM_SYS_PUSH(1, SIC_IWR0)
420 #endif
421 #ifdef SIC_IWR1
422 PM_SYS_PUSH(2, SIC_IWR1)
423 #endif
424 #ifdef SIC_IWR2
425 PM_SYS_PUSH(3, SIC_IWR2)
426 #endif
427
428 #ifdef PINT0_ASSIGN
429 PM_SYS_PUSH(4, PINT0_MASK_SET)
430 PM_SYS_PUSH(5, PINT1_MASK_SET)
431 PM_SYS_PUSH(6, PINT2_MASK_SET)
432 PM_SYS_PUSH(7, PINT3_MASK_SET)
433 PM_SYS_PUSH(8, PINT0_ASSIGN)
434 PM_SYS_PUSH(9, PINT1_ASSIGN)
435 PM_SYS_PUSH(10, PINT2_ASSIGN)
436 PM_SYS_PUSH(11, PINT3_ASSIGN)
437 PM_SYS_PUSH(12, PINT0_INVERT_SET)
438 PM_SYS_PUSH(13, PINT1_INVERT_SET)
439 PM_PUSH_SYNC(13)
440 PM_SYS_PUSH(0, PINT2_INVERT_SET)
441 PM_SYS_PUSH(1, PINT3_INVERT_SET)
442 PM_SYS_PUSH(2, PINT0_EDGE_SET)
443 PM_SYS_PUSH(3, PINT1_EDGE_SET)
444 PM_SYS_PUSH(4, PINT2_EDGE_SET)
445 PM_SYS_PUSH(5, PINT3_EDGE_SET)
446 #endif
447
448 PM_SYS_PUSH16(6, SYSCR)
449
450 PM_SYS_PUSH16(7, EBIU_AMGCTL)
451 PM_SYS_PUSH(8, EBIU_AMBCTL0)
452 PM_SYS_PUSH(9, EBIU_AMBCTL1)
453 #ifdef EBIU_FCTL
454 PM_SYS_PUSH(10, EBIU_MBSCTL)
455 PM_SYS_PUSH(11, EBIU_MODE)
456 PM_SYS_PUSH(12, EBIU_FCTL)
457 PM_PUSH_SYNC(12)
458 #else
459 PM_PUSH_SYNC(9)
460 #endif
461
462 /* Save Core MMRs */
463 I0.H = hi(COREMMR_BASE);
464 I0.L = lo(COREMMR_BASE);
465 I1 = I0;
466 I2 = I0;
467 I3 = I0;
468 B0 = I0;
469 B1 = I0;
470 B2 = I0;
471 B3 = I0;
472 I1.L = lo(DCPLB_ADDR0);
473 I2.L = lo(DCPLB_DATA0);
474 I3.L = lo(ICPLB_ADDR0);
475 B0.L = lo(ICPLB_DATA0);
476 B1.L = lo(EVT2);
477 B2.L = lo(IMASK);
478 B3.L = lo(TCNTL);
479
480 /* DCPLB Addr */
481 FP = I1;
482 PM_PUSH(0, DCPLB_ADDR0)
483 PM_PUSH(1, DCPLB_ADDR1)
484 PM_PUSH(2, DCPLB_ADDR2)
485 PM_PUSH(3, DCPLB_ADDR3)
486 PM_PUSH(4, DCPLB_ADDR4)
487 PM_PUSH(5, DCPLB_ADDR5)
488 PM_PUSH(6, DCPLB_ADDR6)
489 PM_PUSH(7, DCPLB_ADDR7)
490 PM_PUSH(8, DCPLB_ADDR8)
491 PM_PUSH(9, DCPLB_ADDR9)
492 PM_PUSH(10, DCPLB_ADDR10)
493 PM_PUSH(11, DCPLB_ADDR11)
494 PM_PUSH(12, DCPLB_ADDR12)
495 PM_PUSH(13, DCPLB_ADDR13)
496 PM_PUSH_SYNC(13)
497 PM_PUSH(0, DCPLB_ADDR14)
498 PM_PUSH(1, DCPLB_ADDR15)
499
500 /* DCPLB Data */
501 FP = I2;
502 PM_PUSH(2, DCPLB_DATA0)
503 PM_PUSH(3, DCPLB_DATA1)
504 PM_PUSH(4, DCPLB_DATA2)
505 PM_PUSH(5, DCPLB_DATA3)
506 PM_PUSH(6, DCPLB_DATA4)
507 PM_PUSH(7, DCPLB_DATA5)
508 PM_PUSH(8, DCPLB_DATA6)
509 PM_PUSH(9, DCPLB_DATA7)
510 PM_PUSH(10, DCPLB_DATA8)
511 PM_PUSH(11, DCPLB_DATA9)
512 PM_PUSH(12, DCPLB_DATA10)
513 PM_PUSH(13, DCPLB_DATA11)
514 PM_PUSH_SYNC(13)
515 PM_PUSH(0, DCPLB_DATA12)
516 PM_PUSH(1, DCPLB_DATA13)
517 PM_PUSH(2, DCPLB_DATA14)
518 PM_PUSH(3, DCPLB_DATA15)
519
520 /* ICPLB Addr */
521 FP = I3;
522 PM_PUSH(4, ICPLB_ADDR0)
523 PM_PUSH(5, ICPLB_ADDR1)
524 PM_PUSH(6, ICPLB_ADDR2)
525 PM_PUSH(7, ICPLB_ADDR3)
526 PM_PUSH(8, ICPLB_ADDR4)
527 PM_PUSH(9, ICPLB_ADDR5)
528 PM_PUSH(10, ICPLB_ADDR6)
529 PM_PUSH(11, ICPLB_ADDR7)
530 PM_PUSH(12, ICPLB_ADDR8)
531 PM_PUSH(13, ICPLB_ADDR9)
532 PM_PUSH_SYNC(13)
533 PM_PUSH(0, ICPLB_ADDR10)
534 PM_PUSH(1, ICPLB_ADDR11)
535 PM_PUSH(2, ICPLB_ADDR12)
536 PM_PUSH(3, ICPLB_ADDR13)
537 PM_PUSH(4, ICPLB_ADDR14)
538 PM_PUSH(5, ICPLB_ADDR15)
539
540 /* ICPLB Data */
541 FP = B0;
542 PM_PUSH(6, ICPLB_DATA0)
543 PM_PUSH(7, ICPLB_DATA1)
544 PM_PUSH(8, ICPLB_DATA2)
545 PM_PUSH(9, ICPLB_DATA3)
546 PM_PUSH(10, ICPLB_DATA4)
547 PM_PUSH(11, ICPLB_DATA5)
548 PM_PUSH(12, ICPLB_DATA6)
549 PM_PUSH(13, ICPLB_DATA7)
550 PM_PUSH_SYNC(13)
551 PM_PUSH(0, ICPLB_DATA8)
552 PM_PUSH(1, ICPLB_DATA9)
553 PM_PUSH(2, ICPLB_DATA10)
554 PM_PUSH(3, ICPLB_DATA11)
555 PM_PUSH(4, ICPLB_DATA12)
556 PM_PUSH(5, ICPLB_DATA13)
557 PM_PUSH(6, ICPLB_DATA14)
558 PM_PUSH(7, ICPLB_DATA15)
559
560 /* Event Vectors */
561 FP = B1;
562 PM_PUSH(8, EVT2)
563 PM_PUSH(9, EVT3)
564 FP += 4; /* EVT4 */
565 PM_PUSH(10, EVT5)
566 PM_PUSH(11, EVT6)
567 PM_PUSH(12, EVT7)
568 PM_PUSH(13, EVT8)
569 PM_PUSH_SYNC(13)
570 PM_PUSH(0, EVT9)
571 PM_PUSH(1, EVT10)
572 PM_PUSH(2, EVT11)
573 PM_PUSH(3, EVT12)
574 PM_PUSH(4, EVT13)
575 PM_PUSH(5, EVT14)
576 PM_PUSH(6, EVT15)
577
578 /* CEC */
579 FP = B2;
580 PM_PUSH(7, IMASK)
581 FP += 4; /* IPEND */
582 PM_PUSH(8, ILAT)
583 PM_PUSH(9, IPRIO)
584
585 /* Core Timer */
586 FP = B3;
587 PM_PUSH(10, TCNTL)
588 PM_PUSH(11, TPERIOD)
589 PM_PUSH(12, TSCALE)
590 PM_PUSH(13, TCOUNT)
591 PM_PUSH_SYNC(13)
592
593 /* Misc non-contiguous registers */
594 FP = I0;
595 PM_CORE_PUSH(0, DMEM_CONTROL);
596 PM_CORE_PUSH(1, IMEM_CONTROL);
597 PM_CORE_PUSH(2, TBUFCTL);
598 PM_PUSH_SYNC(2)
599
600 /* Setup args to hibernate mode early for pipeline optimization */
601 R0 = M3;
602 P1.H = _hibernate_mode;
603 P1.L = _hibernate_mode;
604
605 /* Save Magic, return address and Stack Pointer */
606 P0 = 0;
607 R1.H = 0xDEAD; /* Hibernate Magic */
608 R1.L = 0xBEEF;
609 R2.H = .Lpm_resume_here;
610 R2.L = .Lpm_resume_here;
611 [P0++] = R1; /* Store Hibernate Magic */
612 [P0++] = R2; /* Save Return Address */
613 [P0++] = SP; /* Save Stack Pointer */
614
615 /* Must use an indirect call as we need to jump to L1 */
616 call (P1); /* Goodbye */
617
618 .Lpm_resume_here:
619
620 /* Restore Core MMRs */
621 I0.H = hi(COREMMR_BASE);
622 I0.L = lo(COREMMR_BASE);
623 I1 = I0;
624 I2 = I0;
625 I3 = I0;
626 B0 = I0;
627 B1 = I0;
628 B2 = I0;
629 B3 = I0;
630 I1.L = lo(DCPLB_ADDR15);
631 I2.L = lo(DCPLB_DATA15);
632 I3.L = lo(ICPLB_ADDR15);
633 B0.L = lo(ICPLB_DATA15);
634 B1.L = lo(EVT15);
635 B2.L = lo(IPRIO);
636 B3.L = lo(TCOUNT);
637
638 /* Misc non-contiguous registers */
639 FP = I0;
640 PM_POP_SYNC(2)
641 PM_CORE_POP(2, TBUFCTL)
642 PM_CORE_POP(1, IMEM_CONTROL)
643 PM_CORE_POP(0, DMEM_CONTROL)
644
645 /* Core Timer */
646 PM_POP_SYNC(13)
647 FP = B3;
648 PM_POP(13, TCOUNT)
649 PM_POP(12, TSCALE)
650 PM_POP(11, TPERIOD)
651 PM_POP(10, TCNTL)
652
653 /* CEC */
654 FP = B2;
655 PM_POP(9, IPRIO)
656 PM_POP(8, ILAT)
657 FP += -4; /* IPEND */
658 PM_POP(7, IMASK)
659
660 /* Event Vectors */
661 FP = B1;
662 PM_POP(6, EVT15)
663 PM_POP(5, EVT14)
664 PM_POP(4, EVT13)
665 PM_POP(3, EVT12)
666 PM_POP(2, EVT11)
667 PM_POP(1, EVT10)
668 PM_POP(0, EVT9)
669 PM_POP_SYNC(13)
670 PM_POP(13, EVT8)
671 PM_POP(12, EVT7)
672 PM_POP(11, EVT6)
673 PM_POP(10, EVT5)
674 FP += -4; /* EVT4 */
675 PM_POP(9, EVT3)
676 PM_POP(8, EVT2)
677
678 /* ICPLB Data */
679 FP = B0;
680 PM_POP(7, ICPLB_DATA15)
681 PM_POP(6, ICPLB_DATA14)
682 PM_POP(5, ICPLB_DATA13)
683 PM_POP(4, ICPLB_DATA12)
684 PM_POP(3, ICPLB_DATA11)
685 PM_POP(2, ICPLB_DATA10)
686 PM_POP(1, ICPLB_DATA9)
687 PM_POP(0, ICPLB_DATA8)
688 PM_POP_SYNC(13)
689 PM_POP(13, ICPLB_DATA7)
690 PM_POP(12, ICPLB_DATA6)
691 PM_POP(11, ICPLB_DATA5)
692 PM_POP(10, ICPLB_DATA4)
693 PM_POP(9, ICPLB_DATA3)
694 PM_POP(8, ICPLB_DATA2)
695 PM_POP(7, ICPLB_DATA1)
696 PM_POP(6, ICPLB_DATA0)
697
698 /* ICPLB Addr */
699 FP = I3;
700 PM_POP(5, ICPLB_ADDR15)
701 PM_POP(4, ICPLB_ADDR14)
702 PM_POP(3, ICPLB_ADDR13)
703 PM_POP(2, ICPLB_ADDR12)
704 PM_POP(1, ICPLB_ADDR11)
705 PM_POP(0, ICPLB_ADDR10)
706 PM_POP_SYNC(13)
707 PM_POP(13, ICPLB_ADDR9)
708 PM_POP(12, ICPLB_ADDR8)
709 PM_POP(11, ICPLB_ADDR7)
710 PM_POP(10, ICPLB_ADDR6)
711 PM_POP(9, ICPLB_ADDR5)
712 PM_POP(8, ICPLB_ADDR4)
713 PM_POP(7, ICPLB_ADDR3)
714 PM_POP(6, ICPLB_ADDR2)
715 PM_POP(5, ICPLB_ADDR1)
716 PM_POP(4, ICPLB_ADDR0)
717
718 /* DCPLB Data */
719 FP = I2;
720 PM_POP(3, DCPLB_DATA15)
721 PM_POP(2, DCPLB_DATA14)
722 PM_POP(1, DCPLB_DATA13)
723 PM_POP(0, DCPLB_DATA12)
724 PM_POP_SYNC(13)
725 PM_POP(13, DCPLB_DATA11)
726 PM_POP(12, DCPLB_DATA10)
727 PM_POP(11, DCPLB_DATA9)
728 PM_POP(10, DCPLB_DATA8)
729 PM_POP(9, DCPLB_DATA7)
730 PM_POP(8, DCPLB_DATA6)
731 PM_POP(7, DCPLB_DATA5)
732 PM_POP(6, DCPLB_DATA4)
733 PM_POP(5, DCPLB_DATA3)
734 PM_POP(4, DCPLB_DATA2)
735 PM_POP(3, DCPLB_DATA1)
736 PM_POP(2, DCPLB_DATA0)
737
738 /* DCPLB Addr */
739 FP = I1;
740 PM_POP(1, DCPLB_ADDR15)
741 PM_POP(0, DCPLB_ADDR14)
742 PM_POP_SYNC(13)
743 PM_POP(13, DCPLB_ADDR13)
744 PM_POP(12, DCPLB_ADDR12)
745 PM_POP(11, DCPLB_ADDR11)
746 PM_POP(10, DCPLB_ADDR10)
747 PM_POP(9, DCPLB_ADDR9)
748 PM_POP(8, DCPLB_ADDR8)
749 PM_POP(7, DCPLB_ADDR7)
750 PM_POP(6, DCPLB_ADDR6)
751 PM_POP(5, DCPLB_ADDR5)
752 PM_POP(4, DCPLB_ADDR4)
753 PM_POP(3, DCPLB_ADDR3)
754 PM_POP(2, DCPLB_ADDR2)
755 PM_POP(1, DCPLB_ADDR1)
756 PM_POP(0, DCPLB_ADDR0)
757
758 /* Restore System MMRs */
759 FP.H = hi(SYSMMR_BASE);
760 FP.L = lo(SYSMMR_BASE);
761
762 #ifdef EBIU_FCTL
763 PM_POP_SYNC(12)
764 PM_SYS_POP(12, EBIU_FCTL)
765 PM_SYS_POP(11, EBIU_MODE)
766 PM_SYS_POP(10, EBIU_MBSCTL)
767 #else
768 PM_POP_SYNC(9)
769 #endif
770 PM_SYS_POP(9, EBIU_AMBCTL1)
771 PM_SYS_POP(8, EBIU_AMBCTL0)
772 PM_SYS_POP16(7, EBIU_AMGCTL)
773
774 PM_SYS_POP16(6, SYSCR)
775
776 #ifdef PINT0_ASSIGN
777 PM_SYS_POP(5, PINT3_EDGE_SET)
778 PM_SYS_POP(4, PINT2_EDGE_SET)
779 PM_SYS_POP(3, PINT1_EDGE_SET)
780 PM_SYS_POP(2, PINT0_EDGE_SET)
781 PM_SYS_POP(1, PINT3_INVERT_SET)
782 PM_SYS_POP(0, PINT2_INVERT_SET)
783 PM_POP_SYNC(13)
784 PM_SYS_POP(13, PINT1_INVERT_SET)
785 PM_SYS_POP(12, PINT0_INVERT_SET)
786 PM_SYS_POP(11, PINT3_ASSIGN)
787 PM_SYS_POP(10, PINT2_ASSIGN)
788 PM_SYS_POP(9, PINT1_ASSIGN)
789 PM_SYS_POP(8, PINT0_ASSIGN)
790 PM_SYS_POP(7, PINT3_MASK_SET)
791 PM_SYS_POP(6, PINT2_MASK_SET)
792 PM_SYS_POP(5, PINT1_MASK_SET)
793 PM_SYS_POP(4, PINT0_MASK_SET)
794 #endif
795
796 #ifdef SIC_IWR2
797 PM_SYS_POP(3, SIC_IWR2)
798 #endif
799 #ifdef SIC_IWR1
800 PM_SYS_POP(2, SIC_IWR1)
801 #endif
802 #ifdef SIC_IWR0
803 PM_SYS_POP(1, SIC_IWR0)
804 #endif
805 #ifdef SIC_IWR
806 PM_SYS_POP(1, SIC_IWR)
807 #endif
808
809 #ifdef SIC_IAR11
810 PM_SYS_POP(0, SIC_IAR11)
811 #endif
812 PM_POP_SYNC(13)
813 #ifdef SIC_IAR8
814 PM_SYS_POP(13, SIC_IAR10)
815 PM_SYS_POP(12, SIC_IAR9)
816 PM_SYS_POP(11, SIC_IAR8)
817 #endif
818 #ifdef SIC_IAR7
819 PM_SYS_POP(10, SIC_IAR7)
820 #endif
821 #ifdef SIC_IAR6
822 PM_SYS_POP(9, SIC_IAR6)
823 PM_SYS_POP(8, SIC_IAR5)
824 PM_SYS_POP(7, SIC_IAR4)
825 #endif
826 #ifdef SIC_IAR3
827 PM_SYS_POP(6, SIC_IAR3)
828 #endif
829 #ifdef SIC_IAR0
830 PM_SYS_POP(5, SIC_IAR2)
831 PM_SYS_POP(4, SIC_IAR1)
832 PM_SYS_POP(3, SIC_IAR0)
833 #endif
834 #ifdef SIC_IMASK0
835 # ifdef SIC_IMASK2
836 PM_SYS_POP(2, SIC_IMASK2)
837 # endif
838 PM_SYS_POP(1, SIC_IMASK1)
839 PM_SYS_POP(0, SIC_IMASK0)
840 #else
841 PM_SYS_POP(0, SIC_IMASK)
842 #endif
843
844 /* Restore Core Registers */
845 RETI = [sp++];
846 SEQSTAT = [sp++];
847 RETX = [sp++];
848 SYSCFG = [sp++];
849 CYCLES2 = [sp++];
850 CYCLES = [sp++];
851 ASTAT = [sp++];
852 RETS = [sp++];
853
854 LB1 = [sp++];
855 LB0 = [sp++];
856 LT1 = [sp++];
857 LT0 = [sp++];
858 LC1 = [sp++];
859 LC0 = [sp++];
860
861 a1.w = [sp++];
862 a1.x = [sp++];
863 a0.w = [sp++];
864 a0.x = [sp++];
865 b3 = [sp++];
866 b2 = [sp++];
867 b1 = [sp++];
868 b0 = [sp++];
869
870 l3 = [sp++];
871 l2 = [sp++];
872 l1 = [sp++];
873 l0 = [sp++];
874
875 m3 = [sp++];
876 m2 = [sp++];
877 m1 = [sp++];
878 m0 = [sp++];
879
880 i3 = [sp++];
881 i2 = [sp++];
882 i1 = [sp++];
883 i0 = [sp++];
884
885 usp = [sp++];
886 fp = [sp++];
887 (R7:0, P5:0) = [sp++];
888
889 [--sp] = RETI; /* Clear Global Interrupt Disable */
890 SP += 4;
891
892 RTS;
893 ENDPROC(_do_hibernate)
This page took 0.048866 seconds and 4 git commands to generate.