2 * Hibernation support specific for ARM
4 * Copyright (C) 2010 Nokia Corporation
5 * Copyright (C) 2010 Texas Instruments, Inc.
6 * Copyright (C) 2006 Rafael J. Wysocki <rjw@sisk.pl>
8 * Contact: Hiroshi DOYU <Hiroshi.DOYU@nokia.com>
10 * License terms: GNU General Public License (GPL) version 2
13 #include <linux/module.h>
17 * Image of the saved processor state
19 * coprocessor 15 registers(RW)
21 struct saved_context_cortex_a8 {
23 u32 cssr; /* Cache Size Selection */
26 u32 cacr; /* Coprocessor Access Control */
28 u32 ttb_0r; /* Translation Table Base 0 */
29 u32 ttb_1r; /* Translation Table Base 1 */
30 u32 ttbcr; /* Translation Talbe Base Control */
32 u32 dacr; /* Domain Access Control */
34 u32 d_fsr; /* Data Fault Status */
35 u32 i_fsr; /* Instruction Fault Status */
36 u32 d_afsr; /* Data Auxilirary Fault Status */ ;
37 u32 i_afsr; /* Instruction Auxilirary Fault Status */;
39 u32 d_far; /* Data Fault Address */
40 u32 i_far; /* Instruction Fault Address */
42 u32 par; /* Physical Address */
43 /* CR9 */ /* FIXME: Are they necessary? */
44 u32 pmcontrolr; /* Performance Monitor Control */
45 u32 cesr; /* Count Enable Set */
46 u32 cecr; /* Count Enable Clear */
47 u32 ofsr; /* Overflow Flag Status */
48 u32 sir; /* Software Increment */
49 u32 pcsr; /* Performance Counter Selection */
50 u32 ccr; /* Cycle Count */
51 u32 esr; /* Event Selection */
52 u32 pmcountr; /* Performance Monitor Count */
53 u32 uer; /* User Enable */
54 u32 iesr; /* Interrupt Enable Set */
55 u32 iecr; /* Interrupt Enable Clear */
56 u32 l2clr; /* L2 Cache Lockdown */
58 u32 d_tlblr; /* Data TLB Lockdown Register */
59 u32 i_tlblr; /* Instruction TLB Lockdown Register */
60 u32 prrr; /* Primary Region Remap Register */
61 u32 nrrr; /* Normal Memory Remap Register */
63 u32 pleuar; /* PLE User Accessibility */
64 u32 plecnr; /* PLE Channel Number */
65 u32 plecr; /* PLE Control */
66 u32 pleisar; /* PLE Internal Start Address */
67 u32 pleiear; /* PLE Internal End Address */
68 u32 plecidr; /* PLE Context ID */
70 u32 snsvbar; /* Secure or Nonsecure Vector Base Address */
72 u32 fcse; /* FCSE PID */
73 u32 cid; /* Context ID */
74 u32 urwtpid; /* User read/write Thread and Process ID */
75 u32 urotpid; /* User read-only Thread and Process ID */
76 u32 potpid; /* Privileged only Thread and Process ID */
77 } __attribute__((packed));
79 struct saved_context_cortex_a9 {
81 u32 cssr; /* Cache Size Selection */
89 u32 ttb_0r; /* Translation Table Base 0 */
90 u32 ttb_1r; /* Translation Table Base 1 */
91 u32 ttbcr; /* Translation Talbe Base Control */
93 u32 dacr; /* Domain Access Control */
95 u32 d_fsr; /* Data Fault Status */
96 u32 i_fsr; /* Instruction Fault Status */
97 u32 d_afsr; /* Data Auxilirary Fault Status */ ;
98 u32 i_afsr; /* Instruction Auxilirary Fault Status */;
100 u32 d_far; /* Data Fault Address */
101 u32 i_far; /* Instruction Fault Address */
103 u32 par; /* Physical Address */
104 /* CR9 */ /* FIXME: Are they necessary? */
105 u32 pmcontrolr; /* Performance Monitor Control */
106 u32 cesr; /* Count Enable Set */
107 u32 cecr; /* Count Enable Clear */
108 u32 ofsr; /* Overflow Flag Status */
109 u32 pcsr; /* Performance Counter Selection */
110 u32 ccr; /* Cycle Count */
111 u32 esr; /* Event Selection */
112 u32 pmcountr; /* Performance Monitor Count */
113 u32 uer; /* User Enable */
114 u32 iesr; /* Interrupt Enable Set */
115 u32 iecr; /* Interrupt Enable Clear */
117 u32 d_tlblr; /* Data TLB Lockdown Register */
118 u32 prrr; /* Primary Region Remap Register */
119 u32 nrrr; /* Normal Memory Remap Register */
126 u32 fcse; /* FCSE PID */
127 u32 cid; /* Context ID */
128 u32 urwtpid; /* User read/write Thread and Process ID */
129 u32 urotpid; /* User read-only Thread and Process ID */
130 u32 potpid; /* Privileged only Thread and Process ID */
133 } __attribute__((packed));
135 union saved_context {
136 struct saved_context_cortex_a8 cortex_a8;
137 struct saved_context_cortex_a9 cortex_a9;
140 /* Used in hibernate_asm.S */
141 #define USER_CONTEXT_SIZE (15 * sizeof(u32))
142 unsigned long saved_context_r0[USER_CONTEXT_SIZE];
143 unsigned long saved_cpsr;
144 unsigned long saved_context_r13_svc;
145 unsigned long saved_context_r14_svc;
146 unsigned long saved_spsr_svc;
148 static union saved_context saved_context;
150 /* References to section boundaries */
151 extern const void __nosave_begin, __nosave_end;
154 * pfn_is_nosave - check if given pfn is in the 'nosave' section
156 int pfn_is_nosave(unsigned long pfn)
158 unsigned long nosave_begin_pfn = __pa_symbol(&__nosave_begin)
160 unsigned long nosave_end_pfn = PAGE_ALIGN(__pa_symbol(&__nosave_end))
163 return (pfn >= nosave_begin_pfn) && (pfn < nosave_end_pfn);
166 #define PART_NUM_CORTEX_A8 (0xC08)
167 #define PART_NUM_CORTEX_A9 (0xC09)
169 static inline u32 arm_primary_part_number(void)
173 asm volatile ("mrc p15, 0, %0, c0, c0, 0" : "=r"(id));
176 if ((id & 0xff000000) != 0x41000000)
184 static inline void __save_processor_state_a8(
185 struct saved_context_cortex_a8 *ctxt)
188 asm volatile ("mrc p15, 2, %0, c0, c0, 0" : "=r"(ctxt->cssr));
190 asm volatile ("mrc p15, 0, %0, c1, c0, 0" : "=r"(ctxt->cr));
191 asm volatile ("mrc p15, 0, %0, c1, c0, 2" : "=r"(ctxt->cacr));
193 asm volatile ("mrc p15, 0, %0, c2, c0, 0" : "=r"(ctxt->ttb_0r));
194 asm volatile ("mrc p15, 0, %0, c2, c0, 1" : "=r"(ctxt->ttb_1r));
195 asm volatile ("mrc p15, 0, %0, c2, c0, 2" : "=r"(ctxt->ttbcr));
197 asm volatile ("mrc p15, 0, %0, c3, c0, 0" : "=r"(ctxt->dacr));
199 asm volatile ("mrc p15, 0, %0, c5, c0, 0" : "=r"(ctxt->d_fsr));
200 asm volatile ("mrc p15, 0, %0, c5, c0, 1" : "=r"(ctxt->i_fsr));
201 asm volatile ("mrc p15, 0, %0, c5, c1, 0" : "=r"(ctxt->d_afsr));
202 asm volatile ("mrc p15, 0, %0, c5, c1, 1" : "=r"(ctxt->i_afsr));
204 asm volatile ("mrc p15, 0, %0, c6, c0, 0" : "=r"(ctxt->d_far));
205 asm volatile ("mrc p15, 0, %0, c6, c0, 2" : "=r"(ctxt->i_far));
207 asm volatile ("mrc p15, 0, %0, c7, c4, 0" : "=r"(ctxt->par));
209 asm volatile ("mrc p15, 0, %0, c9, c12, 0" : "=r"(ctxt->pmcontrolr));
210 asm volatile ("mrc p15, 0, %0, c9, c12, 1" : "=r"(ctxt->cesr));
211 asm volatile ("mrc p15, 0, %0, c9, c12, 2" : "=r"(ctxt->cecr));
212 asm volatile ("mrc p15, 0, %0, c9, c12, 3" : "=r"(ctxt->ofsr));
213 asm volatile ("mrc p15, 0, %0, c9, c12, 4" : "=r"(ctxt->sir));
214 asm volatile ("mrc p15, 0, %0, c9, c12, 5" : "=r"(ctxt->pcsr));
215 asm volatile ("mrc p15, 0, %0, c9, c13, 0" : "=r"(ctxt->ccr));
216 asm volatile ("mrc p15, 0, %0, c9, c13, 1" : "=r"(ctxt->esr));
217 asm volatile ("mrc p15, 0, %0, c9, c13, 2" : "=r"(ctxt->pmcountr));
218 asm volatile ("mrc p15, 0, %0, c9, c14, 0" : "=r"(ctxt->uer));
219 asm volatile ("mrc p15, 0, %0, c9, c14, 1" : "=r"(ctxt->iesr));
220 asm volatile ("mrc p15, 0, %0, c9, c14, 2" : "=r"(ctxt->iecr));
221 asm volatile ("mrc p15, 1, %0, c9, c0, 0" : "=r"(ctxt->l2clr));
223 asm volatile ("mrc p15, 0, %0, c10, c0, 0" : "=r"(ctxt->d_tlblr));
224 asm volatile ("mrc p15, 0, %0, c10, c0, 1" : "=r"(ctxt->i_tlblr));
225 asm volatile ("mrc p15, 0, %0, c10, c2, 0" : "=r"(ctxt->prrr));
226 asm volatile ("mrc p15, 0, %0, c10, c2, 1" : "=r"(ctxt->nrrr));
228 asm volatile ("mrc p15, 0, %0, c11, c1, 0" : "=r"(ctxt->pleuar));
229 asm volatile ("mrc p15, 0, %0, c11, c2, 0" : "=r"(ctxt->plecnr));
230 asm volatile ("mrc p15, 0, %0, c11, c4, 0" : "=r"(ctxt->plecr));
231 asm volatile ("mrc p15, 0, %0, c11, c5, 0" : "=r"(ctxt->pleisar));
232 asm volatile ("mrc p15, 0, %0, c11, c7, 0" : "=r"(ctxt->pleiear));
233 asm volatile ("mrc p15, 0, %0, c11, c15, 0" : "=r"(ctxt->plecidr));
235 asm volatile ("mrc p15, 0, %0, c12, c0, 0" : "=r"(ctxt->snsvbar));
237 asm volatile ("mrc p15, 0, %0, c13, c0, 0" : "=r"(ctxt->fcse));
238 asm volatile ("mrc p15, 0, %0, c13, c0, 1" : "=r"(ctxt->cid));
239 asm volatile ("mrc p15, 0, %0, c13, c0, 2" : "=r"(ctxt->urwtpid));
240 asm volatile ("mrc p15, 0, %0, c13, c0, 3" : "=r"(ctxt->urotpid));
241 asm volatile ("mrc p15, 0, %0, c13, c0, 4" : "=r"(ctxt->potpid));
244 static inline void __save_processor_state_a9(
245 struct saved_context_cortex_a9 *ctxt)
248 asm volatile ("mrc p15, 2, %0, c0, c0, 0" : "=r"(ctxt->cssr));
250 asm volatile ("mrc p15, 0, %0, c1, c0, 0" : "=r"(ctxt->cr));
251 asm volatile ("mrc p15, 0, %0, c1, c0, 1" : "=r"(ctxt->actlr));
252 asm volatile ("mrc p15, 0, %0, c1, c0, 2" : "=r"(ctxt->cacr));
253 asm volatile ("mrc p15, 0, %0, c1, c1, 1" : "=r"(ctxt->sder));
254 asm volatile ("mrc p15, 0, %0, c1, c1, 3" : "=r"(ctxt->vcr));
256 asm volatile ("mrc p15, 0, %0, c2, c0, 0" : "=r"(ctxt->ttb_0r));
257 asm volatile ("mrc p15, 0, %0, c2, c0, 1" : "=r"(ctxt->ttb_1r));
258 asm volatile ("mrc p15, 0, %0, c2, c0, 2" : "=r"(ctxt->ttbcr));
260 asm volatile ("mrc p15, 0, %0, c3, c0, 0" : "=r"(ctxt->dacr));
262 asm volatile ("mrc p15, 0, %0, c5, c0, 0" : "=r"(ctxt->d_fsr));
263 asm volatile ("mrc p15, 0, %0, c5, c0, 1" : "=r"(ctxt->i_fsr));
264 asm volatile ("mrc p15, 0, %0, c5, c1, 0" : "=r"(ctxt->d_afsr));
265 asm volatile ("mrc p15, 0, %0, c5, c1, 1" : "=r"(ctxt->i_afsr));
267 asm volatile ("mrc p15, 0, %0, c6, c0, 0" : "=r"(ctxt->d_far));
268 asm volatile ("mrc p15, 0, %0, c6, c0, 2" : "=r"(ctxt->i_far));
270 asm volatile ("mrc p15, 0, %0, c7, c4, 0" : "=r"(ctxt->par));
272 asm volatile ("mrc p15, 0, %0, c9, c12, 0" : "=r"(ctxt->pmcontrolr));
273 asm volatile ("mrc p15, 0, %0, c9, c12, 1" : "=r"(ctxt->cesr));
274 asm volatile ("mrc p15, 0, %0, c9, c12, 2" : "=r"(ctxt->cecr));
275 asm volatile ("mrc p15, 0, %0, c9, c12, 3" : "=r"(ctxt->ofsr));
276 asm volatile ("mrc p15, 0, %0, c9, c12, 5" : "=r"(ctxt->pcsr));
277 asm volatile ("mrc p15, 0, %0, c9, c13, 0" : "=r"(ctxt->ccr));
278 asm volatile ("mrc p15, 0, %0, c9, c13, 1" : "=r"(ctxt->esr));
279 asm volatile ("mrc p15, 0, %0, c9, c13, 2" : "=r"(ctxt->pmcountr));
280 asm volatile ("mrc p15, 0, %0, c9, c14, 0" : "=r"(ctxt->uer));
281 asm volatile ("mrc p15, 0, %0, c9, c14, 1" : "=r"(ctxt->iesr));
282 asm volatile ("mrc p15, 0, %0, c9, c14, 2" : "=r"(ctxt->iecr));
284 asm volatile ("mrc p15, 0, %0, c10, c0, 0" : "=r"(ctxt->d_tlblr));
285 asm volatile ("mrc p15, 0, %0, c10, c2, 0" : "=r"(ctxt->prrr));
286 asm volatile ("mrc p15, 0, %0, c10, c2, 1" : "=r"(ctxt->nrrr));
289 asm volatile ("mrc p15, 0, %0, c12, c0, 0" : "=r"(ctxt->vbar));
290 asm volatile ("mrc p15, 0, %0, c12, c0, 1" : "=r"(ctxt->mvbar));
291 asm volatile ("mrc p15, 0, %0, c12, c1, 1" : "=r"(ctxt->vir));
293 asm volatile ("mrc p15, 0, %0, c13, c0, 0" : "=r"(ctxt->fcse));
294 asm volatile ("mrc p15, 0, %0, c13, c0, 1" : "=r"(ctxt->cid));
295 asm volatile ("mrc p15, 0, %0, c13, c0, 2" : "=r"(ctxt->urwtpid));
296 asm volatile ("mrc p15, 0, %0, c13, c0, 3" : "=r"(ctxt->urotpid));
297 asm volatile ("mrc p15, 0, %0, c13, c0, 4" : "=r"(ctxt->potpid));
299 asm volatile ("mrc p15, 5, %0, c15, c7, 2" : "=r"(ctxt->mtlbar));
302 static inline void __save_processor_state(union saved_context *ctxt)
304 switch (arm_primary_part_number()) {
305 case PART_NUM_CORTEX_A8:
306 __save_processor_state_a8(&ctxt->cortex_a8);
308 case PART_NUM_CORTEX_A9:
309 __save_processor_state_a9(&ctxt->cortex_a9);
312 WARN(true, "Hibernation is not supported for this processor.(%d)",
313 arm_primary_part_number());
317 static inline void __restore_processor_state_a8(
318 struct saved_context_cortex_a8 *ctxt)
321 asm volatile ("mcr p15, 2, %0, c0, c0, 0" : : "r"(ctxt->cssr));
323 asm volatile ("mcr p15, 0, %0, c1, c0, 0" : : "r"(ctxt->cr));
324 asm volatile ("mcr p15, 0, %0, c1, c0, 2" : : "r"(ctxt->cacr));
326 asm volatile ("mcr p15, 0, %0, c2, c0, 0" : : "r"(ctxt->ttb_0r));
327 asm volatile ("mcr p15, 0, %0, c2, c0, 1" : : "r"(ctxt->ttb_1r));
328 asm volatile ("mcr p15, 0, %0, c2, c0, 2" : : "r"(ctxt->ttbcr));
330 asm volatile ("mcr p15, 0, %0, c3, c0, 0" : : "r"(ctxt->dacr));
332 asm volatile ("mcr p15, 0, %0, c5, c0, 0" : : "r"(ctxt->d_fsr));
333 asm volatile ("mcr p15, 0, %0, c5, c0, 1" : : "r"(ctxt->i_fsr));
334 asm volatile ("mcr p15, 0, %0, c5, c1, 0" : : "r"(ctxt->d_afsr));
335 asm volatile ("mcr p15, 0, %0, c5, c1, 1" : : "r"(ctxt->i_afsr));
337 asm volatile ("mcr p15, 0, %0, c6, c0, 0" : : "r"(ctxt->d_far));
338 asm volatile ("mcr p15, 0, %0, c6, c0, 2" : : "r"(ctxt->i_far));
340 asm volatile ("mcr p15, 0, %0, c7, c4, 0" : : "r"(ctxt->par));
342 asm volatile ("mcr p15, 0, %0, c9, c12, 0" : : "r"(ctxt->pmcontrolr));
343 asm volatile ("mcr p15, 0, %0, c9, c12, 1" : : "r"(ctxt->cesr));
344 asm volatile ("mcr p15, 0, %0, c9, c12, 2" : : "r"(ctxt->cecr));
345 asm volatile ("mcr p15, 0, %0, c9, c12, 3" : : "r"(ctxt->ofsr));
346 asm volatile ("mcr p15, 0, %0, c9, c12, 4" : : "r"(ctxt->sir));
347 asm volatile ("mcr p15, 0, %0, c9, c12, 5" : : "r"(ctxt->pcsr));
348 asm volatile ("mcr p15, 0, %0, c9, c13, 0" : : "r"(ctxt->ccr));
349 asm volatile ("mcr p15, 0, %0, c9, c13, 1" : : "r"(ctxt->esr));
350 asm volatile ("mcr p15, 0, %0, c9, c13, 2" : : "r"(ctxt->pmcountr));
351 asm volatile ("mcr p15, 0, %0, c9, c14, 0" : : "r"(ctxt->uer));
352 asm volatile ("mcr p15, 0, %0, c9, c14, 1" : : "r"(ctxt->iesr));
353 asm volatile ("mcr p15, 0, %0, c9, c14, 2" : : "r"(ctxt->iecr));
354 asm volatile ("mcr p15, 1, %0, c9, c0, 0" : : "r"(ctxt->l2clr));
356 asm volatile ("mcr p15, 0, %0, c10, c0, 0" : : "r"(ctxt->d_tlblr));
357 asm volatile ("mcr p15, 0, %0, c10, c0, 1" : : "r"(ctxt->i_tlblr));
358 asm volatile ("mcr p15, 0, %0, c10, c2, 0" : : "r"(ctxt->prrr));
359 asm volatile ("mcr p15, 0, %0, c10, c2, 1" : : "r"(ctxt->nrrr));
361 asm volatile ("mcr p15, 0, %0, c11, c1, 0" : : "r"(ctxt->pleuar));
362 asm volatile ("mcr p15, 0, %0, c11, c2, 0" : : "r"(ctxt->plecnr));
363 asm volatile ("mcr p15, 0, %0, c11, c4, 0" : : "r"(ctxt->plecr));
364 asm volatile ("mcr p15, 0, %0, c11, c5, 0" : : "r"(ctxt->pleisar));
365 asm volatile ("mcr p15, 0, %0, c11, c7, 0" : : "r"(ctxt->pleiear));
366 asm volatile ("mcr p15, 0, %0, c11, c15, 0" : : "r"(ctxt->plecidr));
368 asm volatile ("mcr p15, 0, %0, c12, c0, 0" : : "r"(ctxt->snsvbar));
370 asm volatile ("mcr p15, 0, %0, c13, c0, 0" : : "r"(ctxt->fcse));
371 asm volatile ("mcr p15, 0, %0, c13, c0, 1" : : "r"(ctxt->cid));
372 asm volatile ("mcr p15, 0, %0, c13, c0, 2" : : "r"(ctxt->urwtpid));
373 asm volatile ("mcr p15, 0, %0, c13, c0, 3" : : "r"(ctxt->urotpid));
374 asm volatile ("mcr p15, 0, %0, c13, c0, 4" : : "r"(ctxt->potpid));
377 static inline void __restore_processor_state_a9(
378 struct saved_context_cortex_a9 *ctxt)
381 asm volatile ("mcr p15, 2, %0, c0, c0, 0" : : "r"(ctxt->cssr));
383 asm volatile ("mcr p15, 0, %0, c1, c0, 0" : : "r"(ctxt->cr));
384 asm volatile ("mcr p15, 0, %0, c1, c0, 1" : : "r"(ctxt->actlr));
385 asm volatile ("mcr p15, 0, %0, c1, c0, 2" : : "r"(ctxt->cacr));
386 asm volatile ("mcr p15, 0, %0, c1, c1, 1" : : "r"(ctxt->sder));
387 asm volatile ("mcr p15, 0, %0, c1, c1, 3" : : "r"(ctxt->vcr));
389 asm volatile ("mcr p15, 0, %0, c2, c0, 0" : : "r"(ctxt->ttb_0r));
390 asm volatile ("mcr p15, 0, %0, c2, c0, 1" : : "r"(ctxt->ttb_1r));
391 asm volatile ("mcr p15, 0, %0, c2, c0, 2" : : "r"(ctxt->ttbcr));
393 asm volatile ("mcr p15, 0, %0, c3, c0, 0" : : "r"(ctxt->dacr));
395 asm volatile ("mcr p15, 0, %0, c5, c0, 0" : : "r"(ctxt->d_fsr));
396 asm volatile ("mcr p15, 0, %0, c5, c0, 1" : : "r"(ctxt->i_fsr));
397 asm volatile ("mcr p15, 0, %0, c5, c1, 0" : : "r"(ctxt->d_afsr));
398 asm volatile ("mcr p15, 0, %0, c5, c1, 1" : : "r"(ctxt->i_afsr));
400 asm volatile ("mcr p15, 0, %0, c6, c0, 0" : : "r"(ctxt->d_far));
401 asm volatile ("mcr p15, 0, %0, c6, c0, 2" : : "r"(ctxt->i_far));
403 asm volatile ("mcr p15, 0, %0, c7, c4, 0" : : "r"(ctxt->par));
405 asm volatile ("mcr p15, 0, %0, c9, c12, 0" : : "r"(ctxt->pmcontrolr));
406 asm volatile ("mcr p15, 0, %0, c9, c12, 1" : : "r"(ctxt->cesr));
407 asm volatile ("mcr p15, 0, %0, c9, c12, 2" : : "r"(ctxt->cecr));
408 asm volatile ("mcr p15, 0, %0, c9, c12, 3" : : "r"(ctxt->ofsr));
409 asm volatile ("mcr p15, 0, %0, c9, c12, 5" : : "r"(ctxt->pcsr));
410 asm volatile ("mcr p15, 0, %0, c9, c13, 0" : : "r"(ctxt->ccr));
411 asm volatile ("mcr p15, 0, %0, c9, c13, 1" : : "r"(ctxt->esr));
412 asm volatile ("mcr p15, 0, %0, c9, c13, 2" : : "r"(ctxt->pmcountr));
413 asm volatile ("mcr p15, 0, %0, c9, c14, 0" : : "r"(ctxt->uer));
414 asm volatile ("mcr p15, 0, %0, c9, c14, 1" : : "r"(ctxt->iesr));
415 asm volatile ("mcr p15, 0, %0, c9, c14, 2" : : "r"(ctxt->iecr));
417 asm volatile ("mcr p15, 0, %0, c10, c0, 0" : : "r"(ctxt->d_tlblr));
418 asm volatile ("mcr p15, 0, %0, c10, c2, 0" : : "r"(ctxt->prrr));
419 asm volatile ("mcr p15, 0, %0, c10, c2, 1" : : "r"(ctxt->nrrr));
422 asm volatile ("mcr p15, 0, %0, c12, c0, 0" : : "r"(ctxt->vbar));
423 asm volatile ("mcr p15, 0, %0, c12, c0, 1" : : "r"(ctxt->mvbar));
424 asm volatile ("mcr p15, 0, %0, c12, c1, 1" : : "r"(ctxt->vir));
426 asm volatile ("mcr p15, 0, %0, c13, c0, 0" : : "r"(ctxt->fcse));
427 asm volatile ("mcr p15, 0, %0, c13, c0, 1" : : "r"(ctxt->cid));
428 asm volatile ("mcr p15, 0, %0, c13, c0, 2" : : "r"(ctxt->urwtpid));
429 asm volatile ("mcr p15, 0, %0, c13, c0, 3" : : "r"(ctxt->urotpid));
430 asm volatile ("mcr p15, 0, %0, c13, c0, 4" : : "r"(ctxt->potpid));
432 asm volatile ("mcr p15, 5, %0, c15, c7, 2" : : "r"(ctxt->mtlbar));
435 static inline void __restore_processor_state(union saved_context *ctxt)
437 switch (arm_primary_part_number()) {
438 case PART_NUM_CORTEX_A8:
439 __restore_processor_state_a8(&ctxt->cortex_a8);
441 case PART_NUM_CORTEX_A9:
442 __restore_processor_state_a9(&ctxt->cortex_a9);
445 WARN(true, "Hibernation is not supported for this processor.(%d)",
446 arm_primary_part_number());
450 void save_processor_state(void)
453 __save_processor_state(&saved_context);
456 void restore_processor_state(void)
458 __restore_processor_state(&saved_context);