mt_cache_v7.S 54 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072
  1. .text
  2. .global __inner_flush_dcache_all
  3. .global __inner_flush_dcache_L1
  4. .global __inner_flush_dcache_L2
  5. .global __inner_clean_dcache_all
  6. .global __inner_clean_dcache_L1
  7. .global __inner_clean_dcache_L2
  8. .global __inner_inv_dcache_all
  9. .global __inner_inv_dcache_L1
  10. .global __inner_inv_dcache_L2
  11. .global __enable_dcache
  12. .global __enable_icache
  13. .global __enable_cache
  14. .global __disable_dcache
  15. .global __disable_icache
  16. .global __disable_cache
  17. .global __disable_dcache__inner_flush_dcache_L1
  18. .global __disable_dcache__inner_flush_dcache_L1__inner_flush_dcache_L2
  19. .global __disable_dcache__inner_flush_dcache_L1__inner_clean_dcache_L2
  20. .global d_i_dis_flush_all
  21. .equ C1_IBIT , 0x00001000
  22. .equ C1_CBIT , 0x00000004
  23. .equ PSR_F_BIT, 0x00000040
  24. .equ PSR_I_BIT, 0x00000080
  25. __enable_icache:
  26. MRC p15,0,r0,c1,c0,0
  27. ORR r0,r0,#C1_IBIT
  28. MCR p15,0,r0,c1,c0,0
  29. BX lr
  30. __disable_icache:
  31. MRC p15,0,r0,c1,c0,0
  32. BIC r0,r0,#C1_IBIT
  33. MCR p15,0,r0,c1,c0,0
  34. BX lr
  35. __enable_dcache:
  36. MRC p15,0,r0,c1,c0,0
  37. ORR r0,r0,#C1_CBIT
  38. dsb
  39. MCR p15,0,r0,c1,c0,0
  40. dsb
  41. isb
  42. BX lr
  43. __disable_dcache:
  44. MRC p15,0,r0,c1,c0,0
  45. BIC r0,r0,#C1_CBIT
  46. dsb
  47. MCR p15,0,r0,c1,c0,0
  48. dsb
  49. isb
  50. /*
  51. Erratum:794322,An instruction fetch can be allocated into the L2 cache after the cache is disabled Status
  52. This erratum can be avoided by inserting both of the following after the SCTLR.C bit is cleared to 0,
  53. and before the caches are cleaned or invalidated:
  54. 1) A TLBIMVA operation to any address.
  55. 2) A DSB instruction.
  56. */
  57. MCR p15,0,r0,c8,c7,1
  58. dsb
  59. isb
  60. BX lr
  61. __enable_cache:
  62. MRC p15,0,r0,c1,c0,0
  63. ORR r0,r0,#C1_IBIT
  64. ORR r0,r0,#C1_CBIT
  65. MCR p15,0,r0,c1,c0,0
  66. BX lr
  67. __disable_cache:
  68. MRC p15,0,r0,c1,c0,0
  69. BIC r0,r0,#C1_IBIT
  70. BIC r0,r0,#C1_CBIT
  71. MCR p15,0,r0,c1,c0,0
  72. /*
  73. Erratum:794322,An instruction fetch can be allocated into the L2 cache after the cache is disabled Status
  74. This erratum can be avoided by inserting both of the following after the SCTLR.C bit is cleared to 0,
  75. and before the caches are cleaned or invalidated:
  76. 1) A TLBIMVA operation to any address.
  77. 2) A DSB instruction.
  78. */
  79. MCR p15,0,r0,c8,c7,1
  80. dsb
  81. BX lr
  82. __inner_flush_dcache_all:
  83. push {r0,r1,r2,r3,r4,r5,r7,r8,r9,r10,r11,r14}
  84. dmb @ ensure ordering with previous memory accesses
  85. mrc p15, 1, r0, c0, c0, 1 @ read clidr
  86. ands r3, r0, #0x7000000 @ extract loc from clidr
  87. mov r3, r3, lsr #23 @ left align loc bit field
  88. beq all_finished @ if loc is 0, then no need to clean
  89. mov r10, #0 @ start clean at cache level 0
  90. all_loop1:
  91. add r2, r10, r10, lsr #1 @ work out 3x current cache level
  92. mov r1, r0, lsr r2 @ extract cache type bits from clidr
  93. and r1, r1, #7 @ mask of the bits for current cache only
  94. cmp r1, #2 @ see what cache we have at this level
  95. blt all_skip @ skip if no cache, or just i-cache
  96. #ifdef CONFIG_ARM_ERRATA_814220
  97. dsb
  98. #endif
  99. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  100. isb @ isb to sych the new cssr&csidr
  101. mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
  102. and r2, r1, #7 @ extract the length of the cache lines
  103. add r2, r2, #4 @ add 4 (line length offset)
  104. ldr r4, =0x3ff
  105. ands r4, r4, r1, lsr #3 @ find maximum number on the way size
  106. clz r5, r4 @ find bit position of way size increment
  107. ldr r7, =0x7fff
  108. ands r7, r7, r1, lsr #13 @ extract max number of the index size
  109. all_loop2:
  110. mov r9, r4 @ create working copy of max way size
  111. all_loop3:
  112. orr r11, r10, r9, lsl r5 @ factor way and cache number into r11
  113. orr r11, r11, r7, lsl r2 @ factor index number into r11
  114. #ifdef CONFIG_L1C_OPT
  115. #replace DCCISW by DCISW+DCCSW
  116. cmp r10, #2
  117. mrsne r1, cpsr @disable IRQ and save flag to make clean and invalidate atomic
  118. orrne r8, r1, #PSR_I_BIT | PSR_F_BIT
  119. msrne cpsr_c, r8
  120. mcrne p15, 0, r11, c7, c10, 2 @ clean by set/way
  121. mcrne p15, 0, r11, c7, c6, 2 @ invalidate by set/way
  122. msrne cpsr_c, r1
  123. mcreq p15, 0, r11, c7, c14, 2 @ clean & invalidate by set/way
  124. #else
  125. mcr p15, 0, r11, c7, c14, 2 @ clean & invalidate by set/way
  126. #endif
  127. subs r9, r9, #1 @ decrement the way
  128. bge all_loop3
  129. subs r7, r7, #1 @ decrement the index
  130. bge all_loop2
  131. all_skip:
  132. add r10, r10, #2 @ increment cache number
  133. cmp r3, r10
  134. bgt all_loop1
  135. all_finished:
  136. mov r10, #0 @ swith back to cache level 0
  137. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  138. dsb
  139. isb
  140. pop {r0,r1,r2,r3,r4,r5,r7,r8,r9,r10,r11,r14}
  141. bx lr
  142. __inner_flush_dcache_L1:
  143. push {r0,r1,r2,r3,r4,r5,r7,r8,r9,r10,r11,r14}
  144. dmb @ ensure ordering with previous memory accesses
  145. mrc p15, 1, r0, c0, c0, 1 @ read clidr
  146. ands r3, r0, #0x7000000 @ extract loc from clidr
  147. mov r3, r3, lsr #23 @ left align loc bit field
  148. beq L1_finished @ if loc is 0, then no need to clean
  149. mov r10, #0 @ start clean at cache level 1
  150. L1_loop1:
  151. add r2, r10, r10, lsr #1 @ work out 3x current cache level
  152. mov r1, r0, lsr r2 @ extract cache type bits from clidr
  153. and r1, r1, #7 @ mask of the bits for current cache only
  154. cmp r1, #2 @ see what cache we have at this level
  155. blt L1_skip @ skip if no cache, or just i-cache
  156. #ifdef CONFIG_ARM_ERRATA_814220
  157. dsb
  158. #endif
  159. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  160. isb @ isb to sych the new cssr&csidr
  161. mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
  162. and r2, r1, #7 @ extract the length of the cache lines
  163. add r2, r2, #4 @ add 4 (line length offset)
  164. ldr r4, =0x3ff
  165. ands r4, r4, r1, lsr #3 @ find maximum number on the way size
  166. clz r5, r4 @ find bit position of way size increment
  167. ldr r7, =0x7fff
  168. ands r7, r7, r1, lsr #13 @ extract max number of the index size
  169. L1_loop2:
  170. mov r9, r4 @ create working copy of max way size
  171. L1_loop3:
  172. orr r11, r10, r9, lsl r5 @ factor way and cache number into r11
  173. orr r11, r11, r7, lsl r2 @ factor index number into r11
  174. #ifdef CONFIG_L1C_OPT
  175. #replace DCCISW by DCISW+DCCSW
  176. mrs r1, cpsr @disable IRQ and save flag to make clean and invalidate atomic
  177. orr r8, r1, #PSR_I_BIT | PSR_F_BIT
  178. msr cpsr_c, r8
  179. mcr p15, 0, r11, c7, c10, 2 @ clean by set/way
  180. mcr p15, 0, r11, c7, c6, 2 @ invalidate by set/way
  181. msr cpsr_c, r1
  182. #else
  183. mcr p15, 0, r11, c7, c14, 2 @ clean & invalidate by set/way
  184. #endif
  185. subs r9, r9, #1 @ decrement the way
  186. bge L1_loop3
  187. subs r7, r7, #1 @ decrement the index
  188. bge L1_loop2
  189. L1_skip:
  190. @add r10, r10, #2 @ increment cache number
  191. @cmp r3, r10
  192. @bgt L1_loop1
  193. L1_finished:
  194. mov r10, #0 @ swith back to cache level 0
  195. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  196. dsb
  197. isb
  198. pop {r0,r1,r2,r3,r4,r5,r7,r8,r9,r10,r11,r14}
  199. bx lr
  200. __inner_flush_dcache_L2:
  201. push {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  202. @push {r4,r5,r7,r9,r10,r11}
  203. dmb @ ensure ordering with previous memory accesses
  204. mrc p15, 1, r0, c0, c0, 1 @ read clidr
  205. ands r3, r0, #0x7000000 @ extract loc from clidr
  206. mov r3, r3, lsr #23 @ left align loc bit field
  207. beq L2_finished @ if loc is 0, then no need to clean
  208. mov r10, #2 @ start clean at cache level 2
  209. L2_loop1:
  210. add r2, r10, r10, lsr #1 @ work out 3x current cache level
  211. mov r1, r0, lsr r2 @ extract cache type bits from clidr
  212. and r1, r1, #7 @ mask of the bits for current cache only
  213. cmp r1, #2 @ see what cache we have at this level
  214. blt L2_skip @ skip if no cache, or just i-cache
  215. #ifdef CONFIG_ARM_ERRATA_814220
  216. dsb
  217. #endif
  218. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  219. isb @ isb to sych the new cssr&csidr
  220. mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
  221. and r2, r1, #7 @ extract the length of the cache lines
  222. add r2, r2, #4 @ add 4 (line length offset)
  223. ldr r4, =0x3ff
  224. ands r4, r4, r1, lsr #3 @ find maximum number on the way size
  225. clz r5, r4 @ find bit position of way size increment
  226. ldr r7, =0x7fff
  227. ands r7, r7, r1, lsr #13 @ extract max number of the index size
  228. L2_loop2:
  229. mov r9, r4 @ create working copy of max way size
  230. L2_loop3:
  231. orr r11, r10, r9, lsl r5 @ factor way and cache number into r11
  232. orr r11, r11, r7, lsl r2 @ factor index number into r11
  233. mcr p15, 0, r11, c7, c14, 2 @ clean & invalidate by set/way
  234. subs r9, r9, #1 @ decrement the way
  235. bge L2_loop3
  236. subs r7, r7, #1 @ decrement the index
  237. bge L2_loop2
  238. L2_skip:
  239. @add r10, r10, #2 @ increment cache number
  240. @cmp r3, r10
  241. @bgt L2_loop1
  242. L2_finished:
  243. mov r10, #0 @ swith back to cache level 0
  244. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  245. dsb
  246. isb
  247. @pop {r4,r5,r7,r9,r10,r11}
  248. pop {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  249. bx lr
  250. __inner_clean_dcache_all:
  251. push {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  252. @push {r4,r5,r7,r9,r10,r11}
  253. dmb @ ensure ordering with previous memory accesses
  254. mrc p15, 1, r0, c0, c0, 1 @ read clidr
  255. ands r3, r0, #0x7000000 @ extract loc from clidr
  256. mov r3, r3, lsr #23 @ left align loc bit field
  257. beq all_cl_finished @ if loc is 0, then no need to clean
  258. mov r10, #0 @ start clean at cache level 0
  259. all_cl_loop1:
  260. add r2, r10, r10, lsr #1 @ work out 3x current cache level
  261. mov r1, r0, lsr r2 @ extract cache type bits from clidr
  262. and r1, r1, #7 @ mask of the bits for current cache only
  263. cmp r1, #2 @ see what cache we have at this level
  264. blt all_cl_skip @ skip if no cache, or just i-cache
  265. #ifdef CONFIG_ARM_ERRATA_814220
  266. dsb
  267. #endif
  268. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  269. isb @ isb to sych the new cssr&csidr
  270. mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
  271. and r2, r1, #7 @ extract the length of the cache lines
  272. add r2, r2, #4 @ add 4 (line length offset)
  273. ldr r4, =0x3ff
  274. ands r4, r4, r1, lsr #3 @ find maximum number on the way size
  275. clz r5, r4 @ find bit position of way size increment
  276. ldr r7, =0x7fff
  277. ands r7, r7, r1, lsr #13 @ extract max number of the index size
  278. all_cl_loop2:
  279. mov r9, r4 @ create working copy of max way size
  280. all_cl_loop3:
  281. orr r11, r10, r9, lsl r5 @ factor way and cache number into r11
  282. orr r11, r11, r7, lsl r2 @ factor index number into r11
  283. mcr p15, 0, r11, c7, c10, 2 @ clean by set/way
  284. subs r9, r9, #1 @ decrement the way
  285. bge all_cl_loop3
  286. subs r7, r7, #1 @ decrement the index
  287. bge all_cl_loop2
  288. all_cl_skip:
  289. add r10, r10, #2 @ increment cache number
  290. cmp r3, r10
  291. bgt all_cl_loop1
  292. all_cl_finished:
  293. mov r10, #0 @ swith back to cache level 0
  294. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  295. dsb
  296. isb
  297. @pop {r4,r5,r7,r9,r10,r11}
  298. pop {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  299. bx lr
  300. __inner_clean_dcache_L1:
  301. push {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  302. @push {r4,r5,r7,r9,r10,r11}
  303. dmb @ ensure ordering with previous memory accesses
  304. mrc p15, 1, r0, c0, c0, 1 @ read clidr
  305. ands r3, r0, #0x7000000 @ extract loc from clidr
  306. mov r3, r3, lsr #23 @ left align loc bit field
  307. beq L1_cl_finished @ if loc is 0, then no need to clean
  308. mov r10, #0 @ start clean at cache level 1
  309. L1_cl_loop1:
  310. add r2, r10, r10, lsr #1 @ work out 3x current cache level
  311. mov r1, r0, lsr r2 @ extract cache type bits from clidr
  312. and r1, r1, #7 @ mask of the bits for current cache only
  313. cmp r1, #2 @ see what cache we have at this level
  314. blt L1_cl_skip @ skip if no cache, or just i-cache
  315. #ifdef CONFIG_ARM_ERRATA_814220
  316. dsb
  317. #endif
  318. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  319. isb @ isb to sych the new cssr&csidr
  320. mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
  321. and r2, r1, #7 @ extract the length of the cache lines
  322. add r2, r2, #4 @ add 4 (line length offset)
  323. ldr r4, =0x3ff
  324. ands r4, r4, r1, lsr #3 @ find maximum number on the way size
  325. clz r5, r4 @ find bit position of way size increment
  326. ldr r7, =0x7fff
  327. ands r7, r7, r1, lsr #13 @ extract max number of the index size
  328. L1_cl_loop2:
  329. mov r9, r4 @ create working copy of max way size
  330. L1_cl_loop3:
  331. orr r11, r10, r9, lsl r5 @ factor way and cache number into r11
  332. orr r11, r11, r7, lsl r2 @ factor index number into r11
  333. mcr p15, 0, r11, c7, c10, 2 @ clean by set/way
  334. subs r9, r9, #1 @ decrement the way
  335. bge L1_cl_loop3
  336. subs r7, r7, #1 @ decrement the index
  337. bge L1_cl_loop2
  338. L1_cl_skip:
  339. @add r10, r10, #2 @ increment cache number
  340. @cmp r3, r10
  341. @bgt L1_cl_loop1
  342. L1_cl_finished:
  343. mov r10, #0 @ swith back to cache level 0
  344. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  345. dsb
  346. isb
  347. @pop {r4,r5,r7,r9,r10,r11}
  348. pop {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  349. bx lr
  350. __inner_clean_dcache_L2:
  351. #if 0
  352. mov r0, sp
  353. mcr p15, 0, r0, c7, c14, 1 @ clean and invalidate D entry
  354. dsb
  355. sub r0, r0, #64
  356. mcr p15, 0, r0, c7, c14, 1 @ clean and invalidate D entry
  357. dsb
  358. #endif
  359. push {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  360. @push {r4,r5,r7,r9,r10,r11}
  361. #if 0
  362. mov r0, sp
  363. mcr p15, 0, r0, c7, c14, 1 @ clean and invalidate D entry
  364. dsb
  365. sub r0, r0, #64
  366. mcr p15, 0, r0, c7, c14, 1 @ clean and invalidate D entry
  367. dsb
  368. #endif
  369. dmb @ ensure ordering with previous memory accesses
  370. mrc p15, 1, r0, c0, c0, 1 @ read clidr
  371. ands r3, r0, #0x7000000 @ extract loc from clidr
  372. mov r3, r3, lsr #23 @ left align loc bit field
  373. beq L2_cl_finished @ if loc is 0, then no need to clean
  374. mov r10, #2 @ start clean at cache level 2
  375. L2_cl_loop1:
  376. add r2, r10, r10, lsr #1 @ work out 3x current cache level
  377. mov r1, r0, lsr r2 @ extract cache type bits from clidr
  378. and r1, r1, #7 @ mask of the bits for current cache only
  379. cmp r1, #2 @ see what cache we have at this level
  380. blt L2_cl_skip @ skip if no cache, or just i-cache
  381. #ifdef CONFIG_ARM_ERRATA_814220
  382. dsb
  383. #endif
  384. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  385. isb @ isb to sych the new cssr&csidr
  386. mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
  387. and r2, r1, #7 @ extract the length of the cache lines
  388. add r2, r2, #4 @ add 4 (line length offset)
  389. ldr r4, =0x3ff
  390. ands r4, r4, r1, lsr #3 @ find maximum number on the way size
  391. clz r5, r4 @ find bit position of way size increment
  392. ldr r7, =0x7fff
  393. ands r7, r7, r1, lsr #13 @ extract max number of the index size
  394. L2_cl_loop2:
  395. mov r9, r4 @ create working copy of max way size
  396. L2_cl_loop3:
  397. orr r11, r10, r9, lsl r5 @ factor way and cache number into r11
  398. orr r11, r11, r7, lsl r2 @ factor index number into r11
  399. mcr p15, 0, r11, c7, c10, 2 @ clean by set/way
  400. subs r9, r9, #1 @ decrement the way
  401. bge L2_cl_loop3
  402. subs r7, r7, #1 @ decrement the index
  403. bge L2_cl_loop2
  404. L2_cl_skip:
  405. @add r10, r10, #2 @ increment cache number
  406. @cmp r3, r10
  407. @bgt L2_cl_loop1
  408. L2_cl_finished:
  409. mov r10, #0 @ swith back to cache level 0
  410. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  411. dsb
  412. isb
  413. @pop {r4,r5,r7,r9,r10,r11}
  414. pop {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  415. bx lr
  416. __inner_inv_dcache_all:
  417. push {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  418. @push {r4,r5,r7,r9,r10,r11}
  419. dmb @ ensure ordering with previous memory accesses
  420. mrc p15, 1, r0, c0, c0, 1 @ read clidr
  421. ands r3, r0, #0x7000000 @ extract loc from clidr
  422. mov r3, r3, lsr #23 @ left align loc bit field
  423. beq all_inv_finished @ if loc is 0, then no need to clean
  424. mov r10, #0 @ start clean at cache level 0
  425. all_inv_loop1:
  426. add r2, r10, r10, lsr #1 @ work out 3x current cache level
  427. mov r1, r0, lsr r2 @ extract cache type bits from clidr
  428. and r1, r1, #7 @ mask of the bits for current cache only
  429. cmp r1, #2 @ see what cache we have at this level
  430. blt all_inv_skip @ skip if no cache, or just i-cache
  431. #ifdef CONFIG_ARM_ERRATA_814220
  432. dsb
  433. #endif
  434. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  435. isb @ isb to sych the new cssr&csidr
  436. mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
  437. and r2, r1, #7 @ extract the length of the cache lines
  438. add r2, r2, #4 @ add 4 (line length offset)
  439. ldr r4, =0x3ff
  440. ands r4, r4, r1, lsr #3 @ find maximum number on the way size
  441. clz r5, r4 @ find bit position of way size increment
  442. ldr r7, =0x7fff
  443. ands r7, r7, r1, lsr #13 @ extract max number of the index size
  444. all_inv_loop2:
  445. mov r9, r4 @ create working copy of max way size
  446. all_inv_loop3:
  447. orr r11, r10, r9, lsl r5 @ factor way and cache number into r11
  448. orr r11, r11, r7, lsl r2 @ factor index number into r11
  449. mcr p15, 0, r11, c7, c6, 2 @ invalidate by set/way
  450. subs r9, r9, #1 @ decrement the way
  451. bge all_inv_loop3
  452. subs r7, r7, #1 @ decrement the index
  453. bge all_inv_loop2
  454. all_inv_skip:
  455. add r10, r10, #2 @ increment cache number
  456. cmp r3, r10
  457. bgt all_inv_loop1
  458. all_inv_finished:
  459. mov r10, #0 @ swith back to cache level 0
  460. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  461. dsb
  462. isb
  463. @pop {r4,r5,r7,r9,r10,r11}
  464. pop {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  465. bx lr
  466. __inner_inv_dcache_L1:
  467. push {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  468. @push {r4,r5,r7,r9,r10,r11}
  469. dmb @ ensure ordering with previous memory accesses
  470. mrc p15, 1, r0, c0, c0, 1 @ read clidr
  471. ands r3, r0, #0x7000000 @ extract loc from clidr
  472. mov r3, r3, lsr #23 @ left align loc bit field
  473. beq L1_inv_finished @ if loc is 0, then no need to clean
  474. mov r10, #0 @ start clean at cache level 1
  475. L1_inv_loop1:
  476. add r2, r10, r10, lsr #1 @ work out 3x current cache level
  477. mov r1, r0, lsr r2 @ extract cache type bits from clidr
  478. and r1, r1, #7 @ mask of the bits for current cache only
  479. cmp r1, #2 @ see what cache we have at this level
  480. blt L1_inv_skip @ skip if no cache, or just i-cache
  481. #ifdef CONFIG_ARM_ERRATA_814220
  482. dsb
  483. #endif
  484. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  485. isb @ isb to sych the new cssr&csidr
  486. mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
  487. and r2, r1, #7 @ extract the length of the cache lines
  488. add r2, r2, #4 @ add 4 (line length offset)
  489. ldr r4, =0x3ff
  490. ands r4, r4, r1, lsr #3 @ find maximum number on the way size
  491. clz r5, r4 @ find bit position of way size increment
  492. ldr r7, =0x7fff
  493. ands r7, r7, r1, lsr #13 @ extract max number of the index size
  494. L1_inv_loop2:
  495. mov r9, r4 @ create working copy of max way size
  496. L1_inv_loop3:
  497. orr r11, r10, r9, lsl r5 @ factor way and cache number into r11
  498. orr r11, r11, r7, lsl r2 @ factor index number into r11
  499. mcr p15, 0, r11, c7, c6, 2 @ invalidate by set/way
  500. subs r9, r9, #1 @ decrement the way
  501. bge L1_inv_loop3
  502. subs r7, r7, #1 @ decrement the index
  503. bge L1_inv_loop2
  504. L1_inv_skip:
  505. @add r10, r10, #2 @ increment cache number
  506. @cmp r3, r10
  507. @bgt L1_inv_loop1
  508. L1_inv_finished:
  509. mov r10, #0 @ swith back to cache level 0
  510. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  511. dsb
  512. isb
  513. @pop {r4,r5,r7,r9,r10,r11}
  514. pop {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  515. bx lr
  516. __inner_inv_dcache_L2:
  517. push {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  518. @push {r4,r5,r7,r9,r10,r11}
  519. dmb @ ensure ordering with previous memory accesses
  520. mrc p15, 1, r0, c0, c0, 1 @ read clidr
  521. ands r3, r0, #0x7000000 @ extract loc from clidr
  522. mov r3, r3, lsr #23 @ left align loc bit field
  523. beq L2_inv_finished @ if loc is 0, then no need to clean
  524. mov r10, #2 @ start clean at cache level 2
  525. L2_inv_loop1:
  526. add r2, r10, r10, lsr #1 @ work out 3x current cache level
  527. mov r1, r0, lsr r2 @ extract cache type bits from clidr
  528. and r1, r1, #7 @ mask of the bits for current cache only
  529. cmp r1, #2 @ see what cache we have at this level
  530. blt L2_inv_skip @ skip if no cache, or just i-cache
  531. #ifdef CONFIG_ARM_ERRATA_814220
  532. dsb
  533. #endif
  534. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  535. isb @ isb to sych the new cssr&csidr
  536. mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
  537. and r2, r1, #7 @ extract the length of the cache lines
  538. add r2, r2, #4 @ add 4 (line length offset)
  539. ldr r4, =0x3ff
  540. ands r4, r4, r1, lsr #3 @ find maximum number on the way size
  541. clz r5, r4 @ find bit position of way size increment
  542. ldr r7, =0x7fff
  543. ands r7, r7, r1, lsr #13 @ extract max number of the index size
  544. L2_inv_loop2:
  545. mov r9, r4 @ create working copy of max way size
  546. L2_inv_loop3:
  547. orr r11, r10, r9, lsl r5 @ factor way and cache number into r11
  548. orr r11, r11, r7, lsl r2 @ factor index number into r11
  549. mcr p15, 0, r11, c7, c6, 2 @ invalidate by set/way
  550. subs r9, r9, #1 @ decrement the way
  551. bge L2_inv_loop3
  552. subs r7, r7, #1 @ decrement the index
  553. bge L2_inv_loop2
  554. L2_inv_skip:
  555. @add r10, r10, #2 @ increment cache number
  556. @cmp r3, r10
  557. @bgt L2_inv_loop1
  558. L2_inv_finished:
  559. mov r10, #0 @ swith back to cache level 0
  560. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  561. dsb
  562. isb
  563. @pop {r4,r5,r7,r9,r10,r11}
  564. pop {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  565. bx lr
  566. __disable_dcache__inner_flush_dcache_L1:
  567. /*******************************************************************************
  568. * push stack *
  569. ******************************************************************************/
  570. push {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  571. /*******************************************************************************
  572. * __disable_dcache *
  573. ******************************************************************************/
  574. MRC p15,0,r0,c1,c0,0
  575. BIC r0,r0,#C1_CBIT
  576. dsb
  577. MCR p15,0,r0,c1,c0,0
  578. dsb
  579. isb
  580. /*
  581. Erratum:794322,An instruction fetch can be allocated into the L2 cache after the cache is disabled Status
  582. This erratum can be avoided by inserting both of the following after the SCTLR.C bit is cleared to 0,
  583. and before the caches are cleaned or invalidated:
  584. 1) A TLBIMVA operation to any address.
  585. 2) A DSB instruction.
  586. */
  587. MCR p15,0,r0,c8,c7,1
  588. dsb
  589. isb
  590. /*******************************************************************************
  591. * __inner_flush_dcache_L1 *
  592. ******************************************************************************/
  593. dmb @ ensure ordering with previous memory accesses
  594. mrc p15, 1, r0, c0, c0, 1 @ read clidr
  595. ands r3, r0, #0x7000000 @ extract loc from clidr
  596. mov r3, r3, lsr #23 @ left align loc bit field
  597. beq DF1_L1_finished @ if loc is 0, then no need to clean
  598. mov r10, #0 @ start clean at cache level 1
  599. DF1_L1_loop1:
  600. add r2, r10, r10, lsr #1 @ work out 3x current cache level
  601. mov r1, r0, lsr r2 @ extract cache type bits from clidr
  602. and r1, r1, #7 @ mask of the bits for current cache only
  603. cmp r1, #2 @ see what cache we have at this level
  604. blt DF1_L1_skip @ skip if no cache, or just i-cache
  605. #ifdef CONFIG_ARM_ERRATA_814220
  606. dsb
  607. #endif
  608. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  609. isb @ isb to sych the new cssr&csidr
  610. mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
  611. and r2, r1, #7 @ extract the length of the cache lines
  612. add r2, r2, #4 @ add 4 (line length offset)
  613. ldr r4, =0x3ff
  614. ands r4, r4, r1, lsr #3 @ find maximum number on the way size
  615. clz r5, r4 @ find bit position of way size increment
  616. ldr r7, =0x7fff
  617. ands r7, r7, r1, lsr #13 @ extract max number of the index size
  618. DF1_L1_loop2:
  619. mov r9, r4 @ create working copy of max way size
  620. DF1_L1_loop3:
  621. orr r11, r10, r9, lsl r5 @ factor way and cache number into r11
  622. orr r11, r11, r7, lsl r2 @ factor index number into r11
  623. #if 1
  624. mcr p15, 0, r11, c7, c10, 2 @ clean by set/way
  625. mcr p15, 0, r11, c7, c6, 2 @ invalidate by set/way
  626. #endif
  627. #if 0
  628. mcr p15, 0, r11, c7, c14, 2 @ clean & invalidate by set/way
  629. #endif
  630. subs r9, r9, #1 @ decrement the way
  631. bge DF1_L1_loop3
  632. subs r7, r7, #1 @ decrement the index
  633. bge DF1_L1_loop2
  634. DF1_L1_skip:
  635. @add r10, r10, #2 @ increment cache number
  636. @cmp r3, r10
  637. @bgt DF1_L1_loop1
  638. DF1_L1_finished:
  639. mov r10, #0 @ swith back to cache level 0
  640. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  641. dsb
  642. isb
  643. /*******************************************************************************
  644. * pop stack *
  645. ******************************************************************************/
  646. pop {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  647. bx lr
  648. __disable_dcache__inner_flush_dcache_L1__inner_flush_dcache_L2:
  649. /*******************************************************************************
  650. * push stack *
  651. ******************************************************************************/
  652. push {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  653. /*******************************************************************************
  654. * __disable_dcache *
  655. ******************************************************************************/
  656. MRC p15,0,r0,c1,c0,0
  657. BIC r0,r0,#C1_CBIT
  658. dsb
  659. MCR p15,0,r0,c1,c0,0
  660. dsb
  661. isb
  662. /*
  663. Erratum:794322,An instruction fetch can be allocated into the L2 cache after the cache is disabled Status
  664. This erratum can be avoided by inserting both of the following after the SCTLR.C bit is cleared to 0,
  665. and before the caches are cleaned or invalidated:
  666. 1) A TLBIMVA operation to any address.
  667. 2) A DSB instruction.
  668. */
  669. MCR p15,0,r0,c8,c7,1
  670. dsb
  671. isb
  672. /*******************************************************************************
  673. * __inner_flush_dcache_L1 *
  674. ******************************************************************************/
  675. dmb @ ensure ordering with previous memory accesses
  676. mrc p15, 1, r0, c0, c0, 1 @ read clidr
  677. ands r3, r0, #0x7000000 @ extract loc from clidr
  678. mov r3, r3, lsr #23 @ left align loc bit field
  679. beq DF1F2_L1_finished @ if loc is 0, then no need to clean
  680. mov r10, #0 @ start clean at cache level 1
  681. DF1F2_L1_loop1:
  682. add r2, r10, r10, lsr #1 @ work out 3x current cache level
  683. mov r1, r0, lsr r2 @ extract cache type bits from clidr
  684. and r1, r1, #7 @ mask of the bits for current cache only
  685. cmp r1, #2 @ see what cache we have at this level
  686. blt DF1F2_L1_skip @ skip if no cache, or just i-cache
  687. #ifdef CONFIG_ARM_ERRATA_814220
  688. dsb
  689. #endif
  690. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  691. isb @ isb to sych the new cssr&csidr
  692. mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
  693. and r2, r1, #7 @ extract the length of the cache lines
  694. add r2, r2, #4 @ add 4 (line length offset)
  695. ldr r4, =0x3ff
  696. ands r4, r4, r1, lsr #3 @ find maximum number on the way size
  697. clz r5, r4 @ find bit position of way size increment
  698. ldr r7, =0x7fff
  699. ands r7, r7, r1, lsr #13 @ extract max number of the index size
  700. DF1F2_L1_loop2:
  701. mov r9, r4 @ create working copy of max way size
  702. DF1F2_L1_loop3:
  703. orr r11, r10, r9, lsl r5 @ factor way and cache number into r11
  704. orr r11, r11, r7, lsl r2 @ factor index number into r11
  705. #if 1
  706. mcr p15, 0, r11, c7, c10, 2 @ clean by set/way
  707. mcr p15, 0, r11, c7, c6, 2 @ invalidate by set/way
  708. #endif
  709. #if 0
  710. mcr p15, 0, r11, c7, c14, 2 @ clean & invalidate by set/way
  711. #endif
  712. subs r9, r9, #1 @ decrement the way
  713. bge DF1F2_L1_loop3
  714. subs r7, r7, #1 @ decrement the index
  715. bge DF1F2_L1_loop2
  716. DF1F2_L1_skip:
  717. @add r10, r10, #2 @ increment cache number
  718. @cmp r3, r10
  719. @bgt DF1F2_L1_loop1
  720. DF1F2_L1_finished:
  721. mov r10, #0 @ swith back to cache level 0
  722. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  723. dsb
  724. isb
  725. /*******************************************************************************
  726. * clrex *
  727. ******************************************************************************/
  728. clrex
  729. /*******************************************************************************
  730. * __inner_flush_dcache_L2 *
  731. ******************************************************************************/
  732. dmb @ ensure ordering with previous memory accesses
  733. mrc p15, 1, r0, c0, c0, 1 @ read clidr
  734. ands r3, r0, #0x7000000 @ extract loc from clidr
  735. mov r3, r3, lsr #23 @ left align loc bit field
  736. beq DF1F2_L2_finished @ if loc is 0, then no need to clean
  737. mov r10, #2 @ start clean at cache level 2
  738. DF1F2_L2_loop1:
  739. add r2, r10, r10, lsr #1 @ work out 3x current cache level
  740. mov r1, r0, lsr r2 @ extract cache type bits from clidr
  741. and r1, r1, #7 @ mask of the bits for current cache only
  742. cmp r1, #2 @ see what cache we have at this level
  743. blt DF1F2_L2_skip @ skip if no cache, or just i-cache
  744. #ifdef CONFIG_ARM_ERRATA_814220
  745. dsb
  746. #endif
  747. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  748. isb @ isb to sych the new cssr&csidr
  749. mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
  750. and r2, r1, #7 @ extract the length of the cache lines
  751. add r2, r2, #4 @ add 4 (line length offset)
  752. ldr r4, =0x3ff
  753. ands r4, r4, r1, lsr #3 @ find maximum number on the way size
  754. clz r5, r4 @ find bit position of way size increment
  755. ldr r7, =0x7fff
  756. ands r7, r7, r1, lsr #13 @ extract max number of the index size
  757. DF1F2_L2_loop2:
  758. mov r9, r4 @ create working copy of max way size
  759. DF1F2_L2_loop3:
  760. orr r11, r10, r9, lsl r5 @ factor way and cache number into r11
  761. orr r11, r11, r7, lsl r2 @ factor index number into r11
  762. mcr p15, 0, r11, c7, c14, 2 @ clean & invalidate by set/way
  763. subs r9, r9, #1 @ decrement the way
  764. bge DF1F2_L2_loop3
  765. subs r7, r7, #1 @ decrement the index
  766. bge DF1F2_L2_loop2
  767. DF1F2_L2_skip:
  768. @add r10, r10, #2 @ increment cache number
  769. @cmp r3, r10
  770. @bgt DF1F2_L2_loop1
  771. DF1F2_L2_finished:
  772. mov r10, #0 @ swith back to cache level 0
  773. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  774. dsb
  775. isb
  776. /*******************************************************************************
  777. * pop stack *
  778. ******************************************************************************/
  779. pop {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  780. bx lr
  781. __disable_dcache__inner_flush_dcache_L1__inner_clean_dcache_L2:
  782. /*******************************************************************************
  783. * push stack *
  784. ******************************************************************************/
  785. push {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  786. /*******************************************************************************
  787. * __disable_dcache *
  788. ******************************************************************************/
  789. MRC p15,0,r0,c1,c0,0
  790. BIC r0,r0,#C1_CBIT
  791. dsb
  792. MCR p15,0,r0,c1,c0,0
  793. dsb
  794. isb
  795. /*
  796. Erratum:794322,An instruction fetch can be allocated into the L2 cache after the cache is disabled Status
  797. This erratum can be avoided by inserting both of the following after the SCTLR.C bit is cleared to 0,
  798. and before the caches are cleaned or invalidated:
  799. 1) A TLBIMVA operation to any address.
  800. 2) A DSB instruction.
  801. */
  802. MCR p15,0,r0,c8,c7,1
  803. dsb
  804. isb
  805. /*******************************************************************************
  806. * __inner_flush_dcache_L1 *
  807. ******************************************************************************/
  808. dmb @ ensure ordering with previous memory accesses
  809. mrc p15, 1, r0, c0, c0, 1 @ read clidr
  810. ands r3, r0, #0x7000000 @ extract loc from clidr
  811. mov r3, r3, lsr #23 @ left align loc bit field
  812. beq DF1C2_L1_finished @ if loc is 0, then no need to clean
  813. mov r10, #0 @ start clean at cache level 1
  814. DF1C2_L1_loop1:
  815. add r2, r10, r10, lsr #1 @ work out 3x current cache level
  816. mov r1, r0, lsr r2 @ extract cache type bits from clidr
  817. and r1, r1, #7 @ mask of the bits for current cache only
  818. cmp r1, #2 @ see what cache we have at this level
  819. blt DF1C2_L1_skip @ skip if no cache, or just i-cache
  820. #ifdef CONFIG_ARM_ERRATA_814220
  821. dsb
  822. #endif
  823. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  824. isb @ isb to sych the new cssr&csidr
  825. mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
  826. and r2, r1, #7 @ extract the length of the cache lines
  827. add r2, r2, #4 @ add 4 (line length offset)
  828. ldr r4, =0x3ff
  829. ands r4, r4, r1, lsr #3 @ find maximum number on the way size
  830. clz r5, r4 @ find bit position of way size increment
  831. ldr r7, =0x7fff
  832. ands r7, r7, r1, lsr #13 @ extract max number of the index size
  833. DF1C2_L1_loop2:
  834. mov r9, r4 @ create working copy of max way size
  835. DF1C2_L1_loop3:
  836. orr r11, r10, r9, lsl r5 @ factor way and cache number into r11
  837. orr r11, r11, r7, lsl r2 @ factor index number into r11
  838. #if 1
  839. mcr p15, 0, r11, c7, c10, 2 @ clean by set/way
  840. mcr p15, 0, r11, c7, c6, 2 @ invalidate by set/way
  841. #endif
  842. #if 0
  843. mcr p15, 0, r11, c7, c14, 2 @ clean & invalidate by set/way
  844. #endif
  845. subs r9, r9, #1 @ decrement the way
  846. bge DF1C2_L1_loop3
  847. subs r7, r7, #1 @ decrement the index
  848. bge DF1C2_L1_loop2
  849. DF1C2_L1_skip:
  850. @add r10, r10, #2 @ increment cache number
  851. @cmp r3, r10
  852. @bgt DF1C2_L1_loop1
  853. DF1C2_L1_finished:
  854. mov r10, #0 @ swith back to cache level 0
  855. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  856. dsb
  857. isb
  858. /*******************************************************************************
  859. * clrex *
  860. ******************************************************************************/
  861. clrex
  862. /*******************************************************************************
  863. * __inner_clean_dcache_L2 *
  864. ******************************************************************************/
  865. dmb @ ensure ordering with previous memory accesses
  866. mrc p15, 1, r0, c0, c0, 1 @ read clidr
  867. ands r3, r0, #0x7000000 @ extract loc from clidr
  868. mov r3, r3, lsr #23 @ left align loc bit field
  869. beq DF1C2_L2_cl_finished @ if loc is 0, then no need to clean
  870. mov r10, #2 @ start clean at cache level 2
  871. DF1C2_L2_cl_loop1:
  872. add r2, r10, r10, lsr #1 @ work out 3x current cache level
  873. mov r1, r0, lsr r2 @ extract cache type bits from clidr
  874. and r1, r1, #7 @ mask of the bits for current cache only
  875. cmp r1, #2 @ see what cache we have at this level
  876. blt DF1C2_L2_cl_skip @ skip if no cache, or just i-cache
  877. #ifdef CONFIG_ARM_ERRATA_814220
  878. dsb
  879. #endif
  880. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  881. isb @ isb to sych the new cssr&csidr
  882. mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
  883. and r2, r1, #7 @ extract the length of the cache lines
  884. add r2, r2, #4 @ add 4 (line length offset)
  885. ldr r4, =0x3ff
  886. ands r4, r4, r1, lsr #3 @ find maximum number on the way size
  887. clz r5, r4 @ find bit position of way size increment
  888. ldr r7, =0x7fff
  889. ands r7, r7, r1, lsr #13 @ extract max number of the index size
  890. DF1C2_L2_cl_loop2:
  891. mov r9, r4 @ create working copy of max way size
  892. DF1C2_L2_cl_loop3:
  893. orr r11, r10, r9, lsl r5 @ factor way and cache number into r11
  894. orr r11, r11, r7, lsl r2 @ factor index number into r11
  895. mcr p15, 0, r11, c7, c10, 2 @ clean by set/way
  896. subs r9, r9, #1 @ decrement the way
  897. bge DF1C2_L2_cl_loop3
  898. subs r7, r7, #1 @ decrement the index
  899. bge DF1C2_L2_cl_loop2
  900. DF1C2_L2_cl_skip:
  901. @add r10, r10, #2 @ increment cache number
  902. @cmp r3, r10
  903. @bgt DF1C2_L2_cl_loop1
  904. DF1C2_L2_cl_finished:
  905. mov r10, #0 @ swith back to cache level 0
  906. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  907. dsb
  908. isb
  909. /*******************************************************************************
  910. * pop stack *
  911. ******************************************************************************/
  912. pop {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  913. bx lr
  914. d_i_dis_flush_all:
  915. /*******************************************************************************
  916. * push stack *
  917. ******************************************************************************/
  918. push {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  919. /*******************************************************************************
  920. * __disable_dcache *
  921. ******************************************************************************/
  922. MRC p15,0,r0,c1,c0,0
  923. BIC r0,r0,#C1_CBIT
  924. BIC r0,r0,#C1_IBIT
  925. dsb
  926. MCR p15,0,r0,c1,c0,0
  927. dsb
  928. isb
  929. /*
  930. Erratum:794322,An instruction fetch can be allocated into the L2 cache after the cache is disabled Status
  931. This erratum can be avoided by inserting both of the following after the SCTLR.C bit is cleared to 0,
  932. and before the caches are cleaned or invalidated:
  933. 1) A TLBIMVA operation to any address.
  934. 2) A DSB instruction.
  935. */
  936. MCR p15,0,r0,c8,c7,1
  937. dsb
  938. isb
  939. /*******************************************************************************
  940. * __inner_flush_dcache_L1 *
  941. ******************************************************************************/
  942. dmb @ ensure ordering with previous memory accesses
  943. mrc p15, 1, r0, c0, c0, 1 @ read clidr
  944. ands r3, r0, #0x7000000 @ extract loc from clidr
  945. mov r3, r3, lsr #23 @ left align loc bit field
  946. beq DIF1F2_L1_finished @ if loc is 0, then no need to clean
  947. mov r10, #0 @ start clean at cache level 1
  948. DIF1F2_L1_loop1:
  949. add r2, r10, r10, lsr #1 @ work out 3x current cache level
  950. mov r1, r0, lsr r2 @ extract cache type bits from clidr
  951. and r1, r1, #7 @ mask of the bits for current cache only
  952. cmp r1, #2 @ see what cache we have at this level
  953. blt DIF1F2_L1_skip @ skip if no cache, or just i-cache
  954. #ifdef CONFIG_ARM_ERRATA_814220
  955. dsb
  956. #endif
  957. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  958. isb @ isb to sych the new cssr&csidr
  959. mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
  960. and r2, r1, #7 @ extract the length of the cache lines
  961. add r2, r2, #4 @ add 4 (line length offset)
  962. ldr r4, =0x3ff
  963. ands r4, r4, r1, lsr #3 @ find maximum number on the way size
  964. clz r5, r4 @ find bit position of way size increment
  965. ldr r7, =0x7fff
  966. ands r7, r7, r1, lsr #13 @ extract max number of the index size
  967. DIF1F2_L1_loop2:
  968. mov r9, r4 @ create working copy of max way size
  969. DIF1F2_L1_loop3:
  970. orr r11, r10, r9, lsl r5 @ factor way and cache number into r11
  971. orr r11, r11, r7, lsl r2 @ factor index number into r11
  972. #if 1
  973. mcr p15, 0, r11, c7, c10, 2 @ clean by set/way
  974. mcr p15, 0, r11, c7, c6, 2 @ invalidate by set/way
  975. #endif
  976. #if 0
  977. mcr p15, 0, r11, c7, c14, 2 @ clean & invalidate by set/way
  978. #endif
  979. subs r9, r9, #1 @ decrement the way
  980. bge DIF1F2_L1_loop3
  981. subs r7, r7, #1 @ decrement the index
  982. bge DIF1F2_L1_loop2
  983. DIF1F2_L1_skip:
  984. @add r10, r10, #2 @ increment cache number
  985. @cmp r3, r10
  986. @bgt DIF1F2_L1_loop1
  987. DIF1F2_L1_finished:
  988. mov r10, #0 @ swith back to cache level 0
  989. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  990. dsb
  991. isb
  992. /*******************************************************************************
  993. * clrex *
  994. ******************************************************************************/
  995. clrex
  996. /*******************************************************************************
  997. * __inner_flush_dcache_L2 *
  998. ******************************************************************************/
  999. dmb @ ensure ordering with previous memory accesses
  1000. mrc p15, 1, r0, c0, c0, 1 @ read clidr
  1001. ands r3, r0, #0x7000000 @ extract loc from clidr
  1002. mov r3, r3, lsr #23 @ left align loc bit field
  1003. beq DIF1F2_L2_finished @ if loc is 0, then no need to clean
  1004. mov r10, #2 @ start clean at cache level 2
  1005. DIF1F2_L2_loop1:
  1006. add r2, r10, r10, lsr #1 @ work out 3x current cache level
  1007. mov r1, r0, lsr r2 @ extract cache type bits from clidr
  1008. and r1, r1, #7 @ mask of the bits for current cache only
  1009. cmp r1, #2 @ see what cache we have at this level
  1010. blt DIF1F2_L2_skip @ skip if no cache, or just i-cache
  1011. #ifdef CONFIG_ARM_ERRATA_814220
  1012. dsb
  1013. #endif
  1014. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  1015. isb @ isb to sych the new cssr&csidr
  1016. mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
  1017. and r2, r1, #7 @ extract the length of the cache lines
  1018. add r2, r2, #4 @ add 4 (line length offset)
  1019. ldr r4, =0x3ff
  1020. ands r4, r4, r1, lsr #3 @ find maximum number on the way size
  1021. clz r5, r4 @ find bit position of way size increment
  1022. ldr r7, =0x7fff
  1023. ands r7, r7, r1, lsr #13 @ extract max number of the index size
  1024. DIF1F2_L2_loop2:
  1025. mov r9, r4 @ create working copy of max way size
  1026. DIF1F2_L2_loop3:
  1027. orr r11, r10, r9, lsl r5 @ factor way and cache number into r11
  1028. orr r11, r11, r7, lsl r2 @ factor index number into r11
  1029. mcr p15, 0, r11, c7, c14, 2 @ clean & invalidate by set/way
  1030. subs r9, r9, #1 @ decrement the way
  1031. bge DIF1F2_L2_loop3
  1032. subs r7, r7, #1 @ decrement the index
  1033. bge DIF1F2_L2_loop2
  1034. DIF1F2_L2_skip:
  1035. @add r10, r10, #2 @ increment cache number
  1036. @cmp r3, r10
  1037. @bgt DIF1F2_L2_loop1
  1038. DIF1F2_L2_finished:
  1039. mov r10, #0 @ swith back to cache level 0
  1040. mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
  1041. dsb
  1042. isb
  1043. /*******************************************************************************
  1044. * pop stack *
  1045. ******************************************************************************/
  1046. pop {r0,r1,r2,r3,r4,r5,r7,r9,r10,r11,r14}
  1047. bx lr
  1048. .end