1 /*
   2  * This file and its contents are supplied under the terms of the
   3  * Common Development and Distribution License ("CDDL"), version 1.0.
   4  * You may only use this file in accordance with the terms of version
   5  * 1.0 of the CDDL.
   6  *
   7  * A full copy of the text of the CDDL should have accompanied this
   8  * source.  A copy of the CDDL is also available via the Internet at
   9  * http://www.illumos.org/license/CDDL.
  10  */
  11 
  12 /*
  13  * Copyright 2013 Joyent, Inc.  All rights reserved.
  14  */
  15 
  16         .file   "cache.s"
  17 
  18 /*
  19  * Cache and memory barrier operations
  20  */
  21 
  22 #include <sys/asm_linkage.h>
  23 #include <sys/atomic_impl.h>
  24 
  25 #if defined(lint) || defined(__lint)
  26 
  27 void
  28 membar_sync(void)
  29 {}
  30 
  31 void
  32 membar_enter(void)
  33 {}
  34 
  35 void
  36 membar_exit(void)
  37 {}
  38 
  39 void
  40 membar_producer(void)
  41 {}
  42 
  43 void
  44 membar_consumer(void)
  45 {}
  46 
  47 void
  48 instr_sbarrier(void)
  49 {}
  50 
  51 void
  52 data_sbarrier(void)
  53 {}
  54 
  55 #else   /* __lint */
  56 
  57         /*
  58          * NOTE: membar_enter, membar_exit, membar_producer, and
  59          * membar_consumer are identical routines.  We define them
  60          * separately, instead of using ALTENTRY definitions to alias
  61          * them together, so that DTrace and debuggers will see a unique
  62          * address for them, allowing more accurate tracing.
  63          */
  64         ENTRY(membar_enter)
  65         ALTENTRY(membar_sync)
  66         ARM_DMB_INSTR(r0)
  67         bx lr
  68         SET_SIZE(membar_sync)
  69         SET_SIZE(membar_enter)
  70 
  71         ENTRY(membar_exit)
  72         ARM_DMB_INSTR(r0)
  73         bx lr
  74         SET_SIZE(membar_exit)
  75 
  76         ENTRY(membar_producer)
  77         ARM_DMB_INSTR(r0)
  78         bx lr
  79         SET_SIZE(membar_producer)
  80 
  81         ENTRY(membar_consumer)
  82         ARM_DMB_INSTR(r0)
  83         bx lr
  84         SET_SIZE(membar_consumer)
  85 
  86         ENTRY(instr_sbarrier)
  87         ARM_ISB_INSTR(r0)
  88         bx lr
  89         SET_SIZE(membar_consumer)
  90 
  91         ENTRY(data_sbarrier)
  92         ARM_ISB_INSTR(r0)
  93         bx lr
  94         SET_SIZE(data_sbarrier)
  95 
  96 #endif  /* __lint */
  97 
  98 #if defined(lint) || defined(__lint)
  99 
 100 /* The ARM architecture uses a modified Harvard Architecture which means that we
 101  * get the joys of fixing up this mess. Primarily this means that when we update
 102  * data, it gets written to do the data cache. That needs to be flushed to main
 103  * memory and then the instruction cache needs to be invalidated. This is
 104  * particularly important for things like krtld and DTrace. While the data cache
 105  * does write itself out over time, we cannot rely on it having written itself
 106  * out to the state that we care about by the time that we'd like it to. As
 107  * such, we need to ensure that it's been flushed out ourselves. This also means
 108  * that we could accidentally flush a region of the icache that's already
 109  * updated itself, but that's just what we have to do to keep Von Neumann's
 110  * spirt and great gift alive.
 111  *
 112  * The controllers for the caches have a few different options for invalidation.
 113  * One may:
 114  *
 115  *   o Invalidate or flush the entire cache
 116  *   o Invalidate or flush a cache line
 117  *   o Invalidate or flush a cache range
 118  *
 119  * We opt to take the third option here for the general case of making sure that
 120  * text has been synchronized. While the data cache allows us to both invalidate
 121  * and flush the cache line, we don't currently have a need to do the
 122  * invalidation.
 123  *
 124  * Note that all of these operations should be aligned on an 8-byte boundary.
 125  * The instructions actually only end up using bits [31:5] of an address.
 126  * Callers are required to ensure that this is the case.
 127  */
 128 
 129 void
 130 armv6_icache_disable(void)
 131 {}
 132 
 133 void
 134 armv6_icache_enable(void)
 135 {}
 136 
 137 void
 138 armv6_dcache_disable(void)
 139 {}
 140 
 141 void
 142 armv6_dcache_enable(void)
 143 {}
 144 
 145 void
 146 armv6_icache_inval(void)
 147 {}
 148 
 149 void
 150 armv6_dcache_inval(void)
 151 {}
 152 
 153 void
 154 armv6_dcache_flush(void)
 155 {}
 156 
 157 void
 158 armv6_text_flush_range(caddr_t start, size_t len)
 159 {}
 160 
 161 void
 162 armv6_text_flush(void)
 163 {}
 164 
 165 #else   /* __lint */
 166 
 167         ENTRY(armv6_icache_enable)
 168         mrc     p15, 0, r0, c1, c0, 0
 169         orr     r0, #0x1000
 170         mcr     p15, 0, r0, c1, c0, 0
 171         SET_SIZE(armv6_icache_enable)
 172 
 173         ENTRY(armv6_dcache_enable)
 174         mrc     p15, 0, r0, c1, c0, 0
 175         orr     r0, #0x4
 176         mcr     p15, 0, r0, c1, c0, 0
 177         SET_SIZE(armv6_dcache_enable)
 178 
 179         ENTRY(armv6_icache_disable)
 180         mrc     p15, 0, r0, c1, c0, 0
 181         bic     r0, #0x1000
 182         mcr     p15, 0, r0, c1, c0, 0
 183         SET_SIZE(armv6_icache_disable)
 184 
 185         ENTRY(armv6_dcache_disable)
 186         mrc     p15, 0, r0, c1, c0, 0
 187         bic     r0, #0x4
 188         mcr     p15, 0, r0, c1, c0, 0
 189         SET_SIZE(armv6_dcache_disable)
 190 
 191         ENTRY(armv6_icache_inval)
 192         mcr     p15, 0, r0, c7, c5, 0           @ Invalidate i-cache
 193         bx      lr
 194         SET_SIZE(armv6_icache_inval)
 195 
 196         ENTRY(armv6_dcache_inval)
 197         mcr     p15, 0, r0, c7, c6, 0           @ Invalidate d-cache
 198         ARM_DSB_INSTR(r2)
 199         bx      lr
 200         SET_SIZE(armv6_dcache_inval)
 201 
 202         ENTRY(armv6_dcache_flush)
 203         mcr     p15, 0, r0, c7, c10, 4          @ Flush d-cache
 204         ARM_DSB_INSTR(r2)
 205         bx      lr
 206         SET_SIZE(armv6_dcache_flush)
 207         
 208         ENTRY(armv6_text_flush_range)
 209         add     r1, r1, r0
 210         sub     r1, r1, r0
 211         mcrr    p15, 0, r1, r0, c5              @ Invalidate i-cache range
 212         mcrr    p15, 0, r1, r0, c12             @ Flush d-cache range
 213         ARM_DSB_INSTR(r2)
 214         ARM_ISB_INSTR(r2)
 215         bx      lr
 216         SET_SIZE(armv6_text_flush_range)
 217 
 218         ENTRY(armv6_text_flush)
 219         mcr     p15, 0, r0, c7, c5, 0           @ Invalidate i-cache
 220         mcr     p15, 0, r0, c7, c10, 4          @ Flush d-cache
 221         ARM_DSB_INSTR(r2)
 222         ARM_ISB_INSTR(r2)
 223         bx      lr
 224         SET_SIZE(armv6_text_flush)
 225 
 226 #endif
 227 
 228 #ifdef __lint
 229 
 230 /*
 231  * Perform all of the operations necessary for tlb maintenance after an update
 232  * to the page tables.
 233  */
 234 void
 235 armv6_tlb_sync(void)
 236 {}
 237 
 238 #else   /* __lint */
 239 
 240         ENTRY(armv6_tlb_sync)
 241         mov     r0, #0
 242         mcr     p15, 0, r0, c7, c10, 4          @ Flush d-cache
 243         ARM_DSB_INSTR(r0)
 244         mcr     p15, 0, r0, c8, c7, 0           @ invalidate tlb
 245         mcr     p15, 0, r0, c8, c5, 0           @ Invalidate I-cache + btc
 246         ARM_DSB_INSTR(r0)
 247         ARM_ISB_INSTR(r0)
 248         bx      lr
 249         SET_SIZE(armv6_tlb_sync)
 250 
 251 #endif  /* __lint */