1 /*
   2  * This file and its contents are supplied under the terms of the
   3  * Common Development and Distribution License ("CDDL"), version 1.0.
   4  * You may only use this file in accordance with the terms of version
   5  * 1.0 of the CDDL.
   6  *
   7  * A full copy of the text of the CDDL should have accompanied this
   8  * source.  A copy of the CDDL is also available via the Internet at
   9  * http://www.illumos.org/license/CDDL.
  10  */
  11 
  12 /*
  13  * Copyright (c) 2013 Joyent, Inc.  All rights reserved.
  14  */
  15 
  16         .file   "atomic.s"
  17 
  18 /*
  19  * Atomic Operatoins for 32-bit ARM. Note, that these require at least ARMv6K so
  20  * as to have access to the non-word size LDREX and STREX.
  21  */
  22 
  23 #include <sys/asm_linkage.h>
  24 #include <sys/atomic_impl.h>
  25 
  26 /*
  27  * XXX We probably want some kind of backoff built in to these routines at some
  28  * point.
  29  */
  30 
  31         /*
  32          * NOTE: If atomic_inc_8 and atomic_inc_8_nv are ever
  33          * separated, you need to also edit the libc arm platform
  34          * specific mapfile and remove the NODYNSORT attribute
  35          * from atomic_inc_8_nv.
  36          */
  37         ENTRY(atomic_inc_8)
  38         ALTENTRY(atomic_inc_8_nv)
  39         ALTENTRY(atomic_inc_uchar)
  40         ALTENTRY(atomic_inc_uchar_nv)
  41         mov     r1, #1
  42         b       atomic_add_8
  43         SET_SIZE(atomic_inc_uchar_nv)
  44         SET_SIZE(atomic_inc_uchar)
  45         SET_SIZE(atomic_inc_8_nv)
  46         SET_SIZE(atomic_inc_8)
  47 
  48         /*
  49          * NOTE: If atomic_dec_8 and atomic_dec_8_nv are ever
  50          * separated, you need to also edit the libc arm platform
  51          * specific mapfile and remove the NODYNSORT attribute
  52          * from atomic_dec_8_nv.
  53          */
  54         ENTRY(atomic_dec_8)
  55         ALTENTRY(atomic_dec_8_nv)
  56         ALTENTRY(atomic_dec_uchar)
  57         ALTENTRY(atomic_dec_uchar_nv)
  58         mov     r1, #-1
  59         b       atomic_add_8
  60         SET_SIZE(atomic_dec_uchar_nv)
  61         SET_SIZE(atomic_dec_uchar)
  62         SET_SIZE(atomic_dec_8_nv)
  63         SET_SIZE(atomic_dec_8)
  64 
  65         /*
  66          * NOTE: If atomic_add_8 and atomic_add_8_nv are ever
  67          * separated, you need to also edit the libc arm platform
  68          * specific mapfile and remove the NODYNSORT attribute
  69          * from atomic_add_8_nv.
  70          */
  71         ENTRY(atomic_add_8)
  72         ALTENTRY(atomic_add_8_nv)
  73         ALTENTRY(atomic_add_char)
  74         ALTENTRY(atomic_add_char_nv)
  75 1:
  76         ldrexb  r2, [r0]
  77         add     r2, r1, r2
  78         strexb  r3, r2, [r0]
  79         cmp     r3, #0
  80         bne     1b
  81         mov     r0, r2
  82         bx      lr
  83         SET_SIZE(atomic_add_char_nv)
  84         SET_SIZE(atomic_add_char)
  85         SET_SIZE(atomic_add_8_nv)
  86         SET_SIZE(atomic_add_8)
  87 
  88         /*
  89          * NOTE: If atomic_inc_16 and atomic_inc_16_nv are ever
  90          * separated, you need to also edit the libc arm platform
  91          * specific mapfile and remove the NODYNSORT attribute
  92          * from atomic_inc_16_nv.
  93          */
  94         ENTRY(atomic_inc_16)
  95         ALTENTRY(atomic_inc_16_nv)
  96         ALTENTRY(atomic_inc_ushort)
  97         ALTENTRY(atomic_inc_ushort_nv)
  98         mov     r1, #1
  99         b       atomic_add_16
 100         SET_SIZE(atomic_inc_ushort_nv)
 101         SET_SIZE(atomic_inc_ushort)
 102         SET_SIZE(atomic_inc_16_nv)
 103         SET_SIZE(atomic_inc_16)
 104 
 105         /*
 106          * NOTE: If atomic_dec_16 and atomic_dec_16_nv are ever
 107          * separated, you need to also edit the libc arm platform
 108          * specific mapfile and remove the NODYNSORT attribute
 109          * from atomic_dec_16_nv.
 110          */
 111         ENTRY(atomic_dec_16)
 112         ALTENTRY(atomic_dec_16_nv)
 113         ALTENTRY(atomic_dec_ushort)
 114         ALTENTRY(atomic_dec_ushort_nv)
 115         mov     r1, #-1
 116         b       atomic_add_16
 117         SET_SIZE(atomic_dec_ushort_nv)
 118         SET_SIZE(atomic_dec_ushort)
 119         SET_SIZE(atomic_dec_16_nv)
 120         SET_SIZE(atomic_dec_16)
 121 
 122         /*
 123          * NOTE: If atomic_add_16 and atomic_add_16_nv are ever
 124          * separated, you need to also edit the libc arm platform
 125          * specific mapfile and remove the NODYNSORT attribute
 126          * from atomic_add_16_nv.
 127          */
 128         ENTRY(atomic_add_16)
 129         ALTENTRY(atomic_add_16_nv)
 130         ALTENTRY(atomic_add_short)
 131         ALTENTRY(atomic_add_short_nv)
 132 1:
 133         ldrexh  r2, [r0]
 134         add     r2, r1, r2
 135         strexh  r3, r2, [r0]
 136         cmp     r3, #0
 137         bne     1b
 138         mov     r0, r2
 139         bx      lr
 140         SET_SIZE(atomic_add_short_nv)
 141         SET_SIZE(atomic_add_short)
 142         SET_SIZE(atomic_add_16_nv)
 143         SET_SIZE(atomic_add_16)
 144 
 145         /*
 146          * NOTE: If atomic_inc_32 and atomic_inc_32_nv are ever
 147          * separated, you need to also edit the libc arm platform
 148          * specific mapfile and remove the NODYNSORT attribute
 149          * from atomic_inc_32_nv.
 150          */
 151         ENTRY(atomic_inc_32)
 152         ALTENTRY(atomic_inc_32_nv)
 153         ALTENTRY(atomic_inc_uint)
 154         ALTENTRY(atomic_inc_uint_nv)
 155         ALTENTRY(atomic_inc_ulong)
 156         ALTENTRY(atomic_inc_ulong_nv)
 157         mov     r1, #1
 158         b       atomic_add_32
 159         SET_SIZE(atomic_inc_ulong_nv)
 160         SET_SIZE(atomic_inc_ulong)
 161         SET_SIZE(atomic_inc_uint_nv)
 162         SET_SIZE(atomic_inc_uint)
 163         SET_SIZE(atomic_inc_32_nv)
 164         SET_SIZE(atomic_inc_32)
 165 
 166         /*
 167          * NOTE: If atomic_dec_32 and atomic_dec_32_nv are ever
 168          * separated, you need to also edit the libc arm platform
 169          * specific mapfile and remove the NODYNSORT attribute
 170          * from atomic_dec_32_nv.
 171          */
 172         ENTRY(atomic_dec_32)
 173         ALTENTRY(atomic_dec_32_nv)
 174         ALTENTRY(atomic_dec_uint)
 175         ALTENTRY(atomic_dec_uint_nv)
 176         ALTENTRY(atomic_dec_ulong)
 177         ALTENTRY(atomic_dec_ulong_nv)
 178         mov     r1, #-1
 179         b       atomic_add_32
 180         SET_SIZE(atomic_dec_ulong_nv)
 181         SET_SIZE(atomic_dec_ulong)
 182         SET_SIZE(atomic_dec_uint_nv)
 183         SET_SIZE(atomic_dec_uint)
 184         SET_SIZE(atomic_dec_32_nv)
 185         SET_SIZE(atomic_dec_32)
 186 
 187         /*
 188          * NOTE: If atomic_add_32 and atomic_add_32_nv are ever
 189          * separated, you need to also edit the libc arm platform
 190          * specific mapfile and remove the NODYNSORT attribute
 191          * from atomic_add_32_nv.
 192          */
 193         ENTRY(atomic_add_32)
 194         ALTENTRY(atomic_add_32_nv)
 195         ALTENTRY(atomic_add_int)
 196         ALTENTRY(atomic_add_int_nv)
 197         ALTENTRY(atomic_add_ptr)
 198         ALTENTRY(atomic_add_ptr_nv)
 199         ALTENTRY(atomic_add_long)
 200         ALTENTRY(atomic_add_long_nv)
 201 1:
 202         ldrex   r2, [r0]
 203         add     r2, r1, r2
 204         strex   r3, r2, [r0]
 205         cmp     r3, #0
 206         bne     1b
 207         mov     r0, r2
 208         bx      lr
 209         SET_SIZE(atomic_add_long_nv)
 210         SET_SIZE(atomic_add_long)
 211         SET_SIZE(atomic_add_ptr_nv)
 212         SET_SIZE(atomic_add_ptr)
 213         SET_SIZE(atomic_add_int_nv)
 214         SET_SIZE(atomic_add_int)
 215         SET_SIZE(atomic_add_32_nv)
 216         SET_SIZE(atomic_add_32)
 217 
 218         /*
 219          * NOTE: If atomic_inc_64 and atomic_inc_64_nv are ever
 220          * separated, you need to also edit the libc arm platform
 221          * specific mapfile and remove the NODYNSORT attribute
 222          * from atomic_inc_64_nv.
 223          */
 224         ENTRY(atomic_inc_64)
 225         ALTENTRY(atomic_inc_64_nv)
 226         mov     r2, #1
 227         mov     r3, #0
 228         b       atomic_add_64
 229         SET_SIZE(atomic_inc_64_nv)
 230         SET_SIZE(atomic_inc_64)
 231 
 232         /*
 233          * NOTE: If atomic_dec_64 and atomic_dec_64_nv are ever
 234          * separated, you need to also edit the libc arm platform
 235          * specific mapfile and remove the NODYNSORT attribute
 236          * from atomic_dec_64_nv.
 237          */
 238         ENTRY(atomic_dec_64)
 239         ALTENTRY(atomic_dec_64_nv)
 240         mov     r2, #-1
 241         mvn     r3, #0
 242         b       atomic_add_64
 243         SET_SIZE(atomic_dec_64_nv)
 244         SET_SIZE(atomic_dec_64)
 245 
 246         /*
 247          * NOTE: If atomic_add_64 and atomic_add_64_nv are ever
 248          * separated, you need to also edit the libc arm platform
 249          * specific mapfile and remove the NODYNSORT attribute
 250          * from atomic_add_64_nv.
 251          */
 252         ENTRY(atomic_add_64)
 253         ALTENTRY(atomic_add_64_nv)
 254         push    { r4, r5 }
 255 1:
 256         ldrexd  r4, r5, [r0]
 257         adds    r4, r4, r2      
 258         adc     r5, r5, r3      
 259         strexd  r1, r4, r5, [r0]
 260         cmp     r1, #0
 261         bne     1b
 262         mov     r0, r4
 263         mov     r1, r5
 264         pop     { r4, r5 }
 265         bx      lr
 266         SET_SIZE(atomic_add_64_nv)
 267         SET_SIZE(atomic_add_64)
 268 
 269         /*
 270          * NOTE: If atomic_or_8 and atomic_or_8_nv are ever
 271          * separated, you need to also edit the libc arm platform
 272          * specific mapfile and remove the NODYNSORT attribute
 273          * from atomic_or_8_nv.
 274          */
 275         ENTRY(atomic_or_8)
 276         ALTENTRY(atomic_or_8_nv)
 277         ALTENTRY(atomic_or_uchar)
 278         ALTENTRY(atomic_or_uchar_nv)
 279 1:
 280         ldrexb  r2, [r0]
 281         orr     r2, r1, r2
 282         strexb  r3, r2, [r0]
 283         cmp     r3, #0
 284         bne     1b
 285         mov     r0, r2
 286         bx      lr
 287         SET_SIZE(atomic_or_uchar_nv)
 288         SET_SIZE(atomic_or_uchar)
 289         SET_SIZE(atomic_or_8_nv)
 290         SET_SIZE(atomic_or_8)
 291 
 292         /*
 293          * NOTE: If atomic_or_16 and atomic_or_16_nv are ever
 294          * separated, you need to also edit the libc arm platform
 295          * specific mapfile and remove the NODYNSORT attribute
 296          * from atomic_or_16_nv.
 297          */
 298         ENTRY(atomic_or_16)
 299         ALTENTRY(atomic_or_16_nv)
 300         ALTENTRY(atomic_or_ushort)
 301         ALTENTRY(atomic_or_ushort_nv)
 302 1:
 303         ldrexh  r2, [r0]
 304         orr     r2, r1, r2
 305         strexh  r3, r2, [r0]
 306         cmp     r3, #0
 307         bne     1b
 308         mov     r0, r2
 309         bx      lr
 310         SET_SIZE(atomic_or_ushort_nv)
 311         SET_SIZE(atomic_or_ushort)
 312         SET_SIZE(atomic_or_16_nv)
 313         SET_SIZE(atomic_or_16)
 314 
 315         /*
 316          * NOTE: If atomic_or_32 and atomic_or_32_nv are ever
 317          * separated, you need to also edit the libc arm platform
 318          * specific mapfile and remove the NODYNSORT attribute
 319          * from atomic_or_32_nv.
 320          */
 321         ENTRY(atomic_or_32)
 322         ALTENTRY(atomic_or_32_nv)
 323         ALTENTRY(atomic_or_uint)
 324         ALTENTRY(atomic_or_uint_nv)
 325         ALTENTRY(atomic_or_ulong)
 326         ALTENTRY(atomic_or_ulong_nv)
 327 1:
 328         ldrex   r2, [r0]
 329         add     r2, r1, r2
 330         strex   r3, r2, [r0]
 331         cmp     r3, #0
 332         bne     1b
 333         mov     r0, r2
 334         bx      lr
 335         SET_SIZE(atomic_or_ulong_nv)
 336         SET_SIZE(atomic_or_ulong)
 337         SET_SIZE(atomic_or_uint_nv)
 338         SET_SIZE(atomic_or_uint)
 339         SET_SIZE(atomic_or_32_nv)
 340         SET_SIZE(atomic_or_32)
 341 
 342         /*
 343          * NOTE: If atomic_or_64 and atomic_or_64_nv are ever
 344          * separated, you need to also edit the libc arm platform
 345          * specific mapfile and remove the NODYNSORT attribute
 346          * from atomic_or_64_nv.
 347          */
 348         ENTRY(atomic_or_64)
 349         ALTENTRY(atomic_or_64_nv)
 350         push    { r4, r5 }
 351 1:
 352         ldrexd  r4, r5, [r0]
 353         orr     r4, r4, r2      
 354         orr     r5, r5, r3      
 355         strexd  r1, r4, r5, [r0]
 356         cmp     r1, #0
 357         bne     1b
 358         mov     r0, r4
 359         mov     r1, r5
 360         pop     { r4, r5 }
 361         bx      lr
 362         SET_SIZE(atomic_or_64_nv)
 363         SET_SIZE(atomic_or_64)
 364 
 365         /*
 366          * NOTE: If atomic_and_8 and atomic_and_8_nv are ever
 367          * separated, you need to also edit the libc arm platform
 368          * specific mapfile and remove the NODYNSORT attribute
 369          * from atomic_and_8_nv.
 370          */
 371         ENTRY(atomic_and_8)
 372         ALTENTRY(atomic_and_8_nv)
 373         ALTENTRY(atomic_and_uchar)
 374         ALTENTRY(atomic_and_uchar_nv)
 375 1:
 376         ldrexb  r2, [r0]
 377         and     r2, r1, r2
 378         strexb  r3, r2, [r0]
 379         cmp     r3, #0
 380         bne     1b
 381         mov     r0, r2
 382         bx      lr
 383         SET_SIZE(atomic_and_uchar)
 384         SET_SIZE(atomic_and_8_nv)
 385         SET_SIZE(atomic_and_8)
 386 
 387         /*
 388          * NOTE: If atomic_and_16 and atomic_and_16_nv are ever
 389          * separated, you need to also edit the libc arm platform
 390          * specific mapfile and remove the NODYNSORT attribute
 391          * from atomic_and_16_nv.
 392          */
 393         ENTRY(atomic_and_16)
 394         ALTENTRY(atomic_and_16_nv)
 395         ALTENTRY(atomic_and_ushort)
 396         ALTENTRY(atomic_and_ushort_nv)
 397 1:
 398         ldrexh  r2, [r0]
 399         and     r2, r1, r2
 400         strexh  r3, r2, [r0]
 401         cmp     r3, #0
 402         bne     1b
 403         mov     r0, r2
 404         bx      lr
 405         SET_SIZE(atomic_and_ushort_nv)
 406         SET_SIZE(atomic_and_ushort)
 407         SET_SIZE(atomic_and_16_nv)
 408         SET_SIZE(atomic_and_16)
 409 
 410         /*
 411          * NOTE: If atomic_and_32 and atomic_and_32_nv are ever
 412          * separated, you need to also edit the libc arm platform
 413          * specific mapfile and remove the NODYNSORT attribute
 414          * from atomic_and_32_nv.
 415          */
 416         ENTRY(atomic_and_32)
 417         ALTENTRY(atomic_and_32_nv)
 418         ALTENTRY(atomic_and_uint)
 419         ALTENTRY(atomic_and_uint_nv)
 420         ALTENTRY(atomic_and_ulong)
 421         ALTENTRY(atomic_and_ulong_nv)
 422 1:
 423         ldrex   r2, [r0]
 424         and     r2, r1, r2
 425         strex   r3, r2, [r0]
 426         cmp     r3, #0
 427         bne     1b
 428         mov     r0, r2
 429         bx      lr
 430         SET_SIZE(atomic_and_ulong_nv)
 431         SET_SIZE(atomic_and_ulong)
 432         SET_SIZE(atomic_and_uint_nv)
 433         SET_SIZE(atomic_and_uint)
 434         SET_SIZE(atomic_and_32_nv)
 435         SET_SIZE(atomic_and_32)
 436 
 437         /*
 438          * NOTE: If atomic_and_64 and atomic_and_64_nv are ever
 439          * separated, you need to also edit the libc arm platform
 440          * specific mapfile and remove the NODYNSORT attribute
 441          * from atomic_and_64_nv.
 442          */
 443         ENTRY(atomic_and_64)
 444         ALTENTRY(atomic_and_64_nv)
 445         push    { r4, r5 }
 446 1:
 447         ldrexd  r4, r5, [r0]
 448         and     r4, r4, r2      
 449         and     r5, r5, r3      
 450         strexd  r1, r4, r5, [r0]
 451         cmp     r1, #0
 452         bne     1b
 453         mov     r0, r4
 454         mov     r1, r5
 455         pop     { r4, r5 }
 456         bx      lr
 457         SET_SIZE(atomic_and_64_nv)
 458         SET_SIZE(atomic_and_64)
 459 
 460         ENTRY(atomic_cas_8)
 461         ALTENTRY(atomic_cas_uchar)
 462         push    { r4 }
 463 1:
 464         ldrexb  r3, [r0]
 465         cmp     r1, r3
 466         bne     2f                      @ Compare failed, bail
 467         strexb  r4, r2, [r0]
 468         cmp     r4, #0                  @ strexb failed, take another lap
 469         bne     1b
 470 2:
 471         mov     r0, r3
 472         pop     { r4 }
 473         bx      lr
 474         SET_SIZE(atomic_cas_uchar)
 475         SET_SIZE(atomic_cas_8)
 476 
 477         ENTRY(atomic_cas_16)
 478         ALTENTRY(atomic_cas_ushort)
 479         push    { r4 }
 480 1:
 481         ldrexh  r3, [r0]
 482         cmp     r1, r3
 483         bne     2f                      @ Compare failed, bail
 484         strexh  r4, r2, [r0]
 485         cmp     r4, #0                  @ strexb failed, take another lap
 486         bne     1b
 487 2:
 488         mov     r0, r3
 489         pop     { r4 }
 490         bx      lr
 491         SET_SIZE(atomic_cas_ushort)
 492         SET_SIZE(atomic_cas_16)
 493 
 494         ENTRY(atomic_cas_32)
 495         ALTENTRY(atomic_cas_uint)
 496         ALTENTRY(atomic_cas_ptr)
 497         ALTENTRY(atomic_cas_ulong)
 498         push    { r4 }
 499 1:
 500         ldrex   r3, [r0]
 501         cmp     r1, r3
 502         bne     2f                      @ Compare failed, bail
 503         strex   r4, r2, [r0]
 504         cmp     r4, #0                  @ strexb failed, take another lap
 505         bne     1b
 506 2:
 507         mov     r0, r3
 508         pop     { r4 }
 509         bx      lr
 510         SET_SIZE(atomic_cas_ulong)
 511         SET_SIZE(atomic_cas_ptr)
 512         SET_SIZE(atomic_cas_uint)
 513         SET_SIZE(atomic_cas_32)
 514 
 515         /*
 516          * atomic_cas_64(uint64_t *target, uint64_t cmp, uint64_t newval);
 517          *
 518          * target is in r0
 519          * cmp is in r2,r3
 520          * newval is on the stack 
 521          *
 522          * Our register allocation:
 523          * r0 - Always contains target
 524          * r1 - Always used for the result of strexd
 525          * r2, r3 - Always used for cmp
 526          * r4, r5 - Always used for newval
 527          * r6, r7 - Always used as the ldrexd target
 528          *
 529          * Note that sp points to newval when we enter. We push four values, so
 530          * we need to add 16 when we load newval.
 531          */
 532         ENTRY(atomic_cas_64)
 533         push    { r4, r5, r6, r7 }
 534         ldrd    r4, [sp, #16]           @ load newval into memory
 535 1:
 536         ldrexd  r6, r7, [r0]            @ load *target
 537         cmp     r6, r2
 538         bne     2f                      @ bail if high word not equal
 539         cmp     r5, r3
 540         bne     2f                      @ bail if low word not equal
 541         strexd  r1, r4, r5, [r0]        @ try to store *target
 542         cmp     r1, #0
 543         bne     1b                      @ try again if store aborted
 544 2:
 545         mov     r0, r6                  @ ret low word of *target
 546         mov     r1, r7                  @ ret high word of *target
 547         pop     { r4, r5, r6, r7 }
 548         bx      lr
 549         SET_SIZE(atomic_cas_64)
 550 
 551         ENTRY(atomic_swap_8)
 552         ALTENTRY(atomic_swap_uchar)
 553 1:
 554         ldrexb  r2, [r0]
 555         strexb  r3, r1, [r0]
 556         cmp     r3, #0
 557         bne     1b
 558         mov     r0, r2
 559         bx      lr
 560         SET_SIZE(atomic_swap_uchar)
 561         SET_SIZE(atomic_swap_8)
 562 
 563         ENTRY(atomic_swap_16)
 564         ALTENTRY(atomic_swap_ushort)
 565 1:
 566         ldrexh  r2, [r0]
 567         strexh  r3, r1, [r0]
 568         cmp     r3, #0
 569         bne     1b
 570         mov     r0, r2
 571         bx      lr
 572         SET_SIZE(atomic_swap_ushort)
 573         SET_SIZE(atomic_swap_16)
 574 
 575         ENTRY(atomic_swap_32)
 576         ALTENTRY(atomic_swap_uint)
 577         ALTENTRY(atomic_swap_ptr)
 578         ALTENTRY(atomic_swap_ulong)
 579 1:
 580         ldrex   r2, [r0]
 581         strex   r3, r1, [r0]
 582         cmp     r3, #0
 583         bne     1b
 584         mov     r0, r2
 585         bx      lr
 586         SET_SIZE(atomic_swap_ulong)
 587         SET_SIZE(atomic_swap_ptr)
 588         SET_SIZE(atomic_swap_uint)
 589         SET_SIZE(atomic_swap_32)
 590 
 591         ENTRY(atomic_swap_64)
 592         push    { r4, r5 }
 593 1:
 594         ldrexd  r4, r5, [r0]
 595         strexd  r1, r2, r3, [r0]
 596         cmp     r1, #0
 597         bne     1b
 598         mov     r0, r4
 599         mov     r1, r5
 600         pop     { r4, r5 }
 601         bx      lr
 602         SET_SIZE(atomic_swap_64)
 603 
 604         ENTRY(atomic_set_long_excl)
 605         mov     r3, #1
 606         lsl     r1, r3, r1              @ bit to set
 607 1:
 608         ldrex   r2, [r0]
 609         and     r3, r1, r2
 610         cmp     r3, r1                  @ Check if the bit is set
 611         beq     2f
 612         orr     r2, r1, r2              @ Set the bit
 613         strex   r3, r1, [r0]
 614         cmp     r3, #0
 615         bne     1b
 616         mov     r0, #0
 617         bx      lr
 618 2:
 619         mov     r0, #-1                 @ bit already set
 620         bx      lr
 621         SET_SIZE(atomic_set_long_excl)
 622 
 623         ENTRY(atomic_clear_long_excl)
 624         mov     r3, #1
 625         lsl     r1, r3, r1
 626 1:
 627         ldrex   r2, [r0]
 628         and     r3, r1, r2
 629         cmp     r3, r1
 630         bne     2f
 631         bic     r2, r2, r1              @ r2 = r2 & ~r1
 632         strex   r3, r1, [r0]
 633         cmp     r3, #0
 634         bne     1b
 635         mov     r0, #0
 636         bx      lr
 637 2:
 638         mov     r0, #-1
 639         bx      lr
 640         SET_SIZE(atomic_clear_long_excl)
 641 
 642 #if !defined(_KERNEL)
 643 
 644         /*
 645          * NOTE: membar_enter, membar_exit, membar_producer, and
 646          * membar_consumer are identical routines.  We define them
 647          * separately, instead of using ALTENTRY definitions to alias
 648          * them together, so that DTrace and debuggers will see a unique
 649          * address for them, allowing more accurate tracing.
 650          */
 651         ENTRY(membar_enter)
 652         ARM_DMB_INSTR(r0)
 653         bx lr
 654         SET_SIZE(membar_enter)
 655 
 656         ENTRY(membar_exit)
 657         ARM_DMB_INSTR(r0)
 658         bx lr
 659         SET_SIZE(membar_exit)
 660 
 661         ENTRY(membar_producer)
 662         ARM_DMB_INSTR(r0)
 663         bx lr
 664         SET_SIZE(membar_producer)
 665 
 666         ENTRY(membar_consumer)
 667         ARM_DMB_INSTR(r0)
 668         bx lr
 669         SET_SIZE(membar_consumer)
 670 
 671 #endif  /* !_KERNEL */