ret i64 %val
}
+define ptr @load_acquire_ptr(ptr %ptr) {
+; LA32-LABEL: load_acquire_ptr:
+; LA32: # %bb.0:
+; LA32-NEXT: ld.w $a0, $a0, 0
+; LA32-NEXT: dbar 20
+; LA32-NEXT: ret
+;
+; LA64-LABEL: load_acquire_ptr:
+; LA64: # %bb.0:
+; LA64-NEXT: ld.d $a0, $a0, 0
+; LA64-NEXT: dbar 20
+; LA64-NEXT: ret
+ %val = load atomic ptr, ptr %ptr acquire, align 8
+ ret ptr %val
+}
+
define i8 @load_unordered_i8(ptr %ptr) {
; LA32-LABEL: load_unordered_i8:
; LA32: # %bb.0:
ret i64 %val
}
+define ptr @load_unordered_ptr(ptr %ptr) {
+; LA32-LABEL: load_unordered_ptr:
+; LA32: # %bb.0:
+; LA32-NEXT: ld.w $a0, $a0, 0
+; LA32-NEXT: ret
+;
+; LA64-LABEL: load_unordered_ptr:
+; LA64: # %bb.0:
+; LA64-NEXT: ld.d $a0, $a0, 0
+; LA64-NEXT: ret
+ %val = load atomic ptr, ptr %ptr unordered, align 8
+ ret ptr %val
+}
+
define i8 @load_monotonic_i8(ptr %ptr) {
; LA32-LABEL: load_monotonic_i8:
; LA32: # %bb.0:
ret i64 %val
}
+define ptr @load_monotonic_ptr(ptr %ptr) {
+; LA32-LABEL: load_monotonic_ptr:
+; LA32: # %bb.0:
+; LA32-NEXT: ld.w $a0, $a0, 0
+; LA32-NEXT: ret
+;
+; LA64-LABEL: load_monotonic_ptr:
+; LA64: # %bb.0:
+; LA64-NEXT: ld.d $a0, $a0, 0
+; LA64-NEXT: ret
+ %val = load atomic ptr, ptr %ptr monotonic, align 8
+ ret ptr %val
+}
+
define i8 @load_seq_cst_i8(ptr %ptr) {
; LA32-LABEL: load_seq_cst_i8:
; LA32: # %bb.0:
ret i64 %val
}
+define ptr @load_seq_cst_ptr(ptr %ptr) {
+; LA32-LABEL: load_seq_cst_ptr:
+; LA32: # %bb.0:
+; LA32-NEXT: ld.w $a0, $a0, 0
+; LA32-NEXT: dbar 16
+; LA32-NEXT: ret
+;
+; LA64-LABEL: load_seq_cst_ptr:
+; LA64: # %bb.0:
+; LA64-NEXT: ld.d $a0, $a0, 0
+; LA64-NEXT: dbar 16
+; LA64-NEXT: ret
+ %val = load atomic ptr, ptr %ptr seq_cst, align 8
+ ret ptr %val
+}
+
define void @store_release_i8(ptr %ptr, i8 signext %v) {
; LA32-LABEL: store_release_i8:
; LA32: # %bb.0:
ret void
}
+define void @store_release_ptr(ptr %ptr, ptr %v) {
+; LA32-LABEL: store_release_ptr:
+; LA32: # %bb.0:
+; LA32-NEXT: dbar 18
+; LA32-NEXT: st.w $a1, $a0, 0
+; LA32-NEXT: ret
+;
+; LA64-LABEL: store_release_ptr:
+; LA64: # %bb.0:
+; LA64-NEXT: amswap_db.d $zero, $a1, $a0
+; LA64-NEXT: ret
+ store atomic ptr %v, ptr %ptr release, align 8
+ ret void
+}
+
define void @store_unordered_i8(ptr %ptr, i8 signext %v) {
; LA32-LABEL: store_unordered_i8:
; LA32: # %bb.0:
ret void
}
+define void @store_unordered_ptr(ptr %ptr, ptr %v) {
+; LA32-LABEL: store_unordered_ptr:
+; LA32: # %bb.0:
+; LA32-NEXT: st.w $a1, $a0, 0
+; LA32-NEXT: ret
+;
+; LA64-LABEL: store_unordered_ptr:
+; LA64: # %bb.0:
+; LA64-NEXT: st.d $a1, $a0, 0
+; LA64-NEXT: ret
+ store atomic ptr %v, ptr %ptr unordered, align 8
+ ret void
+}
+
define void @store_monotonic_i8(ptr %ptr, i8 signext %v) {
; LA32-LABEL: store_monotonic_i8:
; LA32: # %bb.0:
ret void
}
+define void @store_monotonic_ptr(ptr %ptr, ptr %v) {
+; LA32-LABEL: store_monotonic_ptr:
+; LA32: # %bb.0:
+; LA32-NEXT: st.w $a1, $a0, 0
+; LA32-NEXT: ret
+;
+; LA64-LABEL: store_monotonic_ptr:
+; LA64: # %bb.0:
+; LA64-NEXT: st.d $a1, $a0, 0
+; LA64-NEXT: ret
+ store atomic ptr %v, ptr %ptr monotonic, align 8
+ ret void
+}
+
define void @store_seq_cst_i8(ptr %ptr, i8 signext %v) {
; LA32-LABEL: store_seq_cst_i8:
; LA32: # %bb.0:
store atomic i64 %v, ptr %ptr seq_cst, align 8
ret void
}
+
+define void @store_seq_cst_ptr(ptr %ptr, ptr %v) {
+; LA32-LABEL: store_seq_cst_ptr:
+; LA32: # %bb.0:
+; LA32-NEXT: dbar 16
+; LA32-NEXT: st.w $a1, $a0, 0
+; LA32-NEXT: dbar 16
+; LA32-NEXT: ret
+;
+; LA64-LABEL: store_seq_cst_ptr:
+; LA64: # %bb.0:
+; LA64-NEXT: amswap_db.d $zero, $a1, $a0
+; LA64-NEXT: ret
+ store atomic ptr %v, ptr %ptr seq_cst, align 8
+ ret void
+}