0001 C LB+unlocklockonceonce+poacquireonce
0002
0003 (*
0004 * Result: Never
0005 *
0006 * If two locked critical sections execute on the same CPU, all accesses
0007 * in the first must execute before any accesses in the second, even if the
0008 * critical sections are protected by different locks. Note: Even when a
0009 * write executes before a read, their memory effects can be reordered from
0010 * the viewpoint of another CPU (the kind of reordering allowed by TSO).
0011 *)
0012
0013 {}
0014
0015 P0(spinlock_t *s, spinlock_t *t, int *x, int *y)
0016 {
0017 int r1;
0018
0019 spin_lock(s);
0020 r1 = READ_ONCE(*x);
0021 spin_unlock(s);
0022 spin_lock(t);
0023 WRITE_ONCE(*y, 1);
0024 spin_unlock(t);
0025 }
0026
0027 P1(int *x, int *y)
0028 {
0029 int r2;
0030
0031 r2 = smp_load_acquire(y);
0032 WRITE_ONCE(*x, 1);
0033 }
0034
0035 exists (0:r1=1 /\ 1:r2=1)