__ __ __ __ _____ _ _ _____ _ _ _ | \/ | \ \ / / | __ \ (_) | | / ____| | | | | | \ / |_ __\ V / | |__) | __ ___ ____ _| |_ ___ | (___ | |__ ___| | | | |\/| | '__|> < | ___/ '__| \ \ / / _` | __/ _ \ \___ \| '_ \ / _ \ | | | | | | |_ / . \ | | | | | |\ V / (_| | || __/ ____) | | | | __/ | | |_| |_|_(_)_/ \_\ |_| |_| |_| \_/ \__,_|\__\___| |_____/|_| |_|\___V 2.1 if you need WebShell for Seo everyday contact me on Telegram Telegram Address : @jackleetFor_More_Tools:
/* SPDX-License-Identifier: GPL-2.0-only */
/*
* ARCv2 supports 64-bit exclusive load (LLOCKD) / store (SCONDD)
* - The address HAS to be 64-bit aligned
*/
#ifndef _ASM_ARC_ATOMIC64_ARCV2_H
#define _ASM_ARC_ATOMIC64_ARCV2_H
typedef struct {
s64 __aligned(8) counter;
} atomic64_t;
#define ATOMIC64_INIT(a) { (a) }
static inline s64 arch_atomic64_read(const atomic64_t *v)
{
s64 val;
__asm__ __volatile__(
" ldd %0, [%1] \n"
: "=r"(val)
: "r"(&v->counter));
return val;
}
static inline void arch_atomic64_set(atomic64_t *v, s64 a)
{
/*
* This could have been a simple assignment in "C" but would need
* explicit volatile. Otherwise gcc optimizers could elide the store
* which borked atomic64 self-test
* In the inline asm version, memory clobber needed for exact same
* reason, to tell gcc about the store.
*
* This however is not needed for sibling atomic64_add() etc since both
* load/store are explicitly done in inline asm. As long as API is used
* for each access, gcc has no way to optimize away any load/store
*/
__asm__ __volatile__(
" std %0, [%1] \n"
:
: "r"(a), "r"(&v->counter)
: "memory");
}
#define ATOMIC64_OP(op, op1, op2) \
static inline void arch_atomic64_##op(s64 a, atomic64_t *v) \
{ \
s64 val; \
\
__asm__ __volatile__( \
"1: \n" \
" llockd %0, [%1] \n" \
" " #op1 " %L0, %L0, %L2 \n" \
" " #op2 " %H0, %H0, %H2 \n" \
" scondd %0, [%1] \n" \
" bnz 1b \n" \
: "=&r"(val) \
: "r"(&v->counter), "ir"(a) \
: "cc", "memory"); \
} \
#define ATOMIC64_OP_RETURN(op, op1, op2) \
static inline s64 arch_atomic64_##op##_return_relaxed(s64 a, atomic64_t *v) \
{ \
s64 val; \
\
__asm__ __volatile__( \
"1: \n" \
" llockd %0, [%1] \n" \
" " #op1 " %L0, %L0, %L2 \n" \
" " #op2 " %H0, %H0, %H2 \n" \
" scondd %0, [%1] \n" \
" bnz 1b \n" \
: [val] "=&r"(val) \
: "r"(&v->counter), "ir"(a) \
: "cc", "memory"); \
\
return val; \
}
#define arch_atomic64_add_return_relaxed arch_atomic64_add_return_relaxed
#define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return_relaxed
#define ATOMIC64_FETCH_OP(op, op1, op2) \
static inline s64 arch_atomic64_fetch_##op##_relaxed(s64 a, atomic64_t *v) \
{ \
s64 val, orig; \
\
__asm__ __volatile__( \
"1: \n" \
" llockd %0, [%2] \n" \
" " #op1 " %L1, %L0, %L3 \n" \
" " #op2 " %H1, %H0, %H3 \n" \
" scondd %1, [%2] \n" \
" bnz 1b \n" \
: "=&r"(orig), "=&r"(val) \
: "r"(&v->counter), "ir"(a) \
: "cc", "memory"); \
\
return orig; \
}
#define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add_relaxed
#define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub_relaxed
#define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and_relaxed
#define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed
#define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or_relaxed
#define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor_relaxed
#define ATOMIC64_OPS(op, op1, op2) \
ATOMIC64_OP(op, op1, op2) \
ATOMIC64_OP_RETURN(op, op1, op2) \
ATOMIC64_FETCH_OP(op, op1, op2)
ATOMIC64_OPS(add, add.f, adc)
ATOMIC64_OPS(sub, sub.f, sbc)
#undef ATOMIC64_OPS
#define ATOMIC64_OPS(op, op1, op2) \
ATOMIC64_OP(op, op1, op2) \
ATOMIC64_FETCH_OP(op, op1, op2)
ATOMIC64_OPS(and, and, and)
ATOMIC64_OPS(andnot, bic, bic)
ATOMIC64_OPS(or, or, or)
ATOMIC64_OPS(xor, xor, xor)
#define arch_atomic64_andnot arch_atomic64_andnot
#undef ATOMIC64_OPS
#undef ATOMIC64_FETCH_OP
#undef ATOMIC64_OP_RETURN
#undef ATOMIC64_OP
static inline s64
arch_atomic64_cmpxchg(atomic64_t *ptr, s64 expected, s64 new)
{
s64 prev;
smp_mb();
__asm__ __volatile__(
"1: llockd %0, [%1] \n"
" brne %L0, %L2, 2f \n"
" brne %H0, %H2, 2f \n"
" scondd %3, [%1] \n"
" bnz 1b \n"
"2: \n"
: "=&r"(prev)
: "r"(ptr), "ir"(expected), "r"(new)
: "cc"); /* memory clobber comes from smp_mb() */
smp_mb();
return prev;
}
#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
static inline s64 arch_atomic64_xchg(atomic64_t *ptr, s64 new)
{
s64 prev;
smp_mb();
__asm__ __volatile__(
"1: llockd %0, [%1] \n"
" scondd %2, [%1] \n"
" bnz 1b \n"
"2: \n"
: "=&r"(prev)
: "r"(ptr), "r"(new)
: "cc"); /* memory clobber comes from smp_mb() */
smp_mb();
return prev;
}
#define arch_atomic64_xchg arch_atomic64_xchg
static inline s64 arch_atomic64_dec_if_positive(atomic64_t *v)
{
s64 val;
smp_mb();
__asm__ __volatile__(
"1: llockd %0, [%1] \n"
" sub.f %L0, %L0, 1 # w0 - 1, set C on borrow\n"
" sub.c %H0, %H0, 1 # if C set, w1 - 1\n"
" brlt %H0, 0, 2f \n"
" scondd %0, [%1] \n"
" bnz 1b \n"
"2: \n"
: "=&r"(val)
: "r"(&v->counter)
: "cc"); /* memory clobber comes from smp_mb() */
smp_mb();
return val;
}
#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
static inline s64 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
{
s64 old, temp;
smp_mb();
__asm__ __volatile__(
"1: llockd %0, [%2] \n"
" brne %L0, %L4, 2f # continue to add since v != u \n"
" breq.d %H0, %H4, 3f # return since v == u \n"
"2: \n"
" add.f %L1, %L0, %L3 \n"
" adc %H1, %H0, %H3 \n"
" scondd %1, [%2] \n"
" bnz 1b \n"
"3: \n"
: "=&r"(old), "=&r" (temp)
: "r"(&v->counter), "r"(a), "r"(u)
: "cc"); /* memory clobber comes from smp_mb() */
smp_mb();
return old;
}
#define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless
#endif
| Name | Type | Size | Permission | Actions |
|---|---|---|---|---|
| Kbuild | File | 215 B | 0644 |
|
| arcregs.h | File | 9.44 KB | 0644 |
|
| asm-offsets.h | File | 165 B | 0644 |
|
| asserts.h | File | 958 B | 0644 |
|
| atomic-llsc.h | File | 2.71 KB | 0644 |
|
| atomic-spinlock.h | File | 2.88 KB | 0644 |
|
| atomic.h | File | 658 B | 0644 |
|
| atomic64-arcv2.h | File | 5.57 KB | 0644 |
|
| barrier.h | File | 1.34 KB | 0644 |
|
| bitops.h | File | 3.69 KB | 0644 |
|
| bug.h | File | 819 B | 0644 |
|
| cache.h | File | 3.52 KB | 0644 |
|
| cacheflush.h | File | 2.38 KB | 0644 |
|
| cachetype.h | File | 185 B | 0644 |
|
| checksum.h | File | 2.32 KB | 0644 |
|
| cmpxchg.h | File | 3.24 KB | 0644 |
|
| current.h | File | 548 B | 0644 |
|
| delay.h | File | 1.85 KB | 0644 |
|
| disasm.h | File | 3.72 KB | 0644 |
|
| dma.h | File | 218 B | 0644 |
|
| dsp-impl.h | File | 3.77 KB | 0644 |
|
| dsp.h | File | 796 B | 0644 |
|
| dwarf.h | File | 1.04 KB | 0644 |
|
| elf.h | File | 1.89 KB | 0644 |
|
| entry-arcv2.h | File | 7.71 KB | 0644 |
|
| entry-compact.h | File | 9.45 KB | 0644 |
|
| entry.h | File | 4.42 KB | 0644 |
|
| exec.h | File | 264 B | 0644 |
|
| fpu.h | File | 1.1 KB | 0644 |
|
| futex.h | File | 3.53 KB | 0644 |
|
| highmem.h | File | 1.39 KB | 0644 |
|
| hugepage.h | File | 2.14 KB | 0644 |
|
| io.h | File | 5.97 KB | 0644 |
|
| irq.h | File | 737 B | 0644 |
|
| irqflags-arcv2.h | File | 3.41 KB | 0644 |
|
| irqflags-compact.h | File | 4.31 KB | 0644 |
|
| irqflags.h | File | 363 B | 0644 |
|
| jump_label.h | File | 1.91 KB | 0644 |
|
| kdebug.h | File | 254 B | 0644 |
|
| kgdb.h | File | 1.21 KB | 0644 |
|
| kprobes.h | File | 1.06 KB | 0644 |
|
| linkage.h | File | 1.45 KB | 0644 |
|
| mach_desc.h | File | 1.9 KB | 0644 |
|
| mmu-arcv2.h | File | 2.46 KB | 0644 |
|
| mmu.h | File | 486 B | 0644 |
|
| mmu_context.h | File | 5.44 KB | 0644 |
|
| module.h | File | 428 B | 0644 |
|
| page.h | File | 3.22 KB | 0644 |
|
| pci.h | File | 360 B | 0644 |
|
| perf_event.h | File | 2 KB | 0644 |
|
| pgalloc.h | File | 2.68 KB | 0644 |
|
| pgtable-bits-arcv2.h | File | 4.79 KB | 0644 |
|
| pgtable-levels.h | File | 5.44 KB | 0644 |
|
| pgtable.h | File | 780 B | 0644 |
|
| processor.h | File | 2.96 KB | 0644 |
|
| ptrace.h | File | 4.39 KB | 0644 |
|
| sections.h | File | 261 B | 0644 |
|
| serial.h | File | 498 B | 0644 |
|
| setup.h | File | 1.21 KB | 0644 |
|
| shmparam.h | File | 297 B | 0644 |
|
| smp.h | File | 3.82 KB | 0644 |
|
| spinlock.h | File | 8.38 KB | 0644 |
|
| spinlock_types.h | File | 905 B | 0644 |
|
| stacktrace.h | File | 1.15 KB | 0644 |
|
| string.h | File | 1.01 KB | 0644 |
|
| switch_to.h | File | 553 B | 0644 |
|
| syscall.h | File | 1.73 KB | 0644 |
|
| syscalls.h | File | 547 B | 0644 |
|
| thread_info.h | File | 3.35 KB | 0644 |
|
| timex.h | File | 362 B | 0644 |
|
| tlb.h | File | 262 B | 0644 |
|
| tlbflush.h | File | 1.62 KB | 0644 |
|
| uaccess.h | File | 15.54 KB | 0644 |
|
| unistd.h | File | 305 B | 0644 |
|
| unwind.h | File | 3.37 KB | 0644 |
|
| vermagic.h | File | 157 B | 0644 |
|
| vmalloc.h | File | 87 B | 0644 |
|