linux-stable/include/linux/atomic/atomic-long.h
Mark Rutland b33eb50a92 locking/atomic: scripts: fix ${atomic}_dec_if_positive() kerneldoc
The ${atomic}_dec_if_positive() ops are unlike all the other conditional
atomic ops. Rather than returning a boolean success value, these return
the value that the atomic variable would be updated to, even when no
update is performed.

We missed this when adding kerneldoc comments, and the documentation for
${atomic}_dec_if_positive() erroneously states:

| Return: @true if @v was updated, @false otherwise.

Ideally we'd clean this up by aligning ${atomic}_dec_if_positive() with
the usual atomic op conventions: with ${atomic}_fetch_dec_if_positive()
for those who care about the value of the varaible, and
${atomic}_dec_if_positive() returning a boolean success value.

In the mean time, align the documentation with the current reality.

Fixes: ad8110706f ("locking/atomic: scripts: generate kerneldoc comments")
Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: Paul E. McKenney <paulmck@kernel.org>
Link: https://lore.kernel.org/r/20230615132734.1119765-1-mark.rutland@arm.com
2023-06-16 16:46:30 +02:00

1798 lines
45 KiB
C

// SPDX-License-Identifier: GPL-2.0
// Generated by scripts/atomic/gen-atomic-long.sh
// DO NOT MODIFY THIS FILE DIRECTLY
#ifndef _LINUX_ATOMIC_LONG_H
#define _LINUX_ATOMIC_LONG_H
#include <linux/compiler.h>
#include <asm/types.h>
#ifdef CONFIG_64BIT
typedef atomic64_t atomic_long_t;
#define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
#define atomic_long_cond_read_acquire atomic64_cond_read_acquire
#define atomic_long_cond_read_relaxed atomic64_cond_read_relaxed
#else
typedef atomic_t atomic_long_t;
#define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
#define atomic_long_cond_read_acquire atomic_cond_read_acquire
#define atomic_long_cond_read_relaxed atomic_cond_read_relaxed
#endif
/**
* raw_atomic_long_read() - atomic load with relaxed ordering
* @v: pointer to atomic_long_t
*
* Atomically loads the value of @v with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_read() elsewhere.
*
* Return: The value loaded from @v.
*/
static __always_inline long
raw_atomic_long_read(const atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_read(v);
#else
return raw_atomic_read(v);
#endif
}
/**
* raw_atomic_long_read_acquire() - atomic load with acquire ordering
* @v: pointer to atomic_long_t
*
* Atomically loads the value of @v with acquire ordering.
*
* Safe to use in noinstr code; prefer atomic_long_read_acquire() elsewhere.
*
* Return: The value loaded from @v.
*/
static __always_inline long
raw_atomic_long_read_acquire(const atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_read_acquire(v);
#else
return raw_atomic_read_acquire(v);
#endif
}
/**
* raw_atomic_long_set() - atomic set with relaxed ordering
* @v: pointer to atomic_long_t
* @i: long value to assign
*
* Atomically sets @v to @i with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_set() elsewhere.
*
* Return: Nothing.
*/
static __always_inline void
raw_atomic_long_set(atomic_long_t *v, long i)
{
#ifdef CONFIG_64BIT
raw_atomic64_set(v, i);
#else
raw_atomic_set(v, i);
#endif
}
/**
* raw_atomic_long_set_release() - atomic set with release ordering
* @v: pointer to atomic_long_t
* @i: long value to assign
*
* Atomically sets @v to @i with release ordering.
*
* Safe to use in noinstr code; prefer atomic_long_set_release() elsewhere.
*
* Return: Nothing.
*/
static __always_inline void
raw_atomic_long_set_release(atomic_long_t *v, long i)
{
#ifdef CONFIG_64BIT
raw_atomic64_set_release(v, i);
#else
raw_atomic_set_release(v, i);
#endif
}
/**
* raw_atomic_long_add() - atomic add with relaxed ordering
* @i: long value to add
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + @i) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_add() elsewhere.
*
* Return: Nothing.
*/
static __always_inline void
raw_atomic_long_add(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
raw_atomic64_add(i, v);
#else
raw_atomic_add(i, v);
#endif
}
/**
* raw_atomic_long_add_return() - atomic add with full ordering
* @i: long value to add
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + @i) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_add_return() elsewhere.
*
* Return: The updated value of @v.
*/
static __always_inline long
raw_atomic_long_add_return(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_add_return(i, v);
#else
return raw_atomic_add_return(i, v);
#endif
}
/**
* raw_atomic_long_add_return_acquire() - atomic add with acquire ordering
* @i: long value to add
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + @i) with acquire ordering.
*
* Safe to use in noinstr code; prefer atomic_long_add_return_acquire() elsewhere.
*
* Return: The updated value of @v.
*/
static __always_inline long
raw_atomic_long_add_return_acquire(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_add_return_acquire(i, v);
#else
return raw_atomic_add_return_acquire(i, v);
#endif
}
/**
* raw_atomic_long_add_return_release() - atomic add with release ordering
* @i: long value to add
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + @i) with release ordering.
*
* Safe to use in noinstr code; prefer atomic_long_add_return_release() elsewhere.
*
* Return: The updated value of @v.
*/
static __always_inline long
raw_atomic_long_add_return_release(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_add_return_release(i, v);
#else
return raw_atomic_add_return_release(i, v);
#endif
}
/**
* raw_atomic_long_add_return_relaxed() - atomic add with relaxed ordering
* @i: long value to add
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + @i) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_add_return_relaxed() elsewhere.
*
* Return: The updated value of @v.
*/
static __always_inline long
raw_atomic_long_add_return_relaxed(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_add_return_relaxed(i, v);
#else
return raw_atomic_add_return_relaxed(i, v);
#endif
}
/**
* raw_atomic_long_fetch_add() - atomic add with full ordering
* @i: long value to add
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + @i) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_add() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_add(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_add(i, v);
#else
return raw_atomic_fetch_add(i, v);
#endif
}
/**
* raw_atomic_long_fetch_add_acquire() - atomic add with acquire ordering
* @i: long value to add
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + @i) with acquire ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_add_acquire() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_add_acquire(i, v);
#else
return raw_atomic_fetch_add_acquire(i, v);
#endif
}
/**
* raw_atomic_long_fetch_add_release() - atomic add with release ordering
* @i: long value to add
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + @i) with release ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_add_release() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_add_release(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_add_release(i, v);
#else
return raw_atomic_fetch_add_release(i, v);
#endif
}
/**
* raw_atomic_long_fetch_add_relaxed() - atomic add with relaxed ordering
* @i: long value to add
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + @i) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_add_relaxed() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_add_relaxed(i, v);
#else
return raw_atomic_fetch_add_relaxed(i, v);
#endif
}
/**
* raw_atomic_long_sub() - atomic subtract with relaxed ordering
* @i: long value to subtract
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v - @i) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_sub() elsewhere.
*
* Return: Nothing.
*/
static __always_inline void
raw_atomic_long_sub(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
raw_atomic64_sub(i, v);
#else
raw_atomic_sub(i, v);
#endif
}
/**
* raw_atomic_long_sub_return() - atomic subtract with full ordering
* @i: long value to subtract
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v - @i) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_sub_return() elsewhere.
*
* Return: The updated value of @v.
*/
static __always_inline long
raw_atomic_long_sub_return(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_sub_return(i, v);
#else
return raw_atomic_sub_return(i, v);
#endif
}
/**
* raw_atomic_long_sub_return_acquire() - atomic subtract with acquire ordering
* @i: long value to subtract
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v - @i) with acquire ordering.
*
* Safe to use in noinstr code; prefer atomic_long_sub_return_acquire() elsewhere.
*
* Return: The updated value of @v.
*/
static __always_inline long
raw_atomic_long_sub_return_acquire(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_sub_return_acquire(i, v);
#else
return raw_atomic_sub_return_acquire(i, v);
#endif
}
/**
* raw_atomic_long_sub_return_release() - atomic subtract with release ordering
* @i: long value to subtract
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v - @i) with release ordering.
*
* Safe to use in noinstr code; prefer atomic_long_sub_return_release() elsewhere.
*
* Return: The updated value of @v.
*/
static __always_inline long
raw_atomic_long_sub_return_release(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_sub_return_release(i, v);
#else
return raw_atomic_sub_return_release(i, v);
#endif
}
/**
* raw_atomic_long_sub_return_relaxed() - atomic subtract with relaxed ordering
* @i: long value to subtract
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v - @i) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_sub_return_relaxed() elsewhere.
*
* Return: The updated value of @v.
*/
static __always_inline long
raw_atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_sub_return_relaxed(i, v);
#else
return raw_atomic_sub_return_relaxed(i, v);
#endif
}
/**
* raw_atomic_long_fetch_sub() - atomic subtract with full ordering
* @i: long value to subtract
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v - @i) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_sub() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_sub(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_sub(i, v);
#else
return raw_atomic_fetch_sub(i, v);
#endif
}
/**
* raw_atomic_long_fetch_sub_acquire() - atomic subtract with acquire ordering
* @i: long value to subtract
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v - @i) with acquire ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_sub_acquire() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_sub_acquire(i, v);
#else
return raw_atomic_fetch_sub_acquire(i, v);
#endif
}
/**
* raw_atomic_long_fetch_sub_release() - atomic subtract with release ordering
* @i: long value to subtract
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v - @i) with release ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_sub_release() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_sub_release(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_sub_release(i, v);
#else
return raw_atomic_fetch_sub_release(i, v);
#endif
}
/**
* raw_atomic_long_fetch_sub_relaxed() - atomic subtract with relaxed ordering
* @i: long value to subtract
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v - @i) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_sub_relaxed() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_sub_relaxed(i, v);
#else
return raw_atomic_fetch_sub_relaxed(i, v);
#endif
}
/**
* raw_atomic_long_inc() - atomic increment with relaxed ordering
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + 1) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_inc() elsewhere.
*
* Return: Nothing.
*/
static __always_inline void
raw_atomic_long_inc(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
raw_atomic64_inc(v);
#else
raw_atomic_inc(v);
#endif
}
/**
* raw_atomic_long_inc_return() - atomic increment with full ordering
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + 1) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_inc_return() elsewhere.
*
* Return: The updated value of @v.
*/
static __always_inline long
raw_atomic_long_inc_return(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_inc_return(v);
#else
return raw_atomic_inc_return(v);
#endif
}
/**
* raw_atomic_long_inc_return_acquire() - atomic increment with acquire ordering
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + 1) with acquire ordering.
*
* Safe to use in noinstr code; prefer atomic_long_inc_return_acquire() elsewhere.
*
* Return: The updated value of @v.
*/
static __always_inline long
raw_atomic_long_inc_return_acquire(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_inc_return_acquire(v);
#else
return raw_atomic_inc_return_acquire(v);
#endif
}
/**
* raw_atomic_long_inc_return_release() - atomic increment with release ordering
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + 1) with release ordering.
*
* Safe to use in noinstr code; prefer atomic_long_inc_return_release() elsewhere.
*
* Return: The updated value of @v.
*/
static __always_inline long
raw_atomic_long_inc_return_release(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_inc_return_release(v);
#else
return raw_atomic_inc_return_release(v);
#endif
}
/**
* raw_atomic_long_inc_return_relaxed() - atomic increment with relaxed ordering
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + 1) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_inc_return_relaxed() elsewhere.
*
* Return: The updated value of @v.
*/
static __always_inline long
raw_atomic_long_inc_return_relaxed(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_inc_return_relaxed(v);
#else
return raw_atomic_inc_return_relaxed(v);
#endif
}
/**
* raw_atomic_long_fetch_inc() - atomic increment with full ordering
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + 1) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_inc() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_inc(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_inc(v);
#else
return raw_atomic_fetch_inc(v);
#endif
}
/**
* raw_atomic_long_fetch_inc_acquire() - atomic increment with acquire ordering
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + 1) with acquire ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_inc_acquire() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_inc_acquire(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_inc_acquire(v);
#else
return raw_atomic_fetch_inc_acquire(v);
#endif
}
/**
* raw_atomic_long_fetch_inc_release() - atomic increment with release ordering
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + 1) with release ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_inc_release() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_inc_release(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_inc_release(v);
#else
return raw_atomic_fetch_inc_release(v);
#endif
}
/**
* raw_atomic_long_fetch_inc_relaxed() - atomic increment with relaxed ordering
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + 1) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_inc_relaxed() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_inc_relaxed(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_inc_relaxed(v);
#else
return raw_atomic_fetch_inc_relaxed(v);
#endif
}
/**
* raw_atomic_long_dec() - atomic decrement with relaxed ordering
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v - 1) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_dec() elsewhere.
*
* Return: Nothing.
*/
static __always_inline void
raw_atomic_long_dec(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
raw_atomic64_dec(v);
#else
raw_atomic_dec(v);
#endif
}
/**
* raw_atomic_long_dec_return() - atomic decrement with full ordering
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v - 1) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_dec_return() elsewhere.
*
* Return: The updated value of @v.
*/
static __always_inline long
raw_atomic_long_dec_return(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_dec_return(v);
#else
return raw_atomic_dec_return(v);
#endif
}
/**
* raw_atomic_long_dec_return_acquire() - atomic decrement with acquire ordering
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v - 1) with acquire ordering.
*
* Safe to use in noinstr code; prefer atomic_long_dec_return_acquire() elsewhere.
*
* Return: The updated value of @v.
*/
static __always_inline long
raw_atomic_long_dec_return_acquire(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_dec_return_acquire(v);
#else
return raw_atomic_dec_return_acquire(v);
#endif
}
/**
* raw_atomic_long_dec_return_release() - atomic decrement with release ordering
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v - 1) with release ordering.
*
* Safe to use in noinstr code; prefer atomic_long_dec_return_release() elsewhere.
*
* Return: The updated value of @v.
*/
static __always_inline long
raw_atomic_long_dec_return_release(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_dec_return_release(v);
#else
return raw_atomic_dec_return_release(v);
#endif
}
/**
* raw_atomic_long_dec_return_relaxed() - atomic decrement with relaxed ordering
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v - 1) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_dec_return_relaxed() elsewhere.
*
* Return: The updated value of @v.
*/
static __always_inline long
raw_atomic_long_dec_return_relaxed(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_dec_return_relaxed(v);
#else
return raw_atomic_dec_return_relaxed(v);
#endif
}
/**
* raw_atomic_long_fetch_dec() - atomic decrement with full ordering
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v - 1) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_dec() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_dec(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_dec(v);
#else
return raw_atomic_fetch_dec(v);
#endif
}
/**
* raw_atomic_long_fetch_dec_acquire() - atomic decrement with acquire ordering
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v - 1) with acquire ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_dec_acquire() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_dec_acquire(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_dec_acquire(v);
#else
return raw_atomic_fetch_dec_acquire(v);
#endif
}
/**
* raw_atomic_long_fetch_dec_release() - atomic decrement with release ordering
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v - 1) with release ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_dec_release() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_dec_release(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_dec_release(v);
#else
return raw_atomic_fetch_dec_release(v);
#endif
}
/**
* raw_atomic_long_fetch_dec_relaxed() - atomic decrement with relaxed ordering
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v - 1) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_dec_relaxed() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_dec_relaxed(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_dec_relaxed(v);
#else
return raw_atomic_fetch_dec_relaxed(v);
#endif
}
/**
* raw_atomic_long_and() - atomic bitwise AND with relaxed ordering
* @i: long value
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v & @i) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_and() elsewhere.
*
* Return: Nothing.
*/
static __always_inline void
raw_atomic_long_and(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
raw_atomic64_and(i, v);
#else
raw_atomic_and(i, v);
#endif
}
/**
* raw_atomic_long_fetch_and() - atomic bitwise AND with full ordering
* @i: long value
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v & @i) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_and() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_and(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_and(i, v);
#else
return raw_atomic_fetch_and(i, v);
#endif
}
/**
* raw_atomic_long_fetch_and_acquire() - atomic bitwise AND with acquire ordering
* @i: long value
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v & @i) with acquire ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_and_acquire() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_and_acquire(i, v);
#else
return raw_atomic_fetch_and_acquire(i, v);
#endif
}
/**
* raw_atomic_long_fetch_and_release() - atomic bitwise AND with release ordering
* @i: long value
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v & @i) with release ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_and_release() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_and_release(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_and_release(i, v);
#else
return raw_atomic_fetch_and_release(i, v);
#endif
}
/**
* raw_atomic_long_fetch_and_relaxed() - atomic bitwise AND with relaxed ordering
* @i: long value
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v & @i) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_and_relaxed() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_and_relaxed(i, v);
#else
return raw_atomic_fetch_and_relaxed(i, v);
#endif
}
/**
* raw_atomic_long_andnot() - atomic bitwise AND NOT with relaxed ordering
* @i: long value
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v & ~@i) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_andnot() elsewhere.
*
* Return: Nothing.
*/
static __always_inline void
raw_atomic_long_andnot(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
raw_atomic64_andnot(i, v);
#else
raw_atomic_andnot(i, v);
#endif
}
/**
* raw_atomic_long_fetch_andnot() - atomic bitwise AND NOT with full ordering
* @i: long value
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v & ~@i) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_andnot() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_andnot(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_andnot(i, v);
#else
return raw_atomic_fetch_andnot(i, v);
#endif
}
/**
* raw_atomic_long_fetch_andnot_acquire() - atomic bitwise AND NOT with acquire ordering
* @i: long value
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v & ~@i) with acquire ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_andnot_acquire() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_andnot_acquire(i, v);
#else
return raw_atomic_fetch_andnot_acquire(i, v);
#endif
}
/**
* raw_atomic_long_fetch_andnot_release() - atomic bitwise AND NOT with release ordering
* @i: long value
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v & ~@i) with release ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_andnot_release() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_andnot_release(i, v);
#else
return raw_atomic_fetch_andnot_release(i, v);
#endif
}
/**
* raw_atomic_long_fetch_andnot_relaxed() - atomic bitwise AND NOT with relaxed ordering
* @i: long value
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v & ~@i) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_andnot_relaxed() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_andnot_relaxed(i, v);
#else
return raw_atomic_fetch_andnot_relaxed(i, v);
#endif
}
/**
* raw_atomic_long_or() - atomic bitwise OR with relaxed ordering
* @i: long value
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v | @i) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_or() elsewhere.
*
* Return: Nothing.
*/
static __always_inline void
raw_atomic_long_or(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
raw_atomic64_or(i, v);
#else
raw_atomic_or(i, v);
#endif
}
/**
* raw_atomic_long_fetch_or() - atomic bitwise OR with full ordering
* @i: long value
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v | @i) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_or() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_or(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_or(i, v);
#else
return raw_atomic_fetch_or(i, v);
#endif
}
/**
* raw_atomic_long_fetch_or_acquire() - atomic bitwise OR with acquire ordering
* @i: long value
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v | @i) with acquire ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_or_acquire() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_or_acquire(i, v);
#else
return raw_atomic_fetch_or_acquire(i, v);
#endif
}
/**
* raw_atomic_long_fetch_or_release() - atomic bitwise OR with release ordering
* @i: long value
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v | @i) with release ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_or_release() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_or_release(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_or_release(i, v);
#else
return raw_atomic_fetch_or_release(i, v);
#endif
}
/**
* raw_atomic_long_fetch_or_relaxed() - atomic bitwise OR with relaxed ordering
* @i: long value
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v | @i) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_or_relaxed() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_or_relaxed(i, v);
#else
return raw_atomic_fetch_or_relaxed(i, v);
#endif
}
/**
* raw_atomic_long_xor() - atomic bitwise XOR with relaxed ordering
* @i: long value
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v ^ @i) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_xor() elsewhere.
*
* Return: Nothing.
*/
static __always_inline void
raw_atomic_long_xor(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
raw_atomic64_xor(i, v);
#else
raw_atomic_xor(i, v);
#endif
}
/**
* raw_atomic_long_fetch_xor() - atomic bitwise XOR with full ordering
* @i: long value
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v ^ @i) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_xor() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_xor(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_xor(i, v);
#else
return raw_atomic_fetch_xor(i, v);
#endif
}
/**
* raw_atomic_long_fetch_xor_acquire() - atomic bitwise XOR with acquire ordering
* @i: long value
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v ^ @i) with acquire ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_xor_acquire() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_xor_acquire(i, v);
#else
return raw_atomic_fetch_xor_acquire(i, v);
#endif
}
/**
* raw_atomic_long_fetch_xor_release() - atomic bitwise XOR with release ordering
* @i: long value
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v ^ @i) with release ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_xor_release() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_xor_release(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_xor_release(i, v);
#else
return raw_atomic_fetch_xor_release(i, v);
#endif
}
/**
* raw_atomic_long_fetch_xor_relaxed() - atomic bitwise XOR with relaxed ordering
* @i: long value
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v ^ @i) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_xor_relaxed() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_xor_relaxed(i, v);
#else
return raw_atomic_fetch_xor_relaxed(i, v);
#endif
}
/**
* raw_atomic_long_xchg() - atomic exchange with full ordering
* @v: pointer to atomic_long_t
* @new: long value to assign
*
* Atomically updates @v to @new with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_xchg() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_xchg(atomic_long_t *v, long new)
{
#ifdef CONFIG_64BIT
return raw_atomic64_xchg(v, new);
#else
return raw_atomic_xchg(v, new);
#endif
}
/**
* raw_atomic_long_xchg_acquire() - atomic exchange with acquire ordering
* @v: pointer to atomic_long_t
* @new: long value to assign
*
* Atomically updates @v to @new with acquire ordering.
*
* Safe to use in noinstr code; prefer atomic_long_xchg_acquire() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_xchg_acquire(atomic_long_t *v, long new)
{
#ifdef CONFIG_64BIT
return raw_atomic64_xchg_acquire(v, new);
#else
return raw_atomic_xchg_acquire(v, new);
#endif
}
/**
* raw_atomic_long_xchg_release() - atomic exchange with release ordering
* @v: pointer to atomic_long_t
* @new: long value to assign
*
* Atomically updates @v to @new with release ordering.
*
* Safe to use in noinstr code; prefer atomic_long_xchg_release() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_xchg_release(atomic_long_t *v, long new)
{
#ifdef CONFIG_64BIT
return raw_atomic64_xchg_release(v, new);
#else
return raw_atomic_xchg_release(v, new);
#endif
}
/**
* raw_atomic_long_xchg_relaxed() - atomic exchange with relaxed ordering
* @v: pointer to atomic_long_t
* @new: long value to assign
*
* Atomically updates @v to @new with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_xchg_relaxed() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_xchg_relaxed(atomic_long_t *v, long new)
{
#ifdef CONFIG_64BIT
return raw_atomic64_xchg_relaxed(v, new);
#else
return raw_atomic_xchg_relaxed(v, new);
#endif
}
/**
* raw_atomic_long_cmpxchg() - atomic compare and exchange with full ordering
* @v: pointer to atomic_long_t
* @old: long value to compare with
* @new: long value to assign
*
* If (@v == @old), atomically updates @v to @new with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_cmpxchg() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
{
#ifdef CONFIG_64BIT
return raw_atomic64_cmpxchg(v, old, new);
#else
return raw_atomic_cmpxchg(v, old, new);
#endif
}
/**
* raw_atomic_long_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
* @v: pointer to atomic_long_t
* @old: long value to compare with
* @new: long value to assign
*
* If (@v == @old), atomically updates @v to @new with acquire ordering.
*
* Safe to use in noinstr code; prefer atomic_long_cmpxchg_acquire() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
{
#ifdef CONFIG_64BIT
return raw_atomic64_cmpxchg_acquire(v, old, new);
#else
return raw_atomic_cmpxchg_acquire(v, old, new);
#endif
}
/**
* raw_atomic_long_cmpxchg_release() - atomic compare and exchange with release ordering
* @v: pointer to atomic_long_t
* @old: long value to compare with
* @new: long value to assign
*
* If (@v == @old), atomically updates @v to @new with release ordering.
*
* Safe to use in noinstr code; prefer atomic_long_cmpxchg_release() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
{
#ifdef CONFIG_64BIT
return raw_atomic64_cmpxchg_release(v, old, new);
#else
return raw_atomic_cmpxchg_release(v, old, new);
#endif
}
/**
* raw_atomic_long_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
* @v: pointer to atomic_long_t
* @old: long value to compare with
* @new: long value to assign
*
* If (@v == @old), atomically updates @v to @new with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_cmpxchg_relaxed() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
{
#ifdef CONFIG_64BIT
return raw_atomic64_cmpxchg_relaxed(v, old, new);
#else
return raw_atomic_cmpxchg_relaxed(v, old, new);
#endif
}
/**
* raw_atomic_long_try_cmpxchg() - atomic compare and exchange with full ordering
* @v: pointer to atomic_long_t
* @old: pointer to long value to compare with
* @new: long value to assign
*
* If (@v == @old), atomically updates @v to @new with full ordering.
* Otherwise, updates @old to the current value of @v.
*
* Safe to use in noinstr code; prefer atomic_long_try_cmpxchg() elsewhere.
*
* Return: @true if the exchange occured, @false otherwise.
*/
static __always_inline bool
raw_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
{
#ifdef CONFIG_64BIT
return raw_atomic64_try_cmpxchg(v, (s64 *)old, new);
#else
return raw_atomic_try_cmpxchg(v, (int *)old, new);
#endif
}
/**
* raw_atomic_long_try_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
* @v: pointer to atomic_long_t
* @old: pointer to long value to compare with
* @new: long value to assign
*
* If (@v == @old), atomically updates @v to @new with acquire ordering.
* Otherwise, updates @old to the current value of @v.
*
* Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_acquire() elsewhere.
*
* Return: @true if the exchange occured, @false otherwise.
*/
static __always_inline bool
raw_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
{
#ifdef CONFIG_64BIT
return raw_atomic64_try_cmpxchg_acquire(v, (s64 *)old, new);
#else
return raw_atomic_try_cmpxchg_acquire(v, (int *)old, new);
#endif
}
/**
* raw_atomic_long_try_cmpxchg_release() - atomic compare and exchange with release ordering
* @v: pointer to atomic_long_t
* @old: pointer to long value to compare with
* @new: long value to assign
*
* If (@v == @old), atomically updates @v to @new with release ordering.
* Otherwise, updates @old to the current value of @v.
*
* Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_release() elsewhere.
*
* Return: @true if the exchange occured, @false otherwise.
*/
static __always_inline bool
raw_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
{
#ifdef CONFIG_64BIT
return raw_atomic64_try_cmpxchg_release(v, (s64 *)old, new);
#else
return raw_atomic_try_cmpxchg_release(v, (int *)old, new);
#endif
}
/**
* raw_atomic_long_try_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
* @v: pointer to atomic_long_t
* @old: pointer to long value to compare with
* @new: long value to assign
*
* If (@v == @old), atomically updates @v to @new with relaxed ordering.
* Otherwise, updates @old to the current value of @v.
*
* Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_relaxed() elsewhere.
*
* Return: @true if the exchange occured, @false otherwise.
*/
static __always_inline bool
raw_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
{
#ifdef CONFIG_64BIT
return raw_atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new);
#else
return raw_atomic_try_cmpxchg_relaxed(v, (int *)old, new);
#endif
}
/**
* raw_atomic_long_sub_and_test() - atomic subtract and test if zero with full ordering
* @i: long value to add
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v - @i) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_sub_and_test() elsewhere.
*
* Return: @true if the resulting value of @v is zero, @false otherwise.
*/
static __always_inline bool
raw_atomic_long_sub_and_test(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_sub_and_test(i, v);
#else
return raw_atomic_sub_and_test(i, v);
#endif
}
/**
* raw_atomic_long_dec_and_test() - atomic decrement and test if zero with full ordering
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v - 1) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_dec_and_test() elsewhere.
*
* Return: @true if the resulting value of @v is zero, @false otherwise.
*/
static __always_inline bool
raw_atomic_long_dec_and_test(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_dec_and_test(v);
#else
return raw_atomic_dec_and_test(v);
#endif
}
/**
* raw_atomic_long_inc_and_test() - atomic increment and test if zero with full ordering
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + 1) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_inc_and_test() elsewhere.
*
* Return: @true if the resulting value of @v is zero, @false otherwise.
*/
static __always_inline bool
raw_atomic_long_inc_and_test(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_inc_and_test(v);
#else
return raw_atomic_inc_and_test(v);
#endif
}
/**
* raw_atomic_long_add_negative() - atomic add and test if negative with full ordering
* @i: long value to add
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + @i) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_add_negative() elsewhere.
*
* Return: @true if the resulting value of @v is negative, @false otherwise.
*/
static __always_inline bool
raw_atomic_long_add_negative(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_add_negative(i, v);
#else
return raw_atomic_add_negative(i, v);
#endif
}
/**
* raw_atomic_long_add_negative_acquire() - atomic add and test if negative with acquire ordering
* @i: long value to add
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + @i) with acquire ordering.
*
* Safe to use in noinstr code; prefer atomic_long_add_negative_acquire() elsewhere.
*
* Return: @true if the resulting value of @v is negative, @false otherwise.
*/
static __always_inline bool
raw_atomic_long_add_negative_acquire(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_add_negative_acquire(i, v);
#else
return raw_atomic_add_negative_acquire(i, v);
#endif
}
/**
* raw_atomic_long_add_negative_release() - atomic add and test if negative with release ordering
* @i: long value to add
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + @i) with release ordering.
*
* Safe to use in noinstr code; prefer atomic_long_add_negative_release() elsewhere.
*
* Return: @true if the resulting value of @v is negative, @false otherwise.
*/
static __always_inline bool
raw_atomic_long_add_negative_release(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_add_negative_release(i, v);
#else
return raw_atomic_add_negative_release(i, v);
#endif
}
/**
* raw_atomic_long_add_negative_relaxed() - atomic add and test if negative with relaxed ordering
* @i: long value to add
* @v: pointer to atomic_long_t
*
* Atomically updates @v to (@v + @i) with relaxed ordering.
*
* Safe to use in noinstr code; prefer atomic_long_add_negative_relaxed() elsewhere.
*
* Return: @true if the resulting value of @v is negative, @false otherwise.
*/
static __always_inline bool
raw_atomic_long_add_negative_relaxed(long i, atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_add_negative_relaxed(i, v);
#else
return raw_atomic_add_negative_relaxed(i, v);
#endif
}
/**
* raw_atomic_long_fetch_add_unless() - atomic add unless value with full ordering
* @v: pointer to atomic_long_t
* @a: long value to add
* @u: long value to compare with
*
* If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_fetch_add_unless() elsewhere.
*
* Return: The original value of @v.
*/
static __always_inline long
raw_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
{
#ifdef CONFIG_64BIT
return raw_atomic64_fetch_add_unless(v, a, u);
#else
return raw_atomic_fetch_add_unless(v, a, u);
#endif
}
/**
* raw_atomic_long_add_unless() - atomic add unless value with full ordering
* @v: pointer to atomic_long_t
* @a: long value to add
* @u: long value to compare with
*
* If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_add_unless() elsewhere.
*
* Return: @true if @v was updated, @false otherwise.
*/
static __always_inline bool
raw_atomic_long_add_unless(atomic_long_t *v, long a, long u)
{
#ifdef CONFIG_64BIT
return raw_atomic64_add_unless(v, a, u);
#else
return raw_atomic_add_unless(v, a, u);
#endif
}
/**
* raw_atomic_long_inc_not_zero() - atomic increment unless zero with full ordering
* @v: pointer to atomic_long_t
*
* If (@v != 0), atomically updates @v to (@v + 1) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_inc_not_zero() elsewhere.
*
* Return: @true if @v was updated, @false otherwise.
*/
static __always_inline bool
raw_atomic_long_inc_not_zero(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_inc_not_zero(v);
#else
return raw_atomic_inc_not_zero(v);
#endif
}
/**
* raw_atomic_long_inc_unless_negative() - atomic increment unless negative with full ordering
* @v: pointer to atomic_long_t
*
* If (@v >= 0), atomically updates @v to (@v + 1) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_inc_unless_negative() elsewhere.
*
* Return: @true if @v was updated, @false otherwise.
*/
static __always_inline bool
raw_atomic_long_inc_unless_negative(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_inc_unless_negative(v);
#else
return raw_atomic_inc_unless_negative(v);
#endif
}
/**
* raw_atomic_long_dec_unless_positive() - atomic decrement unless positive with full ordering
* @v: pointer to atomic_long_t
*
* If (@v <= 0), atomically updates @v to (@v - 1) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_dec_unless_positive() elsewhere.
*
* Return: @true if @v was updated, @false otherwise.
*/
static __always_inline bool
raw_atomic_long_dec_unless_positive(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_dec_unless_positive(v);
#else
return raw_atomic_dec_unless_positive(v);
#endif
}
/**
* raw_atomic_long_dec_if_positive() - atomic decrement if positive with full ordering
* @v: pointer to atomic_long_t
*
* If (@v > 0), atomically updates @v to (@v - 1) with full ordering.
*
* Safe to use in noinstr code; prefer atomic_long_dec_if_positive() elsewhere.
*
* Return: The old value of (@v - 1), regardless of whether @v was updated.
*/
static __always_inline long
raw_atomic_long_dec_if_positive(atomic_long_t *v)
{
#ifdef CONFIG_64BIT
return raw_atomic64_dec_if_positive(v);
#else
return raw_atomic_dec_if_positive(v);
#endif
}
#endif /* _LINUX_ATOMIC_LONG_H */
// 4ef23f98c73cff96d239896175fd26b10b88899e