Skip to content

Commit

Permalink
core: removed trailing spaces
Browse files Browse the repository at this point in the history
  • Loading branch information
miconda committed Nov 13, 2023
1 parent bf90793 commit 82ae416
Show file tree
Hide file tree
Showing 154 changed files with 496 additions and 496 deletions.
2 changes: 1 addition & 1 deletion src/core/action.c
Original file line number Diff line number Diff line change
Expand Up @@ -1271,7 +1271,7 @@ int do_action(struct run_act_ctx *h, struct action *a, struct sip_msg *msg)
if(sjt->rest.cond[i] == v) {
if(likely(sjt->rest.jump[i])) {
ret = run_actions(h, sjt->rest.jump[i], msg);
h->run_flags &= ~BREAK_R_F; /* catch breaks, but
h->run_flags &= ~BREAK_R_F; /* catch breaks, but
let returns pass */
}
goto skip;
Expand Down
10 changes: 5 additions & 5 deletions src/core/atomic/atomic_alpha.h
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/*
/*
* Copyright (C) 2006 iptelorg GmbH
*
* Permission to use, copy, modify, and distribute this software for any
Expand All @@ -14,16 +14,16 @@
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/

/**
/**
* @file
* @brief Atomic operations and memory barriers (alpha specific)
*
* Atomic operations and memory barriers (alpha specific)
* \warning atomic ops do not include memory barriers, see atomic_ops.h
* for more details.
*
*
* Config defines:
* - NOSMP
* - NOSMP
* - __CPU_alpha
* @ingroup atomic
*/
Expand Down Expand Up @@ -88,7 +88,7 @@
#endif /* NOSMP */


/* main asm block
/* main asm block
* if store fails, jump _forward_ (optimization, because back jumps are
* always predicted to happen on alpha )*/
#define ATOMIC_ASM_OP00_int(op) \
Expand Down
20 changes: 10 additions & 10 deletions src/core/atomic/atomic_arm.h
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/*
/*
* Copyright (C) 2006 iptelorg GmbH
*
* Permission to use, copy, modify, and distribute this software for any
Expand All @@ -17,15 +17,15 @@
/**
* @file
* @brief Atomic ops and memory barriers for ARM (>= v3)
*
*
* Atomic ops and memory barriers for ARM architecture (starting from version 3)
* see atomic_ops.h for more info.
*
*
* Config defines:
* - NOSMP
* - __CPU_arm
* - __CPU_arm6 - armv6 support (supports atomic ops via ldrex/strex)
* - __CPU_arm7 - armv7 support
* - __CPU_arm7 - armv7 support
* @ingroup atomic
*/

Expand Down Expand Up @@ -96,7 +96,7 @@
#include "atomic_unknown.h"
#endif

/* main asm block
/* main asm block
* use %0 as input and write the output in %1*/
#define ATOMIC_ASM_OP(op) \
"1: ldrex %0, [%3] \n\t" \
Expand All @@ -105,7 +105,7 @@
" cmp %0, #0 \n\t" \
" bne 1b \n\t"

/* same as above but writes %4 instead of %1, and %0 will contain
/* same as above but writes %4 instead of %1, and %0 will contain
* the prev. val*/
#define ATOMIC_ASM_OP2(op) \
"1: ldrex %0, [%3] \n\t" \
Expand Down Expand Up @@ -183,7 +183,7 @@

/* cmpxchg: %5=old, %4=new_v, %3=var
* if (*var==old) *var=new_v
* returns the original *var (can be used to check if it succeeded:
* returns the original *var (can be used to check if it succeeded:
* if old==cmpxchg(var, old, new_v) -> success
*/
#define ATOMIC_CMPXCHG_DECL(NAME, P_TYPE) \
Expand Down Expand Up @@ -401,14 +401,14 @@ inline static long mb_atomic_add_long(volatile long *v, long i)
* a spinlock, e.g:
* mov %r0, #0x1
* 1: swp %r1, %r0, [&atomic_val]
* if (%r1 & 0x1) goto 1 # wait if first bit is 1
* if (%r1 & 0x1) goto 1 # wait if first bit is 1
* %r1>>=1 # restore the value (only 31 bits can be used )
* %r1=op (%r1, ...)
* %r1=op (%r1, ...)
* %r1<<=1 # shift back the value, such that the first bit is 0
* str %r1, [&atomic_val] # write the value
*
* However only 31 bits could be used (=> atomic_*_int and atomic_*_long
* would still have to be lock based, since in these cases we guarantee all
* would still have to be lock based, since in these cases we guarantee all
* the bits) and I'm not sure there would be a significant performance
* benefit when compared with the fallback lock based version:
* lock(atomic_lock);
Expand Down
14 changes: 7 additions & 7 deletions src/core/atomic/atomic_common.h
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/*
/*
* Copyright (C) 2006 iptelorg GmbH
*
* Permission to use, copy, modify, and distribute this software for any
Expand All @@ -17,7 +17,7 @@
/**
* @defgroup atomic Kamailio atomic operations
* @brief Kamailio atomic operations and memory barriers support
*
*
* Kamailio atomic operations and memory barriers support for different CPU
* architectures implemented in assembler. It also provides some generic
* fallback code for architectures not currently supported.
Expand All @@ -26,7 +26,7 @@
/**
* @file
* @brief Common part for all the atomic operations
*
*
* Common part for all the atomic operations (atomic_t and common operations)
* see atomic_ops.h for more info.
* @ingroup atomic
Expand All @@ -37,7 +37,7 @@

/**
* @brief atomic_t defined as a struct to easily catch non atomic operations on it.
*
*
* atomic_t defined as a struct to easily catch non atomic operations on it,
* e.g. atomic_t foo; foo++ will generate a compile error.
*/
Expand All @@ -47,12 +47,12 @@ typedef struct
} atomic_t;


/**
/**
* @name Atomic load and store operations
* Atomic store and load operations are atomic on all cpus, note however that they
* don't include memory barriers so if you want to use atomic_{get,set}
* don't include memory barriers so if you want to use atomic_{get,set}
* to implement mutexes you must use the mb_* versions or explicitely use
* the barriers
* the barriers
*/

/*@{ */
Expand Down
8 changes: 4 additions & 4 deletions src/core/atomic/atomic_mips2.h
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/*
/*
* Copyright (C) 2006 iptelorg GmbH
*
* Permission to use, copy, modify, and distribute this software for any
Expand All @@ -14,10 +14,10 @@
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/

/**
* @file
/**
* @file
* @brief Atomic operations and memory barriers (MIPS isa 2 and MIPS64 specific)
*
*
* Atomic operations and memory barriers (MIPS isa 2 and MIPS64 specific)
* \warning atomic ops do not include memory barriers, see atomic_ops.h for
* more details.
Expand Down
6 changes: 3 additions & 3 deletions src/core/atomic/atomic_native.h
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/*
/*
* Copyright (C) 2006 iptelorg GmbH
*
* Permission to use, copy, modify, and distribute this software for any
Expand All @@ -17,12 +17,12 @@
/**
* @file
* @brief Native (asm) atomic operations and memory barriers
*
*
* Native (assembler) atomic operations and memory barriers.
* \warning atomic ops do not include memory barriers, see atomic_ops.h for
* more info. Expects atomic_t to be defined (#include "atomic_common.h")
*
* Config defines:
* Config defines:
* - CC_GCC_LIKE_ASM - the compiler support gcc style inline asm
* - NOSMP - the code will be a little faster, but not SMP safe
* - __CPU_i386, __CPU_x86_64, X86_OOSTORE - see atomic_x86.h
Expand Down
6 changes: 3 additions & 3 deletions src/core/atomic/atomic_ppc.h
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/*
/*
* Copyright (C) 2006 iptelorg GmbH
*
* Permission to use, copy, modify, and distribute this software for any
Expand All @@ -17,10 +17,10 @@
/**
* @file
* @brief Atomic operations and memory barriers (PowerPC and PowerPC64 versions)
*
*
* Atomic operations and memory barriers (PowerPC and PowerPC64 versions)
* \warning atomic ops do not include memory barriers see atomic_ops.h for
* more details.
* more details.
* \warning not tested on ppc64
*
* Config defines:
Expand Down
6 changes: 3 additions & 3 deletions src/core/atomic/atomic_sparc.h
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/*
/*
* Copyright (C) 2006 iptelorg GmbH
*
* Permission to use, copy, modify, and distribute this software for any
Expand All @@ -17,11 +17,11 @@
/**
* @file
* @brief Memory barriers for SPARC32 ( version < v 9))
*
*
* Memory barriers for SPARC32 ( version < v 9)), see atomic_ops.h for more
* details.
*
* Config defines:
* Config defines:
* - NOSMP
* @ingroup atomic
*/
Expand Down
8 changes: 4 additions & 4 deletions src/core/atomic/atomic_sparc64.h
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/*
/*
* Copyright (C) 2006 iptelorg GmbH
*
* Permission to use, copy, modify, and distribute this software for any
Expand All @@ -17,7 +17,7 @@
/**
* @file
* @brief Atomic operations and memory barriers (SPARC64 version, 32 and 64 bit modes)
*
*
* Atomic operations and memory barriers (SPARC64 version, 32 and 64 bit modes)
* \warning atomic ops do not include memory barriers see atomic_ops.h for
* more details.
Expand Down Expand Up @@ -62,7 +62,7 @@
* (if the atomic ops on the specific architecture imply these barriers
* => these macros will be empty)
* Warning: these barriers don't force LoadLoad ordering between code
* before the lock/membar_enter_lock() and code
* before the lock/membar_enter_lock() and code
* after membar_leave_lock()/unlock()
*
* Usage: lock(); membar_enter_lock(); .... ; membar_leave_lock(); unlock()
Expand Down Expand Up @@ -164,7 +164,7 @@
return RET_EXPR; \
}

/* same as above, but uses a short 1 op sequence
/* same as above, but uses a short 1 op sequence
* %2 (or %1) is var, %0 is v and return (ret)*/
#define ATOMIC_FUNC_DECL1_RAW(NAME, OP, P_TYPE, RET_TYPE, RET_EXPR) \
inline static RET_TYPE atomic_##NAME##_##P_TYPE( \
Expand Down
10 changes: 5 additions & 5 deletions src/core/atomic/atomic_unknown.h
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/*
/*
* Copyright (C) 2006 iptelorg GmbH
*
* Permission to use, copy, modify, and distribute this software for any
Expand All @@ -22,7 +22,7 @@
* (for architectures not yet supported via inline assembler).
*
* \warning atomic ops do not include memory barriers, see atomic_ops.h
* for more details
* for more details
*
* Config defines:
* - NOSMP (membars are null in this case)
Expand Down Expand Up @@ -60,8 +60,8 @@ extern gen_lock_t *__membar_lock; /* init in atomic_ops.c */
#define _membar_lock lock_get(__membar_lock)
#define _membar_unlock lock_release(__membar_lock)

/* memory barriers
* not a known cpu -> fall back unlock/lock: safe but costly (it should
/* memory barriers
* not a known cpu -> fall back unlock/lock: safe but costly (it should
* include a memory barrier effect)
* lock/unlock does not imply a full memory barrier effect (it allows mixing
* operations from before the lock with operations after the lock _inside_
Expand Down Expand Up @@ -246,7 +246,7 @@ ATOMIC_FUNC_DECL1_RET(add, *var += v; ret = *var, long, long, ret)


/* memory barrier versions, the same as "normal" versions (since the
* locks act as membars), * except fot * the set/get
* locks act as membars), * except fot * the set/get
*/

/* mb_atomic_{set,get} use membar() : if we're lucky we have membars
Expand Down
8 changes: 4 additions & 4 deletions src/core/atomic/atomic_x86.h
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/*
/*
* Copyright (C) 2006 iptelorg GmbH
*
* Permission to use, copy, modify, and distribute this software for any
Expand All @@ -17,7 +17,7 @@
/**
* @file
* @brief Atomic operations and memory barriers (x86 and x86_64/amd64 specific)
*
*
* Atomic operations and memory barriers (x86 and x86_64/amd64 specific)
* \warning atomic ops do not include memory barriers, see atomic_ops.h for more
* details.
Expand Down Expand Up @@ -93,7 +93,7 @@
asm volatile( \
" lock; addq $0, 0(%%rsp) \n\t " \
: : : "memory" \
)
)
*/
#define membar() asm volatile(" mfence \n\t " : : : "memory")
#define membar_read() asm volatile(" lfence \n\t " : : : "memory")
Expand Down Expand Up @@ -122,7 +122,7 @@
do { \
} while(0) /* really empty, not even a cc bar. */
/* lock barrriers: empty, not needed on x86 or x86_64 (atomic ops already
* force the barriers if needed); the lock/unlock should already contain the
* force the barriers if needed); the lock/unlock should already contain the
* gcc do_not_cache barriers*/
#define membar_enter_lock() \
do { \
Expand Down
2 changes: 1 addition & 1 deletion src/core/atomic_ops.c
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/*
/*
* Copyright (C) 2006 iptelorg GmbH
*
* Permission to use, copy, modify, and distribute this software for any
Expand Down

0 comments on commit 82ae416

Please sign in to comment.