22#define COMPILER_BARRIER() asm volatile ("" : : : "memory") 
   25#define UNLIKELY(cond) __builtin_expect(cond, 0) 
   26#define LIKELY(cond)   __builtin_expect(cond, 1) 
   28#define UNLIKELY(cond) (cond) 
   29#define LIKELY(cond)   (cond) 
   32#ifndef UTILS_QUEUE_MPMC 
   34#define utils_spinlock_init(V) 
   35#define utils_spinlock_lock(V) 
   36#define utils_spinlock_unlock(V) 
   41    arax_assert(!( UTILS_QUEUE_CAPACITY & (UTILS_QUEUE_CAPACITY - 1) ) );
 
   44    memset(buff, 0, 
sizeof(
struct queue) );
 
 
   53    register int used_slots;
 
   55    used_slots = q->bottom - q->top;
 
   58        used_slots += UINT16_MAX + 1;
 
   60    return (
unsigned int) used_slots;
 
 
   65    register uint16_t t, b;
 
   80    i       = t & (UTILS_QUEUE_CAPACITY - 1);
 
   82    if (!__sync_bool_compare_and_swap(&q->top, t, t + 1) )
 
 
  102    int used_slots = b - t;
 
  105        used_slots += UINT16_MAX + 1;
 
  108    if (used_slots == UTILS_QUEUE_CAPACITY) {
 
  113    i = b & (UTILS_QUEUE_CAPACITY - 1);
 
  115    __sync_synchronize();
 
 
  128    register uint16_t t, b;
 
  139    i = t & (UTILS_QUEUE_CAPACITY - 1);
 
 
#define arax_assert(EXPR)
struct queue utils_queue_s
#define utils_spinlock_lock(V)
#define utils_spinlock_init(V)
utils_queue_s * utils_queue_init(void *buff)
unsigned int utils_queue_used_slots(utils_queue_s *q)
void * utils_queue_peek(utils_queue_s *q)
#define utils_spinlock_unlock(V)
void * utils_queue_pop(utils_queue_s *q)
void * utils_queue_push(utils_queue_s *q, void *data)
void * entries[UTILS_QUEUE_CAPACITY]