/usr/include/p8-platform/util/atomic.h is in libp8-platform-dev 2.1.0.1+dfsg1-1.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 | #pragma once
/*
* Copyright (C) 2005-2012 Team XBMC
* http://www.kodi.tv
*
* This Program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2, or (at your option)
* any later version.
*
* This Program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with XBMC; see the file COPYING. If not, see
* <http://www.gnu.org/licenses/>.
*
*/
#ifdef _MSC_VER
#include <windows.h>
#endif
///////////////////////////////////////////////////////////////////////////
// 32-bit atomic increment
// Returns new value of *pAddr
///////////////////////////////////////////////////////////////////////////
static inline long atomic_inc(volatile long* pAddr)
{
#if 1 // Always use builtin
return __sync_add_and_fetch(pAddr, 1);OA
#elif defined(__ppc__) || defined(__powerpc__) // PowerPC
long val;
__asm__ __volatile__ (
"sync \n"
"1: lwarx %0, 0, %1 \n"
"addic %0, %0, 1 \n"
"stwcx. %0, 0, %1 \n"
"bne- 1b \n"
"isync"
: "=&r" (val)
: "r" (pAddr)
: "cc", "xer", "memory");
return val;
#elif defined(__arm__) && !defined(__ARM_ARCH_5__)
register long val;
asm volatile (
"dmb ish \n" // Memory barrier. Make sure all memory accesses appearing before this complete before any that appear after
"1: \n"
"ldrex %0, [%1] \n" // (val = *pAddr)
"add %0, #1 \n" // (val += 1)
"strex r1, %0, [%1] \n"
"cmp r1, #0 \n"
"bne 1b \n"
"dmb ish \n" // Memory barrier.
: "=&r" (val)
: "r"(pAddr)
: "r1"
);
return val;
#elif defined(__mips__)
// TODO:
long val;
#error AtomicIncrement undefined for mips
return val;
#elif defined(WIN32)
long val;
__asm
{
mov eax, pAddr ;
lock inc dword ptr [eax] ;
mov eax, [eax] ;
mov val, eax ;
}
return val;
#elif defined(__x86_64__)
register long result;
__asm__ __volatile__ (
"lock/xaddq %q0, %1"
: "=r" (result), "=m" (*pAddr)
: "0" ((long) (1)), "m" (*pAddr));
return *pAddr;
#else // Linux / OSX86 (GCC)
register long reg __asm__ ("eax") = 1;
__asm__ __volatile__ (
"lock/xadd %0, %1 \n"
"inc %%eax"
: "+r" (reg)
: "m" (*pAddr)
: "memory" );
return reg;
#endif
}
|