/***************************************************************************
* Copyright (C) 2013 by Terraneo Federico *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* As a special exception, if other files instantiate templates or use *
* macros or inline functions from this file, or you compile this file *
* and link it with other works to produce a work based on this file, *
* this file does not by itself cause the resulting work to be covered *
* by the GNU General Public License. However the source code for this *
* file must still be made available in accordance with the GNU General *
* Public License. This exception does not invalidate any other reasons *
* why a work based on this file might be covered by the GNU General *
* Public License. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program; if not, see *
***************************************************************************/
#ifndef INTRUSIVE_H
#define INTRUSIVE_H
#include
#include
#include
#include "interfaces/atomic_ops.h"
#if __cplusplus > 199711L
#include
#endif // c++11
namespace miosix {
// Forward decls
template
class intrusive_ref_ptr;
/**
* Base class from which all intrusive objects derive, contains the
* data that intrusive objects must have
*/
class Intrusive
{
protected:
union
{
int referenceCount; ///< Used in IntrusiveRefCounted
//Intrusive *next; ///< Used by the cleanup list, if it will be done
} intrusive;
//virtual ~Intrusive() {} ///< To allow deleting from the cleanup list
};
/**
* Derive from this class to support intrusive reference counting
*/
class IntrusiveRefCounted : public Intrusive
{
protected:
/**
* Constructor, initializes the reference count
*/
IntrusiveRefCounted() { intrusive.referenceCount=0; }
/**
* Copy constructor
*/
IntrusiveRefCounted(const IntrusiveRefCounted&)
{
// The default copy constructor would have copied the reference count,
// but that's wrong, a new object is being built, and its reference
// count should be zero
intrusive.referenceCount=0;
}
/**
* Overload of operator=
*/
IntrusiveRefCounted& operator=(const IntrusiveRefCounted&)
{
// The default copy constructor would have copied the reference count,
// but that's wrong, as we have two distinct object (i.e, chunks of
// memory to be managed), and their reference counts need to stay
// separate, so do nothing in operator=
return *this;
}
// No destructor
private:
template
friend class intrusive_ref_ptr; ///< To access the reference count
};
/**
* An implementation of 20.7.2.4 enable_shared_from_this for IntrusiveRefCounted
* \param T this class uses the CRTP, just like enable_shared_from_this, so if
* class Foo derives from IntrusiveRefCountedSharedFromThis, T has to be Foo.
*/
template
class IntrusiveRefCountedSharedFromThis
{
public:
/**
* Constructor
*/
IntrusiveRefCountedSharedFromThis()
{
static_assert(std::is_base_of::value,"");
}
/**
* \return an intrusive_ref_ptr to this. In this respect,
* IntrusiveRefCounted also behaves like enable_shared_from_this
*/
intrusive_ref_ptr shared_from_this()
{
// Simply making an intrusive_ref_ptr from this works as the reference
// count is intrusive
#ifndef __NO_EXCEPTIONS
T* result=dynamic_cast(this);
assert(result);
#else //__NO_EXCEPTIONS
T* result=static_cast(this);
#endif //__NO_EXCEPTIONS
return intrusive_ref_ptr(result);
}
/**
* \return an intrusive_ref_ptr to this In this respect,
* IntrusiveRefCounted also behaves like enable_shared_from_this
*/
intrusive_ref_ptr shared_from_this() const
{
// Simply making an intrusive_ref_ptr from this works as the reference
// count is intrusive
#ifndef __NO_EXCEPTIONS
const T* result=dynamic_cast(this);
assert(result);
#else //__NO_EXCEPTIONS
const T* result=static_cast(this);
#endif //__NO_EXCEPTIONS
return intrusive_ref_ptr(result);
}
/**
* Destructor
*/
virtual ~IntrusiveRefCountedSharedFromThis();
};
template
IntrusiveRefCountedSharedFromThis::~IntrusiveRefCountedSharedFromThis() {}
/**
* Reference counted pointer to intrusively reference counted objects.
* This class is made in a way to resemble the shared_ptr class, as specified
* in chapter 20.7.2.2 of the C++11 standard.
*
* Thread safety of this class is the same as shared_ptr. The reference
* count is updated using atomic operations, but this does not make
* all kind of concurrent access to the same intrusive_ref_ptr safe.
* A good reference that explains why is
* www.drdobbs.com/cpp/a-base-class-for-intrusively-reference-c/229218807?pgno=3
*
* \param T type to which the object points
*/
template
class intrusive_ref_ptr
{
public:
typedef T element_type; ///< As shared_ptr, expose the managed type
/**
* Default constructor
*/
intrusive_ref_ptr() : object(0) {}
/**
* Constructor, with raw pointer
* \param object object to manage
*/
explicit intrusive_ref_ptr(T *o) : object(o)
{
incrementRefCount();
}
/**
* Generalized constructor, with raw pointer
* \param object object to manage
*/
template
explicit intrusive_ref_ptr(U *o) : object(o)
{
incrementRefCount();
//Disallow polimorphic upcasting of non polimorphic classes,
//as this will lead to bugs
static_assert(std::has_virtual_destructor::value,"");
}
/**
* Copy constructor, with same type of managed pointer
* \param rhs object to manage
*/
intrusive_ref_ptr(const intrusive_ref_ptr& rhs) : object(rhs.object)
{
incrementRefCount();
}
/**
* Generalized copy constructor, to support upcast among refcounted
* pointers
* \param rhs object to manage
*/
template
intrusive_ref_ptr(const intrusive_ref_ptr& rhs) : object(rhs.get())
{
incrementRefCount();
//Disallow polimorphic upcasting of non polimorphic classes,
//as this will lead to bugs
static_assert(std::has_virtual_destructor::value,"");
}
/**
* Operator=, with same type of managed pointer
* \param rhs object to manage
* \return a reference to *this
*/
intrusive_ref_ptr& operator= (const intrusive_ref_ptr& rhs);
/**
* Generalized perator=, to support upcast among refcounted pointers
* \param rhs object to manage
* \return a reference to *this
*/
template
intrusive_ref_ptr& operator= (const intrusive_ref_ptr& rhs);
/**
* Operator=, with raw pointer
* \param rhs object to manage
* \return a reference to *this
*/
intrusive_ref_ptr& operator= (T* o);
/**
* \return a pointer to the managed object
*/
T *get() const { return object; }
/**
* \return a reference to the managed object
*/
T& operator*() const { return *object; }
/**
* Allows this class to behave like the managed type
* \return a pointer to the managed object
*/
T *operator->() const { return object; }
//Safe bool idiom
struct SafeBoolStruct { void* b; };
typedef void* SafeBoolStruct::* SafeBool;
/**
* \return true if the object contains a callback
*/
operator SafeBool() const
{
return object==0 ? 0 : &SafeBoolStruct::b;
}
/**
* Swap the managed object with another intrusive_ref_ptr
* \param rhs the other smart pointer
*/
void swap(intrusive_ref_ptr& rhs) { std::swap(object,rhs.object); }
/**
* After a call to this member function, this intrusive_ref_ptr
* no longer points to the managed object. The managed object is
* deleted if this was the only pointer at it
*/
void reset()
{
if(decrementRefCount()) delete object;
// Object needs to be set to 0 regardless
// of whether the object is deleted
object=0;
}
/**
* \return the number of intrusive_ref_ptr that point to the managed object.
* If return 0, than this points to nullptr
*/
int use_count() const
{
if(!object) return 0;
return object->intrusive.referenceCount;
}
/**
* \internal
* This is just an implementation detail.
* Use the free function atomic_load instead.
* \return a copy of *this
*/
intrusive_ref_ptr atomic_load() const;
/**
* \internal
* This is just an implementation detail.
* Use the free function atomic_store instead.
* Note that this member function is not equivalent to the free function
* with the same name, as it takes its argument by reference. This may
* cause problems in some multithreaded use cases, so don't use it directly
* \param r object to store to *this
* \return the previous value stored in *this
*/
intrusive_ref_ptr atomic_exchange(intrusive_ref_ptr& r);
/**
* Destructor
*/
~intrusive_ref_ptr() { reset(); }
private:
/**
* Increments the reference count
*/
void incrementRefCount()
{
if(object) atomicAdd(&object->intrusive.referenceCount,1);
}
/**
* Decrements the reference count
* \return true if the object has to be deleted
*/
bool decrementRefCount()
{
if(object==0) return false;
return atomicAddExchange(&object->intrusive.referenceCount,-1)==1;
}
T *object; ///< The managed object
};
template
intrusive_ref_ptr& intrusive_ref_ptr::operator=
(const intrusive_ref_ptr& rhs)
{
if(*this==rhs) return *this; //Handle assignment to self
if(decrementRefCount()) delete object;
object=rhs.object;
incrementRefCount();
return *this;
}
template template
intrusive_ref_ptr& intrusive_ref_ptr::operator=
(const intrusive_ref_ptr& rhs)
{
if(*this==rhs) return *this; //Handle assignment to self
if(decrementRefCount()) delete object;
object=rhs.get();
incrementRefCount();
//Disallow polimorphic upcasting of non polimorphic classes,
//as this will lead to bugs
static_assert(std::has_virtual_destructor::value,"");
return *this;
}
template
intrusive_ref_ptr& intrusive_ref_ptr::operator= (T* o)
{
if(decrementRefCount()) delete object;
object=o;
incrementRefCount();
return *this;
}
template
intrusive_ref_ptr intrusive_ref_ptr::atomic_load() const
{
intrusive_ref_ptr result; // This gets initialized with 0
// According to the C++ standard, this causes undefined behaviour if
// T has virtual functions, but GCC (and clang) have an implementation
// based on a compiler intrinsic that does the right thing: it produces
// the correct answer together with a compiler warning for classes with
// virtual destructors, and even for classes using multiple inheritance.
// It (obviously) fails for classes making use of virtual inheritance, but
// in this case the warning is promoted to an error. The last thing to
// fix is to remove the warning using the #pragma
// As a result, intrusive_ref_ptr can't be used with classes that have
// virtual base classes, but that's an acceptable limitation, especially
// considering that you get a meaningful compiler error if accidentally
// trying to use it in such a case.
#pragma GCC diagnostic ignored "-Winvalid-offsetof"
const int offsetBytes=offsetof(T,intrusive.referenceCount);
#pragma GCC diagnostic pop
// Check that referenceCount is properly aligned for the following code to work
static_assert((offsetBytes % sizeof(int))==0, "");
const int offsetInt=offsetBytes/sizeof(int);
void * const volatile * objectPtrAddr=
reinterpret_cast(&object);
void *loadedObject=atomicFetchAndIncrement(objectPtrAddr,offsetInt,1);
// This does not increment referenceCount, as it was done before
result.object=reinterpret_cast(loadedObject);
return result;
}
template
intrusive_ref_ptr intrusive_ref_ptr::atomic_exchange(
intrusive_ref_ptr& r)
{
//Note: this is safe with respect to assignment to self
T *temp=r.object;
if(temp) atomicAdd(&temp->intrusive.referenceCount,1);
// Check that the following reinterpret_casts will work as intended.
// This also means that this code won't work on 64bit machines but for
// Miosix this isn't a problem for now.
static_assert(sizeof(void*)==sizeof(int),"");
int tempInt=reinterpret_cast(temp);
volatile int *objectAddrInt=reinterpret_cast(&object);
temp=reinterpret_cast(atomicSwap(objectAddrInt,tempInt));
intrusive_ref_ptr result; // This gets initialized with 0
// This does not increment referenceCount, as the pointer was swapped
result.object=temp;
return result;
}
/**
* Operator==
* \param a first pointer
* \param b second pointer
* \return true if they point to the same object
*/
template
bool operator==(const intrusive_ref_ptr& a, const intrusive_ref_ptr& b)
{
return a.get()==b.get();
}
template
bool operator==(const T *a, const intrusive_ref_ptr& b)
{
return a==b.get();
}
template
bool operator==(const intrusive_ref_ptr& a, const T *b)
{
return a.get()==b;
}
/**
* Operator!=
* \param a first pointer
* \param b second pointer
* \return true if they point to different objects
*/
template
bool operator!=(const intrusive_ref_ptr& a, const intrusive_ref_ptr& b)
{
return a.get()!=b.get();
}
template
bool operator!=(const T *a, const intrusive_ref_ptr& b)
{
return a!=b.get();
}
template
bool operator!=(const intrusive_ref_ptr& a, const T *b)
{
return a.get()!=b;
}
/**
* Operator<, allows to create containers of objects
* \param a first pointer
* \param b second pointer
* \return true if a.get() < b.get()
*/
template
bool operator<(const intrusive_ref_ptr& a, const intrusive_ref_ptr& b)
{
return a.get()
bool operator<(const T *a, const intrusive_ref_ptr& b)
{
return a
bool operator<(const intrusive_ref_ptr& a, const T *b)
{
return a.get()
std::ostream& operator<<(std::ostream& os, const intrusive_ref_ptr& p)
{
os<
intrusive_ref_ptr static_pointer_cast(const intrusive_ref_ptr& r)
{
// Note: 20.7.2.2.9 says this expression done with shared_ptr causes
// undefined behaviour, however this works with intrusive_ref_ptr
// as the reference count is intrusive, so the counter isn't duplicated.
// To ease porting of code to and from shared_ptr it is recomended
// to use static_pointer_cast, anyway.
return intrusive_ref_ptr(static_cast(r.get()));
}
/**
* Performs dynamic_cast between intrusive_ref_ptr
* \param r intrusive_ref_ptr of source type
* \return intrusive_ref_ptr of destination type
*/
template
intrusive_ref_ptr dynamic_pointer_cast(const intrusive_ref_ptr& r)
{
// Note: 20.7.2.2.9 says this expression done with shared_ptr causes
// undefined behaviour, however this works with intrusive_ref_ptr
// as the reference count is intrusive, so the counter isn't duplicated.
// To ease porting of code to and from shared_ptr it is recomended
// to use static_pointer_cast, anyway.
return intrusive_ref_ptr(dynamic_cast(r.get()));
}
/**
* Performs const_cast between intrusive_ref_ptr
* \param r intrusive_ref_ptr of source type
* \return intrusive_ref_ptr of destination type
*/
template
intrusive_ref_ptr const_pointer_cast(const intrusive_ref_ptr& r)
{
// Note: 20.7.2.2.9 says this expression done with shared_ptr causes
// undefined behaviour, however this works with intrusive_ref_ptr
// as the reference count is intrusive, so the counter isn't duplicated.
// To ease porting of code to and from shared_ptr it is recomended
// to use static_pointer_cast, anyway.
return intrusive_ref_ptr(const_cast(r.get()));
}
/**
* Allows concurrent access to an instance of intrusive_ref_ptr.
* Multiple threads can cooncurrently perform atomic_load(), atomic_store()
* and atomic_exchange() on the same intrusive_ref_ptr. Any other concurent
* access not protected by explicit locking (such as threads calling reset(),
* or using the copy constructor, or deleting the intrusive_ref_ptr) yields
* undefined behaviour.
* \param p pointer to an intrusive_ref_ptr shared among threads
* \return *p, atomically fetching the pointer and incrementing the reference
* count
*/
template
intrusive_ref_ptr atomic_load(const intrusive_ref_ptr *p)
{
if(p==0) return intrusive_ref_ptr();
return p->atomic_load();
}
/**
* Allows concurrent access to an instance of intrusive_ref_ptr.
* Multiple threads can cooncurrently perform atomic_load(), atomic_store()
* and atomic_exchange() on the same intrusive_ref_ptr. Any other concurent
* access not protected by explicit locking (such as threads calling reset(),
* or using the copy constructor, or deleting the intrusive_ref_ptr) yields
* undefined behaviour.
* \param p pointer to an intrusive_ref_ptr shared among threads
* \param r intrusive_ref_ptr that will be stored in *p
*/
template
void atomic_store(intrusive_ref_ptr *p, intrusive_ref_ptr r)
{
if(p) p->atomic_exchange(r);
}
/**
* Allows concurrent access to an instance of intrusive_ref_ptr.
* Multiple threads can cooncurrently perform atomic_load(), atomic_store()
* and atomic_exchange() on the same intrusive_ref_ptr. Any other concurent
* access not protected by explicit locking (such as threads calling reset(),
* or using the copy constructor, or deleting the intrusive_ref_ptr) yields
* undefined behaviour.
* \param p pointer to an intrusive_ref_ptr shared among threads
* \param r value to be stored in *p
* \return the previous value of *p
*/
template
intrusive_ref_ptr atomic_exchange(intrusive_ref_ptr *p,
intrusive_ref_ptr r)
{
if(p==0) return intrusive_ref_ptr();
return p->atomic_exchange(r);
}
} //namenpace miosix
#endif //INTRUSIVE_H