blob: bddf1978d801479552267ca829786ff7dfddc4ce [file] [log] [blame]
* Copyright 2014 Google Inc.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
#ifndef GrTRecorder_DEFINED
#define GrTRecorder_DEFINED
#include "SkTemplates.h"
#include "SkTypes.h"
template<typename TBase, typename TAlign> class GrTRecorder;
template<typename TItem> struct GrTRecorderAllocWrapper;
* Records a list of items with a common base type, optional associated data, and
* permanent memory addresses.
* This class preallocates its own chunks of memory for hosting objects, so new items can
* be created without excessive calls to malloc().
* To create a new item and append it to the back of the list, use the following macros:
* GrNEW_APPEND_TO_RECORDER(recorder, SubclassName, (args))
* GrNEW_APPEND_WITH_DATA_TO_RECORDER(recorder, SubclassName, (args), sizeOfData)
* Upon reset or delete, the items are destructed in the same order they were received,
* not reverse (stack) order.
* @param TBase Common base type of items in the list. If TBase is not a class with a
* virtual destructor, the client is responsible for invoking any necessary
* destructors.
* For now, any subclass used in the list must have the same start address
* as TBase (or in other words, the types must be convertible via
* reinterpret_cast<>). Classes with multiple inheritance (or any subclass
* on an obscure compiler) may not be compatible. This is runtime asserted
* in debug builds.
* @param TAlign A type whose size is the desired memory alignment for object allocations.
* This should be the largest known alignment requirement for all objects
* that may be stored in the list.
template<typename TBase, typename TAlign> class GrTRecorder : SkNoncopyable {
class Iter;
* Create a recorder.
* @param initialSizeInBytes The amount of memory reserved by the recorder initially,
and after calls to reset().
GrTRecorder(int initialSizeInBytes)
: fHeadBlock(MemBlock::Alloc(LengthOf(initialSizeInBytes), NULL)),
fLastItem(NULL) {}
~GrTRecorder() {
bool empty() { return !fLastItem; }
TBase& back() {
return *fLastItem;
* Removes and destroys the last block added to the recorder. It may not be called when the
* recorder is empty.
void pop_back();
* Destruct all items in the list and reset to empty.
void reset();
* Retrieve the extra data associated with an item that was allocated using
* @param item The item whose data to retrieve. The pointer must be of the same type
* that was allocated initally; it can't be a pointer to a base class.
* @return The item's associated data.
template<typename TItem> static const void* GetDataForItem(const TItem* item) {
const TAlign* ptr = reinterpret_cast<const TAlign*>(item);
return &ptr[length_of<TItem>::kValue];
template<typename TItem> static void* GetDataForItem(TItem* item) {
TAlign* ptr = reinterpret_cast<TAlign*>(item);
return &ptr[length_of<TItem>::kValue];
template<typename TItem> struct length_of {
enum { kValue = (sizeof(TItem) + sizeof(TAlign) - 1) / sizeof(TAlign) };
static int LengthOf(int bytes) { return (bytes + sizeof(TAlign) - 1) / sizeof(TAlign); }
struct Header {
int fTotalLength; // The length of an entry including header, item, and data in TAligns.
int fPrevLength; // Same but for the previous entry. Used for iterating backwards.
template<typename TItem> TItem* alloc_back(int dataLength);
struct MemBlock : SkNoncopyable {
/** Allocates a new block and appends it to prev if not NULL. The length param is in units
of TAlign. */
static MemBlock* Alloc(int length, MemBlock* prev) {
MemBlock* block = reinterpret_cast<MemBlock*>(
sk_malloc_throw(sizeof(TAlign) * (length_of<MemBlock>::kValue + length)));
block->fLength = length;
block->fBack = 0;
block->fNext = NULL;
block->fPrev = prev;
if (prev) {
SkASSERT(NULL == prev->fNext);
prev->fNext = block;
return block;
// Frees from this block forward. Also adjusts prev block's next ptr.
static void Free(MemBlock* block) {
if (block && block->fPrev) {
SkASSERT(block->fPrev->fNext == block);
block->fPrev->fNext = NULL;
while (block) {
MemBlock* next = block->fNext;
block = next;
TAlign& operator [](int i) {
return reinterpret_cast<TAlign*>(this)[length_of<MemBlock>::kValue + i];
int fLength; // Length in units of TAlign of the block.
int fBack; // Offset, in TAligns, to unused portion of the memory block.
MemBlock* fNext;
MemBlock* fPrev;
MemBlock* const fHeadBlock;
MemBlock* fTailBlock;
TBase* fLastItem;
template<typename TItem> friend struct GrTRecorderAllocWrapper;
template <typename UBase, typename UAlign, typename UItem>
friend void* operator new(size_t, GrTRecorder<UBase, UAlign>&,
const GrTRecorderAllocWrapper<UItem>&);
friend class Iter;
template<typename TBase, typename TAlign>
void GrTRecorder<TBase, TAlign>::pop_back() {
Header* header = reinterpret_cast<Header*>(
reinterpret_cast<TAlign*>(fLastItem) - length_of<Header>::kValue);
fTailBlock->fBack -= header->fTotalLength;
int lastItemLength = header->fPrevLength;
if (!header->fPrevLength) {
// We popped the first entry in the recorder.
SkASSERT(0 == fTailBlock->fBack);
fLastItem = NULL;
if (!fTailBlock->fBack) {
// We popped the last entry in a block that isn't the head block. Move back a block but
// don't free it since we'll probably grow into it shortly.
fTailBlock = fTailBlock->fPrev;
fLastItem = reinterpret_cast<TBase*>(
&(*fTailBlock)[fTailBlock->fBack - lastItemLength + length_of<Header>::kValue]);
template<typename TBase, typename TAlign>
template<typename TItem>
TItem* GrTRecorder<TBase, TAlign>::alloc_back(int dataLength) {
// Find the header of the previous entry and get its length. We need to store that in the new
// header for backwards iteration (pop_back()).
int prevLength = 0;
if (fLastItem) {
Header* lastHeader = reinterpret_cast<Header*>(
reinterpret_cast<TAlign*>(fLastItem) - length_of<Header>::kValue);
prevLength = lastHeader->fTotalLength;
const int totalLength = length_of<Header>::kValue + length_of<TItem>::kValue + dataLength;
// Check if there is room in the current block and if not walk to next (allocating if
// necessary). Note that pop_back() and reset() can leave the recorder in a state where it
// has preallocated blocks hanging off the tail that are currently unused.
while (fTailBlock->fBack + totalLength > fTailBlock->fLength) {
if (!fTailBlock->fNext) {
fTailBlock = MemBlock::Alloc(SkTMax(2 * fTailBlock->fLength, totalLength), fTailBlock);
} else {
fTailBlock = fTailBlock->fNext;
SkASSERT(0 == fTailBlock->fBack);
Header* header = reinterpret_cast<Header*>(&(*fTailBlock)[fTailBlock->fBack]);
TItem* rawPtr = reinterpret_cast<TItem*>(
&(*fTailBlock)[fTailBlock->fBack + length_of<Header>::kValue]);
header->fTotalLength = totalLength;
header->fPrevLength = prevLength;
fLastItem = rawPtr;
fTailBlock->fBack += totalLength;
// FIXME: We currently require that the base and subclass share the same start address.
// This is not required by the C++ spec, and is likely to not be true in the case of
// multiple inheritance or a base class that doesn't have virtual methods (when the
// subclass does). It would be ideal to find a more robust solution that comes at no
// extra cost to performance or code generality.
SkDEBUGCODE(void* baseAddr = fLastItem;
void* subclassAddr = rawPtr);
SkASSERT(baseAddr == subclassAddr);
return rawPtr;
template<typename TBase, typename TAlign>
class GrTRecorder<TBase, TAlign>::Iter {
Iter(GrTRecorder& recorder) : fBlock(recorder.fHeadBlock), fPosition(0), fItem(NULL) {}
bool next() {
while (fPosition >= fBlock->fBack) {
SkASSERT(fPosition == fBlock->fBack);
if (!fBlock->fNext) {
return false;
fBlock = fBlock->fNext;
fPosition = 0;
Header* header = reinterpret_cast<Header*>(&(*fBlock)[fPosition]);
fItem = reinterpret_cast<TBase*>(&(*fBlock)[fPosition + length_of<Header>::kValue]);
fPosition += header->fTotalLength;
return true;
TBase* get() const {
return fItem;
TBase* operator->() const { return this->get(); }
MemBlock* fBlock;
int fPosition;
TBase* fItem;
template<typename TBase, typename TAlign>
void GrTRecorder<TBase, TAlign>::reset() {
Iter iter(*this);
while ( {
// Assume the next time this recorder fills up it will use approximately the same
// amount of space as last time. Leave enough space for up to ~50% growth; free
// everything else.
if (fTailBlock->fBack <= fTailBlock->fLength / 2) {
} else if (fTailBlock->fNext) {
fTailBlock->fNext->fNext = NULL;
for (MemBlock* block = fHeadBlock; block; block = block->fNext) {
block->fBack = 0;
fTailBlock = fHeadBlock;
fLastItem = NULL;
template<typename TItem> struct GrTRecorderAllocWrapper {
GrTRecorderAllocWrapper() : fDataLength(0) {}
template <typename TBase, typename TAlign>
GrTRecorderAllocWrapper(const GrTRecorder<TBase, TAlign>&, int sizeOfData)
: fDataLength(GrTRecorder<TBase, TAlign>::LengthOf(sizeOfData)) {}
const int fDataLength;
template <typename TBase, typename TAlign, typename TItem>
void* operator new(size_t size, GrTRecorder<TBase, TAlign>& recorder,
const GrTRecorderAllocWrapper<TItem>& wrapper) {
SkASSERT(size == sizeof(TItem));
return recorder.template alloc_back<TItem>(wrapper.fDataLength);
template <typename TBase, typename TAlign, typename TItem>
void operator delete(void*, GrTRecorder<TBase, TAlign>&, const GrTRecorderAllocWrapper<TItem>&) {
// We only provide an operator delete to work around compiler warnings that can come
// up for an unmatched operator new when compiling with exceptions.
#define GrNEW_APPEND_TO_RECORDER(recorder, type_name, args) \
(new (recorder, GrTRecorderAllocWrapper<type_name>()) type_name args)
#define GrNEW_APPEND_WITH_DATA_TO_RECORDER(recorder, type_name, args, size_of_data) \
(new (recorder, GrTRecorderAllocWrapper<type_name>(recorder, size_of_data)) type_name args)