99
1010static const char * CODE_DESCRIPTION = " Foo" ;
1111
12+ extern VkCommandBuffer g_hTemporaryCommandBuffer;
13+ void BeginSingleTimeCommands ();
14+ void EndSingleTimeCommands ();
15+
1216enum CONFIG_TYPE {
1317 CONFIG_TYPE_MINIMUM,
1418 CONFIG_TYPE_SMALL,
@@ -656,6 +660,299 @@ struct AllocInfo
656660 };
657661};
658662
663+ class StagingBufferCollection
664+ {
665+ public:
666+ StagingBufferCollection () { }
667+ ~StagingBufferCollection ();
668+ // Returns false if maximum total size of buffers would be exceeded.
669+ bool AcquireBuffer (VkDeviceSize size, VkBuffer& outBuffer, void *& outMappedPtr);
670+ void ReleaseAllBuffers ();
671+
672+ private:
673+ static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024 ;
674+ struct BufInfo
675+ {
676+ VmaAllocation Allocation = VK_NULL_HANDLE;
677+ VkBuffer Buffer = VK_NULL_HANDLE;
678+ VkDeviceSize Size = VK_WHOLE_SIZE;
679+ void * MappedPtr = nullptr ;
680+ bool Used = false ;
681+ };
682+ std::vector<BufInfo> m_Bufs;
683+ // Including both used and unused.
684+ VkDeviceSize m_TotalSize = 0 ;
685+ };
686+
687+ StagingBufferCollection::~StagingBufferCollection ()
688+ {
689+ for (size_t i = m_Bufs.size (); i--; )
690+ {
691+ vmaDestroyBuffer (g_hAllocator, m_Bufs[i].Buffer , m_Bufs[i].Allocation );
692+ }
693+ }
694+
695+ bool StagingBufferCollection::AcquireBuffer (VkDeviceSize size, VkBuffer& outBuffer, void *& outMappedPtr)
696+ {
697+ assert (size <= MAX_TOTAL_SIZE);
698+
699+ // Try to find existing unused buffer with best size.
700+ size_t bestIndex = SIZE_MAX;
701+ for (size_t i = 0 , count = m_Bufs.size (); i < count; ++i)
702+ {
703+ BufInfo& currBufInfo = m_Bufs[i];
704+ if (!currBufInfo.Used && currBufInfo.Size >= size &&
705+ (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size ))
706+ {
707+ bestIndex = i;
708+ }
709+ }
710+
711+ if (bestIndex != SIZE_MAX)
712+ {
713+ m_Bufs[bestIndex].Used = true ;
714+ outBuffer = m_Bufs[bestIndex].Buffer ;
715+ outMappedPtr = m_Bufs[bestIndex].MappedPtr ;
716+ return true ;
717+ }
718+
719+ // Allocate new buffer with requested size.
720+ if (m_TotalSize + size <= MAX_TOTAL_SIZE)
721+ {
722+ BufInfo bufInfo;
723+ bufInfo.Size = size;
724+ bufInfo.Used = true ;
725+
726+ VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
727+ bufCreateInfo.size = size;
728+ bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
729+
730+ VmaAllocationCreateInfo allocCreateInfo = {};
731+ allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
732+ allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
733+
734+ VmaAllocationInfo allocInfo;
735+ VkResult res = vmaCreateBuffer (g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer , &bufInfo.Allocation , &allocInfo);
736+ bufInfo.MappedPtr = allocInfo.pMappedData ;
737+ TEST (res == VK_SUCCESS && bufInfo.MappedPtr );
738+
739+ outBuffer = bufInfo.Buffer ;
740+ outMappedPtr = bufInfo.MappedPtr ;
741+
742+ m_Bufs.push_back (std::move (bufInfo));
743+
744+ m_TotalSize += size;
745+
746+ return true ;
747+ }
748+
749+ // There are some unused but smaller buffers: Free them and try again.
750+ bool hasUnused = false ;
751+ for (size_t i = 0 , count = m_Bufs.size (); i < count; ++i)
752+ {
753+ if (!m_Bufs[i].Used )
754+ {
755+ hasUnused = true ;
756+ break ;
757+ }
758+ }
759+ if (hasUnused)
760+ {
761+ for (size_t i = m_Bufs.size (); i--; )
762+ {
763+ if (!m_Bufs[i].Used )
764+ {
765+ m_TotalSize -= m_Bufs[i].Size ;
766+ vmaDestroyBuffer (g_hAllocator, m_Bufs[i].Buffer , m_Bufs[i].Allocation );
767+ m_Bufs.erase (m_Bufs.begin () + i);
768+ }
769+ }
770+
771+ return AcquireBuffer (size, outBuffer, outMappedPtr);
772+ }
773+
774+ return false ;
775+ }
776+
777+ void StagingBufferCollection::ReleaseAllBuffers ()
778+ {
779+ for (size_t i = 0 , count = m_Bufs.size (); i < count; ++i)
780+ {
781+ m_Bufs[i].Used = false ;
782+ }
783+ }
784+
785+ static void UploadGpuData (const AllocInfo* allocInfo, size_t allocInfoCount)
786+ {
787+ StagingBufferCollection stagingBufs;
788+
789+ bool cmdBufferStarted = false ;
790+ for (size_t allocInfoIndex = 0 ; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
791+ {
792+ const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
793+ if (currAllocInfo.m_Buffer )
794+ {
795+ const VkDeviceSize size = currAllocInfo.m_BufferInfo .size ;
796+
797+ VkBuffer stagingBuf = VK_NULL_HANDLE;
798+ void * stagingBufMappedPtr = nullptr ;
799+ if (!stagingBufs.AcquireBuffer (size, stagingBuf, stagingBufMappedPtr))
800+ {
801+ TEST (cmdBufferStarted);
802+ EndSingleTimeCommands ();
803+ stagingBufs.ReleaseAllBuffers ();
804+ cmdBufferStarted = false ;
805+
806+ bool ok = stagingBufs.AcquireBuffer (size, stagingBuf, stagingBufMappedPtr);
807+ TEST (ok);
808+ }
809+
810+ // Fill staging buffer.
811+ {
812+ assert (size % sizeof (uint32_t ) == 0 );
813+ uint32_t * stagingValPtr = (uint32_t *)stagingBufMappedPtr;
814+ uint32_t val = currAllocInfo.m_StartValue ;
815+ for (size_t i = 0 ; i < size / sizeof (uint32_t ); ++i)
816+ {
817+ *stagingValPtr = val;
818+ ++stagingValPtr;
819+ ++val;
820+ }
821+ }
822+
823+ // Issue copy command from staging buffer to destination buffer.
824+ if (!cmdBufferStarted)
825+ {
826+ cmdBufferStarted = true ;
827+ BeginSingleTimeCommands ();
828+ }
829+
830+ VkBufferCopy copy = {};
831+ copy.srcOffset = 0 ;
832+ copy.dstOffset = 0 ;
833+ copy.size = size;
834+ vkCmdCopyBuffer (g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer , 1 , ©);
835+ }
836+ else
837+ {
838+ TEST (0 && " Images not currently supported." );
839+ }
840+ }
841+
842+ if (cmdBufferStarted)
843+ {
844+ EndSingleTimeCommands ();
845+ stagingBufs.ReleaseAllBuffers ();
846+ }
847+ }
848+
849+ static void ValidateGpuData (const AllocInfo* allocInfo, size_t allocInfoCount)
850+ {
851+ StagingBufferCollection stagingBufs;
852+
853+ bool cmdBufferStarted = false ;
854+ size_t validateAllocIndexOffset = 0 ;
855+ std::vector<void *> validateStagingBuffers;
856+ for (size_t allocInfoIndex = 0 ; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
857+ {
858+ const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
859+ if (currAllocInfo.m_Buffer )
860+ {
861+ const VkDeviceSize size = currAllocInfo.m_BufferInfo .size ;
862+
863+ VkBuffer stagingBuf = VK_NULL_HANDLE;
864+ void * stagingBufMappedPtr = nullptr ;
865+ if (!stagingBufs.AcquireBuffer (size, stagingBuf, stagingBufMappedPtr))
866+ {
867+ TEST (cmdBufferStarted);
868+ EndSingleTimeCommands ();
869+ cmdBufferStarted = false ;
870+
871+ for (size_t validateIndex = 0 ;
872+ validateIndex < validateStagingBuffers.size ();
873+ ++validateIndex)
874+ {
875+ const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
876+ const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo .size ;
877+ TEST (validateSize % sizeof (uint32_t ) == 0 );
878+ const uint32_t * stagingValPtr = (const uint32_t *)validateStagingBuffers[validateIndex];
879+ uint32_t val = allocInfo[validateAllocIndex].m_StartValue ;
880+ bool valid = true ;
881+ for (size_t i = 0 ; i < validateSize / sizeof (uint32_t ); ++i)
882+ {
883+ if (*stagingValPtr != val)
884+ {
885+ valid = false ;
886+ break ;
887+ }
888+ ++stagingValPtr;
889+ ++val;
890+ }
891+ TEST (valid);
892+ }
893+
894+ stagingBufs.ReleaseAllBuffers ();
895+
896+ validateAllocIndexOffset = allocInfoIndex;
897+ validateStagingBuffers.clear ();
898+
899+ bool ok = stagingBufs.AcquireBuffer (size, stagingBuf, stagingBufMappedPtr);
900+ TEST (ok);
901+ }
902+
903+ // Issue copy command from staging buffer to destination buffer.
904+ if (!cmdBufferStarted)
905+ {
906+ cmdBufferStarted = true ;
907+ BeginSingleTimeCommands ();
908+ }
909+
910+ VkBufferCopy copy = {};
911+ copy.srcOffset = 0 ;
912+ copy.dstOffset = 0 ;
913+ copy.size = size;
914+ vkCmdCopyBuffer (g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer , stagingBuf, 1 , ©);
915+
916+ // Sava mapped pointer for later validation.
917+ validateStagingBuffers.push_back (stagingBufMappedPtr);
918+ }
919+ else
920+ {
921+ TEST (0 && " Images not currently supported." );
922+ }
923+ }
924+
925+ if (cmdBufferStarted)
926+ {
927+ EndSingleTimeCommands ();
928+
929+ for (size_t validateIndex = 0 ;
930+ validateIndex < validateStagingBuffers.size ();
931+ ++validateIndex)
932+ {
933+ const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
934+ const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo .size ;
935+ TEST (validateSize % sizeof (uint32_t ) == 0 );
936+ const uint32_t * stagingValPtr = (const uint32_t *)validateStagingBuffers[validateIndex];
937+ uint32_t val = allocInfo[validateAllocIndex].m_StartValue ;
938+ bool valid = true ;
939+ for (size_t i = 0 ; i < validateSize / sizeof (uint32_t ); ++i)
940+ {
941+ if (*stagingValPtr != val)
942+ {
943+ valid = false ;
944+ break ;
945+ }
946+ ++stagingValPtr;
947+ ++val;
948+ }
949+ TEST (valid);
950+ }
951+
952+ stagingBufs.ReleaseAllBuffers ();
953+ }
954+ }
955+
659956static void GetMemReq (VmaAllocationCreateInfo& outMemReq)
660957{
661958 outMemReq = {};
@@ -4515,6 +4812,41 @@ static void BasicTestAllocatePages()
45154812 vmaDestroyPool (g_hAllocator, pool);
45164813}
45174814
4815+ // Test the testing environment.
4816+ static void TestGpuData ()
4817+ {
4818+ RandomNumberGenerator rand = { 53434 };
4819+
4820+ std::vector<AllocInfo> allocInfo;
4821+
4822+ for (size_t i = 0 ; i < 100 ; ++i)
4823+ {
4824+ AllocInfo info = {};
4825+
4826+ info.m_BufferInfo .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4827+ info.m_BufferInfo .usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
4828+ VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
4829+ VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4830+ info.m_BufferInfo .size = 1024 * 1024 * (rand.Generate () % 9 + 1 );
4831+
4832+ VmaAllocationCreateInfo allocCreateInfo = {};
4833+ allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4834+
4835+ VkResult res = vmaCreateBuffer (g_hAllocator, &info.m_BufferInfo , &allocCreateInfo, &info.m_Buffer , &info.m_Allocation , nullptr );
4836+ TEST (res == VK_SUCCESS);
4837+
4838+ info.m_StartValue = rand.Generate ();
4839+
4840+ allocInfo.push_back (std::move (info));
4841+ }
4842+
4843+ UploadGpuData (allocInfo.data (), allocInfo.size ());
4844+
4845+ ValidateGpuData (allocInfo.data (), allocInfo.size ());
4846+
4847+ DestroyAllAllocations (allocInfo);
4848+ }
4849+
45184850void Test ()
45194851{
45204852 wprintf (L" TESTING:\n " );
@@ -4532,6 +4864,7 @@ void Test()
45324864 // # Simple tests
45334865
45344866 TestBasics ();
4867+ // TestGpuData(); // Not calling this because it's just testing the testing environment.
45354868#if VMA_DEBUG_MARGIN
45364869 TestDebugMargin ();
45374870#else
0 commit comments