MotionMatchingData.cpp 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299
  1. /*
  2. * Copyright (c) Contributors to the Open 3D Engine Project.
  3. * For complete copyright and license terms please see the LICENSE at the root of this distribution.
  4. *
  5. * SPDX-License-Identifier: Apache-2.0 OR MIT
  6. *
  7. */
  8. #include <AzCore/Console/IConsole.h>
  9. #include <AzCore/Debug/Timer.h>
  10. #include <AzCore/Component/ComponentApplicationBus.h>
  11. #include <AzCore/Jobs/JobFunction.h>
  12. #include <AzCore/Jobs/JobCompletion.h>
  13. #include <AzCore/Serialization/EditContext.h>
  14. #include <AzCore/Serialization/SerializeContext.h>
  15. #include <AzCore/Task/TaskGraph.h>
  16. #include <EMotionFX/Source/ActorInstance.h>
  17. #include <EMotionFX/Source/AnimGraphPose.h>
  18. #include <EMotionFX/Source/Motion.h>
  19. #include <Allocators.h>
  20. #include <Feature.h>
  21. #include <FeatureMatrixMinMaxScaler.h>
  22. #include <FeatureMatrixStandardScaler.h>
  23. #include <FeatureSchemaDefault.h>
  24. #include <FeatureTrajectory.h>
  25. #include <FrameDatabase.h>
  26. #include <KdTree.h>
  27. #include <MotionMatchingData.h>
  28. namespace EMotionFX::MotionMatching
  29. {
  30. AZ_CVAR_EXTERNED(bool, mm_multiThreadedInitialization);
  31. AZ_CLASS_ALLOCATOR_IMPL(MotionMatchingData, MotionMatchAllocator)
  32. MotionMatchingData::MotionMatchingData(const FeatureSchema& featureSchema)
  33. : m_featureSchema(featureSchema)
  34. {
  35. m_kdTree = AZStd::make_unique<KdTree>();
  36. }
  37. MotionMatchingData::~MotionMatchingData()
  38. {
  39. Clear();
  40. }
  41. bool MotionMatchingData::ExtractFeatures(ActorInstance* actorInstance, FrameDatabase* frameDatabase)
  42. {
  43. AZ_PROFILE_SCOPE(Animation, "MotionMatchingData::ExtractFeatures");
  44. AZ::Debug::Timer timer;
  45. timer.Stamp();
  46. const size_t numFrames = frameDatabase->GetNumFrames();
  47. if (numFrames == 0)
  48. {
  49. return true;
  50. }
  51. // Initialize all features before we process each frame.
  52. FeatureMatrix::Index featureComponentCount = 0;
  53. for (Feature* feature : m_featureSchema.GetFeatures())
  54. {
  55. Feature::InitSettings frameSettings;
  56. frameSettings.m_actorInstance = actorInstance;
  57. if (!feature->Init(frameSettings))
  58. {
  59. return false;
  60. }
  61. feature->SetColumnOffset(featureComponentCount);
  62. featureComponentCount += feature->GetNumDimensions();
  63. }
  64. // Allocate memory for the feature matrix
  65. m_featureMatrix.resize(/*rows=*/numFrames, /*columns=*/featureComponentCount);
  66. // Multi-threaded
  67. if (mm_multiThreadedInitialization)
  68. {
  69. const size_t numBatches = aznumeric_caster(ceilf(aznumeric_cast<float>(numFrames) / aznumeric_cast<float>(s_numFramesPerBatch)));
  70. AZ::TaskGraphActiveInterface* taskGraphActiveInterface = AZ::Interface<AZ::TaskGraphActiveInterface>::Get();
  71. const bool useTaskGraph = taskGraphActiveInterface && taskGraphActiveInterface->IsTaskGraphActive();
  72. if (useTaskGraph)
  73. {
  74. AZ::TaskGraph m_taskGraph{ "MotionMatching FeatureExtraction" };
  75. // Split-up the motion database into batches of frames and extract the feature values for each batch simultaneously.
  76. for (size_t batchIndex = 0; batchIndex < numBatches; ++batchIndex)
  77. {
  78. const size_t startFrame = batchIndex * s_numFramesPerBatch;
  79. const size_t endFrame = AZStd::min(startFrame + s_numFramesPerBatch, numFrames);
  80. // Create a task for every batch and extract the features simultaneously.
  81. AZ::TaskDescriptor taskDescriptor{ "ExtractFeatures", "MotionMatching" };
  82. m_taskGraph.AddTask(
  83. taskDescriptor,
  84. [this, actorInstance, startFrame, endFrame]()
  85. {
  86. ExtractFeatureValuesRange(actorInstance, m_frameDatabase, m_featureSchema, m_featureMatrix, startFrame, endFrame);
  87. });
  88. }
  89. AZ::TaskGraphEvent finishedEvent{ "MotionMatching FeatureExtraction Wait" };
  90. m_taskGraph.Submit(&finishedEvent);
  91. finishedEvent.Wait();
  92. }
  93. else // job system
  94. {
  95. AZ::JobCompletion jobCompletion;
  96. // Split-up the motion database into batches of frames and extract the feature values for each batch simultaneously.
  97. for (size_t batchIndex = 0; batchIndex < numBatches; ++batchIndex)
  98. {
  99. const size_t startFrame = batchIndex * s_numFramesPerBatch;
  100. const size_t endFrame = AZStd::min(startFrame + s_numFramesPerBatch, numFrames);
  101. // Create a job for every batch and extract the features simultaneously.
  102. AZ::JobContext* jobContext = nullptr;
  103. AZ::Job* job = AZ::CreateJobFunction([this, actorInstance, startFrame, endFrame]()
  104. {
  105. ExtractFeatureValuesRange(actorInstance, m_frameDatabase, m_featureSchema, m_featureMatrix, startFrame, endFrame);
  106. }, /*isAutoDelete=*/true, jobContext);
  107. job->SetDependent(&jobCompletion);
  108. job->Start();
  109. }
  110. jobCompletion.StartAndWaitForCompletion();
  111. }
  112. }
  113. else // Single-threaded
  114. {
  115. ExtractFeatureValuesRange(actorInstance, m_frameDatabase, m_featureSchema, m_featureMatrix, /*startFrame=*/0, numFrames);
  116. }
  117. const float extractFeaturesTime = timer.GetDeltaTimeInSeconds();
  118. AZ_Printf("Motion Matching", "Extracting features for %zu frames took %.2f ms.", m_featureMatrix.rows(), extractFeaturesTime * 1000.0f);
  119. return true;
  120. }
  121. void MotionMatchingData::ExtractFeatureValuesRange(ActorInstance* actorInstance, FrameDatabase& frameDatabase, const FeatureSchema& featureSchema, FeatureMatrix& featureMatrix, size_t startFrame, size_t endFrame)
  122. {
  123. // Iterate over all frames and extract the data for this frame.
  124. AnimGraphPosePool posePool;
  125. AnimGraphPose* pose = posePool.RequestPose(actorInstance);
  126. Feature::ExtractFeatureContext context(featureMatrix, posePool);
  127. context.m_frameDatabase = &frameDatabase;
  128. context.m_framePose = &pose->GetPose();
  129. context.m_actorInstance = actorInstance;
  130. const auto& frames = frameDatabase.GetFrames();
  131. for (size_t frameIndex = startFrame; frameIndex < endFrame; ++frameIndex)
  132. {
  133. const Frame& frame = frames[frameIndex];
  134. context.m_frameIndex = frame.GetFrameIndex();
  135. // Pre-sample the frame pose as that will be needed by many of the feature extraction calculations.
  136. frame.SamplePose(const_cast<Pose*>(context.m_framePose));
  137. // Extract all features for the given frame.
  138. {
  139. for (Feature* feature : featureSchema.GetFeatures())
  140. {
  141. feature->ExtractFeatureValues(context);
  142. }
  143. }
  144. }
  145. posePool.FreePose(pose);
  146. }
  147. bool MotionMatchingData::Init(const InitSettings& settings)
  148. {
  149. AZ_PROFILE_SCOPE(Animation, "MotionMatchingData::Init");
  150. AZ::Debug::Timer initTimer;
  151. initTimer.Stamp();
  152. ///////////////////////////////////////////////////////////////////////
  153. // 1. Import motion data
  154. // Import all motion frames.
  155. size_t totalNumFramesImported = 0;
  156. size_t totalNumFramesDiscarded = 0;
  157. for (Motion* motion : settings.m_motionList)
  158. {
  159. size_t numFrames = 0;
  160. size_t numDiscarded = 0;
  161. std::tie(numFrames, numDiscarded) = m_frameDatabase.ImportFrames(motion, settings.m_frameImportSettings, false);
  162. totalNumFramesImported += numFrames;
  163. totalNumFramesDiscarded += numDiscarded;
  164. if (settings.m_importMirrored)
  165. {
  166. std::tie(numFrames, numDiscarded) = m_frameDatabase.ImportFrames(motion, settings.m_frameImportSettings, true);
  167. totalNumFramesImported += numFrames;
  168. totalNumFramesDiscarded += numDiscarded;
  169. }
  170. }
  171. if (totalNumFramesImported > 0 || totalNumFramesDiscarded > 0)
  172. {
  173. AZ_TracePrintf("Motion Matching", "Imported a total of %d frames (%d frames discarded) across %d motions. This is %.2f seconds (%.2f minutes) of motion data.",
  174. totalNumFramesImported,
  175. totalNumFramesDiscarded,
  176. settings.m_motionList.size(),
  177. totalNumFramesImported / (float)settings.m_frameImportSettings.m_sampleRate,
  178. (totalNumFramesImported / (float)settings.m_frameImportSettings.m_sampleRate) / 60.0f);
  179. }
  180. ///////////////////////////////////////////////////////////////////////
  181. // 2. Extract feature data and place the values into the feature matrix.
  182. if (!ExtractFeatures(settings.m_actorInstance, &m_frameDatabase))
  183. {
  184. AZ_Error("Motion Matching", false, "Failed to extract features from motion database.");
  185. return false;
  186. }
  187. ///////////////////////////////////////////////////////////////////////
  188. // 3. Transform feature data / -matrix
  189. // Note: Do this before initializing the KD-tree as the query vector will contain pre-transformed data as well.
  190. if (settings.m_normalizeData)
  191. {
  192. AZ_PROFILE_SCOPE(Animation, "MotionMatchingData::TransformFeatures");
  193. AZ::Debug::Timer transformFeatureTimer;
  194. transformFeatureTimer.Stamp();
  195. switch (settings.m_featureScalerType)
  196. {
  197. case FeatureScalerType::StandardScalerType:
  198. {
  199. m_featureTransformer.reset(aznew StandardScaler());
  200. break;
  201. }
  202. case FeatureScalerType::MinMaxScalerType:
  203. {
  204. m_featureTransformer.reset(aznew MinMaxScaler());
  205. break;
  206. }
  207. default:
  208. {
  209. m_featureTransformer.reset();
  210. AZ_Error("Motion Matching", false, "Unknown feature scaler type.")
  211. }
  212. }
  213. m_featureTransformer->Fit(m_featureMatrix, settings.m_featureTansformerSettings);
  214. m_featureMatrix = m_featureTransformer->Transform(m_featureMatrix);
  215. const float transformFeatureTime = transformFeatureTimer.GetDeltaTimeInSeconds();
  216. AZ_Printf("Motion Matching", "Transforming/normalizing features took %.2f ms.", transformFeatureTime * 1000.0f);
  217. }
  218. else
  219. {
  220. m_featureTransformer.reset();
  221. }
  222. ///////////////////////////////////////////////////////////////////////
  223. // 4. Initialize the kd-tree used to accelerate the searches
  224. {
  225. // Use all features other than the trajectory for the broad-phase search using the KD-Tree.
  226. for (Feature* feature : m_featureSchema.GetFeatures())
  227. {
  228. if (feature->RTTI_GetType() != azrtti_typeid<FeatureTrajectory>())
  229. {
  230. m_featuresInKdTree.push_back(feature);
  231. }
  232. }
  233. if (!m_kdTree->Init(m_frameDatabase, m_featureMatrix, m_featuresInKdTree, settings.m_maxKdTreeDepth, settings.m_minFramesPerKdTreeNode)) // Internally automatically clears any existing contents.
  234. {
  235. AZ_Error("EMotionFX", false, "Failed to initialize KdTree acceleration structure.");
  236. return false;
  237. }
  238. }
  239. const float initTime = initTimer.GetDeltaTimeInSeconds();
  240. AZ_Printf("Motion Matching", "Feature matrix (%zu, %zu) uses %.2f MB and took %.2f ms to initialize (including initialization of acceleration structures).",
  241. m_featureMatrix.rows(),
  242. m_featureMatrix.cols(),
  243. static_cast<float>(m_featureMatrix.CalcMemoryUsageInBytes()) / 1024.0f / 1024.0f,
  244. initTime * 1000.0f);
  245. return true;
  246. }
  247. void MotionMatchingData::Clear()
  248. {
  249. m_frameDatabase.Clear();
  250. m_featureMatrix.Clear();
  251. m_kdTree->Clear();
  252. m_featuresInKdTree.clear();
  253. }
  254. } // namespace EMotionFX::MotionMatching