tracking_stabilize.c 60 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641
  1. /*
  2. * ***** BEGIN GPL LICENSE BLOCK *****
  3. *
  4. * This program is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU General Public License
  6. * as published by the Free Software Foundation; either version 2
  7. * of the License, or (at your option) any later version.
  8. *
  9. * This program is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. * GNU General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU General Public License
  15. * along with this program; if not, write to the Free Software Foundation,
  16. * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
  17. *
  18. * The Original Code is Copyright (C) 2011 Blender Foundation.
  19. * All rights reserved.
  20. *
  21. * Contributor(s): Blender Foundation,
  22. * Sergey Sharybin
  23. * Keir Mierle
  24. * Ichthyostega
  25. *
  26. * ***** END GPL LICENSE BLOCK *****
  27. */
  28. /** \file blender/blenkernel/intern/tracking_stabilize.c
  29. * \ingroup bke
  30. *
  31. * This file contains implementation of 2D image stabilization.
  32. */
  33. #include <limits.h>
  34. #include "DNA_movieclip_types.h"
  35. #include "DNA_scene_types.h"
  36. #include "DNA_anim_types.h"
  37. #include "RNA_access.h"
  38. #include "BLI_utildefines.h"
  39. #include "BLI_sort_utils.h"
  40. #include "BLI_math_vector.h"
  41. #include "BLI_math.h"
  42. #include "BKE_tracking.h"
  43. #include "BKE_movieclip.h"
  44. #include "BKE_fcurve.h"
  45. #include "BLI_ghash.h"
  46. #include "MEM_guardedalloc.h"
  47. #include "IMB_imbuf_types.h"
  48. #include "IMB_imbuf.h"
  49. /* == Parameterization constants == */
  50. /* When measuring the scale changes relative to the rotation pivot point, it
  51. * might happen accidentally that a probe point (tracking point), which doesn't
  52. * actually move on a circular path, gets very close to the pivot point, causing
  53. * the measured scale contribution to go toward infinity. We damp this undesired
  54. * effect by adding a bias (floor) to the measured distances, which will
  55. * dominate very small distances and thus cause the corresponding track's
  56. * contribution to diminish.
  57. * Measurements happen in normalized (0...1) coordinates within a frame.
  58. */
  59. static float SCALE_ERROR_LIMIT_BIAS = 0.01f;
  60. /* When to consider a track as completely faded out.
  61. * This is used in conjunction with the "disabled" flag of the track
  62. * to determine start positions, end positions and gaps
  63. */
  64. static float EPSILON_WEIGHT = 0.005f;
  65. /* == private working data == */
  66. /* Per track baseline for stabilization, defined at reference frame.
  67. * A track's reference frame is chosen as close as possible to the (global)
  68. * anchor_frame. Baseline holds the constant part of each track's contribution
  69. * to the observed movement; it is calculated at initialization pass, using the
  70. * measurement value at reference frame plus the average contribution to fill
  71. * the gap between global anchor_frame and the reference frame for this track.
  72. * This struct with private working data is associated to the local call context
  73. * via `StabContext::private_track_data`
  74. */
  75. typedef struct TrackStabilizationBase {
  76. float stabilization_offset_base[2];
  77. /* measured relative to translated pivot */
  78. float stabilization_rotation_base[2][2];
  79. /* measured relative to translated pivot */
  80. float stabilization_scale_base;
  81. bool is_init_for_stabilization;
  82. FCurve *track_weight_curve;
  83. } TrackStabilizationBase;
  84. /* Tracks are reordered for initialization, starting as close as possible to
  85. * anchor_frame
  86. */
  87. typedef struct TrackInitOrder {
  88. int sort_value;
  89. int reference_frame;
  90. MovieTrackingTrack *data;
  91. } TrackInitOrder;
  92. /* Per frame private working data, for accessing possibly animated values. */
  93. typedef struct StabContext {
  94. MovieClip *clip;
  95. MovieTracking *tracking;
  96. MovieTrackingStabilization *stab;
  97. GHash *private_track_data;
  98. FCurve *locinf;
  99. FCurve *rotinf;
  100. FCurve *scaleinf;
  101. FCurve *target_pos[2];
  102. FCurve *target_rot;
  103. FCurve *target_scale;
  104. bool use_animation;
  105. } StabContext;
  106. static TrackStabilizationBase *access_stabilization_baseline_data(
  107. StabContext *ctx,
  108. MovieTrackingTrack *track)
  109. {
  110. return BLI_ghash_lookup(ctx->private_track_data, track);
  111. }
  112. static void attach_stabilization_baseline_data(
  113. StabContext *ctx,
  114. MovieTrackingTrack *track,
  115. TrackStabilizationBase *private_data)
  116. {
  117. BLI_ghash_insert(ctx->private_track_data, track, private_data);
  118. }
  119. static void discard_stabilization_baseline_data(void *val)
  120. {
  121. if (val != NULL) {
  122. MEM_freeN(val);
  123. }
  124. }
  125. /* == access animated values for given frame == */
  126. static FCurve *retrieve_stab_animation(MovieClip *clip,
  127. const char *data_path,
  128. int idx)
  129. {
  130. return id_data_find_fcurve(&clip->id,
  131. &clip->tracking.stabilization,
  132. &RNA_MovieTrackingStabilization,
  133. data_path,
  134. idx,
  135. NULL);
  136. }
  137. static FCurve *retrieve_track_weight_animation(MovieClip *clip,
  138. MovieTrackingTrack *track)
  139. {
  140. return id_data_find_fcurve(&clip->id,
  141. track,
  142. &RNA_MovieTrackingTrack,
  143. "weight_stab",
  144. 0,
  145. NULL);
  146. }
  147. static float fetch_from_fcurve(FCurve *animationCurve,
  148. int framenr,
  149. StabContext *ctx,
  150. float default_value)
  151. {
  152. if (ctx && ctx->use_animation && animationCurve) {
  153. int scene_framenr = BKE_movieclip_remap_clip_to_scene_frame(ctx->clip,
  154. framenr);
  155. return evaluate_fcurve(animationCurve, scene_framenr);
  156. }
  157. return default_value;
  158. }
  159. static float get_animated_locinf(StabContext *ctx, int framenr)
  160. {
  161. return fetch_from_fcurve(ctx->locinf, framenr, ctx, ctx->stab->locinf);
  162. }
  163. static float get_animated_rotinf(StabContext *ctx, int framenr)
  164. {
  165. return fetch_from_fcurve(ctx->rotinf, framenr, ctx, ctx->stab->rotinf);
  166. }
  167. static float get_animated_scaleinf(StabContext *ctx, int framenr)
  168. {
  169. return fetch_from_fcurve(ctx->scaleinf, framenr, ctx, ctx->stab->scaleinf);
  170. }
  171. static void get_animated_target_pos(StabContext *ctx,
  172. int framenr,
  173. float target_pos[2])
  174. {
  175. target_pos[0] = fetch_from_fcurve(ctx->target_pos[0],
  176. framenr,
  177. ctx,
  178. ctx->stab->target_pos[0]);
  179. target_pos[1] = fetch_from_fcurve(ctx->target_pos[1],
  180. framenr,
  181. ctx,
  182. ctx->stab->target_pos[1]);
  183. }
  184. static float get_animated_target_rot(StabContext *ctx, int framenr)
  185. {
  186. return fetch_from_fcurve(ctx->target_rot,
  187. framenr,
  188. ctx,
  189. ctx->stab->target_rot);
  190. }
  191. static float get_animated_target_scale(StabContext *ctx, int framenr)
  192. {
  193. return fetch_from_fcurve(ctx->target_scale, framenr, ctx, ctx->stab->scale);
  194. }
  195. static float get_animated_weight(StabContext *ctx,
  196. MovieTrackingTrack *track,
  197. int framenr)
  198. {
  199. TrackStabilizationBase *working_data =
  200. access_stabilization_baseline_data(ctx, track);
  201. if (working_data && working_data->track_weight_curve) {
  202. int scene_framenr = BKE_movieclip_remap_clip_to_scene_frame(ctx->clip,
  203. framenr);
  204. return evaluate_fcurve(working_data->track_weight_curve, scene_framenr);
  205. }
  206. /* Use weight at global 'current frame' as fallback default. */
  207. return track->weight_stab;
  208. }
  209. static void use_values_from_fcurves(StabContext *ctx, bool toggle)
  210. {
  211. if (ctx != NULL) {
  212. ctx->use_animation = toggle;
  213. }
  214. }
  215. /* Prepare per call private working area.
  216. * Used for access to possibly animated values: retrieve available F-curves.
  217. */
  218. static StabContext *initialize_stabilization_working_context(MovieClip *clip)
  219. {
  220. StabContext *ctx = MEM_callocN(sizeof(StabContext),
  221. "2D stabilization animation runtime data");
  222. ctx->clip = clip;
  223. ctx->tracking = &clip->tracking;
  224. ctx->stab = &clip->tracking.stabilization;
  225. ctx->private_track_data = BLI_ghash_ptr_new(
  226. "2D stabilization per track private working data");
  227. ctx->locinf = retrieve_stab_animation(clip, "influence_location", 0);
  228. ctx->rotinf = retrieve_stab_animation(clip, "influence_rotation", 0);
  229. ctx->scaleinf = retrieve_stab_animation(clip, "influence_scale", 0);
  230. ctx->target_pos[0] = retrieve_stab_animation(clip, "target_pos", 0);
  231. ctx->target_pos[1] = retrieve_stab_animation(clip, "target_pos", 1);
  232. ctx->target_rot = retrieve_stab_animation(clip, "target_rot", 0);
  233. ctx->target_scale = retrieve_stab_animation(clip, "target_zoom", 0);
  234. ctx->use_animation = true;
  235. return ctx;
  236. }
  237. /* Discard all private working data attached to this call context.
  238. * NOTE: We allocate the record for the per track baseline contribution
  239. * locally for each call context (i.e. call to
  240. * BKE_tracking_stabilization_data_get()
  241. * Thus it is correct to discard all allocations found within the
  242. * corresponding _local_ GHash
  243. */
  244. static void discard_stabilization_working_context(StabContext *ctx)
  245. {
  246. if (ctx != NULL) {
  247. BLI_ghash_free(ctx->private_track_data,
  248. NULL,
  249. discard_stabilization_baseline_data);
  250. MEM_freeN(ctx);
  251. }
  252. }
  253. static bool is_init_for_stabilization(StabContext *ctx,
  254. MovieTrackingTrack *track)
  255. {
  256. TrackStabilizationBase *working_data =
  257. access_stabilization_baseline_data(ctx, track);
  258. return (working_data != NULL && working_data->is_init_for_stabilization);
  259. }
  260. static bool is_usable_for_stabilization(StabContext *ctx,
  261. MovieTrackingTrack *track)
  262. {
  263. return (track->flag & TRACK_USE_2D_STAB) &&
  264. is_init_for_stabilization(ctx, track);
  265. }
  266. static bool is_effectively_disabled(StabContext *ctx,
  267. MovieTrackingTrack *track,
  268. MovieTrackingMarker *marker)
  269. {
  270. return (marker->flag & MARKER_DISABLED) ||
  271. (EPSILON_WEIGHT > get_animated_weight(ctx, track, marker->framenr));
  272. }
  273. static int search_closest_marker_index(MovieTrackingTrack *track,
  274. int ref_frame)
  275. {
  276. MovieTrackingMarker *markers = track->markers;
  277. int end = track->markersnr;
  278. int i = track->last_marker;
  279. i = MAX2(0, i);
  280. i = MIN2(i, end - 1);
  281. for ( ; i < end - 1 && markers[i].framenr <= ref_frame; ++i);
  282. for ( ; 0 < i && markers[i].framenr > ref_frame; --i);
  283. track->last_marker = i;
  284. return i;
  285. }
  286. static void retrieve_next_higher_usable_frame(StabContext *ctx,
  287. MovieTrackingTrack *track,
  288. int i,
  289. int ref_frame,
  290. int *next_higher)
  291. {
  292. MovieTrackingMarker *markers = track->markers;
  293. int end = track->markersnr;
  294. BLI_assert(0 <= i && i < end);
  295. while (i < end &&
  296. (markers[i].framenr < ref_frame ||
  297. is_effectively_disabled(ctx, track, &markers[i])))
  298. {
  299. ++i;
  300. }
  301. if (i < end && markers[i].framenr < *next_higher) {
  302. BLI_assert(markers[i].framenr >= ref_frame);
  303. *next_higher = markers[i].framenr;
  304. }
  305. }
  306. static void retrieve_next_lower_usable_frame(StabContext *ctx,
  307. MovieTrackingTrack *track,
  308. int i,
  309. int ref_frame,
  310. int *next_lower)
  311. {
  312. MovieTrackingMarker *markers = track->markers;
  313. BLI_assert(0 <= i && i < track->markersnr);
  314. while (i >= 0 &&
  315. (markers[i].framenr > ref_frame ||
  316. is_effectively_disabled(ctx, track, &markers[i])))
  317. {
  318. --i;
  319. }
  320. if (0 <= i && markers[i].framenr > *next_lower) {
  321. BLI_assert(markers[i].framenr <= ref_frame);
  322. *next_lower = markers[i].framenr;
  323. }
  324. }
  325. /* Find closest frames with usable stabilization data.
  326. * A frame counts as _usable_ when there is at least one track marked for
  327. * translation stabilization, which has an enabled tracking marker at this very
  328. * frame. We search both for the next lower and next higher position, to allow
  329. * the caller to interpolate gaps and to extrapolate at the ends of the
  330. * definition range.
  331. *
  332. * NOTE: Regarding performance note that the individual tracks will cache the
  333. * last search position.
  334. */
  335. static void find_next_working_frames(StabContext *ctx,
  336. int framenr,
  337. int *next_lower,
  338. int *next_higher)
  339. {
  340. for (MovieTrackingTrack *track = ctx->tracking->tracks.first;
  341. track != NULL;
  342. track = track->next)
  343. {
  344. if (is_usable_for_stabilization(ctx, track)) {
  345. int startpoint = search_closest_marker_index(track, framenr);
  346. retrieve_next_higher_usable_frame(ctx,
  347. track,
  348. startpoint,
  349. framenr,
  350. next_higher);
  351. retrieve_next_lower_usable_frame(ctx,
  352. track,
  353. startpoint,
  354. framenr,
  355. next_lower);
  356. }
  357. }
  358. }
  359. /* Find active (enabled) marker closest to the reference frame. */
  360. static MovieTrackingMarker *get_closest_marker(StabContext *ctx,
  361. MovieTrackingTrack *track,
  362. int ref_frame)
  363. {
  364. int next_lower = MINAFRAME;
  365. int next_higher = MAXFRAME;
  366. int i = search_closest_marker_index(track, ref_frame);
  367. retrieve_next_higher_usable_frame(ctx, track, i, ref_frame, &next_higher);
  368. retrieve_next_lower_usable_frame(ctx, track, i, ref_frame, &next_lower);
  369. if ((next_higher - ref_frame) < (ref_frame - next_lower)) {
  370. return BKE_tracking_marker_get_exact(track, next_higher);
  371. }
  372. else {
  373. return BKE_tracking_marker_get_exact(track, next_lower);
  374. }
  375. }
  376. /* Retrieve tracking data, if available and applicable for this frame.
  377. * The returned weight value signals the validity; data recorded for this
  378. * tracking marker on the exact requested frame is output with the full weight
  379. * of this track, while gaps in the data sequence cause the weight to go to zero.
  380. */
  381. static MovieTrackingMarker *get_tracking_data_point(
  382. StabContext *ctx,
  383. MovieTrackingTrack *track,
  384. int framenr,
  385. float *r_weight)
  386. {
  387. MovieTrackingMarker *marker = BKE_tracking_marker_get_exact(track, framenr);
  388. if (marker != NULL && !(marker->flag & MARKER_DISABLED)) {
  389. *r_weight = get_animated_weight(ctx, track, framenr);
  390. return marker;
  391. }
  392. else {
  393. /* No marker at this frame (=gap) or marker disabled. */
  394. *r_weight = 0.0f;
  395. return NULL;
  396. }
  397. }
  398. /* Define the reference point for rotation/scale measurement and compensation.
  399. * The stabilizator works by assuming the image was distorted by a affine linear
  400. * transform, i.e. it was rotated and stretched around this reference point
  401. * (pivot point) and then shifted laterally. Any scale and orientation changes
  402. * will be picked up relative to this point. And later the image will be
  403. * stabilized by rotating around this point. The result can only be as
  404. * accurate as this pivot point actually matches the real rotation center
  405. * of the actual movements. Thus any scheme to define a pivot point is
  406. * always guesswork.
  407. *
  408. * As a simple default, we use the weighted average of the location markers
  409. * of the current frame as pivot point. TODO It is planned to add further
  410. * options, like e.g. anchoring the pivot point at the canvas. Moreover,
  411. * it is planned to allow for a user controllable offset.
  412. */
  413. static void setup_pivot(const float ref_pos[2], float r_pivot[2])
  414. {
  415. zero_v2(r_pivot); /* TODO: add an animated offset position here. */
  416. add_v2_v2(r_pivot, ref_pos);
  417. }
  418. /* Calculate the contribution of a single track at the time position (frame) of
  419. * the given marker. Each track has a local reference frame, which is as close
  420. * as possible to the global anchor_frame. Thus the translation contribution is
  421. * comprised of the offset relative to the image position at that reference
  422. * frame, plus a guess of the contribution for the time span between the
  423. * anchor_frame and the local reference frame of this track. The constant part
  424. * of this contribution is precomputed initially. At the anchor_frame, by
  425. * definition the contribution of all tracks is zero, keeping the frame in place.
  426. *
  427. * track_ref is per track baseline contribution at reference frame; filled in at
  428. * initialization
  429. * marker is tracking data to use as contribution for current frame.
  430. * result_offset is a total cumulated contribution of this track,
  431. * relative to the stabilization anchor_frame,
  432. * in normalized (0...1) coordinates.
  433. */
  434. static void translation_contribution(TrackStabilizationBase *track_ref,
  435. MovieTrackingMarker *marker,
  436. float result_offset[2])
  437. {
  438. add_v2_v2v2(result_offset,
  439. track_ref->stabilization_offset_base,
  440. marker->pos);
  441. }
  442. /* Similar to the ::translation_contribution(), the rotation contribution is
  443. * comprised of the contribution by this individual track, and the averaged
  444. * contribution from anchor_frame to the ref point of this track.
  445. * - Contribution is in terms of angles, -pi < angle < +pi, and all averaging
  446. * happens in this domain.
  447. * - Yet the actual measurement happens as vector between pivot and the current
  448. * tracking point
  449. * - Currently we use the center of frame as approximation for the rotation pivot
  450. * point.
  451. * - Moreover, the pivot point has to be compensated for the already determined
  452. * shift offset, in order to get the pure rotation around the pivot.
  453. * To turn this into a _contribution_, the likewise corrected angle at the
  454. * reference frame has to be subtracted, to get only the pure angle difference
  455. * this tracking point has captured.
  456. * - To get from vectors to angles, we have to go through an arcus tangens,
  457. * which involves the issue of the definition range: the resulting angles will
  458. * flip by 360deg when the measured vector passes from the 2nd to the third
  459. * quadrant, thus messing up the average calculation. Since _any_ tracking
  460. * point might be used, these problems are quite common in practice.
  461. * - Thus we perform the subtraction of the reference and the addition of the
  462. * baseline contribution in polar coordinates as simple addition of angles;
  463. * since these parts are fixed, we can bake them into a rotation matrix.
  464. * With this approach, the border of the arcus tangens definition range will
  465. * be reached only, when the _whole_ contribution approaches +- 180deg,
  466. * meaning we've already tilted the frame upside down. This situation is way
  467. * less common and can be tolerated.
  468. * - As an additional feature, when activated, also changes in image scale
  469. * relative to the rotation center can be picked up. To handle those values
  470. * in the same framework, we average the scales as logarithms.
  471. *
  472. * aspect is a total aspect ratio of the undistorted image (includes fame and
  473. * pixel aspect). The function returns a quality factor, which can be used
  474. * to damp the contributions of points in close proximity to the pivot point,
  475. * since such contributions might be dominated by rounding errors and thus
  476. * poison the calculated average. When the quality factor goes towards zero,
  477. * the weight of this contribution should be reduced accordingly.
  478. */
  479. static float rotation_contribution(TrackStabilizationBase *track_ref,
  480. MovieTrackingMarker *marker,
  481. const float aspect,
  482. const float pivot[2],
  483. float *result_angle,
  484. float *result_scale)
  485. {
  486. float len, quality;
  487. float pos[2];
  488. sub_v2_v2v2(pos, marker->pos, pivot);
  489. pos[0] *= aspect;
  490. mul_m2v2(track_ref->stabilization_rotation_base, pos);
  491. *result_angle = atan2f(pos[1],pos[0]);
  492. len = len_v2(pos);
  493. /* prevent points very close to the pivot point from poisoning the result */
  494. quality = 1 - expf(-len*len / SCALE_ERROR_LIMIT_BIAS*SCALE_ERROR_LIMIT_BIAS);
  495. len += SCALE_ERROR_LIMIT_BIAS;
  496. *result_scale = len * track_ref->stabilization_scale_base;
  497. BLI_assert(0.0 < *result_scale);
  498. return quality;
  499. }
  500. /* Workaround to allow for rotation around an arbitrary pivot point.
  501. * Currently, the public API functions do not support this flexibility.
  502. * Rather, rotation will always be applied around a fixed origin.
  503. * As a workaround, we shift the image after rotation to match the
  504. * desired rotation centre. And since this offset needs to be applied
  505. * after the rotation and scaling, we can collapse it with the
  506. * translation compensation, which is also a lateral shift (offset).
  507. * The offset to apply is intended_pivot - rotated_pivot
  508. */
  509. static void compensate_rotation_center(const int size, float aspect,
  510. const float angle,
  511. const float scale,
  512. const float pivot[2],
  513. float result_translation[2])
  514. {
  515. const float origin[2] = {0.5f*aspect*size, 0.5f*size};
  516. float intended_pivot[2], rotated_pivot[2];
  517. float rotation_mat[2][2];
  518. copy_v2_v2(intended_pivot, pivot);
  519. copy_v2_v2(rotated_pivot, pivot);
  520. angle_to_mat2(rotation_mat, +angle);
  521. sub_v2_v2(rotated_pivot, origin);
  522. mul_m2v2(rotation_mat, rotated_pivot);
  523. mul_v2_fl(rotated_pivot, scale);
  524. add_v2_v2(rotated_pivot, origin);
  525. add_v2_v2(result_translation, intended_pivot);
  526. sub_v2_v2(result_translation, rotated_pivot);
  527. }
  528. /* Weighted average of the per track cumulated contributions at given frame.
  529. * Returns truth if all desired calculations could be done and all averages are
  530. * available.
  531. *
  532. * NOTE: Even if the result is not `true`, the returned translation and angle
  533. * are always sensible and as good as can be. Especially in the
  534. * initialization phase we might not be able to get any average (yet) or
  535. * get only a translation value. Since initialization visits tracks in a
  536. * specific order, starting from anchor_frame, the result is logically
  537. * correct non the less. But under normal operation conditions,
  538. * a result of `false` should disable the stabilization function
  539. */
  540. static bool average_track_contributions(StabContext *ctx,
  541. int framenr,
  542. float aspect,
  543. float r_translation[2],
  544. float r_pivot[2],
  545. float *r_angle,
  546. float *r_scale_step)
  547. {
  548. bool ok;
  549. float weight_sum;
  550. MovieTrackingTrack *track;
  551. MovieTracking *tracking = ctx->tracking;
  552. MovieTrackingStabilization *stab = &tracking->stabilization;
  553. float ref_pos[2];
  554. BLI_assert(stab->flag & TRACKING_2D_STABILIZATION);
  555. zero_v2(r_translation);
  556. *r_scale_step = 0.0f; /* logarithm */
  557. *r_angle = 0.0f;
  558. zero_v2(ref_pos);
  559. ok = false;
  560. weight_sum = 0.0f;
  561. for (track = tracking->tracks.first; track; track = track->next) {
  562. if (!is_init_for_stabilization(ctx, track)) {
  563. continue;
  564. }
  565. if (track->flag & TRACK_USE_2D_STAB) {
  566. float weight = 0.0f;
  567. MovieTrackingMarker *marker = get_tracking_data_point(ctx,
  568. track,
  569. framenr,
  570. &weight);
  571. if (marker) {
  572. TrackStabilizationBase *stabilization_base =
  573. access_stabilization_baseline_data(ctx, track);
  574. BLI_assert(stabilization_base != NULL);
  575. float offset[2];
  576. weight_sum += weight;
  577. translation_contribution(stabilization_base, marker, offset);
  578. r_translation[0] += weight * offset[0];
  579. r_translation[1] += weight * offset[1];
  580. ref_pos[0] += weight * marker->pos[0];
  581. ref_pos[1] += weight * marker->pos[1];
  582. ok |= (weight_sum > EPSILON_WEIGHT);
  583. }
  584. }
  585. }
  586. if (!ok) {
  587. return false;
  588. }
  589. ref_pos[0] /= weight_sum;
  590. ref_pos[1] /= weight_sum;
  591. r_translation[0] /= weight_sum;
  592. r_translation[1] /= weight_sum;
  593. setup_pivot(ref_pos, r_pivot);
  594. if (!(stab->flag & TRACKING_STABILIZE_ROTATION)) {
  595. return ok;
  596. }
  597. ok = false;
  598. weight_sum = 0.0f;
  599. for (track = tracking->tracks.first; track; track = track->next) {
  600. if (!is_init_for_stabilization(ctx, track)) {
  601. continue;
  602. }
  603. if (track->flag & TRACK_USE_2D_STAB_ROT) {
  604. float weight = 0.0f;
  605. MovieTrackingMarker *marker = get_tracking_data_point(ctx,
  606. track,
  607. framenr,
  608. &weight);
  609. if (marker) {
  610. TrackStabilizationBase *stabilization_base =
  611. access_stabilization_baseline_data(ctx, track);
  612. BLI_assert(stabilization_base != NULL);
  613. float rotation, scale, quality;
  614. quality = rotation_contribution(stabilization_base,
  615. marker,
  616. aspect,
  617. r_pivot,
  618. &rotation,
  619. &scale);
  620. weight *= quality;
  621. weight_sum += weight;
  622. *r_angle += rotation * weight;
  623. if (stab->flag & TRACKING_STABILIZE_SCALE) {
  624. *r_scale_step += logf(scale) * weight;
  625. }
  626. else {
  627. *r_scale_step = 0;
  628. }
  629. ok |= (weight_sum > EPSILON_WEIGHT);
  630. }
  631. }
  632. }
  633. if (ok) {
  634. *r_scale_step /= weight_sum;
  635. *r_angle /= weight_sum;
  636. }
  637. else {
  638. /* We reach this point because translation could be calculated,
  639. * but rotation/scale found no data to work on.
  640. */
  641. *r_scale_step = 0.0f;
  642. *r_angle = 0.0f;
  643. }
  644. return true;
  645. }
  646. /* Calculate weight center of location tracks for given frame.
  647. * This function performs similar calculations as average_track_contributions(),
  648. * but does not require the tracks to be initialized for stabilisation. Moreover,
  649. * when there is no usable tracking data for the given frame number, data from
  650. * a neighbouring frame is used. Thus this function can be used to calculate
  651. * a starting point on initialization.
  652. */
  653. static void average_marker_positions(StabContext *ctx, int framenr, float r_ref_pos[2])
  654. {
  655. bool ok = false;
  656. float weight_sum;
  657. MovieTrackingTrack *track;
  658. MovieTracking *tracking = ctx->tracking;
  659. zero_v2(r_ref_pos);
  660. weight_sum = 0.0f;
  661. for (track = tracking->tracks.first; track; track = track->next) {
  662. if (track->flag & TRACK_USE_2D_STAB) {
  663. float weight = 0.0f;
  664. MovieTrackingMarker *marker =
  665. get_tracking_data_point(ctx, track, framenr, &weight);
  666. if (marker) {
  667. weight_sum += weight;
  668. r_ref_pos[0] += weight * marker->pos[0];
  669. r_ref_pos[1] += weight * marker->pos[1];
  670. ok |= (weight_sum > EPSILON_WEIGHT);
  671. }
  672. }
  673. }
  674. if (ok) {
  675. r_ref_pos[0] /= weight_sum;
  676. r_ref_pos[1] /= weight_sum;
  677. }
  678. else {
  679. /* No usable tracking data on any track on this frame.
  680. * Use data from neighbouring frames to extrapolate...
  681. */
  682. int next_lower = MINAFRAME;
  683. int next_higher = MAXFRAME;
  684. use_values_from_fcurves(ctx, true);
  685. for (track = tracking->tracks.first; track; track = track->next) {
  686. /* Note: we deliberately do not care if this track
  687. * is already initialized for stabilisation */
  688. if (track->flag & TRACK_USE_2D_STAB) {
  689. int startpoint = search_closest_marker_index(track, framenr);
  690. retrieve_next_higher_usable_frame(ctx,
  691. track,
  692. startpoint,
  693. framenr,
  694. &next_higher);
  695. retrieve_next_lower_usable_frame(ctx,
  696. track,
  697. startpoint,
  698. framenr,
  699. &next_lower);
  700. }
  701. }
  702. if (next_lower >= MINFRAME) {
  703. /* use next usable frame to the left.
  704. * Also default to this frame when we're in a gap */
  705. average_marker_positions(ctx, next_lower, r_ref_pos);
  706. }
  707. else if (next_higher < MAXFRAME) {
  708. average_marker_positions(ctx, next_higher, r_ref_pos);
  709. }
  710. use_values_from_fcurves(ctx, false);
  711. }
  712. }
  713. /* Linear interpolation of data retrieved at two measurement points.
  714. * This function is used to fill gaps in the middle of the covered area,
  715. * at frames without any usable tracks for stabilization.
  716. *
  717. * framenr is a position to interpolate for.
  718. * frame_a is a valid measurement point below framenr
  719. * frame_b is a valid measurement point above framenr
  720. * Returns truth if both measurements could actually be retrieved.
  721. * Otherwise output parameters remain unaltered
  722. */
  723. static bool interpolate_averaged_track_contributions(StabContext *ctx,
  724. int framenr,
  725. int frame_a,
  726. int frame_b,
  727. const float aspect,
  728. float r_translation[2],
  729. float r_pivot[2],
  730. float *r_angle,
  731. float *r_scale_step)
  732. {
  733. float t, s;
  734. float trans_a[2], trans_b[2];
  735. float angle_a, angle_b;
  736. float scale_a, scale_b;
  737. float pivot_a[2], pivot_b[2];
  738. bool success = false;
  739. BLI_assert(frame_a <= frame_b);
  740. BLI_assert(frame_a <= framenr);
  741. BLI_assert(framenr <= frame_b);
  742. t = ((float)framenr - frame_a) / (frame_b - frame_a);
  743. s = 1.0f - t;
  744. success = average_track_contributions(ctx, frame_a, aspect, trans_a, pivot_a, &angle_a, &scale_a);
  745. if (!success) {
  746. return false;
  747. }
  748. success = average_track_contributions(ctx, frame_b, aspect, trans_b, pivot_b, &angle_b, &scale_b);
  749. if (!success) {
  750. return false;
  751. }
  752. interp_v2_v2v2(r_translation, trans_a, trans_b, t);
  753. interp_v2_v2v2(r_pivot, pivot_a, pivot_b, t);
  754. *r_scale_step = s * scale_a + t * scale_b;
  755. *r_angle = s * angle_a + t * angle_b;
  756. return true;
  757. }
  758. /* Reorder tracks starting with those providing a tracking data frame
  759. * closest to the global anchor_frame. Tracks with a gap at anchor_frame or
  760. * starting farer away from anchor_frame altogether will be visited later.
  761. * This allows to build up baseline contributions incrementally.
  762. *
  763. * order is an array for sorting the tracks. Must be of suitable size to hold
  764. * all tracks.
  765. * Returns number of actually usable tracks, can be less than the overall number
  766. * of tracks.
  767. *
  768. * NOTE: After returning, the order array holds entries up to the number of
  769. * usable tracks, appropriately sorted starting with the closest tracks.
  770. * Initialization includes disabled tracks, since they might be enabled
  771. * through automation later.
  772. */
  773. static int establish_track_initialization_order(StabContext *ctx,
  774. TrackInitOrder *order)
  775. {
  776. size_t tracknr = 0;
  777. MovieTrackingTrack *track;
  778. MovieTracking *tracking = ctx->tracking;
  779. int anchor_frame = tracking->stabilization.anchor_frame;
  780. for (track = tracking->tracks.first; track != NULL; track = track->next) {
  781. MovieTrackingMarker *marker;
  782. order[tracknr].data = track;
  783. marker = get_closest_marker(ctx, track, anchor_frame);
  784. if (marker != NULL &&
  785. (track->flag & (TRACK_USE_2D_STAB | TRACK_USE_2D_STAB_ROT)))
  786. {
  787. order[tracknr].sort_value = abs(marker->framenr - anchor_frame);
  788. order[tracknr].reference_frame = marker->framenr;
  789. ++tracknr;
  790. }
  791. }
  792. if (tracknr) {
  793. qsort(order, tracknr, sizeof(TrackInitOrder), BLI_sortutil_cmp_int);
  794. }
  795. return tracknr;
  796. }
  797. /* Setup the constant part of this track's contribution to the determined frame
  798. * movement. Tracks usually don't provide tracking data for every frame. Thus,
  799. * for determining data at a given frame, we split up the contribution into a
  800. * part covered by actual measurements on this track, and the initial gap
  801. * between this track's reference frame and the global anchor_frame.
  802. * The (missing) data for the gap can be substituted by the average offset
  803. * observed by the other tracks covering the gap. This approximation doesn't
  804. * introduce wrong data, but it records data with incorrect weight. A totally
  805. * correct solution would require us to average the contribution per frame, and
  806. * then integrate stepwise over all frames -- which of course would be way more
  807. * expensive, especially for longer clips. To the contrary, our solution
  808. * cumulates the total contribution per track and averages afterwards over all
  809. * tracks; it can thus be calculated just based on the data of a single frame,
  810. * plus the "baseline" for the reference frame, which is what we are computing
  811. * here.
  812. *
  813. * Since we're averaging _contributions_, we have to calculate the _difference_
  814. * of the measured position at current frame and the position at the reference
  815. * frame. But the "reference" part of this difference is constant and can thus
  816. * be packed together with the baseline contribution into a single precomputed
  817. * vector per track.
  818. *
  819. * In case of the rotation contribution, the principle is the same, but we have
  820. * to compensate for the already determined translation and measure the pure
  821. * rotation, simply because this is how we model the offset: shift plus rotation
  822. * around the shifted rotation center. To circumvent problems with the
  823. * definition range of the arcus tangens function, we perform this baseline
  824. * addition and reference angle subtraction in polar coordinates and bake this
  825. * operation into a precomputed rotation matrix.
  826. *
  827. * track is a track to be initialized to initialize
  828. * reference_frame is a local frame for this track, the closest pick to the
  829. * global anchor_frame.
  830. * aspect is a total aspect ratio of the undistorted image (includes fame and
  831. * pixel aspect).
  832. * target_pos is a possibly animated target position as set by the user for
  833. * the reference_frame
  834. * average_translation is a value observed by the _other_ tracks for the gap
  835. * between reference_frame and anchor_frame. This
  836. * average must not contain contributions of frames
  837. * not yet initialized
  838. * average_angle in a similar way, the rotation value observed by the
  839. * _other_ tracks.
  840. * average_scale_step is an image scale factor observed on average by the other
  841. * tracks for this frame. This value is recorded and
  842. * averaged as logarithm. The recorded scale changes
  843. * are damped for very small contributions, to limit
  844. * the effect of probe points approaching the pivot
  845. * too closely.
  846. *
  847. * NOTE: when done, this track is marked as initialized
  848. */
  849. static void initialize_track_for_stabilization(StabContext *ctx,
  850. MovieTrackingTrack *track,
  851. int reference_frame,
  852. float aspect,
  853. const float average_translation[2],
  854. const float pivot[2],
  855. const float average_angle,
  856. const float average_scale_step)
  857. {
  858. float pos[2], angle, len;
  859. TrackStabilizationBase *local_data =
  860. access_stabilization_baseline_data(ctx, track);
  861. MovieTrackingMarker *marker =
  862. BKE_tracking_marker_get_exact(track, reference_frame);
  863. /* Logic for initialization order ensures there *is* a marker on that
  864. * very frame.
  865. */
  866. BLI_assert(marker != NULL);
  867. BLI_assert(local_data != NULL);
  868. /* Per track baseline value for translation. */
  869. sub_v2_v2v2(local_data->stabilization_offset_base,
  870. average_translation,
  871. marker->pos);
  872. /* Per track baseline value for rotation. */
  873. sub_v2_v2v2(pos, marker->pos, pivot);
  874. pos[0] *= aspect;
  875. angle = average_angle - atan2f(pos[1],pos[0]);
  876. angle_to_mat2(local_data->stabilization_rotation_base, angle);
  877. /* Per track baseline value for zoom. */
  878. len = len_v2(pos) + SCALE_ERROR_LIMIT_BIAS;
  879. local_data->stabilization_scale_base = expf(average_scale_step) / len;
  880. local_data->is_init_for_stabilization = true;
  881. }
  882. static void initialize_all_tracks(StabContext *ctx, float aspect)
  883. {
  884. size_t i, track_cnt = 0;
  885. MovieClip *clip = ctx->clip;
  886. MovieTracking *tracking = ctx->tracking;
  887. MovieTrackingTrack *track;
  888. TrackInitOrder *order;
  889. /* Attempt to start initialization at anchor_frame.
  890. * By definition, offset contribution is zero there.
  891. */
  892. int reference_frame = tracking->stabilization.anchor_frame;
  893. float average_angle=0, average_scale_step=0;
  894. float average_translation[2], average_pos[2], pivot[2];
  895. zero_v2(average_translation);
  896. zero_v2(pivot);
  897. /* Initialize private working data. */
  898. for (track = tracking->tracks.first; track != NULL; track = track->next) {
  899. TrackStabilizationBase *local_data =
  900. access_stabilization_baseline_data(ctx, track);
  901. if (!local_data) {
  902. local_data = MEM_callocN(sizeof(TrackStabilizationBase),
  903. "2D stabilization per track baseline data");
  904. attach_stabilization_baseline_data(ctx, track, local_data);
  905. }
  906. BLI_assert(local_data != NULL);
  907. local_data->track_weight_curve = retrieve_track_weight_animation(clip,
  908. track);
  909. local_data->is_init_for_stabilization = false;
  910. ++track_cnt;
  911. }
  912. if (!track_cnt) {
  913. return;
  914. }
  915. order = MEM_mallocN(track_cnt * sizeof(TrackInitOrder),
  916. "stabilization track order");
  917. if (!order) {
  918. return;
  919. }
  920. track_cnt = establish_track_initialization_order(ctx, order);
  921. if (track_cnt == 0) {
  922. goto cleanup;
  923. }
  924. /* starting point for pivot, before having initialized any track */
  925. average_marker_positions(ctx, reference_frame, average_pos);
  926. setup_pivot(average_pos, pivot);
  927. for (i = 0; i < track_cnt; ++i) {
  928. track = order[i].data;
  929. if (reference_frame != order[i].reference_frame) {
  930. reference_frame = order[i].reference_frame;
  931. average_track_contributions(ctx,
  932. reference_frame,
  933. aspect,
  934. average_translation,
  935. pivot,
  936. &average_angle,
  937. &average_scale_step);
  938. }
  939. initialize_track_for_stabilization(ctx,
  940. track,
  941. reference_frame,
  942. aspect,
  943. average_translation,
  944. pivot,
  945. average_angle,
  946. average_scale_step);
  947. }
  948. cleanup:
  949. MEM_freeN(order);
  950. }
  951. /* Retrieve the measurement of frame movement by averaging contributions of
  952. * active tracks.
  953. *
  954. * translation is a measurement in normalized 0..1 coordinates.
  955. * angle is a measurement in radians -pi..+pi counter clockwise relative to
  956. * translation compensated frame center
  957. * scale_step is a measurement of image scale changes, in logarithmic scale
  958. * (zero means scale == 1)
  959. * Returns calculation enabled and all data retrieved as expected for this frame.
  960. *
  961. * NOTE: when returning `false`, output parameters are reset to neutral values.
  962. */
  963. static bool stabilization_determine_offset_for_frame(StabContext *ctx,
  964. int framenr,
  965. float aspect,
  966. float r_translation[2],
  967. float r_pivot[2],
  968. float *r_angle,
  969. float *r_scale_step)
  970. {
  971. bool success = false;
  972. /* Early output if stabilization is disabled. */
  973. if ((ctx->stab->flag & TRACKING_2D_STABILIZATION) == 0) {
  974. zero_v2(r_translation);
  975. *r_scale_step = 0.0f;
  976. *r_angle = 0.0f;
  977. return false;
  978. }
  979. success = average_track_contributions(ctx,
  980. framenr,
  981. aspect,
  982. r_translation,
  983. r_pivot,
  984. r_angle,
  985. r_scale_step);
  986. if (!success) {
  987. /* Try to hold extrapolated settings beyond the definition range
  988. * and to interpolate in gaps without any usable tracking data
  989. * to prevent sudden jump to image zero position.
  990. */
  991. int next_lower = MINAFRAME;
  992. int next_higher = MAXFRAME;
  993. use_values_from_fcurves(ctx, true);
  994. find_next_working_frames(ctx, framenr, &next_lower, &next_higher);
  995. if (next_lower >= MINFRAME && next_higher < MAXFRAME) {
  996. success = interpolate_averaged_track_contributions(ctx,
  997. framenr,
  998. next_lower,
  999. next_higher,
  1000. aspect,
  1001. r_translation,
  1002. r_pivot,
  1003. r_angle,
  1004. r_scale_step);
  1005. }
  1006. else if (next_higher < MAXFRAME) {
  1007. /* Before start of stabilized range: extrapolate start point
  1008. * settings.
  1009. */
  1010. success = average_track_contributions(ctx,
  1011. next_higher,
  1012. aspect,
  1013. r_translation,
  1014. r_pivot,
  1015. r_angle,
  1016. r_scale_step);
  1017. }
  1018. else if (next_lower >= MINFRAME) {
  1019. /* After end of stabilized range: extrapolate end point settings. */
  1020. success = average_track_contributions(ctx,
  1021. next_lower,
  1022. aspect,
  1023. r_translation,
  1024. r_pivot,
  1025. r_angle,
  1026. r_scale_step);
  1027. }
  1028. use_values_from_fcurves(ctx, false);
  1029. }
  1030. return success;
  1031. }
  1032. /* Calculate stabilization data (translation, scale and rotation) from given raw
  1033. * measurements. Result is in absolute image dimensions (expanded image, square
  1034. * pixels), includes automatic or manual scaling and compensates for a target
  1035. * frame position, if given.
  1036. *
  1037. * size is a size of the expanded image, the width in pixels is size * aspect.
  1038. * aspect is a ratio (width / height) of the effective canvas (square pixels).
  1039. * do_compensate denotes whether to actually output values necessary to
  1040. * _compensate_ the determined frame movement.
  1041. * Otherwise, the effective target movement is returned.
  1042. */
  1043. static void stabilization_calculate_data(StabContext *ctx,
  1044. int framenr,
  1045. int size,
  1046. float aspect,
  1047. bool do_compensate,
  1048. float scale_step,
  1049. float r_translation[2],
  1050. float r_pivot[2],
  1051. float *r_scale,
  1052. float *r_angle)
  1053. {
  1054. float target_pos[2], target_scale;
  1055. float scaleinf = get_animated_scaleinf(ctx, framenr);
  1056. if (ctx->stab->flag & TRACKING_STABILIZE_SCALE) {
  1057. *r_scale = expf(scale_step * scaleinf); /* Averaged in log scale */
  1058. }
  1059. else {
  1060. *r_scale = 1.0f;
  1061. }
  1062. mul_v2_fl(r_translation, get_animated_locinf(ctx, framenr));
  1063. *r_angle *= get_animated_rotinf(ctx, framenr);
  1064. /* Compensate for a target frame position.
  1065. * This allows to follow tracking / panning shots in a semi manual fashion,
  1066. * when animating the settings for the target frame position.
  1067. */
  1068. get_animated_target_pos(ctx, framenr, target_pos);
  1069. sub_v2_v2(r_translation, target_pos);
  1070. *r_angle -= get_animated_target_rot(ctx, framenr);
  1071. target_scale = get_animated_target_scale(ctx, framenr);
  1072. if (target_scale != 0.0f) {
  1073. *r_scale /= target_scale;
  1074. /* target_scale is an expected/intended reference zoom value */
  1075. }
  1076. /* Convert from relative to absolute coordinates, square pixels. */
  1077. r_translation[0] *= (float)size * aspect;
  1078. r_translation[1] *= (float)size;
  1079. r_pivot[0] *= (float)size * aspect;
  1080. r_pivot[1] *= (float)size;
  1081. /* Output measured data, or inverse of the measured values for
  1082. * compensation?
  1083. */
  1084. if (do_compensate) {
  1085. mul_v2_fl(r_translation, -1.0f);
  1086. *r_angle *= -1.0f;
  1087. if (*r_scale != 0.0f) {
  1088. *r_scale = 1.0f / *r_scale;
  1089. }
  1090. }
  1091. }
  1092. static void stabilization_data_to_mat4(float pixel_aspect,
  1093. const float pivot[2],
  1094. const float translation[2],
  1095. float scale,
  1096. float angle,
  1097. float r_mat[4][4])
  1098. {
  1099. float translation_mat[4][4], rotation_mat[4][4], scale_mat[4][4],
  1100. pivot_mat[4][4], inv_pivot_mat[4][4],
  1101. aspect_mat[4][4], inv_aspect_mat[4][4];
  1102. float scale_vector[3] = {scale, scale, 1.0f};
  1103. unit_m4(translation_mat);
  1104. unit_m4(rotation_mat);
  1105. unit_m4(scale_mat);
  1106. unit_m4(aspect_mat);
  1107. unit_m4(pivot_mat);
  1108. unit_m4(inv_pivot_mat);
  1109. /* aspect ratio correction matrix */
  1110. aspect_mat[0][0] /= pixel_aspect;
  1111. invert_m4_m4(inv_aspect_mat, aspect_mat);
  1112. add_v2_v2(pivot_mat[3], pivot);
  1113. sub_v2_v2(inv_pivot_mat[3], pivot);
  1114. size_to_mat4(scale_mat, scale_vector); /* scale matrix */
  1115. add_v2_v2(translation_mat[3], translation); /* translation matrix */
  1116. rotate_m4(rotation_mat, 'Z', angle); /* rotation matrix */
  1117. /* Compose transformation matrix. */
  1118. mul_m4_series(r_mat, aspect_mat, translation_mat,
  1119. pivot_mat, scale_mat, rotation_mat, inv_pivot_mat,
  1120. inv_aspect_mat);
  1121. }
  1122. /* Calculate scale factor necessary to eliminate black image areas
  1123. * caused by the compensating movements of the stabilizator.
  1124. * This function visits every frame where stabilisation data is
  1125. * available and determines the factor for this frame. The overall
  1126. * largest factor found is returned as result.
  1127. *
  1128. * NOTE: all tracks need to be initialized before calling this function.
  1129. */
  1130. static float calculate_autoscale_factor(StabContext *ctx,
  1131. int size,
  1132. float aspect)
  1133. {
  1134. MovieTrackingStabilization *stab = ctx->stab;
  1135. float pixel_aspect = ctx->tracking->camera.pixel_aspect;
  1136. int height = size, width = aspect*size;
  1137. int sfra = INT_MAX, efra = INT_MIN, cfra;
  1138. float scale = 1.0f, scale_step = 0.0f;
  1139. MovieTrackingTrack *track;
  1140. /* Calculate maximal frame range of tracks where stabilization is active. */
  1141. for (track = ctx->tracking->tracks.first; track; track = track->next) {
  1142. if ((track->flag & TRACK_USE_2D_STAB) ||
  1143. ((stab->flag & TRACKING_STABILIZE_ROTATION) &&
  1144. (track->flag & TRACK_USE_2D_STAB_ROT)))
  1145. {
  1146. int first_frame = track->markers[0].framenr;
  1147. int last_frame = track->markers[track->markersnr - 1].framenr;
  1148. sfra = min_ii(sfra, first_frame);
  1149. efra = max_ii(efra, last_frame);
  1150. }
  1151. }
  1152. use_values_from_fcurves(ctx, true);
  1153. for (cfra = sfra; cfra <= efra; cfra++) {
  1154. float translation[2], pivot[2], angle, tmp_scale;
  1155. float mat[4][4];
  1156. const float points[4][2] = {{0.0f, 0.0f},
  1157. {0.0f, height},
  1158. {width, height},
  1159. {width, 0.0f}};
  1160. const bool do_compensate = true;
  1161. /* Calculate stabilization parameters for the current frame. */
  1162. stabilization_determine_offset_for_frame(ctx,
  1163. cfra,
  1164. aspect,
  1165. translation,
  1166. pivot,
  1167. &angle,
  1168. &scale_step);
  1169. stabilization_calculate_data(ctx,
  1170. cfra,
  1171. size,
  1172. aspect,
  1173. do_compensate,
  1174. scale_step,
  1175. translation,
  1176. pivot,
  1177. &tmp_scale,
  1178. &angle);
  1179. /* Compose transformation matrix. */
  1180. /* NOTE: Here we operate in NON-COMPENSATED coordinates, meaning we have
  1181. * to construct transformation matrix using proper pivot point.
  1182. * Compensation for that will happen later on.
  1183. */
  1184. stabilization_data_to_mat4(pixel_aspect,
  1185. pivot,
  1186. translation,
  1187. tmp_scale,
  1188. angle,
  1189. mat);
  1190. /* Investigate the transformed border lines for this frame;
  1191. * find out, where it cuts the original frame.
  1192. */
  1193. for (int edge_index = 0; edge_index < 4; edge_index++) {
  1194. /* Calculate coordinates of stabilized frame edge points.
  1195. * Use matrix multiplication here so we operate in homogeneous
  1196. * coordinates.
  1197. */
  1198. float stable_edge_p1[3], stable_edge_p2[3];
  1199. copy_v2_v2(stable_edge_p1, points[edge_index]);
  1200. copy_v2_v2(stable_edge_p2, points[(edge_index + 1) % 4]);
  1201. stable_edge_p1[2] = stable_edge_p2[2] = 0.0f;
  1202. mul_m4_v3(mat, stable_edge_p1);
  1203. mul_m4_v3(mat, stable_edge_p2);
  1204. /* Now we iterate over all original frame corners (we call them
  1205. * 'point' here) to see if there's black area between stabilized
  1206. * frame edge and original point.
  1207. */
  1208. for (int point_index = 0; point_index < 4; point_index++) {
  1209. const float point[3] = {points[point_index][0],
  1210. points[point_index][1],
  1211. 0.0f};
  1212. /* Calculate vector which goes from first edge point to
  1213. * second one.
  1214. */
  1215. float stable_edge_vec[3];
  1216. sub_v3_v3v3(stable_edge_vec, stable_edge_p2, stable_edge_p1);
  1217. /* Calculate vector which connects current frame point to
  1218. * first edge point.
  1219. */
  1220. float point_to_edge_start_vec[3];
  1221. sub_v3_v3v3(point_to_edge_start_vec, point, stable_edge_p1);
  1222. /* Use this two vectors to check whether frame point is inside
  1223. * of the stabilized frame or not.
  1224. * If the point is inside, there is no black area happening
  1225. * and no scaling required for it.
  1226. */
  1227. if (cross_v2v2(stable_edge_vec, point_to_edge_start_vec) >= 0.0f) {
  1228. /* We are scaling around motion-compensated pivot point. */
  1229. float scale_pivot[2];
  1230. add_v2_v2v2(scale_pivot, pivot, translation);
  1231. /* Calculate line which goes via `point` and parallel to
  1232. * the stabilized frame edge. This line is coming via
  1233. * `point` and `point2` at the end.
  1234. */
  1235. float point2[2];
  1236. add_v2_v2v2(point2, point, stable_edge_vec);
  1237. /* Calculate actual distance between pivot point and
  1238. * the stabilized frame edge. Then calculate distance
  1239. * between pivot point and line which goes via actual
  1240. * corner and is parallel to the edge.
  1241. *
  1242. * Dividing one by another will give us required scale
  1243. * factor to get rid of black areas.
  1244. */
  1245. float real_dist = dist_to_line_v2(scale_pivot,
  1246. stable_edge_p1,
  1247. stable_edge_p2);
  1248. float required_dist = dist_to_line_v2(scale_pivot,
  1249. point,
  1250. point2);
  1251. const float S = required_dist / real_dist;
  1252. scale = max_ff(scale, S);
  1253. }
  1254. }
  1255. }
  1256. }
  1257. if (stab->maxscale > 0.0f) {
  1258. scale = min_ff(scale, stab->maxscale);
  1259. }
  1260. use_values_from_fcurves(ctx, false);
  1261. return scale;
  1262. }
  1263. /* Prepare working data and determine reference point for each track.
  1264. *
  1265. * NOTE: These calculations _could_ be cached and reused for all frames of the
  1266. * same clip. However, since proper initialization depends on (weight)
  1267. * animation and setup of tracks, ensuring consistency of cached init data
  1268. * turns out to be tricky, hard to maintain and generally not worth the
  1269. * effort. Thus we'll re-initialize on every frame.
  1270. */
  1271. static StabContext *init_stabilizer(MovieClip *clip, int size, float aspect)
  1272. {
  1273. StabContext *ctx = initialize_stabilization_working_context(clip);
  1274. BLI_assert(ctx != NULL);
  1275. initialize_all_tracks(ctx, aspect);
  1276. if (ctx->stab->flag & TRACKING_AUTOSCALE) {
  1277. ctx->stab->scale = 1.0;
  1278. ctx->stab->scale = calculate_autoscale_factor(ctx, size, aspect);
  1279. }
  1280. /* By default, just use values for the global current frame. */
  1281. use_values_from_fcurves(ctx, false);
  1282. return ctx;
  1283. }
  1284. /* === public interface functions === */
  1285. /* Get stabilization data (translation, scaling and angle) for a given frame.
  1286. * Returned data describes how to compensate the detected movement, but with any
  1287. * chosen scale factor already applied and any target frame position already
  1288. * compensated. In case stabilization fails or is disabled, neutral values are
  1289. * returned.
  1290. *
  1291. * framenr is a frame number, relative to the clip (not relative to the scene
  1292. * timeline)
  1293. * width is an effective width of the canvas (square pixels), used to scale the
  1294. * determined translation
  1295. *
  1296. * Outputs:
  1297. * - translation of the lateral shift, absolute canvas coordinates
  1298. * (square pixels).
  1299. * - scale of the scaling to apply
  1300. * - angle of the rotation angle, relative to the frame center
  1301. */
  1302. /* TODO(sergey): Use r_ prefix for output parameters here. */
  1303. void BKE_tracking_stabilization_data_get(MovieClip *clip,
  1304. int framenr,
  1305. int width,
  1306. int height,
  1307. float translation[2],
  1308. float *scale,
  1309. float *angle)
  1310. {
  1311. StabContext *ctx = NULL;
  1312. MovieTracking *tracking = &clip->tracking;
  1313. bool enabled = (tracking->stabilization.flag & TRACKING_2D_STABILIZATION);
  1314. /* Might become a parameter of a stabilization compositor node. */
  1315. bool do_compensate = true;
  1316. float scale_step = 0.0f;
  1317. float pixel_aspect = tracking->camera.pixel_aspect;
  1318. float aspect = (float)width * pixel_aspect / height;
  1319. int size = height;
  1320. float pivot[2];
  1321. if (enabled) {
  1322. ctx = init_stabilizer(clip, size, aspect);
  1323. }
  1324. if (enabled &&
  1325. stabilization_determine_offset_for_frame(ctx,
  1326. framenr,
  1327. aspect,
  1328. translation,
  1329. pivot,
  1330. angle,
  1331. &scale_step))
  1332. {
  1333. stabilization_calculate_data(ctx,
  1334. framenr,
  1335. size,
  1336. aspect,
  1337. do_compensate,
  1338. scale_step,
  1339. translation,
  1340. pivot,
  1341. scale,
  1342. angle);
  1343. compensate_rotation_center(size,
  1344. aspect,
  1345. *angle,
  1346. *scale,
  1347. pivot,
  1348. translation);
  1349. }
  1350. else {
  1351. zero_v2(translation);
  1352. *scale = 1.0f;
  1353. *angle = 0.0f;
  1354. }
  1355. discard_stabilization_working_context(ctx);
  1356. }
  1357. /* Stabilize given image buffer using stabilization data for a specified
  1358. * frame number.
  1359. *
  1360. * NOTE: frame number should be in clip space, not scene space.
  1361. */
  1362. /* TODO(sergey): Use r_ prefix for output parameters here. */
  1363. ImBuf *BKE_tracking_stabilize_frame(MovieClip *clip,
  1364. int framenr,
  1365. ImBuf *ibuf,
  1366. float translation[2],
  1367. float *scale,
  1368. float *angle)
  1369. {
  1370. float tloc[2], tscale, tangle;
  1371. MovieTracking *tracking = &clip->tracking;
  1372. MovieTrackingStabilization *stab = &tracking->stabilization;
  1373. ImBuf *tmpibuf;
  1374. int width = ibuf->x, height = ibuf->y;
  1375. float pixel_aspect = tracking->camera.pixel_aspect;
  1376. float mat[4][4];
  1377. int j, filter = tracking->stabilization.filter;
  1378. void (*interpolation)(struct ImBuf *, struct ImBuf *, float, float, int, int) = NULL;
  1379. int ibuf_flags;
  1380. if (translation)
  1381. copy_v2_v2(tloc, translation);
  1382. if (scale)
  1383. tscale = *scale;
  1384. /* Perform early output if no stabilization is used. */
  1385. if ((stab->flag & TRACKING_2D_STABILIZATION) == 0) {
  1386. if (translation)
  1387. zero_v2(translation);
  1388. if (scale)
  1389. *scale = 1.0f;
  1390. if (angle)
  1391. *angle = 0.0f;
  1392. return ibuf;
  1393. }
  1394. /* Allocate frame for stabilization result. */
  1395. ibuf_flags = 0;
  1396. if (ibuf->rect)
  1397. ibuf_flags |= IB_rect;
  1398. if (ibuf->rect_float)
  1399. ibuf_flags |= IB_rectfloat;
  1400. tmpibuf = IMB_allocImBuf(ibuf->x, ibuf->y, ibuf->planes, ibuf_flags);
  1401. /* Calculate stabilization matrix. */
  1402. BKE_tracking_stabilization_data_get(clip, framenr, width, height, tloc, &tscale, &tangle);
  1403. BKE_tracking_stabilization_data_to_mat4(ibuf->x, ibuf->y, pixel_aspect, tloc, tscale, tangle, mat);
  1404. /* The following code visits each nominal target grid position
  1405. * and picks interpolated data "backwards" from source.
  1406. * thus we need the inverse of the transformation to apply. */
  1407. invert_m4(mat);
  1408. if (filter == TRACKING_FILTER_NEAREST)
  1409. interpolation = nearest_interpolation;
  1410. else if (filter == TRACKING_FILTER_BILINEAR)
  1411. interpolation = bilinear_interpolation;
  1412. else if (filter == TRACKING_FILTER_BICUBIC)
  1413. interpolation = bicubic_interpolation;
  1414. else
  1415. /* fallback to default interpolation method */
  1416. interpolation = nearest_interpolation;
  1417. /* This function is only used for display in clip editor and
  1418. * sequencer only, which would only benefit of using threads
  1419. * here.
  1420. *
  1421. * But need to keep an eye on this if the function will be
  1422. * used in other cases.
  1423. */
  1424. #pragma omp parallel for if (tmpibuf->y > 128)
  1425. for (j = 0; j < tmpibuf->y; j++) {
  1426. int i;
  1427. for (i = 0; i < tmpibuf->x; i++) {
  1428. float vec[3] = {i, j, 0.0f};
  1429. mul_v3_m4v3(vec, mat, vec);
  1430. interpolation(ibuf, tmpibuf, vec[0], vec[1], i, j);
  1431. }
  1432. }
  1433. if (tmpibuf->rect_float)
  1434. tmpibuf->userflags |= IB_RECT_INVALID;
  1435. if (translation)
  1436. copy_v2_v2(translation, tloc);
  1437. if (scale)
  1438. *scale = tscale;
  1439. if (angle)
  1440. *angle = tangle;
  1441. return tmpibuf;
  1442. }
  1443. /* Build a 4x4 transformation matrix based on the given 2D stabilization data.
  1444. * mat is a 4x4 matrix in homogeneous coordinates, adapted to the
  1445. * final image buffer size and compensated for pixel aspect ratio,
  1446. * ready for direct OpenGL drawing.
  1447. *
  1448. * TODO(sergey): The signature of this function should be changed. we actually
  1449. * don't need the dimensions of the image buffer. Instead we
  1450. * should consider to provide the pivot point of the rotation as a
  1451. * further stabilization data parameter.
  1452. */
  1453. void BKE_tracking_stabilization_data_to_mat4(int buffer_width,
  1454. int buffer_height,
  1455. float pixel_aspect,
  1456. float translation[2],
  1457. float scale,
  1458. float angle,
  1459. float r_mat[4][4])
  1460. {
  1461. /* Since we cannot receive the real pivot point coordinates (API limitation),
  1462. * we perform the rotation/scale around the center of frame.
  1463. * Then we correct by an additional shift, which was calculated in
  1464. * compensate_rotation_center() and "sneaked in" as additional offset
  1465. * in the translation parameter. This works, since translation needs to be
  1466. * applied after rotation/scale anyway. Thus effectively the image gets
  1467. * rotated around the desired pivot point
  1468. */
  1469. /* TODO(sergey) pivot shouldn't be calculated here, rather received
  1470. * as a parameter.
  1471. */
  1472. float pivot[2];
  1473. pivot[0] = 0.5f * pixel_aspect * buffer_width;
  1474. pivot[1] = 0.5f * buffer_height;
  1475. /* Compose transformation matrix. */
  1476. stabilization_data_to_mat4(pixel_aspect,
  1477. pivot,
  1478. translation,
  1479. scale,
  1480. angle,
  1481. r_mat);
  1482. }