OpenShot Library | libopenshot  0.5.0
Timeline.cpp
Go to the documentation of this file.
1 
9 // Copyright (c) 2008-2019 OpenShot Studios, LLC
10 //
11 // SPDX-License-Identifier: LGPL-3.0-or-later
12 
13 #include "Timeline.h"
14 
15 #include "CacheBase.h"
16 #include "CacheDisk.h"
17 #include "CacheMemory.h"
18 #include "CrashHandler.h"
19 #include "FrameMapper.h"
20 #include "Exceptions.h"
21 
22 #include <algorithm>
23 #include <QDir>
24 #include <QFileInfo>
25 #include <unordered_map>
26 #include <cmath>
27 #include <cstdint>
28 
29 using namespace openshot;
30 
31 // Default Constructor for the timeline (which sets the canvas width and height)
32 Timeline::Timeline(int width, int height, Fraction fps, int sample_rate, int channels, ChannelLayout channel_layout) :
33  is_open(false), auto_map_clips(true), managed_cache(true), path(""), max_time(0.0)
34 {
35  // Create CrashHandler and Attach (incase of errors)
37 
38  // Init viewport size (curve based, because it can be animated)
39  viewport_scale = Keyframe(100.0);
40  viewport_x = Keyframe(0.0);
41  viewport_y = Keyframe(0.0);
42 
43  // Init background color
44  color.red = Keyframe(0.0);
45  color.green = Keyframe(0.0);
46  color.blue = Keyframe(0.0);
47 
48  // Init FileInfo struct (clear all values)
49  info.width = width;
50  info.height = height;
53  info.fps = fps;
54  info.sample_rate = sample_rate;
55  info.channels = channels;
56  info.channel_layout = channel_layout;
58  info.duration = 60 * 30; // 30 minute default duration
59  info.has_audio = true;
60  info.has_video = true;
62  info.display_ratio = openshot::Fraction(width, height);
65  info.acodec = "openshot::timeline";
66  info.vcodec = "openshot::timeline";
67 
68  // Init max image size
70 
71  // Init cache
72  final_cache = new CacheMemory();
73  const int cache_frames = std::max(Settings::Instance()->CACHE_MIN_FRAMES, OPEN_MP_NUM_PROCESSORS * 4);
74  final_cache->SetMaxBytesFromInfo(cache_frames, info.width, info.height, info.sample_rate, info.channels);
75 }
76 
77 // Delegating constructor that copies parameters from a provided ReaderInfo
79  info.width, info.height, info.fps, info.sample_rate,
80  info.channels, info.channel_layout) {}
81 
82 // Constructor for the timeline (which loads a JSON structure from a file path, and initializes a timeline)
83 Timeline::Timeline(const std::string& projectPath, bool convert_absolute_paths) :
84  is_open(false), auto_map_clips(true), managed_cache(true), path(projectPath), max_time(0.0) {
85 
86  // Create CrashHandler and Attach (incase of errors)
88 
89  // Init final cache as NULL (will be created after loading json)
90  final_cache = NULL;
91 
92  // Init viewport size (curve based, because it can be animated)
93  viewport_scale = Keyframe(100.0);
94  viewport_x = Keyframe(0.0);
95  viewport_y = Keyframe(0.0);
96 
97  // Init background color
98  color.red = Keyframe(0.0);
99  color.green = Keyframe(0.0);
100  color.blue = Keyframe(0.0);
101 
102  // Check if path exists
103  QFileInfo filePath(QString::fromStdString(path));
104  if (!filePath.exists()) {
105  throw InvalidFile("File could not be opened.", path);
106  }
107 
108  // Check OpenShot Install Path exists
110  QDir openshotPath(QString::fromStdString(s->PATH_OPENSHOT_INSTALL));
111  if (!openshotPath.exists()) {
112  throw InvalidFile("PATH_OPENSHOT_INSTALL could not be found.", s->PATH_OPENSHOT_INSTALL);
113  }
114  QDir openshotTransPath(openshotPath.filePath("transitions"));
115  if (!openshotTransPath.exists()) {
116  throw InvalidFile("PATH_OPENSHOT_INSTALL/transitions could not be found.", openshotTransPath.path().toStdString());
117  }
118 
119  // Determine asset path
120  QString asset_name = filePath.baseName().left(30) + "_assets";
121  QDir asset_folder(filePath.dir().filePath(asset_name));
122  if (!asset_folder.exists()) {
123  // Create directory if needed
124  asset_folder.mkpath(".");
125  }
126 
127  // Load UTF-8 project file into QString
128  QFile projectFile(QString::fromStdString(path));
129  projectFile.open(QFile::ReadOnly);
130  QString projectContents = QString::fromUtf8(projectFile.readAll());
131 
132  // Convert all relative paths into absolute paths (if requested)
133  if (convert_absolute_paths) {
134 
135  // Find all "image" or "path" references in JSON (using regex). Must loop through match results
136  // due to our path matching needs, which are not possible with the QString::replace() function.
137  QRegularExpression allPathsRegex(QStringLiteral("\"(image|path)\":.*?\"(.*?)\""));
138  std::vector<QRegularExpressionMatch> matchedPositions;
139  QRegularExpressionMatchIterator i = allPathsRegex.globalMatch(projectContents);
140  while (i.hasNext()) {
141  QRegularExpressionMatch match = i.next();
142  if (match.hasMatch()) {
143  // Push all match objects into a vector (so we can reverse them later)
144  matchedPositions.push_back(match);
145  }
146  }
147 
148  // Reverse the matches (bottom of file to top, so our replacements don't break our match positions)
149  std::vector<QRegularExpressionMatch>::reverse_iterator itr;
150  for (itr = matchedPositions.rbegin(); itr != matchedPositions.rend(); itr++) {
151  QRegularExpressionMatch match = *itr;
152  QString relativeKey = match.captured(1); // image or path
153  QString relativePath = match.captured(2); // relative file path
154  QString absolutePath = "";
155 
156  // Find absolute path of all path, image (including special replacements of @assets and @transitions)
157  if (relativePath.startsWith("@assets")) {
158  absolutePath = QFileInfo(asset_folder.absoluteFilePath(relativePath.replace("@assets", "."))).canonicalFilePath();
159  } else if (relativePath.startsWith("@transitions")) {
160  absolutePath = QFileInfo(openshotTransPath.absoluteFilePath(relativePath.replace("@transitions", "."))).canonicalFilePath();
161  } else {
162  absolutePath = QFileInfo(filePath.absoluteDir().absoluteFilePath(relativePath)).canonicalFilePath();
163  }
164 
165  // Replace path in JSON content, if an absolute path was successfully found
166  if (!absolutePath.isEmpty()) {
167  projectContents.replace(match.capturedStart(0), match.capturedLength(0), "\"" + relativeKey + "\": \"" + absolutePath + "\"");
168  }
169  }
170  // Clear matches
171  matchedPositions.clear();
172  }
173 
174  // Set JSON of project
175  SetJson(projectContents.toStdString());
176 
177  // Calculate valid duration and set has_audio and has_video
178  // based on content inside this Timeline's clips.
179  float calculated_duration = 0.0;
180  for (auto clip : clips)
181  {
182  float clip_last_frame = clip->Position() + clip->Duration();
183  if (clip_last_frame > calculated_duration)
184  calculated_duration = clip_last_frame;
185  if (clip->Reader() && clip->Reader()->info.has_audio)
186  info.has_audio = true;
187  if (clip->Reader() && clip->Reader()->info.has_video)
188  info.has_video = true;
189 
190  }
191  info.video_length = calculated_duration * info.fps.ToFloat();
192  info.duration = calculated_duration;
193 
194  // Init FileInfo settings
195  info.acodec = "openshot::timeline";
196  info.vcodec = "openshot::timeline";
198  info.has_video = true;
199  info.has_audio = true;
200 
201  // Init max image size
203 
204  // Init cache
205  final_cache = new CacheMemory();
206  const int cache_frames = std::max(Settings::Instance()->CACHE_MIN_FRAMES, OPEN_MP_NUM_PROCESSORS * 4);
207  final_cache->SetMaxBytesFromInfo(cache_frames, info.width, info.height, info.sample_rate, info.channels);
208 }
209 
211  if (is_open) {
212  // Auto Close if not already
213  Close();
214  }
215 
216  // Remove all clips, effects, and frame mappers
217  Clear();
218 
219  // Destroy previous cache (if managed by timeline)
220  if (managed_cache && final_cache) {
221  delete final_cache;
222  final_cache = NULL;
223  }
224 }
225 
226 // Add to the tracked_objects map a pointer to a tracked object (TrackedObjectBBox)
227 void Timeline::AddTrackedObject(std::shared_ptr<openshot::TrackedObjectBase> trackedObject){
228 
229  // Search for the tracked object on the map
230  auto iterator = tracked_objects.find(trackedObject->Id());
231 
232  if (iterator != tracked_objects.end()){
233  // Tracked object's id already present on the map, overwrite it
234  iterator->second = trackedObject;
235  }
236  else{
237  // Tracked object's id not present -> insert it on the map
238  tracked_objects[trackedObject->Id()] = trackedObject;
239  }
240 
241  return;
242 }
243 
244 // Return tracked object pointer by it's id
245 std::shared_ptr<openshot::TrackedObjectBase> Timeline::GetTrackedObject(std::string id) const{
246 
247  // Search for the tracked object on the map
248  auto iterator = tracked_objects.find(id);
249 
250  if (iterator != tracked_objects.end()){
251  // Id found, return the pointer to the tracked object
252  std::shared_ptr<openshot::TrackedObjectBase> trackedObject = iterator->second;
253  return trackedObject;
254  }
255  else {
256  // Id not found, return a null pointer
257  return nullptr;
258  }
259 }
260 
261 // Return the ID's of the tracked objects as a list of strings
262 std::list<std::string> Timeline::GetTrackedObjectsIds() const{
263 
264  // Create a list of strings
265  std::list<std::string> trackedObjects_ids;
266 
267  // Iterate through the tracked_objects map
268  for (auto const& it: tracked_objects){
269  // Add the IDs to the list
270  trackedObjects_ids.push_back(it.first);
271  }
272 
273  return trackedObjects_ids;
274 }
275 
276 #ifdef USE_OPENCV
277 // Return the trackedObject's properties as a JSON string
278 std::string Timeline::GetTrackedObjectValues(std::string id, int64_t frame_number) const {
279 
280  // Initialize the JSON object
281  Json::Value trackedObjectJson;
282 
283  // Search for the tracked object on the map
284  auto iterator = tracked_objects.find(id);
285 
286  if (iterator != tracked_objects.end())
287  {
288  // Id found, Get the object pointer and cast it as a TrackedObjectBBox
289  std::shared_ptr<TrackedObjectBBox> trackedObject = std::static_pointer_cast<TrackedObjectBBox>(iterator->second);
290 
291  // Get the trackedObject values for it's first frame
292  if (trackedObject->ExactlyContains(frame_number)){
293  BBox box = trackedObject->GetBox(frame_number);
294  float x1 = box.cx - (box.width/2);
295  float y1 = box.cy - (box.height/2);
296  float x2 = box.cx + (box.width/2);
297  float y2 = box.cy + (box.height/2);
298  float rotation = box.angle;
299 
300  trackedObjectJson["x1"] = x1;
301  trackedObjectJson["y1"] = y1;
302  trackedObjectJson["x2"] = x2;
303  trackedObjectJson["y2"] = y2;
304  trackedObjectJson["rotation"] = rotation;
305 
306  } else {
307  BBox box = trackedObject->BoxVec.begin()->second;
308  float x1 = box.cx - (box.width/2);
309  float y1 = box.cy - (box.height/2);
310  float x2 = box.cx + (box.width/2);
311  float y2 = box.cy + (box.height/2);
312  float rotation = box.angle;
313 
314  trackedObjectJson["x1"] = x1;
315  trackedObjectJson["y1"] = y1;
316  trackedObjectJson["x2"] = x2;
317  trackedObjectJson["y2"] = y2;
318  trackedObjectJson["rotation"] = rotation;
319  }
320 
321  }
322  else {
323  // Id not found, return all 0 values
324  trackedObjectJson["x1"] = 0;
325  trackedObjectJson["y1"] = 0;
326  trackedObjectJson["x2"] = 0;
327  trackedObjectJson["y2"] = 0;
328  trackedObjectJson["rotation"] = 0;
329  }
330 
331  return trackedObjectJson.toStyledString();
332 }
333 #endif
334 
335 // Add an openshot::Clip to the timeline
337 {
338  // Get lock (prevent getting frames while this happens)
339  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
340 
341  // Assign timeline to clip
342  clip->ParentTimeline(this);
343 
344  // Clear cache of clip and nested reader (if any)
345  if (clip->Reader() && clip->Reader()->GetCache())
346  clip->Reader()->GetCache()->Clear();
347 
348  // All clips should be converted to the frame rate of this timeline
349  if (auto_map_clips) {
350  // Apply framemapper (or update existing framemapper)
351  apply_mapper_to_clip(clip);
352  }
353 
354  // Add clip to list
355  clips.push_back(clip);
356 
357  // Sort clips
358  sort_clips();
359 }
360 
361 // Add an effect to the timeline
363 {
364  // Get lock (prevent getting frames while this happens)
365  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
366 
367  // Assign timeline to effect
368  effect->ParentTimeline(this);
369 
370  // Add effect to list
371  effects.push_back(effect);
372 
373  // Sort effects
374  sort_effects();
375 }
376 
377 // Remove an effect from the timeline
379 {
380  // Get lock (prevent getting frames while this happens)
381  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
382 
383  effects.remove(effect);
384 
385  // Delete effect object (if timeline allocated it)
386  if (allocated_effects.count(effect)) {
387  allocated_effects.erase(effect); // erase before nulling the pointer
388  delete effect;
389  effect = NULL;
390  }
391 
392  // Sort effects
393  sort_effects();
394 }
395 
396 // Remove an openshot::Clip to the timeline
398 {
399  // Get lock (prevent getting frames while this happens)
400  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
401 
402  clips.remove(clip);
403 
404  // Delete clip object (if timeline allocated it)
405  if (allocated_clips.count(clip)) {
406  allocated_clips.erase(clip); // erase before nulling the pointer
407  delete clip;
408  clip = NULL;
409  }
410 
411  // Sort clips
412  sort_clips();
413 }
414 
415 // Look up a clip
416 openshot::Clip* Timeline::GetClip(const std::string& id)
417 {
418  // Find the matching clip (if any)
419  for (const auto& clip : clips) {
420  if (clip->Id() == id) {
421  return clip;
422  }
423  }
424  return nullptr;
425 }
426 
427 // Look up a timeline effect
429 {
430  // Find the matching effect (if any)
431  for (const auto& effect : effects) {
432  if (effect->Id() == id) {
433  return effect;
434  }
435  }
436  return nullptr;
437 }
438 
440 {
441  // Search all clips for matching effect ID
442  for (const auto& clip : clips) {
443  const auto e = clip->GetEffect(id);
444  if (e != nullptr) {
445  return e;
446  }
447  }
448  return nullptr;
449 }
450 
451 // Return the list of effects on all clips
452 std::list<openshot::EffectBase*> Timeline::ClipEffects() const {
453 
454  // Initialize the list
455  std::list<EffectBase*> timelineEffectsList;
456 
457  // Loop through all clips
458  for (const auto& clip : clips) {
459 
460  // Get the clip's list of effects
461  std::list<EffectBase*> clipEffectsList = clip->Effects();
462 
463  // Append the clip's effects to the list
464  timelineEffectsList.insert(timelineEffectsList.end(), clipEffectsList.begin(), clipEffectsList.end());
465  }
466 
467  return timelineEffectsList;
468 }
469 
470 // Compute the end time of the latest timeline element
472  // Return cached max_time variable (threadsafe)
473  return max_time;
474 }
475 
476 // Compute the highest frame# based on the latest time and FPS
478  const double fps = info.fps.ToDouble();
479  const double t = GetMaxTime();
480  // Inclusive start, exclusive end -> ceil at the end boundary
481  return static_cast<int64_t>(std::ceil(t * fps));
482 }
483 
484 // Compute the first frame# based on the first clip position
486  const double fps = info.fps.ToDouble();
487  const double t = GetMinTime();
488  // Inclusive start -> floor at the start boundary, then 1-index
489  return static_cast<int64_t>(std::floor(t * fps)) + 1;
490 }
491 
492 // Compute the start time of the first timeline clip
494  // Return cached min_time variable (threadsafe)
495  return min_time;
496 }
497 
498 // Apply a FrameMapper to a clip which matches the settings of this timeline
499 void Timeline::apply_mapper_to_clip(Clip* clip)
500 {
501  // Determine type of reader
502  ReaderBase* clip_reader = NULL;
503  if (clip->Reader()->Name() == "FrameMapper")
504  {
505  // Get the existing reader
506  clip_reader = (ReaderBase*) clip->Reader();
507 
508  // Update the mapping
509  FrameMapper* clip_mapped_reader = (FrameMapper*) clip_reader;
511 
512  } else {
513 
514  // Create a new FrameMapper to wrap the current reader
516  allocated_frame_mappers.insert(mapper);
517  clip_reader = (ReaderBase*) mapper;
518  }
519 
520  // Update clip reader
521  clip->Reader(clip_reader);
522 }
523 
524 // Apply the timeline's framerate and samplerate to all clips
526 {
527  // Clear all cached frames
528  ClearAllCache();
529 
530  // Loop through all clips
531  for (auto clip : clips)
532  {
533  // Apply framemapper (or update existing framemapper)
534  apply_mapper_to_clip(clip);
535  }
536 }
537 
538 // Calculate time of a frame number, based on a framerate
539 double Timeline::calculate_time(int64_t number, Fraction rate)
540 {
541  // Get float version of fps fraction
542  double raw_fps = rate.ToFloat();
543 
544  // Return the time (in seconds) of this frame
545  return double(number - 1) / raw_fps;
546 }
547 
548 // Apply effects to the source frame (if any)
549 std::shared_ptr<Frame> Timeline::apply_effects(std::shared_ptr<Frame> frame, int64_t timeline_frame_number, int layer, TimelineInfoStruct* options)
550 {
551  // Debug output
553  "Timeline::apply_effects",
554  "frame->number", frame->number,
555  "timeline_frame_number", timeline_frame_number,
556  "layer", layer);
557 
558  // Find Effects at this position and layer
559  for (auto effect : effects)
560  {
561  // Does clip intersect the current requested time
562  const double fpsD = info.fps.ToDouble();
563  int64_t effect_start_position = static_cast<int64_t>(std::llround(effect->Position() * fpsD)) + 1;
564  int64_t effect_end_position = static_cast<int64_t>(std::llround((effect->Position() + effect->Duration()) * fpsD));
565 
566  bool does_effect_intersect = (effect_start_position <= timeline_frame_number && effect_end_position >= timeline_frame_number && effect->Layer() == layer);
567 
568  // Clip is visible
569  if (does_effect_intersect)
570  {
571  // Determine the frame needed for this clip (based on the position on the timeline)
572  int64_t effect_start_frame = static_cast<int64_t>(std::llround(effect->Start() * fpsD)) + 1;
573  int64_t effect_frame_number = timeline_frame_number - effect_start_position + effect_start_frame;
574 
575  if (!options->is_top_clip)
576  continue; // skip effect, if overlapped/covered by another clip on same layer
577 
578  if (options->is_before_clip_keyframes != effect->info.apply_before_clip)
579  continue; // skip effect, if this filter does not match
580 
581  // Debug output
583  "Timeline::apply_effects (Process Effect)",
584  "effect_frame_number", effect_frame_number,
585  "does_effect_intersect", does_effect_intersect);
586 
587  // Apply the effect to this frame
588  frame = effect->GetFrame(frame, effect_frame_number);
589  }
590 
591  } // end effect loop
592 
593  // Return modified frame
594  return frame;
595 }
596 
597 // Get or generate a blank frame
598 std::shared_ptr<Frame> Timeline::GetOrCreateFrame(std::shared_ptr<Frame> background_frame, Clip* clip, int64_t number, openshot::TimelineInfoStruct* options)
599 {
600  std::shared_ptr<Frame> new_frame;
601 
602  // Init some basic properties about this frame
603  int samples_in_frame = Frame::GetSamplesPerFrame(number, info.fps, info.sample_rate, info.channels);
604 
605  try {
606  // Debug output
608  "Timeline::GetOrCreateFrame (from reader)",
609  "number", number,
610  "samples_in_frame", samples_in_frame);
611 
612  // Attempt to get a frame (but this could fail if a reader has just been closed)
613  new_frame = std::shared_ptr<Frame>(clip->GetFrame(background_frame, number, options));
614 
615  // Return real frame
616  return new_frame;
617 
618  } catch (const ReaderClosed & e) {
619  // ...
620  } catch (const OutOfBoundsFrame & e) {
621  // ...
622  }
623 
624  // Debug output
626  "Timeline::GetOrCreateFrame (create blank)",
627  "number", number,
628  "samples_in_frame", samples_in_frame);
629 
630  // Create blank frame
631  return new_frame;
632 }
633 
634 // Process a new layer of video or audio
635 void Timeline::add_layer(std::shared_ptr<Frame> new_frame, Clip* source_clip, int64_t clip_frame_number, bool is_top_clip, float max_volume)
636 {
637  // Create timeline options (with details about this current frame request)
638  TimelineInfoStruct options{};
639  options.is_top_clip = is_top_clip;
640  options.is_before_clip_keyframes = true;
641 
642  // Get the clip's frame, composited on top of the current timeline frame
643  std::shared_ptr<Frame> source_frame;
644  source_frame = GetOrCreateFrame(new_frame, source_clip, clip_frame_number, &options);
645 
646  // No frame found... so bail
647  if (!source_frame)
648  return;
649 
650  // Debug output
652  "Timeline::add_layer",
653  "new_frame->number", new_frame->number,
654  "clip_frame_number", clip_frame_number);
655 
656  /* COPY AUDIO - with correct volume */
657  if (source_clip->Reader()->info.has_audio) {
658  // Debug output
660  "Timeline::add_layer (Copy Audio)",
661  "source_clip->Reader()->info.has_audio", source_clip->Reader()->info.has_audio,
662  "source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
663  "info.channels", info.channels,
664  "clip_frame_number", clip_frame_number);
665 
666  if (source_frame->GetAudioChannelsCount() == info.channels && source_clip->has_audio.GetInt(clip_frame_number) != 0)
667  {
668  // Ensure timeline frame matches the source samples once per frame
669  if (new_frame->GetAudioSamplesCount() != source_frame->GetAudioSamplesCount()){
670  new_frame->ResizeAudio(info.channels, source_frame->GetAudioSamplesCount(), info.sample_rate, info.channel_layout);
671  }
672 
673  for (int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++)
674  {
675  // Get volume from previous frame and this frame
676  float previous_volume = source_clip->volume.GetValue(clip_frame_number - 1);
677  float volume = source_clip->volume.GetValue(clip_frame_number);
678  int channel_filter = source_clip->channel_filter.GetInt(clip_frame_number); // optional channel to filter (if not -1)
679  int channel_mapping = source_clip->channel_mapping.GetInt(clip_frame_number); // optional channel to map this channel to (if not -1)
680 
681  // Apply volume mixing strategy
682  if (source_clip->mixing == VOLUME_MIX_AVERAGE && max_volume > 1.0) {
683  // Don't allow this clip to exceed 100% (divide volume equally between all overlapping clips with volume
684  previous_volume = previous_volume / max_volume;
685  volume = volume / max_volume;
686  }
687  else if (source_clip->mixing == VOLUME_MIX_REDUCE && max_volume > 1.0) {
688  // Reduce clip volume by a bit, hoping it will prevent exceeding 100% (but it is very possible it will)
689  previous_volume = previous_volume * 0.77;
690  volume = volume * 0.77;
691  }
692 
693  // If channel filter enabled, check for correct channel (and skip non-matching channels)
694  if (channel_filter != -1 && channel_filter != channel)
695  continue; // skip to next channel
696 
697  // If no volume on this frame or previous frame, do nothing
698  if (previous_volume == 0.0 && volume == 0.0)
699  continue; // skip to next channel
700 
701  // If channel mapping disabled, just use the current channel
702  if (channel_mapping == -1)
703  channel_mapping = channel;
704 
705  // Apply ramp to source frame (if needed)
706  if (!isEqual(previous_volume, 1.0) || !isEqual(volume, 1.0))
707  source_frame->ApplyGainRamp(channel_mapping, 0, source_frame->GetAudioSamplesCount(), previous_volume, volume);
708 
709  // Copy audio samples (and set initial volume). Mix samples with existing audio samples. The gains are added together, to
710  // be sure to set the gain's correctly, so the sum does not exceed 1.0 (of audio distortion will happen).
711  new_frame->AddAudio(false, channel_mapping, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), 1.0);
712  }
713  }
714  else
715  // Debug output
717  "Timeline::add_layer (No Audio Copied - Wrong # of Channels)",
718  "source_clip->Reader()->info.has_audio",
719  source_clip->Reader()->info.has_audio,
720  "source_frame->GetAudioChannelsCount()",
721  source_frame->GetAudioChannelsCount(),
722  "info.channels", info.channels,
723  "clip_frame_number", clip_frame_number);
724  }
725 
726  // Debug output
728  "Timeline::add_layer (Transform: Composite Image Layer: Completed)",
729  "source_frame->number", source_frame->number,
730  "new_frame->GetImage()->width()", new_frame->GetWidth(),
731  "new_frame->GetImage()->height()", new_frame->GetHeight());
732 }
733 
734 // Update the list of 'opened' clips
735 void Timeline::update_open_clips(Clip *clip, bool does_clip_intersect)
736 {
737  // Get lock (prevent getting frames while this happens)
738  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
739 
741  "Timeline::update_open_clips (before)",
742  "does_clip_intersect", does_clip_intersect,
743  "closing_clips.size()", closing_clips.size(),
744  "open_clips.size()", open_clips.size());
745 
746  // is clip already in list?
747  bool clip_found = open_clips.count(clip);
748 
749  if (clip_found && !does_clip_intersect)
750  {
751  // Remove clip from 'opened' list, because it's closed now
752  open_clips.erase(clip);
753 
754  // Close clip
755  clip->Close();
756  }
757  else if (!clip_found && does_clip_intersect)
758  {
759  // Add clip to 'opened' list, because it's missing
760  open_clips[clip] = clip;
761 
762  try {
763  // Open the clip
764  clip->Open();
765 
766  } catch (const InvalidFile & e) {
767  // ...
768  }
769  }
770 
771  // Debug output
773  "Timeline::update_open_clips (after)",
774  "does_clip_intersect", does_clip_intersect,
775  "clip_found", clip_found,
776  "closing_clips.size()", closing_clips.size(),
777  "open_clips.size()", open_clips.size());
778 }
779 
780 // Calculate the max and min duration (in seconds) of the timeline, based on all the clips, and cache the value
781 void Timeline::calculate_max_duration() {
782  double last_clip = 0.0;
783  double last_effect = 0.0;
784  double first_clip = std::numeric_limits<double>::max();
785  double first_effect = std::numeric_limits<double>::max();
786 
787  // Find the last and first clip
788  if (!clips.empty()) {
789  // Find the clip with the maximum end frame
790  const auto max_clip = std::max_element(
791  clips.begin(), clips.end(), CompareClipEndFrames());
792  last_clip = (*max_clip)->Position() + (*max_clip)->Duration();
793 
794  // Find the clip with the minimum start position (ignoring layer)
795  const auto min_clip = std::min_element(
796  clips.begin(), clips.end(), [](const openshot::Clip* lhs, const openshot::Clip* rhs) {
797  return lhs->Position() < rhs->Position();
798  });
799  first_clip = (*min_clip)->Position();
800  }
801 
802  // Find the last and first effect
803  if (!effects.empty()) {
804  // Find the effect with the maximum end frame
805  const auto max_effect = std::max_element(
806  effects.begin(), effects.end(), CompareEffectEndFrames());
807  last_effect = (*max_effect)->Position() + (*max_effect)->Duration();
808 
809  // Find the effect with the minimum start position
810  const auto min_effect = std::min_element(
811  effects.begin(), effects.end(), [](const openshot::EffectBase* lhs, const openshot::EffectBase* rhs) {
812  return lhs->Position() < rhs->Position();
813  });
814  first_effect = (*min_effect)->Position();
815  }
816 
817  // Calculate the max and min time
818  max_time = std::max(last_clip, last_effect);
819  min_time = std::min(first_clip, first_effect);
820 
821  // If no clips or effects exist, set min_time to 0
822  if (clips.empty() && effects.empty()) {
823  min_time = 0.0;
824  max_time = 0.0;
825  }
826 }
827 
828 // Sort clips by position on the timeline
829 void Timeline::sort_clips()
830 {
831  // Get lock (prevent getting frames while this happens)
832  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
833 
834  // Debug output
836  "Timeline::SortClips",
837  "clips.size()", clips.size());
838 
839  // sort clips
840  clips.sort(CompareClips());
841 
842  // calculate max timeline duration
843  calculate_max_duration();
844 }
845 
846 // Sort effects by position on the timeline
847 void Timeline::sort_effects()
848 {
849  // Get lock (prevent getting frames while this happens)
850  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
851 
852  // sort clips
853  effects.sort(CompareEffects());
854 
855  // calculate max timeline duration
856  calculate_max_duration();
857 }
858 
859 // Clear all clips from timeline
861 {
862  ZmqLogger::Instance()->AppendDebugMethod("Timeline::Clear");
863 
864  // Get lock (prevent getting frames while this happens)
865  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
866 
867  // Close all open clips
868  for (auto clip : clips)
869  {
870  update_open_clips(clip, false);
871 
872  // Delete clip object (if timeline allocated it)
873  bool allocated = allocated_clips.count(clip);
874  if (allocated) {
875  delete clip;
876  }
877  }
878  // Clear all clips
879  clips.clear();
880  allocated_clips.clear();
881 
882  // Close all effects
883  for (auto effect : effects)
884  {
885  // Delete effect object (if timeline allocated it)
886  bool allocated = allocated_effects.count(effect);
887  if (allocated) {
888  delete effect;
889  }
890  }
891  // Clear all effects
892  effects.clear();
893  allocated_effects.clear();
894 
895  // Delete all FrameMappers
896  for (auto mapper : allocated_frame_mappers)
897  {
898  mapper->Reader(NULL);
899  mapper->Close();
900  delete mapper;
901  }
902  allocated_frame_mappers.clear();
903 }
904 
905 // Close the reader (and any resources it was consuming)
907 {
908  ZmqLogger::Instance()->AppendDebugMethod("Timeline::Close");
909 
910  // Get lock (prevent getting frames while this happens)
911  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
912 
913  // Close all open clips
914  for (auto clip : clips)
915  {
916  // Open or Close this clip, based on if it's intersecting or not
917  update_open_clips(clip, false);
918  }
919 
920  // Mark timeline as closed
921  is_open = false;
922 
923  // Clear all cache (deep clear, including nested Readers)
924  ClearAllCache(true);
925 }
926 
927 // Open the reader (and start consuming resources)
929 {
930  is_open = true;
931 }
932 
933 // Compare 2 floating point numbers for equality
934 bool Timeline::isEqual(double a, double b)
935 {
936  return fabs(a - b) < 0.000001;
937 }
938 
939 // Get an openshot::Frame object for a specific frame number of this reader.
940 std::shared_ptr<Frame> Timeline::GetFrame(int64_t requested_frame)
941 {
942  // Adjust out of bounds frame number
943  if (requested_frame < 1)
944  requested_frame = 1;
945 
946  // Check cache
947  std::shared_ptr<Frame> frame;
948  frame = final_cache->GetFrame(requested_frame);
949  if (frame) {
950  // Debug output
952  "Timeline::GetFrame (Cached frame found)",
953  "requested_frame", requested_frame);
954 
955  // Return cached frame
956  return frame;
957  }
958  else
959  {
960  // Prevent async calls to the following code
961  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
962 
963  // Check cache 2nd time
964  std::shared_ptr<Frame> frame;
965  frame = final_cache->GetFrame(requested_frame);
966  if (frame) {
967  // Debug output
969  "Timeline::GetFrame (Cached frame found on 2nd check)",
970  "requested_frame", requested_frame);
971 
972  // Return cached frame
973  return frame;
974  } else {
975  // Get a list of clips that intersect with the requested section of timeline
976  // This also opens the readers for intersecting clips, and marks non-intersecting clips as 'needs closing'
977  std::vector<Clip *> nearby_clips;
978  nearby_clips = find_intersecting_clips(requested_frame, 1, true);
979 
980  // Debug output
982  "Timeline::GetFrame (processing frame)",
983  "requested_frame", requested_frame,
984  "omp_get_thread_num()", omp_get_thread_num());
985 
986  // Init some basic properties about this frame
987  int samples_in_frame = Frame::GetSamplesPerFrame(requested_frame, info.fps, info.sample_rate, info.channels);
988 
989  // Create blank frame (which will become the requested frame)
990  std::shared_ptr<Frame> new_frame(std::make_shared<Frame>(requested_frame, preview_width, preview_height, "#000000", samples_in_frame, info.channels));
991  new_frame->AddAudioSilence(samples_in_frame);
992  new_frame->SampleRate(info.sample_rate);
993  new_frame->ChannelsLayout(info.channel_layout);
994 
995  // Debug output
997  "Timeline::GetFrame (Adding solid color)",
998  "requested_frame", requested_frame,
999  "info.width", info.width,
1000  "info.height", info.height);
1001 
1002  // Add Background Color to 1st layer (if animated or not black)
1003  if ((color.red.GetCount() > 1 || color.green.GetCount() > 1 || color.blue.GetCount() > 1) ||
1004  (color.red.GetValue(requested_frame) != 0.0 || color.green.GetValue(requested_frame) != 0.0 ||
1005  color.blue.GetValue(requested_frame) != 0.0))
1006  new_frame->AddColor(preview_width, preview_height, color.GetColorHex(requested_frame));
1007 
1008  // Debug output
1010  "Timeline::GetFrame (Loop through clips)",
1011  "requested_frame", requested_frame,
1012  "clips.size()", clips.size(),
1013  "nearby_clips.size()", nearby_clips.size());
1014 
1015  // Precompute per-clip timing for this requested frame
1016  struct ClipInfo {
1017  Clip* clip;
1018  int64_t start_pos;
1019  int64_t end_pos;
1020  int64_t start_frame;
1021  int64_t frame_number;
1022  bool intersects;
1023  };
1024  std::vector<ClipInfo> clip_infos;
1025  clip_infos.reserve(nearby_clips.size());
1026  const double fpsD = info.fps.ToDouble();
1027 
1028  for (auto clip : nearby_clips) {
1029  int64_t start_pos = static_cast<int64_t>(std::llround(clip->Position() * fpsD)) + 1;
1030  int64_t end_pos = static_cast<int64_t>(std::llround((clip->Position() + clip->Duration()) * fpsD));
1031  bool intersects = (start_pos <= requested_frame && end_pos >= requested_frame);
1032  int64_t start_frame = static_cast<int64_t>(std::llround(clip->Start() * fpsD)) + 1;
1033  int64_t frame_number = requested_frame - start_pos + start_frame;
1034  clip_infos.push_back({clip, start_pos, end_pos, start_frame, frame_number, intersects});
1035  }
1036 
1037  // Determine top clip per layer (linear, no nested loop)
1038  std::unordered_map<int, int64_t> top_start_for_layer;
1039  std::unordered_map<int, Clip*> top_clip_for_layer;
1040  for (const auto& ci : clip_infos) {
1041  if (!ci.intersects) continue;
1042  const int layer = ci.clip->Layer();
1043  auto it = top_start_for_layer.find(layer);
1044  if (it == top_start_for_layer.end() || ci.start_pos > it->second) {
1045  top_start_for_layer[layer] = ci.start_pos; // strictly greater to match prior logic
1046  top_clip_for_layer[layer] = ci.clip;
1047  }
1048  }
1049 
1050  // Compute max_volume across all overlapping clips once
1051  float max_volume_sum = 0.0f;
1052  for (const auto& ci : clip_infos) {
1053  if (!ci.intersects) continue;
1054  if (ci.clip->Reader() && ci.clip->Reader()->info.has_audio &&
1055  ci.clip->has_audio.GetInt(ci.frame_number) != 0) {
1056  max_volume_sum += static_cast<float>(ci.clip->volume.GetValue(ci.frame_number));
1057  }
1058  }
1059 
1060  // Compose intersecting clips in a single pass
1061  for (const auto& ci : clip_infos) {
1062  // Debug output
1064  "Timeline::GetFrame (Does clip intersect)",
1065  "requested_frame", requested_frame,
1066  "clip->Position()", ci.clip->Position(),
1067  "clip->Duration()", ci.clip->Duration(),
1068  "does_clip_intersect", ci.intersects);
1069 
1070  // Clip is visible
1071  if (ci.intersects) {
1072  // Is this the top clip on its layer?
1073  bool is_top_clip = false;
1074  const int layer = ci.clip->Layer();
1075  auto top_it = top_clip_for_layer.find(layer);
1076  if (top_it != top_clip_for_layer.end())
1077  is_top_clip = (top_it->second == ci.clip);
1078 
1079  // Determine the frame needed for this clip (based on the position on the timeline)
1080  int64_t clip_frame_number = ci.frame_number;
1081 
1082  // Debug output
1084  "Timeline::GetFrame (Calculate clip's frame #)",
1085  "clip->Position()", ci.clip->Position(),
1086  "clip->Start()", ci.clip->Start(),
1087  "info.fps.ToFloat()", info.fps.ToFloat(),
1088  "clip_frame_number", clip_frame_number);
1089 
1090  // Add clip's frame as layer
1091  add_layer(new_frame, ci.clip, clip_frame_number, is_top_clip, max_volume_sum);
1092 
1093  } else {
1094  // Debug output
1096  "Timeline::GetFrame (clip does not intersect)",
1097  "requested_frame", requested_frame,
1098  "does_clip_intersect", ci.intersects);
1099  }
1100 
1101  } // end clip loop
1102 
1103  // Debug output
1105  "Timeline::GetFrame (Add frame to cache)",
1106  "requested_frame", requested_frame,
1107  "info.width", info.width,
1108  "info.height", info.height);
1109 
1110  // Set frame # on mapped frame
1111  new_frame->SetFrameNumber(requested_frame);
1112 
1113  // Add final frame to cache
1114  final_cache->Add(new_frame);
1115 
1116  // Return frame (or blank frame)
1117  return new_frame;
1118  }
1119  }
1120 }
1121 
1122 
1123 // Find intersecting clips (or non intersecting clips)
1124 std::vector<Clip*> Timeline::find_intersecting_clips(int64_t requested_frame, int number_of_frames, bool include)
1125 {
1126  // Find matching clips
1127  std::vector<Clip*> matching_clips;
1128 
1129  // Calculate time of frame
1130  const int64_t min_requested_frame = requested_frame;
1131  const int64_t max_requested_frame = requested_frame + (number_of_frames - 1);
1132 
1133  // Find Clips at this time
1134  matching_clips.reserve(clips.size());
1135  const double fpsD = info.fps.ToDouble();
1136  for (auto clip : clips)
1137  {
1138  // Does clip intersect the current requested time
1139  int64_t clip_start_position = static_cast<int64_t>(std::llround(clip->Position() * fpsD)) + 1;
1140  int64_t clip_end_position = static_cast<int64_t>(std::llround((clip->Position() + clip->Duration()) * fpsD)) + 1;
1141 
1142  bool does_clip_intersect =
1143  (clip_start_position <= min_requested_frame || clip_start_position <= max_requested_frame) &&
1144  (clip_end_position >= min_requested_frame || clip_end_position >= max_requested_frame);
1145 
1146  // Debug output
1148  "Timeline::find_intersecting_clips (Is clip near or intersecting)",
1149  "requested_frame", requested_frame,
1150  "min_requested_frame", min_requested_frame,
1151  "max_requested_frame", max_requested_frame,
1152  "clip->Position()", clip->Position(),
1153  "does_clip_intersect", does_clip_intersect);
1154 
1155  // Open (or schedule for closing) this clip, based on if it's intersecting or not
1156  update_open_clips(clip, does_clip_intersect);
1157 
1158  // Clip is visible
1159  if (does_clip_intersect && include)
1160  // Add the intersecting clip
1161  matching_clips.push_back(clip);
1162 
1163  else if (!does_clip_intersect && !include)
1164  // Add the non-intersecting clip
1165  matching_clips.push_back(clip);
1166 
1167  } // end clip loop
1168 
1169  // return list
1170  return matching_clips;
1171 }
1172 
1173 // Set the cache object used by this reader
1174 void Timeline::SetCache(CacheBase* new_cache) {
1175  // Get lock (prevent getting frames while this happens)
1176  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1177 
1178  // Destroy previous cache (if managed by timeline)
1179  if (managed_cache && final_cache) {
1180  delete final_cache;
1181  final_cache = NULL;
1182  managed_cache = false;
1183  }
1184 
1185  // Set new cache
1186  final_cache = new_cache;
1187 }
1188 
1189 // Generate JSON string of this object
1190 std::string Timeline::Json() const {
1191 
1192  // Return formatted string
1193  return JsonValue().toStyledString();
1194 }
1195 
1196 // Generate Json::Value for this object
1197 Json::Value Timeline::JsonValue() const {
1198 
1199  // Create root json object
1200  Json::Value root = ReaderBase::JsonValue(); // get parent properties
1201  root["type"] = "Timeline";
1202  root["viewport_scale"] = viewport_scale.JsonValue();
1203  root["viewport_x"] = viewport_x.JsonValue();
1204  root["viewport_y"] = viewport_y.JsonValue();
1205  root["color"] = color.JsonValue();
1206  root["path"] = path;
1207 
1208  // Add array of clips
1209  root["clips"] = Json::Value(Json::arrayValue);
1210 
1211  // Find Clips at this time
1212  for (const auto existing_clip : clips)
1213  {
1214  root["clips"].append(existing_clip->JsonValue());
1215  }
1216 
1217  // Add array of effects
1218  root["effects"] = Json::Value(Json::arrayValue);
1219 
1220  // loop through effects
1221  for (const auto existing_effect: effects)
1222  {
1223  root["effects"].append(existing_effect->JsonValue());
1224  }
1225 
1226  // return JsonValue
1227  return root;
1228 }
1229 
1230 // Load JSON string into this object
1231 void Timeline::SetJson(const std::string value) {
1232 
1233  // Get lock (prevent getting frames while this happens)
1234  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1235 
1236  // Parse JSON string into JSON objects
1237  try
1238  {
1239  const Json::Value root = openshot::stringToJson(value);
1240  // Set all values that match
1241  SetJsonValue(root);
1242  }
1243  catch (const std::exception& e)
1244  {
1245  // Error parsing JSON (or missing keys)
1246  throw InvalidJSON("JSON is invalid (missing keys or invalid data types)");
1247  }
1248 }
1249 
1250 // Load Json::Value into this object
1251 void Timeline::SetJsonValue(const Json::Value root) {
1252 
1253  // Get lock (prevent getting frames while this happens)
1254  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1255 
1256  // Close timeline before we do anything (this closes all clips)
1257  bool was_open = is_open;
1258  Close();
1259 
1260  // Set parent data
1262 
1263  // Set data from Json (if key is found)
1264  if (!root["path"].isNull())
1265  path = root["path"].asString();
1266 
1267  if (!root["clips"].isNull()) {
1268  // Clear existing clips
1269  clips.clear();
1270 
1271  // loop through clips
1272  for (const Json::Value existing_clip : root["clips"]) {
1273  // Skip NULL nodes
1274  if (existing_clip.isNull()) {
1275  continue;
1276  }
1277 
1278  // Create Clip
1279  Clip *c = new Clip();
1280 
1281  // Keep track of allocated clip objects
1282  allocated_clips.insert(c);
1283 
1284  // When a clip is attached to an object, it searches for the object
1285  // on it's parent timeline. Setting the parent timeline of the clip here
1286  // allows attaching it to an object when exporting the project (because)
1287  // the exporter script initializes the clip and it's effects
1288  // before setting its parent timeline.
1289  c->ParentTimeline(this);
1290 
1291  // Load Json into Clip
1292  c->SetJsonValue(existing_clip);
1293 
1294  // Add Clip to Timeline
1295  AddClip(c);
1296  }
1297  }
1298 
1299  if (!root["effects"].isNull()) {
1300  // Clear existing effects
1301  effects.clear();
1302 
1303  // loop through effects
1304  for (const Json::Value existing_effect :root["effects"]) {
1305  // Skip NULL nodes
1306  if (existing_effect.isNull()) {
1307  continue;
1308  }
1309 
1310  // Create Effect
1311  EffectBase *e = NULL;
1312 
1313  if (!existing_effect["type"].isNull()) {
1314  // Create instance of effect
1315  if ( (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) ) {
1316 
1317  // Keep track of allocated effect objects
1318  allocated_effects.insert(e);
1319 
1320  // Load Json into Effect
1321  e->SetJsonValue(existing_effect);
1322 
1323  // Add Effect to Timeline
1324  AddEffect(e);
1325  }
1326  }
1327  }
1328  }
1329 
1330  if (!root["duration"].isNull()) {
1331  // Update duration of timeline
1332  info.duration = root["duration"].asDouble();
1334  }
1335 
1336  // Update preview settings
1339 
1340  // Resort (and recalculate min/max duration)
1341  sort_clips();
1342  sort_effects();
1343 
1344  // Re-open if needed
1345  if (was_open)
1346  Open();
1347 }
1348 
1349 // Apply a special formatted JSON object, which represents a change to the timeline (insert, update, delete)
1350 void Timeline::ApplyJsonDiff(std::string value) {
1351 
1352  // Get lock (prevent getting frames while this happens)
1353  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1354 
1355  // Parse JSON string into JSON objects
1356  try
1357  {
1358  const Json::Value root = openshot::stringToJson(value);
1359  // Process the JSON change array, loop through each item
1360  for (const Json::Value change : root) {
1361  std::string change_key = change["key"][(uint)0].asString();
1362 
1363  // Process each type of change
1364  if (change_key == "clips")
1365  // Apply to CLIPS
1366  apply_json_to_clips(change);
1367 
1368  else if (change_key == "effects")
1369  // Apply to EFFECTS
1370  apply_json_to_effects(change);
1371 
1372  else
1373  // Apply to TIMELINE
1374  apply_json_to_timeline(change);
1375 
1376  }
1377  }
1378  catch (const std::exception& e)
1379  {
1380  // Error parsing JSON (or missing keys)
1381  throw InvalidJSON("JSON is invalid (missing keys or invalid data types)");
1382  }
1383 }
1384 
1385 // Apply JSON diff to clips
1386 void Timeline::apply_json_to_clips(Json::Value change) {
1387 
1388  // Get key and type of change
1389  std::string change_type = change["type"].asString();
1390  std::string clip_id = "";
1391  Clip *existing_clip = NULL;
1392 
1393  // Find id of clip (if any)
1394  for (auto key_part : change["key"]) {
1395  // Get each change
1396  if (key_part.isObject()) {
1397  // Check for id
1398  if (!key_part["id"].isNull()) {
1399  // Set the id
1400  clip_id = key_part["id"].asString();
1401 
1402  // Find matching clip in timeline (if any)
1403  for (auto c : clips)
1404  {
1405  if (c->Id() == clip_id) {
1406  existing_clip = c;
1407  break; // clip found, exit loop
1408  }
1409  }
1410  break; // id found, exit loop
1411  }
1412  }
1413  }
1414 
1415  // Check for a more specific key (targetting this clip's effects)
1416  // For example: ["clips", {"id:123}, "effects", {"id":432}]
1417  if (existing_clip && change["key"].size() == 4 && change["key"][2] == "effects")
1418  {
1419  // This change is actually targetting a specific effect under a clip (and not the clip)
1420  Json::Value key_part = change["key"][3];
1421 
1422  if (key_part.isObject()) {
1423  // Check for id
1424  if (!key_part["id"].isNull())
1425  {
1426  // Set the id
1427  std::string effect_id = key_part["id"].asString();
1428 
1429  // Find matching effect in timeline (if any)
1430  std::list<EffectBase*> effect_list = existing_clip->Effects();
1431  for (auto e : effect_list)
1432  {
1433  if (e->Id() == effect_id) {
1434  // Apply the change to the effect directly
1435  apply_json_to_effects(change, e);
1436 
1437  // Calculate start and end frames that this impacts, and remove those frames from the cache
1438  int64_t new_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1439  int64_t new_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1440  final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1441 
1442  return; // effect found, don't update clip
1443  }
1444  }
1445  }
1446  }
1447  }
1448 
1449  // Determine type of change operation
1450  if (change_type == "insert") {
1451 
1452  // Create clip
1453  Clip *clip = new Clip();
1454 
1455  // Keep track of allocated clip objects
1456  allocated_clips.insert(clip);
1457 
1458  // Set properties of clip from JSON
1459  clip->SetJsonValue(change["value"]);
1460 
1461  // Add clip to timeline
1462  AddClip(clip);
1463 
1464  } else if (change_type == "update") {
1465 
1466  // Update existing clip
1467  if (existing_clip) {
1468  // Calculate start and end frames prior to the update
1469  int64_t old_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1470  int64_t old_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1471 
1472  // Update clip properties from JSON
1473  existing_clip->SetJsonValue(change["value"]);
1474 
1475  // Calculate new start and end frames after the update
1476  int64_t new_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1477  int64_t new_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1478 
1479  // Remove both the old and new ranges from the timeline cache
1480  final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1481  final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1482 
1483  // Remove cache on clip's Reader (if found)
1484  if (existing_clip->Reader() && existing_clip->Reader()->GetCache()) {
1485  existing_clip->Reader()->GetCache()->Remove(old_starting_frame - 8, old_ending_frame + 8);
1486  existing_clip->Reader()->GetCache()->Remove(new_starting_frame - 8, new_ending_frame + 8);
1487  }
1488 
1489  // Apply framemapper (or update existing framemapper)
1490  if (auto_map_clips) {
1491  apply_mapper_to_clip(existing_clip);
1492  }
1493  }
1494 
1495  } else if (change_type == "delete") {
1496 
1497  // Remove existing clip
1498  if (existing_clip) {
1499  // Remove clip from timeline
1500  RemoveClip(existing_clip);
1501 
1502  // Calculate start and end frames that this impacts, and remove those frames from the cache
1503  int64_t old_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1504  int64_t old_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1505  final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1506  }
1507 
1508  }
1509 
1510  // Re-Sort Clips (since they likely changed)
1511  sort_clips();
1512 }
1513 
1514 // Apply JSON diff to effects
1515 void Timeline::apply_json_to_effects(Json::Value change) {
1516 
1517  // Get key and type of change
1518  std::string change_type = change["type"].asString();
1519  EffectBase *existing_effect = NULL;
1520 
1521  // Find id of an effect (if any)
1522  for (auto key_part : change["key"]) {
1523 
1524  if (key_part.isObject()) {
1525  // Check for id
1526  if (!key_part["id"].isNull())
1527  {
1528  // Set the id
1529  std::string effect_id = key_part["id"].asString();
1530 
1531  // Find matching effect in timeline (if any)
1532  for (auto e : effects)
1533  {
1534  if (e->Id() == effect_id) {
1535  existing_effect = e;
1536  break; // effect found, exit loop
1537  }
1538  }
1539  break; // id found, exit loop
1540  }
1541  }
1542  }
1543 
1544  // Now that we found the effect, apply the change to it
1545  if (existing_effect || change_type == "insert") {
1546  // Apply change to effect
1547  apply_json_to_effects(change, existing_effect);
1548  }
1549 }
1550 
1551 // Apply JSON diff to effects (if you already know which effect needs to be updated)
1552 void Timeline::apply_json_to_effects(Json::Value change, EffectBase* existing_effect) {
1553 
1554  // Get key and type of change
1555  std::string change_type = change["type"].asString();
1556 
1557  // Calculate start and end frames that this impacts, and remove those frames from the cache
1558  if (!change["value"].isArray() && !change["value"]["position"].isNull()) {
1559  int64_t new_starting_frame = (change["value"]["position"].asDouble() * info.fps.ToDouble()) + 1;
1560  int64_t new_ending_frame = ((change["value"]["position"].asDouble() + change["value"]["end"].asDouble() - change["value"]["start"].asDouble()) * info.fps.ToDouble()) + 1;
1561  final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1562  }
1563 
1564  // Determine type of change operation
1565  if (change_type == "insert") {
1566 
1567  // Determine type of effect
1568  std::string effect_type = change["value"]["type"].asString();
1569 
1570  // Create Effect
1571  EffectBase *e = NULL;
1572 
1573  // Init the matching effect object
1574  if ( (e = EffectInfo().CreateEffect(effect_type)) ) {
1575 
1576  // Keep track of allocated effect objects
1577  allocated_effects.insert(e);
1578 
1579  // Load Json into Effect
1580  e->SetJsonValue(change["value"]);
1581 
1582  // Add Effect to Timeline
1583  AddEffect(e);
1584  }
1585 
1586  } else if (change_type == "update") {
1587 
1588  // Update existing effect
1589  if (existing_effect) {
1590 
1591  // Calculate start and end frames that this impacts, and remove those frames from the cache
1592  int64_t old_starting_frame = (existing_effect->Position() * info.fps.ToDouble()) + 1;
1593  int64_t old_ending_frame = ((existing_effect->Position() + existing_effect->Duration()) * info.fps.ToDouble()) + 1;
1594  final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1595 
1596  // Update effect properties from JSON
1597  existing_effect->SetJsonValue(change["value"]);
1598  }
1599 
1600  } else if (change_type == "delete") {
1601 
1602  // Remove existing effect
1603  if (existing_effect) {
1604 
1605  // Calculate start and end frames that this impacts, and remove those frames from the cache
1606  int64_t old_starting_frame = (existing_effect->Position() * info.fps.ToDouble()) + 1;
1607  int64_t old_ending_frame = ((existing_effect->Position() + existing_effect->Duration()) * info.fps.ToDouble()) + 1;
1608  final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1609 
1610  // Remove effect from timeline
1611  RemoveEffect(existing_effect);
1612  }
1613 
1614  }
1615 
1616  // Re-Sort Effects (since they likely changed)
1617  sort_effects();
1618 }
1619 
1620 // Apply JSON diff to timeline properties
1621 void Timeline::apply_json_to_timeline(Json::Value change) {
1622  bool cache_dirty = true;
1623 
1624  // Get key and type of change
1625  std::string change_type = change["type"].asString();
1626  std::string root_key = change["key"][(uint)0].asString();
1627  std::string sub_key = "";
1628  if (change["key"].size() >= 2)
1629  sub_key = change["key"][(uint)1].asString();
1630 
1631  // Determine type of change operation
1632  if (change_type == "insert" || change_type == "update") {
1633 
1634  // INSERT / UPDATE
1635  // Check for valid property
1636  if (root_key == "color")
1637  // Set color
1638  color.SetJsonValue(change["value"]);
1639  else if (root_key == "viewport_scale")
1640  // Set viewport scale
1641  viewport_scale.SetJsonValue(change["value"]);
1642  else if (root_key == "viewport_x")
1643  // Set viewport x offset
1644  viewport_x.SetJsonValue(change["value"]);
1645  else if (root_key == "viewport_y")
1646  // Set viewport y offset
1647  viewport_y.SetJsonValue(change["value"]);
1648  else if (root_key == "duration") {
1649  // Update duration of timeline
1650  info.duration = change["value"].asDouble();
1652 
1653  // We don't want to clear cache for duration adjustments
1654  cache_dirty = false;
1655  }
1656  else if (root_key == "width") {
1657  // Set width
1658  info.width = change["value"].asInt();
1660  }
1661  else if (root_key == "height") {
1662  // Set height
1663  info.height = change["value"].asInt();
1665  }
1666  else if (root_key == "fps" && sub_key == "" && change["value"].isObject()) {
1667  // Set fps fraction
1668  if (!change["value"]["num"].isNull())
1669  info.fps.num = change["value"]["num"].asInt();
1670  if (!change["value"]["den"].isNull())
1671  info.fps.den = change["value"]["den"].asInt();
1672  }
1673  else if (root_key == "fps" && sub_key == "num")
1674  // Set fps.num
1675  info.fps.num = change["value"].asInt();
1676  else if (root_key == "fps" && sub_key == "den")
1677  // Set fps.den
1678  info.fps.den = change["value"].asInt();
1679  else if (root_key == "display_ratio" && sub_key == "" && change["value"].isObject()) {
1680  // Set display_ratio fraction
1681  if (!change["value"]["num"].isNull())
1682  info.display_ratio.num = change["value"]["num"].asInt();
1683  if (!change["value"]["den"].isNull())
1684  info.display_ratio.den = change["value"]["den"].asInt();
1685  }
1686  else if (root_key == "display_ratio" && sub_key == "num")
1687  // Set display_ratio.num
1688  info.display_ratio.num = change["value"].asInt();
1689  else if (root_key == "display_ratio" && sub_key == "den")
1690  // Set display_ratio.den
1691  info.display_ratio.den = change["value"].asInt();
1692  else if (root_key == "pixel_ratio" && sub_key == "" && change["value"].isObject()) {
1693  // Set pixel_ratio fraction
1694  if (!change["value"]["num"].isNull())
1695  info.pixel_ratio.num = change["value"]["num"].asInt();
1696  if (!change["value"]["den"].isNull())
1697  info.pixel_ratio.den = change["value"]["den"].asInt();
1698  }
1699  else if (root_key == "pixel_ratio" && sub_key == "num")
1700  // Set pixel_ratio.num
1701  info.pixel_ratio.num = change["value"].asInt();
1702  else if (root_key == "pixel_ratio" && sub_key == "den")
1703  // Set pixel_ratio.den
1704  info.pixel_ratio.den = change["value"].asInt();
1705 
1706  else if (root_key == "sample_rate")
1707  // Set sample rate
1708  info.sample_rate = change["value"].asInt();
1709  else if (root_key == "channels")
1710  // Set channels
1711  info.channels = change["value"].asInt();
1712  else if (root_key == "channel_layout")
1713  // Set channel layout
1714  info.channel_layout = (ChannelLayout) change["value"].asInt();
1715  else
1716  // Error parsing JSON (or missing keys)
1717  throw InvalidJSONKey("JSON change key is invalid", change.toStyledString());
1718 
1719 
1720  } else if (change["type"].asString() == "delete") {
1721 
1722  // DELETE / RESET
1723  // Reset the following properties (since we can't delete them)
1724  if (root_key == "color") {
1725  color = Color();
1726  color.red = Keyframe(0.0);
1727  color.green = Keyframe(0.0);
1728  color.blue = Keyframe(0.0);
1729  }
1730  else if (root_key == "viewport_scale")
1731  viewport_scale = Keyframe(1.0);
1732  else if (root_key == "viewport_x")
1733  viewport_x = Keyframe(0.0);
1734  else if (root_key == "viewport_y")
1735  viewport_y = Keyframe(0.0);
1736  else
1737  // Error parsing JSON (or missing keys)
1738  throw InvalidJSONKey("JSON change key is invalid", change.toStyledString());
1739 
1740  }
1741 
1742  if (cache_dirty) {
1743  // Clear entire cache
1744  ClearAllCache();
1745  }
1746 }
1747 
1748 // Clear all caches
1749 void Timeline::ClearAllCache(bool deep) {
1750 
1751  // Clear primary cache
1752  if (final_cache) {
1753  final_cache->Clear();
1754  }
1755 
1756  // Loop through all clips
1757  try {
1758  for (const auto clip : clips) {
1759  // Clear cache on clip and reader if present
1760  if (clip->Reader()) {
1761  if (auto rc = clip->Reader()->GetCache())
1762  rc->Clear();
1763 
1764  // Clear nested Reader (if deep clear requested)
1765  if (deep && clip->Reader()->Name() == "FrameMapper") {
1766  FrameMapper *nested_reader = static_cast<FrameMapper *>(clip->Reader());
1767  if (nested_reader->Reader()) {
1768  if (auto nc = nested_reader->Reader()->GetCache())
1769  nc->Clear();
1770  }
1771  }
1772  }
1773 
1774  // Clear clip cache
1775  if (auto cc = clip->GetCache())
1776  cc->Clear();
1777  }
1778  } catch (const ReaderClosed & e) {
1779  // ...
1780  }
1781 }
1782 
1783 // Set Max Image Size (used for performance optimization). Convenience function for setting
1784 // Settings::Instance()->MAX_WIDTH and Settings::Instance()->MAX_HEIGHT.
1785 void Timeline::SetMaxSize(int width, int height) {
1786  // Maintain aspect ratio regardless of what size is passed in
1787  QSize display_ratio_size = QSize(info.width, info.height);
1788  QSize proposed_size = QSize(std::min(width, info.width), std::min(height, info.height));
1789 
1790  // Scale QSize up to proposed size
1791  display_ratio_size.scale(proposed_size, Qt::KeepAspectRatio);
1792 
1793  // Update preview settings
1794  preview_width = display_ratio_size.width();
1795  preview_height = display_ratio_size.height();
1796 }
openshot::stringToJson
const Json::Value stringToJson(const std::string value)
Definition: Json.cpp:16
openshot::Timeline::RemoveClip
void RemoveClip(openshot::Clip *clip)
Remove an openshot::Clip from the timeline.
Definition: Timeline.cpp:397
openshot::FrameMapper::ChangeMapping
void ChangeMapping(Fraction target_fps, PulldownType pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout)
Change frame rate or audio mapping details.
Definition: FrameMapper.cpp:813
openshot::ReaderInfo::sample_rate
int sample_rate
The number of audio samples per second (44100 is a common sample rate)
Definition: ReaderBase.h:60
openshot::EffectInfo
This class returns a listing of all effects supported by libopenshot.
Definition: EffectInfo.h:28
openshot::Fraction::ToFloat
float ToFloat()
Return this fraction as a float (i.e. 1/2 = 0.5)
Definition: Fraction.cpp:35
openshot::Timeline::GetFrame
std::shared_ptr< openshot::Frame > GetFrame(int64_t requested_frame) override
Definition: Timeline.cpp:940
openshot::EffectBase
This abstract class is the base class, used by all effects in libopenshot.
Definition: EffectBase.h:53
openshot::ReaderBase::JsonValue
virtual Json::Value JsonValue() const =0
Generate Json::Value for this object.
Definition: ReaderBase.cpp:106
openshot::Timeline::~Timeline
virtual ~Timeline()
Definition: Timeline.cpp:210
openshot::CacheBase::Clear
virtual void Clear()=0
Clear the cache of all frames.
openshot::CacheBase::GetFrame
virtual std::shared_ptr< openshot::Frame > GetFrame(int64_t frame_number)=0
Get a frame from the cache.
openshot::Timeline::viewport_x
openshot::Keyframe viewport_x
Curve representing the x coordinate for the viewport.
Definition: Timeline.h:324
openshot::TimelineBase::preview_width
int preview_width
Optional preview width of timeline image. If your preview window is smaller than the timeline,...
Definition: TimelineBase.h:44
openshot::CompareClipEndFrames
Definition: Timeline.h:75
openshot::Timeline::SetMaxSize
void SetMaxSize(int width, int height)
Definition: Timeline.cpp:1785
openshot::BBox::height
float height
bounding box height
Definition: TrackedObjectBBox.h:42
openshot::CrashHandler::Instance
static CrashHandler * Instance()
Definition: CrashHandler.cpp:27
openshot::EffectInfo::CreateEffect
EffectBase * CreateEffect(std::string effect_type)
Create an instance of an effect (factory style)
Definition: EffectInfo.cpp:27
openshot::ReaderBase::SetJsonValue
virtual void SetJsonValue(const Json::Value root)=0
Load Json::Value into this object.
Definition: ReaderBase.cpp:157
openshot
This namespace is the default namespace for all code in the openshot library.
Definition: Compressor.h:28
openshot::TimelineBase::preview_height
int preview_height
Optional preview width of timeline image. If your preview window is smaller than the timeline,...
Definition: TimelineBase.h:45
openshot::CacheBase::Add
virtual void Add(std::shared_ptr< openshot::Frame > frame)=0
Add a Frame to the cache.
openshot::Timeline::ApplyJsonDiff
void ApplyJsonDiff(std::string value)
Apply a special formatted JSON object, which represents a change to the timeline (add,...
Definition: Timeline.cpp:1350
openshot::Clip
This class represents a clip (used to arrange readers on the timeline)
Definition: Clip.h:89
openshot::Fraction
This class represents a fraction.
Definition: Fraction.h:30
openshot::BBox::cy
float cy
y-coordinate of the bounding box center
Definition: TrackedObjectBBox.h:40
openshot::ReaderBase::info
openshot::ReaderInfo info
Information about the current media file.
Definition: ReaderBase.h:88
openshot::Settings
This class is contains settings used by libopenshot (and can be safely toggled at any point)
Definition: Settings.h:26
openshot::Timeline::GetMinFrame
int64_t GetMinFrame()
Look up the start frame number of the first element on the timeline (first frame is 1)
Definition: Timeline.cpp:485
Timeline.h
Header file for Timeline class.
openshot::Clip::ParentTimeline
void ParentTimeline(openshot::TimelineBase *new_timeline) override
Set associated Timeline pointer.
Definition: Clip.cpp:446
openshot::Timeline::ClearAllCache
void ClearAllCache(bool deep=false)
Definition: Timeline.cpp:1749
openshot::Timeline::GetTrackedObjectsIds
std::list< std::string > GetTrackedObjectsIds() const
Return the ID's of the tracked objects as a list of strings.
Definition: Timeline.cpp:262
openshot::CompareEffectEndFrames
Like CompareClipEndFrames, but for effects.
Definition: Timeline.h:81
openshot::Keyframe::SetJsonValue
void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition: KeyFrame.cpp:372
openshot::Clip::Effects
std::list< openshot::EffectBase * > Effects()
Return the list of effects on the timeline.
Definition: Clip.h:243
openshot::ReaderInfo::duration
float duration
Length of time (in seconds)
Definition: ReaderBase.h:43
CacheDisk.h
Header file for CacheDisk class.
openshot::Clip::channel_mapping
openshot::Keyframe channel_mapping
A number representing an audio channel to output (only works when filtering a channel)
Definition: Clip.h:348
openshot::ReaderInfo::has_video
bool has_video
Determines if this file has a video stream.
Definition: ReaderBase.h:40
openshot::ReaderInfo::width
int width
The width of the video (in pixesl)
Definition: ReaderBase.h:46
openshot::ClipBase::Position
void Position(float value)
Set the Id of this clip object
Definition: ClipBase.cpp:19
openshot::CacheBase
All cache managers in libopenshot are based on this CacheBase class.
Definition: CacheBase.h:34
openshot::Clip::SetJsonValue
void SetJsonValue(const Json::Value root) override
Load Json::Value into this object.
Definition: Clip.cpp:1015
CacheBase.h
Header file for CacheBase class.
openshot::OutOfBoundsFrame
Exception for frames that are out of bounds.
Definition: Exceptions.h:300
openshot::Fraction::ToDouble
double ToDouble() const
Return this fraction as a double (i.e. 1/2 = 0.5)
Definition: Fraction.cpp:40
openshot::Timeline::apply_effects
std::shared_ptr< openshot::Frame > apply_effects(std::shared_ptr< openshot::Frame > frame, int64_t timeline_frame_number, int layer, TimelineInfoStruct *options)
Apply global/timeline effects to the source frame (if any)
Definition: Timeline.cpp:549
openshot::Keyframe::JsonValue
Json::Value JsonValue() const
Generate Json::Value for this object.
Definition: KeyFrame.cpp:339
openshot::CacheBase::Remove
virtual void Remove(int64_t frame_number)=0
Remove a specific frame.
FrameMapper.h
Header file for the FrameMapper class.
openshot::ReaderBase::clip
openshot::ClipBase * clip
Pointer to the parent clip instance (if any)
Definition: ReaderBase.h:80
openshot::CacheBase::SetMaxBytesFromInfo
void SetMaxBytesFromInfo(int64_t number_of_frames, int width, int height, int sample_rate, int channels)
Set maximum bytes to a different amount based on a ReaderInfo struct.
Definition: CacheBase.cpp:28
openshot::Color
This class represents a color (used on the timeline and clips)
Definition: Color.h:27
openshot::ClipBase::SetJsonValue
virtual void SetJsonValue(const Json::Value root)=0
Load Json::Value into this object.
Definition: ClipBase.cpp:80
openshot::ReaderInfo::video_length
int64_t video_length
The number of frames in the video stream.
Definition: ReaderBase.h:53
openshot::ReaderInfo::height
int height
The height of the video (in pixels)
Definition: ReaderBase.h:45
openshot::VOLUME_MIX_REDUCE
@ VOLUME_MIX_REDUCE
Reduce volume by about %25, and then mix (louder, but could cause pops if the sum exceeds 100%)
Definition: Enums.h:71
openshot::BBox::angle
float angle
bounding box rotation angle [degrees]
Definition: TrackedObjectBBox.h:43
openshot::Fraction::num
int num
Numerator for the fraction.
Definition: Fraction.h:32
openshot::Timeline::GetClip
openshot::Clip * GetClip(const std::string &id)
Look up a single clip by ID.
Definition: Timeline.cpp:416
openshot::Fraction::den
int den
Denominator for the fraction.
Definition: Fraction.h:33
OPEN_MP_NUM_PROCESSORS
#define OPEN_MP_NUM_PROCESSORS
Definition: OpenMPUtilities.h:23
openshot::Keyframe
A Keyframe is a collection of Point instances, which is used to vary a number or property over time.
Definition: KeyFrame.h:53
CrashHandler.h
Header file for CrashHandler class.
openshot::Fraction::Reduce
void Reduce()
Reduce this fraction (i.e. 640/480 = 4/3)
Definition: Fraction.cpp:65
openshot::Color::SetJsonValue
void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition: Color.cpp:117
openshot::Timeline::Open
void Open() override
Open the reader (and start consuming resources)
Definition: Timeline.cpp:928
openshot::Timeline::SetJson
void SetJson(const std::string value) override
Load JSON string into this object.
Definition: Timeline.cpp:1231
openshot::TimelineInfoStruct::is_before_clip_keyframes
bool is_before_clip_keyframes
Is this before clip keyframes are applied.
Definition: TimelineBase.h:35
openshot::Fraction::Reciprocal
Fraction Reciprocal() const
Return the reciprocal as a Fraction.
Definition: Fraction.cpp:78
openshot::ReaderInfo::has_audio
bool has_audio
Determines if this file has an audio stream.
Definition: ReaderBase.h:41
openshot::Timeline::GetTrackedObjectValues
std::string GetTrackedObjectValues(std::string id, int64_t frame_number) const
Return the trackedObject's properties as a JSON string.
Definition: Timeline.cpp:278
openshot::CacheMemory
This class is a memory-based cache manager for Frame objects.
Definition: CacheMemory.h:29
openshot::FrameMapper::Close
void Close() override
Close the openshot::FrameMapper and internal reader.
Definition: FrameMapper.cpp:723
openshot::InvalidJSON
Exception for invalid JSON.
Definition: Exceptions.h:217
openshot::Timeline::GetMaxTime
double GetMaxTime()
Look up the end time of the latest timeline element.
Definition: Timeline.cpp:471
openshot::BBox::width
float width
bounding box width
Definition: TrackedObjectBBox.h:41
openshot::Timeline
This class represents a timeline.
Definition: Timeline.h:154
openshot::Timeline::ClipEffects
std::list< openshot::EffectBase * > ClipEffects() const
Return the list of effects on all clips.
Definition: Timeline.cpp:452
CacheMemory.h
Header file for CacheMemory class.
openshot::Color::green
openshot::Keyframe green
Curve representing the green value (0 - 255)
Definition: Color.h:31
openshot::ReaderInfo
This struct contains info about a media file, such as height, width, frames per second,...
Definition: ReaderBase.h:38
openshot::CompareEffects
Definition: Timeline.h:65
openshot::ReaderInfo::video_timebase
openshot::Fraction video_timebase
The video timebase determines how long each frame stays on the screen.
Definition: ReaderBase.h:55
openshot::Settings::Instance
static Settings * Instance()
Create or get an instance of this logger singleton (invoke the class with this method)
Definition: Settings.cpp:23
openshot::TimelineInfoStruct
This struct contains info about the current Timeline clip instance.
Definition: TimelineBase.h:32
openshot::Timeline::Clear
void Clear()
Clear all clips, effects, and frame mappers from timeline (and free memory)
Definition: Timeline.cpp:860
openshot::Timeline::RemoveEffect
void RemoveEffect(openshot::EffectBase *effect)
Remove an effect from the timeline.
Definition: Timeline.cpp:378
openshot::ClipBase::Start
void Start(float value)
Set start position (in seconds) of clip (trim start of video)
Definition: ClipBase.cpp:42
path
path
Definition: FFmpegWriter.cpp:1469
openshot::Settings::PATH_OPENSHOT_INSTALL
std::string PATH_OPENSHOT_INSTALL
Definition: Settings.h:114
openshot::FrameMapper
This class creates a mapping between 2 different frame rates, applying a specific pull-down technique...
Definition: FrameMapper.h:193
openshot::Timeline::Close
void Close() override
Close the timeline reader (and any resources it was consuming)
Definition: Timeline.cpp:906
openshot::Frame::GetSamplesPerFrame
int GetSamplesPerFrame(openshot::Fraction fps, int sample_rate, int channels)
Calculate the # of samples per video frame (for the current frame number)
Definition: Frame.cpp:484
openshot::Timeline::AddClip
void AddClip(openshot::Clip *clip)
Add an openshot::Clip to the timeline.
Definition: Timeline.cpp:336
openshot::InvalidFile
Exception for files that can not be found or opened.
Definition: Exceptions.h:187
openshot::ZmqLogger::Instance
static ZmqLogger * Instance()
Create or get an instance of this logger singleton (invoke the class with this method)
Definition: ZmqLogger.cpp:35
openshot::ZmqLogger::AppendDebugMethod
void AppendDebugMethod(std::string method_name, std::string arg1_name="", float arg1_value=-1.0, std::string arg2_name="", float arg2_value=-1.0, std::string arg3_name="", float arg3_value=-1.0, std::string arg4_name="", float arg4_value=-1.0, std::string arg5_name="", float arg5_value=-1.0, std::string arg6_name="", float arg6_value=-1.0)
Append debug information.
Definition: ZmqLogger.cpp:178
openshot::Clip::volume
openshot::Keyframe volume
Curve representing the volume (0 to 1)
Definition: Clip.h:331
openshot::ReaderInfo::vcodec
std::string vcodec
The name of the video codec used to encode / decode the video stream.
Definition: ReaderBase.h:52
openshot::Timeline::AddTrackedObject
void AddTrackedObject(std::shared_ptr< openshot::TrackedObjectBase > trackedObject)
Add to the tracked_objects map a pointer to a tracked object (TrackedObjectBBox)
Definition: Timeline.cpp:227
openshot::Color::JsonValue
Json::Value JsonValue() const
Generate Json::Value for this object.
Definition: Color.cpp:86
openshot::Keyframe::GetInt
int GetInt(int64_t index) const
Get the rounded INT value at a specific index.
Definition: KeyFrame.cpp:282
openshot::BBox
This struct holds the information of a bounding-box.
Definition: TrackedObjectBBox.h:37
openshot::Timeline::color
openshot::Color color
Background color of timeline canvas.
Definition: Timeline.h:328
openshot::ReaderClosed
Exception when a reader is closed, and a frame is requested.
Definition: Exceptions.h:363
openshot::Timeline::GetEffect
openshot::EffectBase * GetEffect(const std::string &id)
Look up a timeline effect by ID.
Definition: Timeline.cpp:428
openshot::ReaderInfo::channel_layout
openshot::ChannelLayout channel_layout
The channel layout (mono, stereo, 5 point surround, etc...)
Definition: ReaderBase.h:62
openshot::CompareClips
Definition: Timeline.h:49
openshot::Clip::channel_filter
openshot::Keyframe channel_filter
A number representing an audio channel to filter (clears all other channels)
Definition: Clip.h:347
openshot::ClipBase::Id
void Id(std::string value)
Definition: ClipBase.h:94
openshot::Keyframe::GetCount
int64_t GetCount() const
Get the number of points (i.e. # of points)
Definition: KeyFrame.cpp:424
openshot::Timeline::Timeline
Timeline(int width, int height, openshot::Fraction fps, int sample_rate, int channels, openshot::ChannelLayout channel_layout)
Constructor for the timeline (which configures the default frame properties)
Definition: Timeline.cpp:32
openshot::ReaderInfo::fps
openshot::Fraction fps
Frames per second, as a fraction (i.e. 24/1 = 24 fps)
Definition: ReaderBase.h:48
openshot::ReaderBase
This abstract class is the base class, used by all readers in libopenshot.
Definition: ReaderBase.h:75
openshot::Timeline::Json
std::string Json() const override
Generate JSON string of this object.
Definition: Timeline.cpp:1190
openshot::Timeline::GetMaxFrame
int64_t GetMaxFrame()
Look up the end frame number of the latest element on the timeline.
Definition: Timeline.cpp:477
openshot::ClipBase::GetFrame
virtual std::shared_ptr< openshot::Frame > GetFrame(int64_t frame_number)=0
This method is required for all derived classes of ClipBase, and returns a new openshot::Frame object...
openshot::Timeline::SetJsonValue
void SetJsonValue(const Json::Value root) override
Load Json::Value into this object.
Definition: Timeline.cpp:1251
openshot::VOLUME_MIX_AVERAGE
@ VOLUME_MIX_AVERAGE
Evenly divide the overlapping clips volume keyframes, so that the sum does not exceed 100%.
Definition: Enums.h:70
openshot::Timeline::ApplyMapperToClips
void ApplyMapperToClips()
Apply the timeline's framerate and samplerate to all clips.
Definition: Timeline.cpp:525
openshot::Timeline::viewport_y
openshot::Keyframe viewport_y
Curve representing the y coordinate for the viewport.
Definition: Timeline.h:325
openshot::Clip::has_audio
openshot::Keyframe has_audio
An optional override to determine if this clip has audio (-1=undefined, 0=no, 1=yes)
Definition: Clip.h:351
openshot::ChannelLayout
ChannelLayout
This enumeration determines the audio channel layout (such as stereo, mono, 5 point surround,...
Definition: ChannelLayouts.h:28
openshot::ReaderInfo::pixel_ratio
openshot::Fraction pixel_ratio
The pixel ratio of the video stream as a fraction (i.e. some pixels are not square)
Definition: ReaderBase.h:50
openshot::BBox::cx
float cx
x-coordinate of the bounding box center
Definition: TrackedObjectBBox.h:39
openshot::Clip::Reader
void Reader(openshot::ReaderBase *new_reader)
Set the current reader.
Definition: Clip.cpp:337
openshot::Timeline::GetClipEffect
openshot::EffectBase * GetClipEffect(const std::string &id)
Look up a clip effect by ID.
Definition: Timeline.cpp:439
openshot::Color::red
openshot::Keyframe red
Curve representing the red value (0 - 255)
Definition: Color.h:30
openshot::FrameMapper::Reader
ReaderBase * Reader()
Get the current reader.
Definition: FrameMapper.cpp:67
openshot::ReaderInfo::acodec
std::string acodec
The name of the audio codec used to encode / decode the video stream.
Definition: ReaderBase.h:58
openshot::PULLDOWN_NONE
@ PULLDOWN_NONE
Do not apply pull-down techniques, just repeat or skip entire frames.
Definition: FrameMapper.h:46
openshot::ReaderInfo::display_ratio
openshot::Fraction display_ratio
The ratio of width to height of the video stream (i.e. 640x480 has a ratio of 4/3)
Definition: ReaderBase.h:51
openshot::TimelineInfoStruct::is_top_clip
bool is_top_clip
Is clip on top (if overlapping another clip)
Definition: TimelineBase.h:34
openshot::InvalidJSONKey
Exception for missing JSON Change key.
Definition: Exceptions.h:262
openshot::Color::GetColorHex
std::string GetColorHex(int64_t frame_number)
Get the HEX value of a color at a specific frame.
Definition: Color.cpp:47
openshot::ReaderInfo::channels
int channels
The number of audio channels used in the audio stream.
Definition: ReaderBase.h:61
openshot::Timeline::viewport_scale
openshot::Keyframe viewport_scale
Curve representing the scale of the viewport (0 to 100)
Definition: Timeline.h:323
openshot::Timeline::AddEffect
void AddEffect(openshot::EffectBase *effect)
Add an effect to the timeline.
Definition: Timeline.cpp:362
openshot::Timeline::JsonValue
Json::Value JsonValue() const override
Generate Json::Value for this object.
Definition: Timeline.cpp:1197
openshot::ReaderBase::GetCache
virtual openshot::CacheBase * GetCache()=0
Get the cache object used by this reader (note: not all readers use cache)
openshot::Color::blue
openshot::Keyframe blue
Curve representing the red value (0 - 255)
Definition: Color.h:32
openshot::Timeline::GetTrackedObject
std::shared_ptr< openshot::TrackedObjectBase > GetTrackedObject(std::string id) const
Return tracked object pointer by it's id.
Definition: Timeline.cpp:245
Exceptions.h
Header file for all Exception classes.
openshot::Clip::mixing
openshot::VolumeMixType mixing
What strategy should be followed when mixing audio with other clips.
Definition: Clip.h:180
openshot::Timeline::GetMinTime
double GetMinTime()
Look up the position/start time of the first timeline element.
Definition: Timeline.cpp:493
openshot::EffectBase::SetJsonValue
virtual void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition: EffectBase.cpp:115
openshot::Keyframe::GetValue
double GetValue(int64_t index) const
Get the value at a specific index.
Definition: KeyFrame.cpp:258
openshot::Timeline::SetCache
void SetCache(openshot::CacheBase *new_cache)
Definition: Timeline.cpp:1174
openshot::ReaderBase::getFrameMutex
std::recursive_mutex getFrameMutex
Mutex for multiple threads.
Definition: ReaderBase.h:79