25 #include <QRegularExpression>
26 #include <unordered_map>
34 is_open(false), auto_map_clips(true), managed_cache(true),
path(
""), max_time(0.0), cache_epoch(0), safe_edit_frames_remaining(0)
80 info.width, info.height, info.fps, info.sample_rate,
81 info.channels, info.channel_layout) {}
85 is_open(false), auto_map_clips(true), managed_cache(true),
path(projectPath), max_time(0.0), cache_epoch(0), safe_edit_frames_remaining(0) {
104 QFileInfo filePath(QString::fromStdString(path));
105 if (!filePath.exists()) {
106 throw InvalidFile(
"Timeline project file could not be opened.", path);
112 if (!openshotPath.exists()) {
115 QDir openshotTransPath(openshotPath.filePath(
"transitions"));
116 if (!openshotTransPath.exists()) {
117 throw InvalidFile(
"PATH_OPENSHOT_INSTALL/transitions could not be found.", openshotTransPath.path().toStdString());
121 QString asset_name = filePath.baseName().left(30) +
"_assets";
122 QDir asset_folder(filePath.dir().filePath(asset_name));
123 if (!asset_folder.exists()) {
125 asset_folder.mkpath(
".");
129 QFile projectFile(QString::fromStdString(path));
130 projectFile.open(QFile::ReadOnly);
131 QString projectContents = QString::fromUtf8(projectFile.readAll());
134 if (convert_absolute_paths) {
138 QRegularExpression allPathsRegex(QStringLiteral(
"\"(image|path)\":.*?\"(.*?)\""));
139 std::vector<QRegularExpressionMatch> matchedPositions;
140 QRegularExpressionMatchIterator i = allPathsRegex.globalMatch(projectContents);
141 while (i.hasNext()) {
142 QRegularExpressionMatch match = i.next();
143 if (match.hasMatch()) {
145 matchedPositions.push_back(match);
150 std::vector<QRegularExpressionMatch>::reverse_iterator itr;
151 for (itr = matchedPositions.rbegin(); itr != matchedPositions.rend(); itr++) {
152 QRegularExpressionMatch match = *itr;
153 QString relativeKey = match.captured(1);
154 QString relativePath = match.captured(2);
155 QString absolutePath =
"";
158 if (relativePath.startsWith(
"@assets")) {
159 absolutePath = QFileInfo(asset_folder.absoluteFilePath(relativePath.replace(
"@assets",
"."))).canonicalFilePath();
160 }
else if (relativePath.startsWith(
"@transitions")) {
161 absolutePath = QFileInfo(openshotTransPath.absoluteFilePath(relativePath.replace(
"@transitions",
"."))).canonicalFilePath();
163 absolutePath = QFileInfo(filePath.absoluteDir().absoluteFilePath(relativePath)).canonicalFilePath();
167 if (!absolutePath.isEmpty()) {
168 projectContents.replace(match.capturedStart(0), match.capturedLength(0),
"\"" + relativeKey +
"\": \"" + absolutePath +
"\"");
172 matchedPositions.clear();
176 SetJson(projectContents.toStdString());
180 float calculated_duration = 0.0;
181 for (
auto clip : clips)
184 if (clip_last_frame > calculated_duration)
185 calculated_duration = clip_last_frame;
186 if (
clip->Reader() &&
clip->Reader()->info.has_audio)
188 if (
clip->Reader() &&
clip->Reader()->info.has_video)
221 if (managed_cache && final_cache) {
231 auto iterator = tracked_objects.find(trackedObject->Id());
233 if (iterator != tracked_objects.end()){
235 iterator->second = trackedObject;
239 tracked_objects[trackedObject->Id()] = trackedObject;
249 auto iterator = tracked_objects.find(
id);
251 if (iterator != tracked_objects.end()){
253 std::shared_ptr<openshot::TrackedObjectBase> trackedObject = iterator->second;
254 return trackedObject;
266 std::list<std::string> trackedObjects_ids;
269 for (
auto const& it: tracked_objects){
271 trackedObjects_ids.push_back(it.first);
274 return trackedObjects_ids;
282 Json::Value trackedObjectJson;
285 auto iterator = tracked_objects.find(
id);
287 if (iterator != tracked_objects.end())
290 std::shared_ptr<TrackedObjectBBox> trackedObject = std::static_pointer_cast<TrackedObjectBBox>(iterator->second);
293 if (trackedObject->ExactlyContains(frame_number)){
294 BBox box = trackedObject->GetBox(frame_number);
295 float x1 = box.
cx - (box.
width/2);
297 float x2 = box.
cx + (box.
width/2);
299 float rotation = box.
angle;
301 trackedObjectJson[
"x1"] = x1;
302 trackedObjectJson[
"y1"] = y1;
303 trackedObjectJson[
"x2"] = x2;
304 trackedObjectJson[
"y2"] = y2;
305 trackedObjectJson[
"rotation"] = rotation;
308 BBox box = trackedObject->BoxVec.begin()->second;
309 float x1 = box.
cx - (box.
width/2);
311 float x2 = box.
cx + (box.
width/2);
313 float rotation = box.
angle;
315 trackedObjectJson[
"x1"] = x1;
316 trackedObjectJson[
"y1"] = y1;
317 trackedObjectJson[
"x2"] = x2;
318 trackedObjectJson[
"y2"] = y2;
319 trackedObjectJson[
"rotation"] = rotation;
325 trackedObjectJson[
"x1"] = 0;
326 trackedObjectJson[
"y1"] = 0;
327 trackedObjectJson[
"x2"] = 0;
328 trackedObjectJson[
"y2"] = 0;
329 trackedObjectJson[
"rotation"] = 0;
332 return trackedObjectJson.toStyledString();
340 const std::lock_guard<std::recursive_mutex> guard(
getFrameMutex);
343 clip->ParentTimeline(
this);
346 if (
clip->Reader() &&
clip->Reader()->GetCache())
347 clip->Reader()->GetCache()->Clear();
350 if (auto_map_clips) {
352 apply_mapper_to_clip(
clip);
356 clips.push_back(
clip);
366 const std::lock_guard<std::recursive_mutex> guard(
getFrameMutex);
369 effect->ParentTimeline(
this);
372 effects.push_back(effect);
382 const std::lock_guard<std::recursive_mutex> guard(
getFrameMutex);
384 effects.remove(effect);
387 if (allocated_effects.count(effect)) {
388 allocated_effects.erase(effect);
401 const std::lock_guard<std::recursive_mutex> guard(
getFrameMutex);
406 if (allocated_clips.count(
clip)) {
407 allocated_clips.erase(
clip);
420 for (
const auto&
clip : clips) {
432 for (
const auto& effect : effects) {
433 if (effect->Id() ==
id) {
443 for (
const auto&
clip : clips) {
444 const auto e =
clip->GetEffect(
id);
456 std::list<EffectBase*> timelineEffectsList;
459 for (
const auto&
clip : clips) {
462 std::list<EffectBase*> clipEffectsList =
clip->Effects();
465 timelineEffectsList.insert(timelineEffectsList.end(), clipEffectsList.begin(), clipEffectsList.end());
468 return timelineEffectsList;
482 return static_cast<int64_t
>(std::ceil(t * fps));
490 return static_cast<int64_t
>(std::floor(t * fps)) + 1;
500 void Timeline::apply_mapper_to_clip(
Clip* clip)
504 if (
clip->Reader()->Name() ==
"FrameMapper")
517 allocated_frame_mappers.insert(mapper);
522 clip->Reader(clip_reader);
532 for (
auto clip : clips)
535 apply_mapper_to_clip(
clip);
540 double Timeline::calculate_time(int64_t number,
Fraction rate)
543 double raw_fps = rate.
ToFloat();
546 return double(number - 1) / raw_fps;
554 "Timeline::apply_effects",
555 "frame->number", frame->number,
556 "timeline_frame_number", timeline_frame_number,
560 for (
auto effect : effects)
564 int64_t effect_start_position =
static_cast<int64_t
>(std::llround(effect->Position() * fpsD)) + 1;
565 int64_t effect_end_position =
static_cast<int64_t
>(std::llround((effect->Position() + effect->Duration()) * fpsD));
567 bool does_effect_intersect = (effect_start_position <= timeline_frame_number && effect_end_position >= timeline_frame_number && effect->Layer() == layer);
570 if (does_effect_intersect)
573 int64_t effect_start_frame =
static_cast<int64_t
>(std::llround(effect->Start() * fpsD)) + 1;
574 int64_t effect_frame_number = timeline_frame_number - effect_start_position + effect_start_frame;
584 "Timeline::apply_effects (Process Effect)",
585 "effect_frame_number", effect_frame_number,
586 "does_effect_intersect", does_effect_intersect);
589 frame = effect->ProcessFrame(frame, effect_frame_number);
599 std::shared_ptr<Frame> Timeline::GetOrCreateFrame(std::shared_ptr<Frame> background_frame,
Clip* clip, int64_t number,
openshot::TimelineInfoStruct* options)
601 std::shared_ptr<Frame> new_frame;
609 "Timeline::GetOrCreateFrame (from reader)",
611 "samples_in_frame", samples_in_frame);
614 new_frame = std::shared_ptr<Frame>(
clip->
GetFrame(background_frame, number, options));
627 "Timeline::GetOrCreateFrame (create blank)",
629 "samples_in_frame", samples_in_frame);
636 void Timeline::add_layer(std::shared_ptr<Frame> new_frame,
Clip* source_clip, int64_t clip_frame_number,
bool is_top_clip,
bool force_safe_composite,
float max_volume)
645 std::shared_ptr<Frame> source_frame;
646 source_frame = GetOrCreateFrame(new_frame, source_clip, clip_frame_number, &options);
654 "Timeline::add_layer",
655 "new_frame->number", new_frame->number,
656 "clip_frame_number", clip_frame_number);
659 if (source_clip->
Reader()->info.has_audio) {
662 "Timeline::add_layer (Copy Audio)",
663 "source_clip->Reader()->info.has_audio", source_clip->
Reader()->info.has_audio,
664 "source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
666 "clip_frame_number", clip_frame_number);
671 if (new_frame->GetAudioSamplesCount() != source_frame->GetAudioSamplesCount()){
675 for (
int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++)
678 float previous_volume = source_clip->
volume.
GetValue(clip_frame_number - 1);
686 previous_volume = previous_volume / max_volume;
687 volume = volume / max_volume;
691 previous_volume = previous_volume * 0.77;
692 volume = volume * 0.77;
696 if (channel_filter != -1 && channel_filter != channel)
700 if (previous_volume == 0.0 && volume == 0.0)
704 if (channel_mapping == -1)
705 channel_mapping = channel;
708 if (!isEqual(previous_volume, 1.0) || !isEqual(volume, 1.0))
709 source_frame->ApplyGainRamp(channel_mapping, 0, source_frame->GetAudioSamplesCount(), previous_volume, volume);
713 new_frame->AddAudio(
false, channel_mapping, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), 1.0);
719 "Timeline::add_layer (No Audio Copied - Wrong # of Channels)",
720 "source_clip->Reader()->info.has_audio",
721 source_clip->
Reader()->info.has_audio,
722 "source_frame->GetAudioChannelsCount()",
723 source_frame->GetAudioChannelsCount(),
725 "clip_frame_number", clip_frame_number);
730 "Timeline::add_layer (Transform: Composite Image Layer: Completed)",
731 "source_frame->number", source_frame->number,
732 "new_frame->GetImage()->width()", new_frame->GetWidth(),
733 "new_frame->GetImage()->height()", new_frame->GetHeight());
737 void Timeline::update_open_clips(
Clip *clip,
bool does_clip_intersect)
740 const std::lock_guard<std::recursive_mutex> guard(
getFrameMutex);
743 "Timeline::update_open_clips (before)",
744 "does_clip_intersect", does_clip_intersect,
745 "closing_clips.size()", closing_clips.size(),
746 "open_clips.size()", open_clips.size());
749 bool clip_found = open_clips.count(
clip);
751 if (clip_found && !does_clip_intersect)
754 open_clips.erase(
clip);
759 else if (!clip_found && does_clip_intersect)
775 "Timeline::update_open_clips (after)",
776 "does_clip_intersect", does_clip_intersect,
777 "clip_found", clip_found,
778 "closing_clips.size()", closing_clips.size(),
779 "open_clips.size()", open_clips.size());
783 void Timeline::calculate_max_duration() {
784 double last_clip = 0.0;
785 double last_effect = 0.0;
786 double first_clip = std::numeric_limits<double>::max();
787 double first_effect = std::numeric_limits<double>::max();
790 if (!clips.empty()) {
792 const auto max_clip = std::max_element(
794 last_clip = (*max_clip)->Position() + (*max_clip)->Duration();
797 const auto min_clip = std::min_element(
799 return lhs->Position() < rhs->Position();
801 first_clip = (*min_clip)->Position();
805 if (!effects.empty()) {
807 const auto max_effect = std::max_element(
809 last_effect = (*max_effect)->Position() + (*max_effect)->Duration();
812 const auto min_effect = std::min_element(
814 return lhs->Position() < rhs->Position();
816 first_effect = (*min_effect)->Position();
820 max_time = std::max(last_clip, last_effect);
821 min_time = std::min(first_clip, first_effect);
824 if (clips.empty() && effects.empty()) {
831 void Timeline::sort_clips()
834 const std::lock_guard<std::recursive_mutex> guard(
getFrameMutex);
838 "Timeline::SortClips",
839 "clips.size()", clips.size());
845 calculate_max_duration();
849 void Timeline::sort_effects()
852 const std::lock_guard<std::recursive_mutex> guard(
getFrameMutex);
858 calculate_max_duration();
867 const std::lock_guard<std::recursive_mutex> guard(
getFrameMutex);
870 for (
auto clip : clips)
872 update_open_clips(
clip,
false);
875 bool allocated = allocated_clips.count(
clip);
882 allocated_clips.clear();
885 for (
auto effect : effects)
888 bool allocated = allocated_effects.count(effect);
895 allocated_effects.clear();
898 for (
auto mapper : allocated_frame_mappers)
904 allocated_frame_mappers.clear();
913 const std::lock_guard<std::recursive_mutex> guard(
getFrameMutex);
916 for (
auto clip : clips)
919 update_open_clips(
clip,
false);
936 bool Timeline::isEqual(
double a,
double b)
938 return fabs(a - b) < 0.000001;
945 if (requested_frame < 1)
948 const bool past_timeline_end = (max_frame > 0 && requested_frame > max_frame);
951 std::shared_ptr<Frame> frame;
952 if (!past_timeline_end)
953 frame = final_cache->
GetFrame(requested_frame);
957 "Timeline::GetFrame (Cached frame found)",
958 "requested_frame", requested_frame);
966 const std::lock_guard<std::recursive_mutex> lock(
getFrameMutex);
969 std::shared_ptr<Frame> frame;
970 if (!past_timeline_end)
971 frame = final_cache->
GetFrame(requested_frame);
975 "Timeline::GetFrame (Cached frame found on 2nd check)",
976 "requested_frame", requested_frame);
983 std::vector<Clip *> nearby_clips;
984 nearby_clips = find_intersecting_clips(requested_frame, 1,
true);
988 "Timeline::GetFrame (processing frame)",
989 "requested_frame", requested_frame,
990 "omp_get_thread_num()", omp_get_thread_num());
997 new_frame->AddAudioSilence(samples_in_frame);
1003 "Timeline::GetFrame (Adding solid color)",
1004 "requested_frame", requested_frame,
1016 "Timeline::GetFrame (Loop through clips)",
1017 "requested_frame", requested_frame,
1018 "clips.size()", clips.size(),
1019 "nearby_clips.size()", nearby_clips.size());
1026 int64_t start_frame;
1027 int64_t frame_number;
1030 std::vector<ClipInfo> clip_infos;
1031 clip_infos.reserve(nearby_clips.size());
1034 for (
auto clip : nearby_clips) {
1035 int64_t start_pos =
static_cast<int64_t
>(std::llround(
clip->
Position() * fpsD)) + 1;
1036 int64_t end_pos =
static_cast<int64_t
>(std::llround((
clip->
Position() +
clip->Duration()) * fpsD));
1037 bool intersects = (start_pos <= requested_frame && end_pos >= requested_frame);
1038 int64_t start_frame =
static_cast<int64_t
>(std::llround(
clip->
Start() * fpsD)) + 1;
1039 int64_t frame_number = requested_frame - start_pos + start_frame;
1040 clip_infos.push_back({
clip, start_pos, end_pos, start_frame, frame_number, intersects});
1044 std::unordered_map<int, int64_t> top_start_for_layer;
1045 std::unordered_map<int, Clip*> top_clip_for_layer;
1046 for (
const auto& ci : clip_infos) {
1047 if (!ci.intersects)
continue;
1048 const int layer = ci.clip->Layer();
1049 auto it = top_start_for_layer.find(layer);
1050 if (it == top_start_for_layer.end() || ci.start_pos > it->second) {
1051 top_start_for_layer[layer] = ci.start_pos;
1052 top_clip_for_layer[layer] = ci.clip;
1057 float max_volume_sum = 0.0f;
1058 for (
const auto& ci : clip_infos) {
1059 if (!ci.intersects)
continue;
1060 if (ci.clip->Reader() && ci.clip->Reader()->info.has_audio &&
1061 ci.clip->has_audio.GetInt(ci.frame_number) != 0) {
1062 max_volume_sum +=
static_cast<float>(ci.clip->volume.GetValue(ci.frame_number));
1067 const int safe_remaining = safe_edit_frames_remaining.load(std::memory_order_relaxed);
1068 const bool force_safe_composite = (safe_remaining > 0);
1069 if (force_safe_composite) {
1070 safe_edit_frames_remaining.fetch_sub(1, std::memory_order_relaxed);
1072 for (
const auto& ci : clip_infos) {
1075 "Timeline::GetFrame (Does clip intersect)",
1076 "requested_frame", requested_frame,
1077 "clip->Position()", ci.clip->Position(),
1078 "clip->Duration()", ci.clip->Duration(),
1079 "does_clip_intersect", ci.intersects);
1082 if (ci.intersects) {
1084 bool is_top_clip =
false;
1085 const int layer = ci.clip->Layer();
1086 auto top_it = top_clip_for_layer.find(layer);
1087 if (top_it != top_clip_for_layer.end())
1088 is_top_clip = (top_it->second == ci.clip);
1091 int64_t clip_frame_number = ci.frame_number;
1095 "Timeline::GetFrame (Calculate clip's frame #)",
1096 "clip->Position()", ci.clip->Position(),
1097 "clip->Start()", ci.clip->Start(),
1099 "clip_frame_number", clip_frame_number);
1102 add_layer(new_frame, ci.clip, clip_frame_number, is_top_clip, force_safe_composite, max_volume_sum);
1107 "Timeline::GetFrame (clip does not intersect)",
1108 "requested_frame", requested_frame,
1109 "does_clip_intersect", ci.intersects);
1116 "Timeline::GetFrame (Add frame to cache)",
1117 "requested_frame", requested_frame,
1122 new_frame->SetFrameNumber(requested_frame);
1125 if (!past_timeline_end)
1126 final_cache->
Add(new_frame);
1135 std::vector<Clip*> Timeline::find_intersecting_clips(int64_t requested_frame,
int number_of_frames,
bool include)
1138 std::vector<Clip*> matching_clips;
1141 const int64_t min_requested_frame = requested_frame;
1142 const int64_t max_requested_frame = requested_frame + (number_of_frames - 1);
1145 matching_clips.reserve(clips.size());
1147 for (
auto clip : clips)
1150 int64_t clip_start_position =
static_cast<int64_t
>(std::llround(
clip->
Position() * fpsD)) + 1;
1151 int64_t clip_end_position =
static_cast<int64_t
>(std::llround((
clip->
Position() +
clip->Duration()) * fpsD)) + 1;
1153 bool does_clip_intersect =
1154 (clip_start_position <= min_requested_frame || clip_start_position <= max_requested_frame) &&
1155 (clip_end_position >= min_requested_frame || clip_end_position >= max_requested_frame);
1159 "Timeline::find_intersecting_clips (Is clip near or intersecting)",
1160 "requested_frame", requested_frame,
1161 "min_requested_frame", min_requested_frame,
1162 "max_requested_frame", max_requested_frame,
1164 "does_clip_intersect", does_clip_intersect);
1167 update_open_clips(
clip, does_clip_intersect);
1170 if (does_clip_intersect && include)
1172 matching_clips.push_back(
clip);
1174 else if (!does_clip_intersect && !include)
1176 matching_clips.push_back(
clip);
1181 return matching_clips;
1187 const std::lock_guard<std::recursive_mutex> lock(
getFrameMutex);
1190 if (managed_cache && final_cache) {
1193 managed_cache =
false;
1197 final_cache = new_cache;
1212 root[
"type"] =
"Timeline";
1217 root[
"path"] = path;
1220 root[
"clips"] = Json::Value(Json::arrayValue);
1223 for (
const auto existing_clip : clips)
1225 root[
"clips"].append(existing_clip->JsonValue());
1229 root[
"effects"] = Json::Value(Json::arrayValue);
1232 for (
const auto existing_effect: effects)
1234 root[
"effects"].append(existing_effect->JsonValue());
1245 const std::lock_guard<std::recursive_mutex> lock(
getFrameMutex);
1254 catch (
const std::exception& e)
1257 throw InvalidJSON(
"JSON is invalid (missing keys or invalid data types)");
1265 const std::lock_guard<std::recursive_mutex> lock(
getFrameMutex);
1268 bool was_open = is_open;
1275 if (!root[
"path"].isNull())
1276 path = root[
"path"].asString();
1278 if (!root[
"clips"].isNull()) {
1283 for (
const Json::Value existing_clip : root[
"clips"]) {
1285 if (existing_clip.isNull()) {
1293 allocated_clips.insert(c);
1310 if (!root[
"effects"].isNull()) {
1315 for (
const Json::Value existing_effect :root[
"effects"]) {
1317 if (existing_effect.isNull()) {
1324 if (!existing_effect[
"type"].isNull()) {
1326 if ( (e =
EffectInfo().CreateEffect(existing_effect[
"type"].asString())) ) {
1329 allocated_effects.insert(e);
1341 if (!root[
"duration"].isNull()) {
1367 const std::lock_guard<std::recursive_mutex> lock(
getFrameMutex);
1374 for (
const Json::Value change : root) {
1375 std::string change_key = change[
"key"][(uint)0].asString();
1378 if (change_key ==
"clips")
1380 apply_json_to_clips(change);
1382 else if (change_key ==
"effects")
1384 apply_json_to_effects(change);
1388 apply_json_to_timeline(change);
1393 if (!root.empty()) {
1395 safe_edit_frames_remaining.store(240, std::memory_order_relaxed);
1399 catch (
const std::exception& e)
1402 throw InvalidJSON(
"JSON is invalid (missing keys or invalid data types)");
1406 void Timeline::BumpCacheEpoch() {
1407 cache_epoch.fetch_add(1, std::memory_order_relaxed);
1411 void Timeline::apply_json_to_clips(Json::Value change) {
1414 std::string change_type = change[
"type"].asString();
1415 std::string clip_id =
"";
1416 Clip *existing_clip = NULL;
1419 for (
auto key_part : change[
"key"]) {
1421 if (key_part.isObject()) {
1423 if (!key_part[
"id"].isNull()) {
1425 clip_id = key_part[
"id"].asString();
1428 for (
auto c : clips)
1430 if (c->Id() == clip_id) {
1442 if (existing_clip && change[
"key"].size() == 4 && change[
"key"][2] ==
"effects")
1445 Json::Value key_part = change[
"key"][3];
1447 if (key_part.isObject()) {
1449 if (!key_part[
"id"].isNull())
1452 std::string effect_id = key_part[
"id"].asString();
1455 std::list<EffectBase*> effect_list = existing_clip->
Effects();
1456 for (
auto e : effect_list)
1458 if (e->Id() == effect_id) {
1460 apply_json_to_effects(change, e);
1469 int64_t new_ending_frame = ((existing_clip->
Position() + existing_clip->Duration()) *
info.
fps.
ToDouble()) + 1;
1470 final_cache->
Remove(new_starting_frame - 8, new_ending_frame + 8);
1480 if (change_type ==
"insert") {
1486 allocated_clips.insert(
clip);
1497 final_cache->
Remove(new_starting_frame - 8, new_ending_frame + 8);
1499 }
else if (change_type ==
"update") {
1502 if (existing_clip) {
1505 int64_t old_ending_frame = ((existing_clip->
Position() + existing_clip->Duration()) *
info.
fps.
ToDouble()) + 1;
1512 int64_t new_ending_frame = ((existing_clip->
Position() + existing_clip->Duration()) *
info.
fps.
ToDouble()) + 1;
1515 final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
1516 final_cache->
Remove(new_starting_frame - 8, new_ending_frame + 8);
1519 if (auto_map_clips) {
1520 apply_mapper_to_clip(existing_clip);
1524 }
else if (change_type ==
"delete") {
1527 if (existing_clip) {
1533 int64_t old_ending_frame = ((existing_clip->
Position() + existing_clip->Duration()) *
info.
fps.
ToDouble()) + 1;
1534 final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
1544 void Timeline::apply_json_to_effects(Json::Value change) {
1547 std::string change_type = change[
"type"].asString();
1551 for (
auto key_part : change[
"key"]) {
1553 if (key_part.isObject()) {
1555 if (!key_part[
"id"].isNull())
1558 std::string effect_id = key_part[
"id"].asString();
1561 for (
auto e : effects)
1563 if (e->Id() == effect_id) {
1564 existing_effect = e;
1574 if (existing_effect || change_type ==
"insert") {
1576 apply_json_to_effects(change, existing_effect);
1581 void Timeline::apply_json_to_effects(Json::Value change,
EffectBase* existing_effect) {
1584 std::string change_type = change[
"type"].asString();
1587 if (!change[
"value"].isArray() && !change[
"value"][
"position"].isNull()) {
1588 int64_t new_starting_frame = (change[
"value"][
"position"].asDouble() *
info.
fps.
ToDouble()) + 1;
1589 int64_t new_ending_frame = ((change[
"value"][
"position"].asDouble() + change[
"value"][
"end"].asDouble() - change[
"value"][
"start"].asDouble()) *
info.
fps.
ToDouble()) + 1;
1590 final_cache->
Remove(new_starting_frame - 8, new_ending_frame + 8);
1594 if (change_type ==
"insert") {
1597 std::string effect_type = change[
"value"][
"type"].asString();
1606 allocated_effects.insert(e);
1615 }
else if (change_type ==
"update") {
1618 if (existing_effect) {
1622 int64_t old_ending_frame = ((existing_effect->
Position() + existing_effect->Duration()) *
info.
fps.
ToDouble()) + 1;
1623 final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
1629 }
else if (change_type ==
"delete") {
1632 if (existing_effect) {
1636 int64_t old_ending_frame = ((existing_effect->
Position() + existing_effect->Duration()) *
info.
fps.
ToDouble()) + 1;
1637 final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
1650 void Timeline::apply_json_to_timeline(Json::Value change) {
1651 bool cache_dirty =
true;
1654 std::string change_type = change[
"type"].asString();
1655 std::string root_key = change[
"key"][(uint)0].asString();
1656 std::string sub_key =
"";
1657 if (change[
"key"].size() >= 2)
1658 sub_key = change[
"key"][(uint)1].asString();
1661 if (change_type ==
"insert" || change_type ==
"update") {
1665 if (root_key ==
"color")
1668 else if (root_key ==
"viewport_scale")
1671 else if (root_key ==
"viewport_x")
1674 else if (root_key ==
"viewport_y")
1677 else if (root_key ==
"duration") {
1683 cache_dirty =
false;
1685 else if (root_key ==
"width") {
1690 else if (root_key ==
"height") {
1695 else if (root_key ==
"fps" && sub_key ==
"" && change[
"value"].isObject()) {
1697 if (!change[
"value"][
"num"].isNull())
1698 info.
fps.
num = change[
"value"][
"num"].asInt();
1699 if (!change[
"value"][
"den"].isNull())
1700 info.
fps.
den = change[
"value"][
"den"].asInt();
1702 else if (root_key ==
"fps" && sub_key ==
"num")
1705 else if (root_key ==
"fps" && sub_key ==
"den")
1708 else if (root_key ==
"display_ratio" && sub_key ==
"" && change[
"value"].isObject()) {
1710 if (!change[
"value"][
"num"].isNull())
1712 if (!change[
"value"][
"den"].isNull())
1715 else if (root_key ==
"display_ratio" && sub_key ==
"num")
1718 else if (root_key ==
"display_ratio" && sub_key ==
"den")
1721 else if (root_key ==
"pixel_ratio" && sub_key ==
"" && change[
"value"].isObject()) {
1723 if (!change[
"value"][
"num"].isNull())
1725 if (!change[
"value"][
"den"].isNull())
1728 else if (root_key ==
"pixel_ratio" && sub_key ==
"num")
1731 else if (root_key ==
"pixel_ratio" && sub_key ==
"den")
1735 else if (root_key ==
"sample_rate")
1738 else if (root_key ==
"channels")
1741 else if (root_key ==
"channel_layout")
1746 throw InvalidJSONKey(
"JSON change key is invalid", change.toStyledString());
1749 }
else if (change[
"type"].asString() ==
"delete") {
1753 if (root_key ==
"color") {
1759 else if (root_key ==
"viewport_scale")
1761 else if (root_key ==
"viewport_x")
1763 else if (root_key ==
"viewport_y")
1767 throw InvalidJSONKey(
"JSON change key is invalid", change.toStyledString());
1780 const std::lock_guard<std::recursive_mutex> guard(
getFrameMutex);
1784 final_cache->
Clear();
1789 for (
const auto clip : clips) {
1791 if (
clip->Reader()) {
1792 if (
auto rc =
clip->Reader()->GetCache())
1796 if (deep &&
clip->Reader()->Name() ==
"FrameMapper") {
1798 if (nested_reader->
Reader()) {
1806 if (
auto cc =
clip->GetCache())
1825 display_ratio_size.scale(proposed_size, Qt::KeepAspectRatio);