Remove crash debug information from streaming.
This commit is contained in:
		
							parent
							
								
									b5ae492f5e
								
							
						
					
					
						commit
						2694cb76a7
					
				
					 2 changed files with 2 additions and 179 deletions
				
			
		| 
						 | 
					@ -1602,7 +1602,6 @@ void ListWidget::paintEvent(QPaintEvent *e) {
 | 
				
			||||||
		width(),
 | 
							width(),
 | 
				
			||||||
		std::min(st::msgMaxWidth / 2, width() / 2));
 | 
							std::min(st::msgMaxWidth / 2, width() / 2));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	auto ms = crl::now();
 | 
					 | 
				
			||||||
	auto clip = e->rect();
 | 
						auto clip = e->rect();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	auto from = std::lower_bound(begin(_items), end(_items), clip.top(), [this](auto &elem, int top) {
 | 
						auto from = std::lower_bound(begin(_items), end(_items), clip.top(), [this](auto &elem, int top) {
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -12,15 +12,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
 | 
				
			||||||
#include "base/concurrent_timer.h"
 | 
					#include "base/concurrent_timer.h"
 | 
				
			||||||
#include "core/crash_reports.h"
 | 
					#include "core/crash_reports.h"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#include "zlib.h"
 | 
					 | 
				
			||||||
#include <cfenv>
 | 
					#include <cfenv>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
extern "C" {
 | 
					 | 
				
			||||||
extern int __isa_available;
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
#define TO_LOG(x) debugLog(QString x)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
namespace Media {
 | 
					namespace Media {
 | 
				
			||||||
namespace Streaming {
 | 
					namespace Streaming {
 | 
				
			||||||
namespace {
 | 
					namespace {
 | 
				
			||||||
| 
						 | 
					@ -174,9 +167,6 @@ private:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	[[nodiscard]] TimePoint trackTime() const;
 | 
						[[nodiscard]] TimePoint trackTime() const;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	void debugAssertKnownTime(int step, crl::time time) const;
 | 
					 | 
				
			||||||
	void debugLog(const QString &entry) const;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
	const crl::weak_on_queue<VideoTrackObject> _weak;
 | 
						const crl::weak_on_queue<VideoTrackObject> _weak;
 | 
				
			||||||
	PlaybackOptions _options;
 | 
						PlaybackOptions _options;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -204,8 +194,6 @@ private:
 | 
				
			||||||
	// For initial frame skipping for an exact seek.
 | 
						// For initial frame skipping for an exact seek.
 | 
				
			||||||
	FFmpeg::FramePointer _initialSkippingFrame;
 | 
						FFmpeg::FramePointer _initialSkippingFrame;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	mutable QStringList _debugLog;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
};
 | 
					};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
VideoTrackObject::VideoTrackObject(
 | 
					VideoTrackObject::VideoTrackObject(
 | 
				
			||||||
| 
						 | 
					@ -227,18 +215,6 @@ VideoTrackObject::VideoTrackObject(
 | 
				
			||||||
	Expects(_stream.duration > 1);
 | 
						Expects(_stream.duration > 1);
 | 
				
			||||||
	Expects(_ready != nullptr);
 | 
						Expects(_ready != nullptr);
 | 
				
			||||||
	Expects(_error != nullptr);
 | 
						Expects(_error != nullptr);
 | 
				
			||||||
 | 
					 | 
				
			||||||
	TO_LOG(("created,speed:%1,mode:%2,position:%3,sync:%4,"
 | 
					 | 
				
			||||||
		"loop:%5,wait:%6,duration:%7,initialized:%8,isa:%9"
 | 
					 | 
				
			||||||
		).arg(options.speed
 | 
					 | 
				
			||||||
		).arg(int(options.mode)
 | 
					 | 
				
			||||||
		).arg(options.position
 | 
					 | 
				
			||||||
		).arg(options.syncVideoByAudio ? "true" : "false"
 | 
					 | 
				
			||||||
		).arg(options.loop ? "true" : "false"
 | 
					 | 
				
			||||||
		).arg(options.waitForMarkAsShown ? "true" : "false"
 | 
					 | 
				
			||||||
		).arg(_stream.duration
 | 
					 | 
				
			||||||
		).arg(_shared->initialized() ? "true" : "false"
 | 
					 | 
				
			||||||
		).arg(__isa_available));
 | 
					 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
rpl::producer<> VideoTrackObject::checkNextFrame() const {
 | 
					rpl::producer<> VideoTrackObject::checkNextFrame() const {
 | 
				
			||||||
| 
						 | 
					@ -260,11 +236,9 @@ void VideoTrackObject::process(std::vector<FFmpeg::Packet> &&packets) {
 | 
				
			||||||
		return;
 | 
							return;
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	if (packets.front().empty()) {
 | 
						if (packets.front().empty()) {
 | 
				
			||||||
		TO_LOG(("process,packets:%1,till_end").arg(packets.size()));
 | 
					 | 
				
			||||||
		Assert(packets.size() == 1);
 | 
							Assert(packets.size() == 1);
 | 
				
			||||||
		_readTillEnd = true;
 | 
							_readTillEnd = true;
 | 
				
			||||||
	} else if (!_readTillEnd) {
 | 
						} else if (!_readTillEnd) {
 | 
				
			||||||
		TO_LOG(("process,packets:%1,till:%2").arg(packets.size()).arg(durationByPacket(packets.back())));
 | 
					 | 
				
			||||||
		//for (const auto &packet : packets) {
 | 
							//for (const auto &packet : packets) {
 | 
				
			||||||
		//	// Maybe it is enough to count by list.back()?.. hope so.
 | 
							//	// Maybe it is enough to count by list.back()?.. hope so.
 | 
				
			||||||
		//	accumulate_max(
 | 
							//	accumulate_max(
 | 
				
			||||||
| 
						 | 
					@ -283,7 +257,6 @@ void VideoTrackObject::process(std::vector<FFmpeg::Packet> &&packets) {
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	for (auto i = begin(packets), e = end(packets); i != e; ++i) {
 | 
						for (auto i = begin(packets), e = end(packets); i != e; ++i) {
 | 
				
			||||||
		if (_shared->initialized()) {
 | 
							if (_shared->initialized()) {
 | 
				
			||||||
			TO_LOG(("queueing_packets,count:%1").arg(e - i));
 | 
					 | 
				
			||||||
			_stream.queue.insert(
 | 
								_stream.queue.insert(
 | 
				
			||||||
				end(_stream.queue),
 | 
									end(_stream.queue),
 | 
				
			||||||
				std::make_move_iterator(i),
 | 
									std::make_move_iterator(i),
 | 
				
			||||||
| 
						 | 
					@ -291,7 +264,6 @@ void VideoTrackObject::process(std::vector<FFmpeg::Packet> &&packets) {
 | 
				
			||||||
			queueReadFrames();
 | 
								queueReadFrames();
 | 
				
			||||||
			break;
 | 
								break;
 | 
				
			||||||
		} else if (!tryReadFirstFrame(std::move(*i))) {
 | 
							} else if (!tryReadFirstFrame(std::move(*i))) {
 | 
				
			||||||
			TO_LOG(("fail_read_first_frame"));
 | 
					 | 
				
			||||||
			fail(Error::InvalidData);
 | 
								fail(Error::InvalidData);
 | 
				
			||||||
			break;
 | 
								break;
 | 
				
			||||||
		}
 | 
							}
 | 
				
			||||||
| 
						 | 
					@ -315,106 +287,21 @@ int VideoTrackObject::durationByPacket(const FFmpeg::Packet &packet) {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
void VideoTrackObject::queueReadFrames(crl::time delay) {
 | 
					void VideoTrackObject::queueReadFrames(crl::time delay) {
 | 
				
			||||||
	if (delay > 0) {
 | 
						if (delay > 0) {
 | 
				
			||||||
		TO_LOG(("queue_with_delay:%1").arg(delay));
 | 
					 | 
				
			||||||
		_readFramesTimer.callOnce(delay);
 | 
							_readFramesTimer.callOnce(delay);
 | 
				
			||||||
	} else if (!_queued) {
 | 
						} else if (!_queued) {
 | 
				
			||||||
		TO_LOG(("queue_without_delay"));
 | 
					 | 
				
			||||||
		_queued = true;
 | 
							_queued = true;
 | 
				
			||||||
		_weak.with([](VideoTrackObject &that) {
 | 
							_weak.with([](VideoTrackObject &that) {
 | 
				
			||||||
			that.TO_LOG(("unqueued_without_delay"));
 | 
					 | 
				
			||||||
			that._queued = false;
 | 
								that._queued = false;
 | 
				
			||||||
			that.readFrames();
 | 
								that.readFrames();
 | 
				
			||||||
		});
 | 
							});
 | 
				
			||||||
	} else {
 | 
					 | 
				
			||||||
		TO_LOG(("queued_already"));
 | 
					 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
void VideoTrackObject::debugAssertKnownTime(int step, crl::time time) const {
 | 
					 | 
				
			||||||
	if (time < kTimeUnknown / 2) {
 | 
					 | 
				
			||||||
		CrashReports::SetAnnotation("DebugStep", QString::number(step));
 | 
					 | 
				
			||||||
		CrashReports::SetAnnotation("CheckedValue", QString::number(time));
 | 
					 | 
				
			||||||
		CrashReports::SetAnnotation(
 | 
					 | 
				
			||||||
			"_syncTimePoint.trackTime",
 | 
					 | 
				
			||||||
			QString::number(_syncTimePoint.trackTime));
 | 
					 | 
				
			||||||
		CrashReports::SetAnnotation(
 | 
					 | 
				
			||||||
			"_syncTimePoint.worldTime",
 | 
					 | 
				
			||||||
			QString::number(_syncTimePoint.worldTime));
 | 
					 | 
				
			||||||
		CrashReports::SetAnnotation(
 | 
					 | 
				
			||||||
			"_pausedTime",
 | 
					 | 
				
			||||||
			QString::number(_pausedTime));
 | 
					 | 
				
			||||||
		CrashReports::SetAnnotation(
 | 
					 | 
				
			||||||
			"_resumedTime",
 | 
					 | 
				
			||||||
			QString::number(_resumedTime));
 | 
					 | 
				
			||||||
		if (!_shared) {
 | 
					 | 
				
			||||||
			CrashReports::SetAnnotation("_shared", "NULL");
 | 
					 | 
				
			||||||
		} else {
 | 
					 | 
				
			||||||
			CrashReports::SetAnnotation(
 | 
					 | 
				
			||||||
				"_shared->initialized",
 | 
					 | 
				
			||||||
				_shared->initialized() ? "true" : "false");
 | 
					 | 
				
			||||||
		}
 | 
					 | 
				
			||||||
		CrashReports::SetAnnotation("Now", QString::number(crl::now()));
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
		const auto log = _debugLog.join('\n').toUtf8();
 | 
					 | 
				
			||||||
		const auto compressed = [&] {
 | 
					 | 
				
			||||||
			auto result = QByteArray(log.size() + 1024, char(0));
 | 
					 | 
				
			||||||
			z_stream stream;
 | 
					 | 
				
			||||||
			stream.zalloc = nullptr;
 | 
					 | 
				
			||||||
			stream.zfree = nullptr;
 | 
					 | 
				
			||||||
			stream.opaque = nullptr;
 | 
					 | 
				
			||||||
			stream.avail_in = log.size();
 | 
					 | 
				
			||||||
			stream.next_in = reinterpret_cast<Bytef*>(const_cast<char*>(log.data()));
 | 
					 | 
				
			||||||
			stream.avail_out = result.size();
 | 
					 | 
				
			||||||
			stream.next_out = reinterpret_cast<Bytef*>(result.data());
 | 
					 | 
				
			||||||
			int res = deflateInit2(&stream, Z_BEST_COMPRESSION, Z_DEFLATED, -MAX_WBITS, MAX_MEM_LEVEL, Z_DEFAULT_STRATEGY);
 | 
					 | 
				
			||||||
			if (res != Z_OK) {
 | 
					 | 
				
			||||||
				CrashReports::SetAnnotation("Log", "deflatInit2:" + QString::number(res));
 | 
					 | 
				
			||||||
				return QByteArray();
 | 
					 | 
				
			||||||
			}
 | 
					 | 
				
			||||||
			{
 | 
					 | 
				
			||||||
				const auto guard = gsl::finally([&] { deflateEnd(&stream); });
 | 
					 | 
				
			||||||
				int res = deflate(&stream, Z_FINISH);
 | 
					 | 
				
			||||||
				if (res != Z_OK && res != Z_STREAM_END) {
 | 
					 | 
				
			||||||
					CrashReports::SetAnnotation("Log", "deflate:" + QString::number(res));
 | 
					 | 
				
			||||||
					return QByteArray();
 | 
					 | 
				
			||||||
				} else if (!stream.avail_out) {
 | 
					 | 
				
			||||||
					CrashReports::SetAnnotation("Log", "deflate:no_avail_out");
 | 
					 | 
				
			||||||
					return QByteArray();
 | 
					 | 
				
			||||||
				}
 | 
					 | 
				
			||||||
			}
 | 
					 | 
				
			||||||
			result.resize(result.size() - stream.avail_out);
 | 
					 | 
				
			||||||
			return result;
 | 
					 | 
				
			||||||
		}();
 | 
					 | 
				
			||||||
		if (!compressed.isEmpty()) {
 | 
					 | 
				
			||||||
			CrashReports::SetAnnotation("Log", QString::fromUtf8(compressed.toBase64()));
 | 
					 | 
				
			||||||
		}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
		Unexpected("Bad time value.");
 | 
					 | 
				
			||||||
	}
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
void VideoTrackObject::debugLog(const QString &entry) const {
 | 
					 | 
				
			||||||
	constexpr auto kMaxEntries = 2048;
 | 
					 | 
				
			||||||
	if (_debugLog.size() >= kMaxEntries) {
 | 
					 | 
				
			||||||
		return;
 | 
					 | 
				
			||||||
	} else if (_debugLog.size() == kMaxEntries - 1) {
 | 
					 | 
				
			||||||
		_debugLog.push_back("...");
 | 
					 | 
				
			||||||
		return;
 | 
					 | 
				
			||||||
	}
 | 
					 | 
				
			||||||
	_debugLog.push_back("stp.worldTime:"
 | 
					 | 
				
			||||||
		+ QString::number(_syncTimePoint.worldTime)
 | 
					 | 
				
			||||||
		+ ";stp.trackTime:" + QString::number(_syncTimePoint.trackTime)
 | 
					 | 
				
			||||||
		+ ";fe:" + QString::number(std::fetestexcept(FE_ALL_EXCEPT))
 | 
					 | 
				
			||||||
		+ ";" + entry);
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
void VideoTrackObject::readFrames() {
 | 
					void VideoTrackObject::readFrames() {
 | 
				
			||||||
	if (interrupted()) {
 | 
						if (interrupted()) {
 | 
				
			||||||
		return;
 | 
							return;
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	auto time = trackTime().trackTime;
 | 
						auto time = trackTime().trackTime;
 | 
				
			||||||
	TO_LOG(("reading_frames,time:%1").arg(time));
 | 
					 | 
				
			||||||
	debugAssertKnownTime(1, time);
 | 
					 | 
				
			||||||
	while (true) {
 | 
						while (true) {
 | 
				
			||||||
		const auto result = readEnoughFrames(time);
 | 
							const auto result = readEnoughFrames(time);
 | 
				
			||||||
		v::match(result, [&](FrameResult result) {
 | 
							v::match(result, [&](FrameResult result) {
 | 
				
			||||||
| 
						 | 
					@ -425,7 +312,6 @@ void VideoTrackObject::readFrames() {
 | 
				
			||||||
				const auto duration = computeDuration();
 | 
									const auto duration = computeDuration();
 | 
				
			||||||
				Assert(duration != kDurationUnavailable);
 | 
									Assert(duration != kDurationUnavailable);
 | 
				
			||||||
				time -= duration;
 | 
									time -= duration;
 | 
				
			||||||
				debugAssertKnownTime(2, time);
 | 
					 | 
				
			||||||
			}
 | 
								}
 | 
				
			||||||
		}, [&](Shared::PrepareNextCheck delay) {
 | 
							}, [&](Shared::PrepareNextCheck delay) {
 | 
				
			||||||
			Expects(delay == kTimeUnknown || delay > 0);
 | 
								Expects(delay == kTimeUnknown || delay > 0);
 | 
				
			||||||
| 
						 | 
					@ -443,7 +329,6 @@ void VideoTrackObject::readFrames() {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
auto VideoTrackObject::readEnoughFrames(crl::time trackTime)
 | 
					auto VideoTrackObject::readEnoughFrames(crl::time trackTime)
 | 
				
			||||||
-> ReadEnoughState {
 | 
					-> ReadEnoughState {
 | 
				
			||||||
	TO_LOG(("reading_enough_frames,time:%1").arg(trackTime));
 | 
					 | 
				
			||||||
	const auto dropStaleFrames = !_options.waitForMarkAsShown;
 | 
						const auto dropStaleFrames = !_options.waitForMarkAsShown;
 | 
				
			||||||
	const auto state = _shared->prepareState(trackTime, dropStaleFrames);
 | 
						const auto state = _shared->prepareState(trackTime, dropStaleFrames);
 | 
				
			||||||
	return v::match(state, [&](Shared::PrepareFrame frame)
 | 
						return v::match(state, [&](Shared::PrepareFrame frame)
 | 
				
			||||||
| 
						 | 
					@ -451,18 +336,13 @@ auto VideoTrackObject::readEnoughFrames(crl::time trackTime)
 | 
				
			||||||
		while (true) {
 | 
							while (true) {
 | 
				
			||||||
			const auto result = readFrame(frame);
 | 
								const auto result = readFrame(frame);
 | 
				
			||||||
			if (result != FrameResult::Done) {
 | 
								if (result != FrameResult::Done) {
 | 
				
			||||||
				TO_LOG(("read_enough_state,frame_result:%1").arg(int(result)));
 | 
					 | 
				
			||||||
				return result;
 | 
									return result;
 | 
				
			||||||
			} else if (!dropStaleFrames
 | 
								} else if (!dropStaleFrames
 | 
				
			||||||
				|| !VideoTrack::IsStale(frame, trackTime)) {
 | 
									|| !VideoTrack::IsStale(frame, trackTime)) {
 | 
				
			||||||
				TO_LOG(("read_enough_state,null"));
 | 
					 | 
				
			||||||
				return v::null;
 | 
									return v::null;
 | 
				
			||||||
			}
 | 
								}
 | 
				
			||||||
		}
 | 
							}
 | 
				
			||||||
	}, [&](Shared::PrepareNextCheck delay) -> ReadEnoughState {
 | 
						}, [&](Shared::PrepareNextCheck delay) -> ReadEnoughState {
 | 
				
			||||||
		TO_LOG(("read_enough_state,delay:%1").arg(delay));
 | 
					 | 
				
			||||||
		Expects(delay == kTimeUnknown || delay > 0); // Debugging crash.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
		return delay;
 | 
							return delay;
 | 
				
			||||||
	}, [&](v::null_t) -> ReadEnoughState {
 | 
						}, [&](v::null_t) -> ReadEnoughState {
 | 
				
			||||||
		return FrameResult::Done;
 | 
							return FrameResult::Done;
 | 
				
			||||||
| 
						 | 
					@ -472,7 +352,6 @@ auto VideoTrackObject::readEnoughFrames(crl::time trackTime)
 | 
				
			||||||
bool VideoTrackObject::loopAround() {
 | 
					bool VideoTrackObject::loopAround() {
 | 
				
			||||||
	const auto duration = computeDuration();
 | 
						const auto duration = computeDuration();
 | 
				
			||||||
	if (duration == kDurationUnavailable) {
 | 
						if (duration == kDurationUnavailable) {
 | 
				
			||||||
		TO_LOG(("loop_around,error_duration_unavailable"));
 | 
					 | 
				
			||||||
		LOG(("Streaming Error: "
 | 
							LOG(("Streaming Error: "
 | 
				
			||||||
			"Couldn't find out the real video stream duration."));
 | 
								"Couldn't find out the real video stream duration."));
 | 
				
			||||||
		return false;
 | 
							return false;
 | 
				
			||||||
| 
						 | 
					@ -480,7 +359,6 @@ bool VideoTrackObject::loopAround() {
 | 
				
			||||||
	avcodec_flush_buffers(_stream.codec.get());
 | 
						avcodec_flush_buffers(_stream.codec.get());
 | 
				
			||||||
	_loopingShift += duration;
 | 
						_loopingShift += duration;
 | 
				
			||||||
	_readTillEnd = false;
 | 
						_readTillEnd = false;
 | 
				
			||||||
	TO_LOG(("loop_around,duration:%1,shift:%2").arg(duration).arg(_loopingShift));
 | 
					 | 
				
			||||||
	return true;
 | 
						return true;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -498,38 +376,31 @@ auto VideoTrackObject::readFrame(not_null<Frame*> frame) -> FrameResult {
 | 
				
			||||||
	if (const auto error = ReadNextFrame(_stream)) {
 | 
						if (const auto error = ReadNextFrame(_stream)) {
 | 
				
			||||||
		if (error.code() == AVERROR_EOF) {
 | 
							if (error.code() == AVERROR_EOF) {
 | 
				
			||||||
			if (!_options.loop) {
 | 
								if (!_options.loop) {
 | 
				
			||||||
				TO_LOG(("read_frame,error_eof"));
 | 
					 | 
				
			||||||
				frame->position = kFinishedPosition;
 | 
									frame->position = kFinishedPosition;
 | 
				
			||||||
				frame->displayed = kTimeUnknown;
 | 
									frame->displayed = kTimeUnknown;
 | 
				
			||||||
				return FrameResult::Finished;
 | 
									return FrameResult::Finished;
 | 
				
			||||||
			} else if (loopAround()) {
 | 
								} else if (loopAround()) {
 | 
				
			||||||
				TO_LOG(("read_frame,error_looped"));
 | 
					 | 
				
			||||||
				return FrameResult::Looped;
 | 
									return FrameResult::Looped;
 | 
				
			||||||
			} else {
 | 
								} else {
 | 
				
			||||||
				TO_LOG(("read_frame,error_bad_data"));
 | 
					 | 
				
			||||||
				fail(Error::InvalidData);
 | 
									fail(Error::InvalidData);
 | 
				
			||||||
				return FrameResult::Error;
 | 
									return FrameResult::Error;
 | 
				
			||||||
			}
 | 
								}
 | 
				
			||||||
		} else if (error.code() != AVERROR(EAGAIN) || _readTillEnd) {
 | 
							} else if (error.code() != AVERROR(EAGAIN) || _readTillEnd) {
 | 
				
			||||||
			TO_LOG(("read_frame,error_bad_data:%1,till_end:%2").arg(error.code()).arg(_readTillEnd ? "true" : "false"));
 | 
					 | 
				
			||||||
			fail(Error::InvalidData);
 | 
								fail(Error::InvalidData);
 | 
				
			||||||
			return FrameResult::Error;
 | 
								return FrameResult::Error;
 | 
				
			||||||
		}
 | 
							}
 | 
				
			||||||
		TO_LOG(("read_frame,error_again"));
 | 
					 | 
				
			||||||
		Assert(_stream.queue.empty());
 | 
							Assert(_stream.queue.empty());
 | 
				
			||||||
		_waitingForData.fire({});
 | 
							_waitingForData.fire({});
 | 
				
			||||||
		return FrameResult::Waiting;
 | 
							return FrameResult::Waiting;
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	const auto position = currentFramePosition();
 | 
						const auto position = currentFramePosition();
 | 
				
			||||||
	if (position == kTimeUnknown) {
 | 
						if (position == kTimeUnknown) {
 | 
				
			||||||
		TO_LOG(("read_frame,error_current_position:%1").arg(position));
 | 
					 | 
				
			||||||
		fail(Error::InvalidData);
 | 
							fail(Error::InvalidData);
 | 
				
			||||||
		return FrameResult::Error;
 | 
							return FrameResult::Error;
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	std::swap(frame->decoded, _stream.frame);
 | 
						std::swap(frame->decoded, _stream.frame);
 | 
				
			||||||
	frame->position = position;
 | 
						frame->position = position;
 | 
				
			||||||
	frame->displayed = kTimeUnknown;
 | 
						frame->displayed = kTimeUnknown;
 | 
				
			||||||
	TO_LOG(("read_frame,current_position:%1").arg(position));
 | 
					 | 
				
			||||||
	return FrameResult::Done;
 | 
						return FrameResult::Done;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -629,13 +500,11 @@ void VideoTrackObject::presentFrameIfNeeded() {
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	const auto dropStaleFrames = !_options.waitForMarkAsShown;
 | 
						const auto dropStaleFrames = !_options.waitForMarkAsShown;
 | 
				
			||||||
	const auto time = trackTime();
 | 
						const auto time = trackTime();
 | 
				
			||||||
	TO_LOG(("present_frame_check,world:%1,track:%2").arg(time.worldTime).arg(time.trackTime));
 | 
					 | 
				
			||||||
	const auto presented = _shared->presentFrame(
 | 
						const auto presented = _shared->presentFrame(
 | 
				
			||||||
		this,
 | 
							this,
 | 
				
			||||||
		time,
 | 
							time,
 | 
				
			||||||
		_options.speed,
 | 
							_options.speed,
 | 
				
			||||||
		dropStaleFrames);
 | 
							dropStaleFrames);
 | 
				
			||||||
	TO_LOG(("present_frame_check,add:%1,position:%2,next:%3").arg(presented.addedWorldTimeDelay).arg(presented.displayPosition).arg(presented.nextCheckDelay));
 | 
					 | 
				
			||||||
	addTimelineDelay(presented.addedWorldTimeDelay);
 | 
						addTimelineDelay(presented.addedWorldTimeDelay);
 | 
				
			||||||
	if (presented.displayPosition == kFinishedPosition) {
 | 
						if (presented.displayPosition == kFinishedPosition) {
 | 
				
			||||||
		interrupt();
 | 
							interrupt();
 | 
				
			||||||
| 
						 | 
					@ -653,7 +522,6 @@ void VideoTrackObject::presentFrameIfNeeded() {
 | 
				
			||||||
void VideoTrackObject::pause(crl::time time) {
 | 
					void VideoTrackObject::pause(crl::time time) {
 | 
				
			||||||
	Expects(_syncTimePoint.valid());
 | 
						Expects(_syncTimePoint.valid());
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	TO_LOG(("pause,time:%1,paused:%2").arg(time).arg(_pausedTime));
 | 
					 | 
				
			||||||
	if (interrupted()) {
 | 
						if (interrupted()) {
 | 
				
			||||||
		return;
 | 
							return;
 | 
				
			||||||
	} else if (_pausedTime == kTimeUnknown) {
 | 
						} else if (_pausedTime == kTimeUnknown) {
 | 
				
			||||||
| 
						 | 
					@ -664,8 +532,6 @@ void VideoTrackObject::pause(crl::time time) {
 | 
				
			||||||
void VideoTrackObject::resume(crl::time time) {
 | 
					void VideoTrackObject::resume(crl::time time) {
 | 
				
			||||||
	Expects(_syncTimePoint.trackTime != kTimeUnknown);
 | 
						Expects(_syncTimePoint.trackTime != kTimeUnknown);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	TO_LOG(("resume,time:%1,paused:%2,resumed:%3").arg(time).arg(_pausedTime).arg(_resumedTime));
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
	if (interrupted()) {
 | 
						if (interrupted()) {
 | 
				
			||||||
		return;
 | 
							return;
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
| 
						 | 
					@ -686,15 +552,12 @@ void VideoTrackObject::resume(crl::time time) {
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
void VideoTrackObject::setSpeed(float64 speed) {
 | 
					void VideoTrackObject::setSpeed(float64 speed) {
 | 
				
			||||||
	TO_LOG(("set_speed,speed:%1").arg(speed));
 | 
					 | 
				
			||||||
	if (interrupted()) {
 | 
						if (interrupted()) {
 | 
				
			||||||
		return;
 | 
							return;
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	if (_syncTimePoint.valid()) {
 | 
						if (_syncTimePoint.valid()) {
 | 
				
			||||||
		const auto time = trackTime();
 | 
							const auto time = trackTime();
 | 
				
			||||||
		TO_LOG(("set_speed_changing_time,world:%1,track:%2").arg(time.worldTime).arg(time.trackTime));
 | 
					 | 
				
			||||||
		_syncTimePoint = time;
 | 
							_syncTimePoint = time;
 | 
				
			||||||
		debugAssertKnownTime(3, _syncTimePoint.trackTime);
 | 
					 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	_options.speed = speed;
 | 
						_options.speed = speed;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
| 
						 | 
					@ -703,23 +566,17 @@ void VideoTrackObject::setWaitForMarkAsShown(bool wait) {
 | 
				
			||||||
	if (interrupted()) {
 | 
						if (interrupted()) {
 | 
				
			||||||
		return;
 | 
							return;
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	TO_LOG(("set_wait_for_mark,wait:%1").arg(wait ? "true" : "false"));
 | 
					 | 
				
			||||||
	_options.waitForMarkAsShown = wait;
 | 
						_options.waitForMarkAsShown = wait;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
bool VideoTrackObject::interrupted() const {
 | 
					bool VideoTrackObject::interrupted() const {
 | 
				
			||||||
	if (_shared == nullptr) {
 | 
						return !_shared;
 | 
				
			||||||
		TO_LOG(("interruped_true"));
 | 
					 | 
				
			||||||
		return true;
 | 
					 | 
				
			||||||
	}
 | 
					 | 
				
			||||||
	return false;
 | 
					 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
void VideoTrackObject::frameShown() {
 | 
					void VideoTrackObject::frameShown() {
 | 
				
			||||||
	if (interrupted()) {
 | 
						if (interrupted()) {
 | 
				
			||||||
		return;
 | 
							return;
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	TO_LOG(("frame_shown"));
 | 
					 | 
				
			||||||
	queueReadFrames();
 | 
						queueReadFrames();
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -729,7 +586,6 @@ void VideoTrackObject::addTimelineDelay(crl::time delayed) {
 | 
				
			||||||
	if (!delayed) {
 | 
						if (!delayed) {
 | 
				
			||||||
		return;
 | 
							return;
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	TO_LOG(("adding_delay:%1").arg(delayed));
 | 
					 | 
				
			||||||
	_syncTimePoint.worldTime += delayed;
 | 
						_syncTimePoint.worldTime += delayed;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -745,34 +601,26 @@ void VideoTrackObject::removeFrameRequest(const Instance *instance) {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
bool VideoTrackObject::tryReadFirstFrame(FFmpeg::Packet &&packet) {
 | 
					bool VideoTrackObject::tryReadFirstFrame(FFmpeg::Packet &&packet) {
 | 
				
			||||||
	if (ProcessPacket(_stream, std::move(packet)).failed()) {
 | 
						if (ProcessPacket(_stream, std::move(packet)).failed()) {
 | 
				
			||||||
		TO_LOG(("try_read_first_frame_process_failed"));
 | 
					 | 
				
			||||||
		return false;
 | 
							return false;
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	TO_LOG(("try_read_first_frame"));
 | 
					 | 
				
			||||||
	while (true) {
 | 
						while (true) {
 | 
				
			||||||
		if (const auto error = ReadNextFrame(_stream)) {
 | 
							if (const auto error = ReadNextFrame(_stream)) {
 | 
				
			||||||
			if (error.code() == AVERROR_EOF) {
 | 
								if (error.code() == AVERROR_EOF) {
 | 
				
			||||||
				if (!_initialSkippingFrame) {
 | 
									if (!_initialSkippingFrame) {
 | 
				
			||||||
					TO_LOG(("try_read_first_frame_eof_bad"));
 | 
					 | 
				
			||||||
					return false;
 | 
										return false;
 | 
				
			||||||
				}
 | 
									}
 | 
				
			||||||
				// Return the last valid frame if we seek too far.
 | 
									// Return the last valid frame if we seek too far.
 | 
				
			||||||
				_stream.frame = std::move(_initialSkippingFrame);
 | 
									_stream.frame = std::move(_initialSkippingFrame);
 | 
				
			||||||
				TO_LOG(("try_read_first_frame_eof_to_initial"));
 | 
					 | 
				
			||||||
				return processFirstFrame();
 | 
									return processFirstFrame();
 | 
				
			||||||
			} else if (error.code() != AVERROR(EAGAIN) || _readTillEnd) {
 | 
								} else if (error.code() != AVERROR(EAGAIN) || _readTillEnd) {
 | 
				
			||||||
				TO_LOG(("try_read_first_frame_error,end:%1").arg(_readTillEnd ? "true" : "false"));
 | 
					 | 
				
			||||||
				return false;
 | 
									return false;
 | 
				
			||||||
			} else {
 | 
								} else {
 | 
				
			||||||
				// Waiting for more packets.
 | 
									// Waiting for more packets.
 | 
				
			||||||
				TO_LOG(("try_read_first_frame_waiting"));
 | 
					 | 
				
			||||||
				return true;
 | 
									return true;
 | 
				
			||||||
			}
 | 
								}
 | 
				
			||||||
		} else if (!fillStateFromFrame()) {
 | 
							} else if (!fillStateFromFrame()) {
 | 
				
			||||||
			TO_LOG(("try_read_first_frame_bad_state"));
 | 
					 | 
				
			||||||
			return false;
 | 
								return false;
 | 
				
			||||||
		} else if (_syncTimePoint.trackTime >= _options.position) {
 | 
							} else if (_syncTimePoint.trackTime >= _options.position) {
 | 
				
			||||||
			TO_LOG(("try_read_first_frame_process"));
 | 
					 | 
				
			||||||
			return processFirstFrame();
 | 
								return processFirstFrame();
 | 
				
			||||||
		}
 | 
							}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -797,8 +645,6 @@ bool VideoTrackObject::processFirstFrame() {
 | 
				
			||||||
	if (frame.isNull()) {
 | 
						if (frame.isNull()) {
 | 
				
			||||||
		return false;
 | 
							return false;
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	debugAssertKnownTime(4, _syncTimePoint.trackTime);
 | 
					 | 
				
			||||||
	TO_LOG(("process_first_frame"));
 | 
					 | 
				
			||||||
	_shared->init(std::move(frame), _syncTimePoint.trackTime);
 | 
						_shared->init(std::move(frame), _syncTimePoint.trackTime);
 | 
				
			||||||
	callReady();
 | 
						callReady();
 | 
				
			||||||
	queueReadFrames();
 | 
						queueReadFrames();
 | 
				
			||||||
| 
						 | 
					@ -819,12 +665,9 @@ crl::time VideoTrackObject::currentFramePosition() const {
 | 
				
			||||||
bool VideoTrackObject::fillStateFromFrame() {
 | 
					bool VideoTrackObject::fillStateFromFrame() {
 | 
				
			||||||
	const auto position = currentFramePosition();
 | 
						const auto position = currentFramePosition();
 | 
				
			||||||
	if (position == kTimeUnknown) {
 | 
						if (position == kTimeUnknown) {
 | 
				
			||||||
		TO_LOG(("fill_state_from_frame_bad"));
 | 
					 | 
				
			||||||
		return false;
 | 
							return false;
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	TO_LOG(("fill_state_from_frame,position:%1").arg(position));
 | 
					 | 
				
			||||||
	_syncTimePoint.trackTime = position;
 | 
						_syncTimePoint.trackTime = position;
 | 
				
			||||||
	debugAssertKnownTime(5, _syncTimePoint.trackTime);
 | 
					 | 
				
			||||||
	return true;
 | 
						return true;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -847,51 +690,32 @@ void VideoTrackObject::callReady() {
 | 
				
			||||||
	data.state.receivedTill = _readTillEnd
 | 
						data.state.receivedTill = _readTillEnd
 | 
				
			||||||
		? _stream.duration
 | 
							? _stream.duration
 | 
				
			||||||
		: _syncTimePoint.trackTime;
 | 
							: _syncTimePoint.trackTime;
 | 
				
			||||||
	TO_LOG(("call_ready,till:%1").arg(data.state.receivedTill));
 | 
					 | 
				
			||||||
	base::take(_ready)({ data });
 | 
						base::take(_ready)({ data });
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
TimePoint VideoTrackObject::trackTime() const {
 | 
					TimePoint VideoTrackObject::trackTime() const {
 | 
				
			||||||
	debugAssertKnownTime(7, _syncTimePoint.trackTime);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
	auto result = TimePoint();
 | 
						auto result = TimePoint();
 | 
				
			||||||
	result.worldTime = (_pausedTime != kTimeUnknown)
 | 
						result.worldTime = (_pausedTime != kTimeUnknown)
 | 
				
			||||||
		? _pausedTime
 | 
							? _pausedTime
 | 
				
			||||||
		: crl::now();
 | 
							: crl::now();
 | 
				
			||||||
	if (!_syncTimePoint) {
 | 
						if (!_syncTimePoint) {
 | 
				
			||||||
		result.trackTime = _syncTimePoint.trackTime;
 | 
							result.trackTime = _syncTimePoint.trackTime;
 | 
				
			||||||
		TO_LOG(("track_time,paused:%1,result_world:%2").arg(_pausedTime).arg(result.worldTime));
 | 
					 | 
				
			||||||
		return result;
 | 
							return result;
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	debugAssertKnownTime(8, _syncTimePoint.worldTime);
 | 
					 | 
				
			||||||
	debugAssertKnownTime(9, result.worldTime);
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
	Assert(_resumedTime != kTimeUnknown);
 | 
						Assert(_resumedTime != kTimeUnknown);
 | 
				
			||||||
	if (_options.syncVideoByAudio && _audioId.externalPlayId()) {
 | 
						if (_options.syncVideoByAudio && _audioId.externalPlayId()) {
 | 
				
			||||||
		const auto mixer = Media::Player::mixer();
 | 
							const auto mixer = Media::Player::mixer();
 | 
				
			||||||
		const auto point = mixer->getExternalSyncTimePoint(_audioId);
 | 
							const auto point = mixer->getExternalSyncTimePoint(_audioId);
 | 
				
			||||||
		if (point && point.worldTime > _resumedTime) {
 | 
							if (point && point.worldTime > _resumedTime) {
 | 
				
			||||||
			TO_LOG(("track_time_sync,world:%1,track:%2,resumed:%3").arg(point.worldTime).arg(point.trackTime).arg(_resumedTime));
 | 
					 | 
				
			||||||
			_syncTimePoint = point;
 | 
								_syncTimePoint = point;
 | 
				
			||||||
			debugAssertKnownTime(6, _syncTimePoint.trackTime);
 | 
					 | 
				
			||||||
			debugAssertKnownTime(10, _syncTimePoint.worldTime);
 | 
					 | 
				
			||||||
		}
 | 
							}
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	const auto adjust = (result.worldTime - _syncTimePoint.worldTime);
 | 
						const auto adjust = (result.worldTime - _syncTimePoint.worldTime);
 | 
				
			||||||
	const auto adjustSpeed = adjust * _options.speed;
 | 
						const auto adjustSpeed = adjust * _options.speed;
 | 
				
			||||||
	const auto roundAdjustSpeed = SafeRound(adjustSpeed);
 | 
						const auto roundAdjustSpeed = SafeRound(adjustSpeed);
 | 
				
			||||||
	const auto timeRoundAdjustSpeed = crl::time(roundAdjustSpeed);
 | 
						const auto timeRoundAdjustSpeed = crl::time(roundAdjustSpeed);
 | 
				
			||||||
	const auto trackTime = _syncTimePoint.trackTime + timeRoundAdjustSpeed;
 | 
						result.trackTime = _syncTimePoint.trackTime + timeRoundAdjustSpeed;
 | 
				
			||||||
	TO_LOG(("track_time_adjusted,world:%1,adjust:%2,speed:%3,delta:%4,rounded:%5,casted:%6,final:%7"
 | 
					 | 
				
			||||||
		).arg(result.worldTime
 | 
					 | 
				
			||||||
		).arg(adjust
 | 
					 | 
				
			||||||
		).arg(_options.speed
 | 
					 | 
				
			||||||
		).arg(adjustSpeed
 | 
					 | 
				
			||||||
		).arg(roundAdjustSpeed
 | 
					 | 
				
			||||||
		).arg(timeRoundAdjustSpeed
 | 
					 | 
				
			||||||
		).arg(trackTime));
 | 
					 | 
				
			||||||
	result.trackTime = trackTime;
 | 
					 | 
				
			||||||
	debugAssertKnownTime(11, result.trackTime);
 | 
					 | 
				
			||||||
	return result;
 | 
						return result;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
		Loading…
	
	Add table
		
		Reference in a new issue