1
Fork 0
mirror of https://github.com/RGBCube/serenity synced 2025-07-28 09:47:34 +00:00

AK: Rename Time to Duration

That's what this class really is; in fact that's what the first line of
the comment says it is.

This commit does not rename the main files, since those will contain
other time-related classes in a little bit.
This commit is contained in:
kleines Filmröllchen 2023-03-13 16:30:34 +01:00 committed by Jelle Raaijmakers
parent 82ddc813d5
commit 213025f210
140 changed files with 634 additions and 628 deletions

View file

@ -31,9 +31,9 @@ public:
// Returns the timestamp of the keyframe that was seeked to.
// The value is `Optional` to allow the demuxer to decide not to seek so that it can keep its position
// in the case that the timestamp is closer to the current time than the nearest keyframe.
virtual DecoderErrorOr<Optional<Time>> seek_to_most_recent_keyframe(Track track, Time timestamp, Optional<Time> earliest_available_sample = OptionalNone()) = 0;
virtual DecoderErrorOr<Optional<Duration>> seek_to_most_recent_keyframe(Track track, Duration timestamp, Optional<Duration> earliest_available_sample = OptionalNone()) = 0;
virtual DecoderErrorOr<Time> duration() = 0;
virtual DecoderErrorOr<Duration> duration() = 0;
protected:
virtual DecoderErrorOr<NonnullOwnPtr<Sample>> get_next_sample_for_track(Track track) = 0;

View file

@ -32,11 +32,11 @@ public:
void set_writing_app(DeprecatedString writing_app) { m_writing_app = move(writing_app); }
Optional<double> duration_unscaled() const { return m_duration_unscaled; }
void set_duration_unscaled(double duration) { m_duration_unscaled.emplace(duration); }
Optional<Time> duration() const
Optional<Duration> duration() const
{
if (!duration_unscaled().has_value())
return {};
return Time::from_nanoseconds(static_cast<i64>(static_cast<double>(timestamp_scale()) * duration_unscaled().value()));
return Duration::from_nanoseconds(static_cast<i64>(static_cast<double>(timestamp_scale()) * duration_unscaled().value()));
}
private:
@ -167,8 +167,8 @@ public:
u64 track_number() const { return m_track_number; }
void set_track_number(u64 track_number) { m_track_number = track_number; }
Time timestamp() const { return m_timestamp; }
void set_timestamp(Time timestamp) { m_timestamp = timestamp; }
Duration timestamp() const { return m_timestamp; }
void set_timestamp(Duration timestamp) { m_timestamp = timestamp; }
bool only_keyframes() const { return m_only_keyframes; }
void set_only_keyframes(bool only_keyframes) { m_only_keyframes = only_keyframes; }
bool invisible() const { return m_invisible; }
@ -185,7 +185,7 @@ public:
private:
u64 m_track_number { 0 };
Time m_timestamp { Time::zero() };
Duration m_timestamp { Duration::zero() };
bool m_only_keyframes { false };
bool m_invisible { false };
Lacing m_lacing { None };
@ -195,11 +195,11 @@ private:
class Cluster {
public:
Time timestamp() const { return m_timestamp; }
void set_timestamp(Time timestamp) { m_timestamp = timestamp; }
Duration timestamp() const { return m_timestamp; }
void set_timestamp(Duration timestamp) { m_timestamp = timestamp; }
private:
Time m_timestamp { Time::zero() };
Duration m_timestamp { Duration::zero() };
};
class CueTrackPosition {
@ -219,14 +219,14 @@ private:
class CuePoint {
public:
Time timestamp() const { return m_timestamp; }
void set_timestamp(Time timestamp) { m_timestamp = timestamp; }
Duration timestamp() const { return m_timestamp; }
void set_timestamp(Duration timestamp) { m_timestamp = timestamp; }
OrderedHashMap<u64, CueTrackPosition>& track_positions() { return m_track_positions; }
OrderedHashMap<u64, CueTrackPosition> const& track_positions() const { return m_track_positions; }
Optional<CueTrackPosition const&> position_for_track(u64 track_number) const { return m_track_positions.get(track_number); }
private:
Time m_timestamp = Time::min();
Duration m_timestamp = Duration::min();
OrderedHashMap<u64, CueTrackPosition> m_track_positions;
};

View file

@ -71,7 +71,7 @@ DecoderErrorOr<MatroskaDemuxer::TrackStatus*> MatroskaDemuxer::get_track_status(
return &m_track_statuses.get(track).release_value();
}
DecoderErrorOr<Optional<Time>> MatroskaDemuxer::seek_to_most_recent_keyframe(Track track, Time timestamp, Optional<Time> earliest_available_sample)
DecoderErrorOr<Optional<Duration>> MatroskaDemuxer::seek_to_most_recent_keyframe(Track track, Duration timestamp, Optional<Duration> earliest_available_sample)
{
// Removing the track status will cause us to start from the beginning.
if (timestamp.is_zero()) {
@ -113,10 +113,10 @@ DecoderErrorOr<NonnullOwnPtr<Sample>> MatroskaDemuxer::get_next_sample_for_track
return make<VideoSample>(status.block->frame(status.frame_index++), cicp, status.block->timestamp());
}
DecoderErrorOr<Time> MatroskaDemuxer::duration()
DecoderErrorOr<Duration> MatroskaDemuxer::duration()
{
auto duration = TRY(m_reader.segment_information()).duration();
return duration.value_or(Time::zero());
return duration.value_or(Duration::zero());
}
}

View file

@ -29,9 +29,9 @@ public:
DecoderErrorOr<Vector<Track>> get_tracks_for_type(TrackType type) override;
DecoderErrorOr<Optional<Time>> seek_to_most_recent_keyframe(Track track, Time timestamp, Optional<Time> earliest_available_sample = OptionalNone()) override;
DecoderErrorOr<Optional<Duration>> seek_to_most_recent_keyframe(Track track, Duration timestamp, Optional<Duration> earliest_available_sample = OptionalNone()) override;
DecoderErrorOr<Time> duration() override;
DecoderErrorOr<Duration> duration() override;
protected:
DecoderErrorOr<NonnullOwnPtr<Sample>> get_next_sample_for_track(Track track) override;

View file

@ -543,11 +543,11 @@ static DecoderErrorOr<Cluster> parse_cluster(Streamer& streamer, u64 timestamp_s
TRY_READ(streamer.seek_to_position(first_element_position));
Cluster cluster;
cluster.set_timestamp(Time::from_nanoseconds(timestamp.release_value() * timestamp_scale));
cluster.set_timestamp(Duration::from_nanoseconds(timestamp.release_value() * timestamp_scale));
return cluster;
}
static DecoderErrorOr<Block> parse_simple_block(Streamer& streamer, Time cluster_timestamp, u64 segment_timestamp_scale, TrackEntry track)
static DecoderErrorOr<Block> parse_simple_block(Streamer& streamer, Duration cluster_timestamp, u64 segment_timestamp_scale, TrackEntry track)
{
Block block;
@ -567,11 +567,11 @@ static DecoderErrorOr<Block> parse_simple_block(Streamer& streamer, Time cluster
// of that track. To get the timestamp in nanoseconds of the first frame in a Block or
// SimpleBlock, the formula becomes:
// `( ( Cluster\Timestamp + ( block timestamp * TrackTimestampScale ) ) * TimestampScale ) - CodecDelay`
Time timestamp_offset = Time::from_nanoseconds(static_cast<i64>(static_cast<double>(TRY_READ(streamer.read_i16()) * segment_timestamp_scale) * track.timestamp_scale()));
timestamp_offset -= Time::from_nanoseconds(static_cast<i64>(track.codec_delay()));
Duration timestamp_offset = Duration::from_nanoseconds(static_cast<i64>(static_cast<double>(TRY_READ(streamer.read_i16()) * segment_timestamp_scale) * track.timestamp_scale()));
timestamp_offset -= Duration::from_nanoseconds(static_cast<i64>(track.codec_delay()));
// This is only mentioned in the elements specification under TrackOffset.
// https://www.matroska.org/technical/elements.html
timestamp_offset += Time::from_nanoseconds(static_cast<i64>(track.timestamp_offset()));
timestamp_offset += Duration::from_nanoseconds(static_cast<i64>(track.timestamp_offset()));
block.set_timestamp(cluster_timestamp + timestamp_offset);
auto flags = TRY_READ(streamer.read_octet());
@ -704,7 +704,7 @@ static DecoderErrorOr<CuePoint> parse_cue_point(Streamer& streamer, u64 timestam
// https://github.com/mozilla/nestegg/tree/ec6adfbbf979678e3058cc4695257366f39e290b/src/nestegg.c#L2411-L2416
// https://github.com/mozilla/nestegg/tree/ec6adfbbf979678e3058cc4695257366f39e290b/src/nestegg.c#L1383-L1392
// Other fields that specify Matroska Ticks may also use Segment Ticks instead, who knows :^(
auto timestamp = Time::from_nanoseconds(static_cast<i64>(TRY_READ(streamer.read_u64()) * timestamp_scale));
auto timestamp = Duration::from_nanoseconds(static_cast<i64>(TRY_READ(streamer.read_u64()) * timestamp_scale));
cue_point.set_timestamp(timestamp);
dbgln_if(MATROSKA_DEBUG, "Read CuePoint timestamp {}ms", cue_point.timestamp().to_milliseconds());
break;
@ -775,7 +775,7 @@ DecoderErrorOr<void> Reader::ensure_cues_are_parsed()
return {};
}
DecoderErrorOr<void> Reader::seek_to_cue_for_timestamp(SampleIterator& iterator, Time const& timestamp)
DecoderErrorOr<void> Reader::seek_to_cue_for_timestamp(SampleIterator& iterator, Duration const& timestamp)
{
auto const& cue_points = MUST(cue_points_for_track(iterator.m_track.track_number())).release_value();
@ -814,7 +814,7 @@ DecoderErrorOr<void> Reader::seek_to_cue_for_timestamp(SampleIterator& iterator,
return {};
}
static DecoderErrorOr<void> search_clusters_for_keyframe_before_timestamp(SampleIterator& iterator, Time const& timestamp)
static DecoderErrorOr<void> search_clusters_for_keyframe_before_timestamp(SampleIterator& iterator, Duration const& timestamp)
{
#if MATROSKA_DEBUG
size_t inter_frames_count;
@ -856,7 +856,7 @@ DecoderErrorOr<bool> Reader::has_cues_for_track(u64 track_number)
return m_cues.contains(track_number);
}
DecoderErrorOr<SampleIterator> Reader::seek_to_random_access_point(SampleIterator iterator, Time timestamp)
DecoderErrorOr<SampleIterator> Reader::seek_to_random_access_point(SampleIterator iterator, Duration timestamp)
{
if (TRY(has_cues_for_track(iterator.m_track.track_number()))) {
TRY(seek_to_cue_for_timestamp(iterator, timestamp));

View file

@ -39,7 +39,7 @@ public:
DecoderErrorOr<size_t> track_count();
DecoderErrorOr<SampleIterator> create_sample_iterator(u64 track_number);
DecoderErrorOr<SampleIterator> seek_to_random_access_point(SampleIterator, Time);
DecoderErrorOr<SampleIterator> seek_to_random_access_point(SampleIterator, Duration);
DecoderErrorOr<Optional<Vector<CuePoint> const&>> cue_points_for_track(u64 track_number);
DecoderErrorOr<bool> has_cues_for_track(u64 track_number);
@ -58,7 +58,7 @@ private:
DecoderErrorOr<void> parse_cues(Streamer&);
DecoderErrorOr<void> ensure_cues_are_parsed();
DecoderErrorOr<void> seek_to_cue_for_timestamp(SampleIterator&, Time const&);
DecoderErrorOr<void> seek_to_cue_for_timestamp(SampleIterator&, Duration const&);
RefPtr<Core::MappedFile> m_mapped_file;
ReadonlyBytes m_data;
@ -84,7 +84,7 @@ class SampleIterator {
public:
DecoderErrorOr<Block> next_block();
Cluster const& current_cluster() { return *m_current_cluster; }
Optional<Time> const& last_timestamp() { return m_last_timestamp; }
Optional<Duration> const& last_timestamp() { return m_last_timestamp; }
private:
friend class Reader;
@ -108,7 +108,7 @@ private:
// Must always point to an element ID or the end of the stream.
size_t m_position { 0 };
Optional<Time> m_last_timestamp;
Optional<Duration> m_last_timestamp;
Optional<Cluster> m_current_cluster;
};

View file

@ -107,12 +107,12 @@ void PlaybackManager::pause_playback()
TRY_OR_FATAL_ERROR(m_playback_handler->pause());
}
Time PlaybackManager::current_playback_time()
Duration PlaybackManager::current_playback_time()
{
return m_playback_handler->current_time();
}
Time PlaybackManager::duration()
Duration PlaybackManager::duration()
{
auto duration_result = ({
auto demuxer_locker = Threading::MutexLocker(m_demuxer_mutex);
@ -179,12 +179,12 @@ void PlaybackManager::timer_callback()
TRY_OR_FATAL_ERROR(m_playback_handler->do_timed_state_update());
}
void PlaybackManager::seek_to_timestamp(Time target_timestamp, SeekMode seek_mode)
void PlaybackManager::seek_to_timestamp(Duration target_timestamp, SeekMode seek_mode)
{
TRY_OR_FATAL_ERROR(m_playback_handler->seek(target_timestamp, seek_mode));
}
Optional<Time> PlaybackManager::seek_demuxer_to_most_recent_keyframe(Time timestamp, Optional<Time> earliest_available_sample)
Optional<Duration> PlaybackManager::seek_demuxer_to_most_recent_keyframe(Duration timestamp, Optional<Duration> earliest_available_sample)
{
auto result = m_demuxer->seek_to_most_recent_keyframe(m_selected_video_track, timestamp, move(earliest_available_sample));
if (result.is_error())
@ -211,13 +211,13 @@ void PlaybackManager::set_state_update_timer(int delay_ms)
void PlaybackManager::restart_playback()
{
seek_to_timestamp(Time::zero());
seek_to_timestamp(Duration::zero());
}
void PlaybackManager::decode_and_queue_one_sample()
{
#if PLAYBACK_MANAGER_DEBUG
auto start_time = Time::now_monotonic();
auto start_time = Duration::now_monotonic();
#endif
FrameQueueItem item_to_enqueue;
@ -326,12 +326,12 @@ void PlaybackManager::decode_and_queue_one_sample()
m_buffer_is_full.exchange(false);
}
Time PlaybackManager::PlaybackStateHandler::current_time() const
Duration PlaybackManager::PlaybackStateHandler::current_time() const
{
return m_manager.m_last_present_in_media_time;
}
ErrorOr<void> PlaybackManager::PlaybackStateHandler::seek(Time target_timestamp, SeekMode seek_mode)
ErrorOr<void> PlaybackManager::PlaybackStateHandler::seek(Duration target_timestamp, SeekMode seek_mode)
{
return replace_handler_and_delete_this<SeekingStateHandler>(is_playing(), target_timestamp, seek_mode);
}
@ -408,7 +408,7 @@ public:
private:
ErrorOr<void> on_enter() override
{
m_last_present_in_real_time = Time::now_monotonic();
m_last_present_in_real_time = Duration::now_monotonic();
return do_timed_state_update();
}
@ -427,9 +427,9 @@ private:
return replace_handler_and_delete_this<BufferingStateHandler>(true);
}
Time current_time() const override
Duration current_time() const override
{
return manager().m_last_present_in_media_time + (Time::now_monotonic() - m_last_present_in_real_time);
return manager().m_last_present_in_media_time + (Duration::now_monotonic() - m_last_present_in_real_time);
}
ErrorOr<void> do_timed_state_update() override
@ -498,7 +498,7 @@ private:
// If we have a frame, send it for presentation.
if (should_present_frame) {
auto now = Time::now_monotonic();
auto now = Duration::now_monotonic();
manager().m_last_present_in_media_time += now - m_last_present_in_real_time;
m_last_present_in_real_time = now;
@ -520,7 +520,7 @@ private:
return {};
}
Time m_last_present_in_real_time = Time::zero();
Duration m_last_present_in_real_time = Duration::zero();
};
class PlaybackManager::PausedStateHandler : public PlaybackManager::PlaybackStateHandler {
@ -574,7 +574,7 @@ class PlaybackManager::BufferingStateHandler : public PlaybackManager::ResumingS
class PlaybackManager::SeekingStateHandler : public PlaybackManager::ResumingStateHandler {
public:
SeekingStateHandler(PlaybackManager& manager, bool playing, Time target_timestamp, SeekMode seek_mode)
SeekingStateHandler(PlaybackManager& manager, bool playing, Duration target_timestamp, SeekMode seek_mode)
: ResumingStateHandler(manager, playing)
, m_target_timestamp(target_timestamp)
, m_seek_mode(seek_mode)
@ -655,14 +655,14 @@ private:
StringView name() override { return "Seeking"sv; }
ErrorOr<void> seek(Time target_timestamp, SeekMode seek_mode) override
ErrorOr<void> seek(Duration target_timestamp, SeekMode seek_mode) override
{
m_target_timestamp = target_timestamp;
m_seek_mode = seek_mode;
return on_enter();
}
Time current_time() const override
Duration current_time() const override
{
return m_target_timestamp;
}
@ -676,7 +676,7 @@ private:
PlaybackState get_state() const override { return PlaybackState::Seeking; }
Time m_target_timestamp { Time::zero() };
Duration m_target_timestamp { Duration::zero() };
SeekMode m_seek_mode { SeekMode::Accurate };
};
@ -700,7 +700,7 @@ private:
{
// When Stopped, the decoder thread will be waiting for a signal to start its loop going again.
manager().m_decode_wait_condition.broadcast();
return replace_handler_and_delete_this<SeekingStateHandler>(true, Time::zero(), SeekMode::Fast);
return replace_handler_and_delete_this<SeekingStateHandler>(true, Duration::zero(), SeekMode::Fast);
}
bool is_playing() const override { return false; };
PlaybackState get_state() const override { return PlaybackState::Stopped; }

View file

@ -27,30 +27,30 @@ class FrameQueueItem {
public:
FrameQueueItem()
: m_data(Empty())
, m_timestamp(Time::zero())
, m_timestamp(Duration::zero())
{
}
static constexpr Time no_timestamp = Time::min();
static constexpr Duration no_timestamp = Duration::min();
enum class Type {
Frame,
Error,
};
static FrameQueueItem frame(RefPtr<Gfx::Bitmap> bitmap, Time timestamp)
static FrameQueueItem frame(RefPtr<Gfx::Bitmap> bitmap, Duration timestamp)
{
return FrameQueueItem(move(bitmap), timestamp);
}
static FrameQueueItem error_marker(DecoderError&& error, Time timestamp)
static FrameQueueItem error_marker(DecoderError&& error, Duration timestamp)
{
return FrameQueueItem(move(error), timestamp);
}
bool is_frame() const { return m_data.has<RefPtr<Gfx::Bitmap>>(); }
RefPtr<Gfx::Bitmap> bitmap() const { return m_data.get<RefPtr<Gfx::Bitmap>>(); }
Time timestamp() const { return m_timestamp; }
Duration timestamp() const { return m_timestamp; }
bool is_error() const { return m_data.has<DecoderError>(); }
DecoderError const& error() const { return m_data.get<DecoderError>(); }
@ -71,21 +71,21 @@ public:
}
private:
FrameQueueItem(RefPtr<Gfx::Bitmap> bitmap, Time timestamp)
FrameQueueItem(RefPtr<Gfx::Bitmap> bitmap, Duration timestamp)
: m_data(move(bitmap))
, m_timestamp(timestamp)
{
VERIFY(m_timestamp != no_timestamp);
}
FrameQueueItem(DecoderError&& error, Time timestamp)
FrameQueueItem(DecoderError&& error, Duration timestamp)
: m_data(move(error))
, m_timestamp(timestamp)
{
}
Variant<Empty, RefPtr<Gfx::Bitmap>, DecoderError> m_data { Empty() };
Time m_timestamp { no_timestamp };
Duration m_timestamp { no_timestamp };
};
static constexpr size_t frame_buffer_count = 4;
@ -122,7 +122,7 @@ public:
void resume_playback();
void pause_playback();
void restart_playback();
void seek_to_timestamp(Time, SeekMode = DEFAULT_SEEK_MODE);
void seek_to_timestamp(Duration, SeekMode = DEFAULT_SEEK_MODE);
bool is_playing() const
{
return m_playback_handler->is_playing();
@ -134,8 +134,8 @@ public:
u64 number_of_skipped_frames() const { return m_skipped_frames; }
Time current_playback_time();
Time duration();
Duration current_playback_time();
Duration duration();
Function<void(RefPtr<Gfx::Bitmap>)> on_video_frame;
Function<void()> on_playback_state_change;
@ -158,7 +158,7 @@ private:
void timer_callback();
// This must be called with m_demuxer_mutex locked!
Optional<Time> seek_demuxer_to_most_recent_keyframe(Time timestamp, Optional<Time> earliest_available_sample = OptionalNone());
Optional<Duration> seek_demuxer_to_most_recent_keyframe(Duration timestamp, Optional<Duration> earliest_available_sample = OptionalNone());
Optional<FrameQueueItem> dequeue_one_frame();
void set_state_update_timer(int delay_ms);
@ -172,7 +172,7 @@ private:
void dispatch_state_change();
void dispatch_fatal_error(Error);
Time m_last_present_in_media_time = Time::zero();
Duration m_last_present_in_media_time = Duration::zero();
NonnullOwnPtr<Demuxer> m_demuxer;
Threading::Mutex m_demuxer_mutex;
@ -212,10 +212,10 @@ private:
virtual PlaybackState get_state() const = 0;
virtual ErrorOr<void> pause() { return {}; };
virtual ErrorOr<void> buffer() { return {}; };
virtual ErrorOr<void> seek(Time target_timestamp, SeekMode);
virtual ErrorOr<void> seek(Duration target_timestamp, SeekMode);
virtual ErrorOr<void> stop();
virtual Time current_time() const;
virtual Duration current_time() const;
virtual ErrorOr<void> do_timed_state_update() { return {}; };

View file

@ -21,7 +21,7 @@ public:
class VideoSample : public Sample {
public:
VideoSample(ReadonlyBytes data, CodingIndependentCodePoints container_cicp, Time timestamp)
VideoSample(ReadonlyBytes data, CodingIndependentCodePoints container_cicp, Duration timestamp)
: m_data(data)
, m_container_cicp(container_cicp)
, m_timestamp(timestamp)
@ -31,12 +31,12 @@ public:
bool is_video_sample() const override { return true; }
ReadonlyBytes const& data() const { return m_data; }
CodingIndependentCodePoints container_cicp() const { return m_container_cicp; }
Time timestamp() const { return m_timestamp; }
Duration timestamp() const { return m_timestamp; }
private:
ReadonlyBytes m_data;
CodingIndependentCodePoints m_container_cicp;
Time m_timestamp;
Duration m_timestamp;
};
// FIXME: Add samples for audio, subtitles, etc.

View file

@ -22,7 +22,7 @@ enum class TrackType : u32 {
class Track {
struct VideoData {
Time duration {};
Duration duration {};
u64 pixel_width { 0 };
u64 pixel_height { 0 };
};