Upstream version 7.36.149.0
[platform/framework/web/crosswalk.git] / src / media / filters / source_buffer_stream.cc
index bf867ca..9247a25 100644 (file)
 #include "base/bind.h"
 #include "base/debug/trace_event.h"
 #include "base/logging.h"
+#include "media/base/audio_splicer.h"
 
 namespace media {
 
+typedef StreamParser::BufferQueue BufferQueue;
+
 // Buffers with the same timestamp are only allowed under certain conditions.
-// Video: Allowed when the previous frame and current frame are NOT keyframes.
-//        This is the situation for VP8 Alt-Ref frames.
-// Otherwise: Allowed in all situations except where a non-keyframe is followed
-//            by a keyframe.
+// More precisely, it is allowed in all situations except when the previous
+// frame is not a key frame and the current is a key frame.
+// Examples of situations where DTS of two consecutive frames can be equal:
+// - Video: VP8 Alt-Ref frames.
+// - Video: IPBPBP...: DTS for I frame and for P frame can be equal.
+// - Text track cues that start at same time.
 // Returns true if |prev_is_keyframe| and |current_is_keyframe| indicate a
 // same timestamp situation that is allowed. False is returned otherwise.
 static bool AllowSameTimestamp(
-    bool prev_is_keyframe, bool current_is_keyframe, bool is_video) {
-  if (is_video)
-    return !prev_is_keyframe && !current_is_keyframe;
-
+    bool prev_is_keyframe, bool current_is_keyframe,
+    SourceBufferStream::Type type) {
   return prev_is_keyframe || !current_is_keyframe;
 }
 
+// Returns the config ID of |buffer| if |buffer| has no splice buffers or
+// |index| is out of range.  Otherwise returns the config ID for the fade out
+// preroll buffer at position |index|.
+static int GetConfigId(StreamParserBuffer* buffer, size_t index) {
+  return index < buffer->get_splice_buffers().size()
+             ? buffer->get_splice_buffers()[index]->GetConfigId()
+             : buffer->GetConfigId();
+}
+
 // Helper class representing a range of buffered data. All buffers in a
 // SourceBufferRange are ordered sequentially in presentation order with no
 // gaps.
 class SourceBufferRange {
  public:
-  typedef std::deque<scoped_refptr<StreamParserBuffer> > BufferQueue;
-
   // Returns the maximum distance in time between any buffer seen in this
   // stream. Used to estimate the duration of a buffer if its duration is not
   // known.
@@ -44,7 +54,7 @@ class SourceBufferRange {
   // empty and the front of |new_buffers| must be a keyframe.
   // |media_segment_start_time| refers to the starting timestamp for the media
   // segment to which these buffers belong.
-  SourceBufferRange(bool is_video,
+  SourceBufferRange(SourceBufferStream::Type type,
                     const BufferQueue& new_buffers,
                     base::TimeDelta media_segment_start_time,
                     const InterbufferDistanceCB& interbuffer_distance_cb);
@@ -94,7 +104,9 @@ class SourceBufferRange {
   // were removed.
   // |deleted_buffers| contains the buffers that were deleted from this range,
   // starting at the buffer that had been at |next_buffer_index_|.
-  void TruncateAt(base::TimeDelta timestamp,
+  // Returns true if everything in the range was deleted. Otherwise
+  // returns false.
+  bool TruncateAt(base::TimeDelta timestamp,
                   BufferQueue* deleted_buffers, bool is_exclusive);
   // Deletes all buffers in range.
   void DeleteAll(BufferQueue* deleted_buffers);
@@ -185,6 +197,11 @@ class SourceBufferRange {
   // sequence after |buffers_.back()|, false otherwise.
   bool IsNextInSequence(base::TimeDelta timestamp, bool is_keyframe) const;
 
+  // Adds all buffers which overlap [start, end) to the end of |buffers|.  If
+  // no buffers exist in the range returns false, true otherwise.
+  bool GetBuffersInRange(base::TimeDelta start, base::TimeDelta end,
+                         BufferQueue* buffers);
+
   int size_in_bytes() const { return size_in_bytes_; }
 
  private:
@@ -213,7 +230,9 @@ class SourceBufferRange {
 
   // Helper method to delete buffers in |buffers_| starting at
   // |starting_point|, an iterator in |buffers_|.
-  void TruncateAt(const BufferQueue::iterator& starting_point,
+  // Returns true if everything in the range was removed. Returns
+  // false if the range still contains buffers.
+  bool TruncateAt(const BufferQueue::iterator& starting_point,
                   BufferQueue* deleted_buffers);
 
   // Frees the buffers in |buffers_| from [|start_point|,|ending_point|) and
@@ -228,8 +247,8 @@ class SourceBufferRange {
   // Returns the approximate duration of a buffer in this range.
   base::TimeDelta GetApproximateDuration() const;
 
-  // True if this object stores video data.
-  bool is_video_;
+  // Type of this stream.
+  const SourceBufferStream::Type type_;
 
   // An ordered list of buffers in this range.
   BufferQueue buffers_;
@@ -281,11 +300,16 @@ static bool IsRangeListSorted(
   return true;
 }
 
-// Comparison function for two Buffers based on timestamp.
-static bool BufferComparator(
-    const scoped_refptr<media::StreamParserBuffer>& first,
-    const scoped_refptr<media::StreamParserBuffer>& second) {
-  return first->GetDecodeTimestamp() < second->GetDecodeTimestamp();
+// Comparison operators for std::upper_bound() and std::lower_bound().
+static bool CompareTimeDeltaToStreamParserBuffer(
+    const base::TimeDelta& decode_timestamp,
+    const scoped_refptr<media::StreamParserBuffer>& buffer) {
+  return decode_timestamp < buffer->GetDecodeTimestamp();
+}
+static bool CompareStreamParserBufferToTimeDelta(
+    const scoped_refptr<media::StreamParserBuffer>& buffer,
+    const base::TimeDelta& decode_timestamp) {
+  return buffer->GetDecodeTimestamp() < decode_timestamp;
 }
 
 // Returns an estimate of how far from the beginning or end of a range a buffer
@@ -308,24 +332,16 @@ static base::TimeDelta kSeekToStartFudgeRoom() {
   return base::TimeDelta::FromMilliseconds(1000);
 }
 // The maximum amount of data in bytes the stream will keep in memory.
-#if defined(GOOGLE_TV)
-// In Google TV, set the size of the buffer to 1 min because of
-// the limited memory of the embedded system.
-// 2MB: approximately 1 minutes of 256Kbps content.
-// 30MB: approximately 1 minutes of 4Mbps content.
-static int kDefaultAudioMemoryLimit = 2 * 1024 * 1024;
-static int kDefaultVideoMemoryLimit = 30 * 1024 * 1024;
-#else
 // 12MB: approximately 5 minutes of 320Kbps content.
 // 150MB: approximately 5 minutes of 4Mbps content.
 static int kDefaultAudioMemoryLimit = 12 * 1024 * 1024;
 static int kDefaultVideoMemoryLimit = 150 * 1024 * 1024;
-#endif
 
 namespace media {
 
 SourceBufferStream::SourceBufferStream(const AudioDecoderConfig& audio_config,
-                                       const LogCB& log_cb)
+                                       const LogCB& log_cb,
+                                       bool splice_frames_enabled)
     : log_cb_(log_cb),
       current_config_index_(0),
       append_config_index_(0),
@@ -341,13 +357,17 @@ SourceBufferStream::SourceBufferStream(const AudioDecoderConfig& audio_config,
       last_output_buffer_timestamp_(kNoTimestamp()),
       max_interbuffer_distance_(kNoTimestamp()),
       memory_limit_(kDefaultAudioMemoryLimit),
-      config_change_pending_(false) {
+      config_change_pending_(false),
+      splice_buffers_index_(0),
+      pre_splice_complete_(false),
+      splice_frames_enabled_(splice_frames_enabled) {
   DCHECK(audio_config.IsValidConfig());
   audio_configs_.push_back(audio_config);
 }
 
 SourceBufferStream::SourceBufferStream(const VideoDecoderConfig& video_config,
-                                       const LogCB& log_cb)
+                                       const LogCB& log_cb,
+                                       bool splice_frames_enabled)
     : log_cb_(log_cb),
       current_config_index_(0),
       append_config_index_(0),
@@ -363,11 +383,38 @@ SourceBufferStream::SourceBufferStream(const VideoDecoderConfig& video_config,
       last_output_buffer_timestamp_(kNoTimestamp()),
       max_interbuffer_distance_(kNoTimestamp()),
       memory_limit_(kDefaultVideoMemoryLimit),
-      config_change_pending_(false) {
+      config_change_pending_(false),
+      splice_buffers_index_(0),
+      pre_splice_complete_(false),
+      splice_frames_enabled_(splice_frames_enabled) {
   DCHECK(video_config.IsValidConfig());
   video_configs_.push_back(video_config);
 }
 
+SourceBufferStream::SourceBufferStream(const TextTrackConfig& text_config,
+                                       const LogCB& log_cb,
+                                       bool splice_frames_enabled)
+    : log_cb_(log_cb),
+      current_config_index_(0),
+      append_config_index_(0),
+      text_track_config_(text_config),
+      seek_pending_(false),
+      end_of_stream_(false),
+      seek_buffer_timestamp_(kNoTimestamp()),
+      selected_range_(NULL),
+      media_segment_start_time_(kNoTimestamp()),
+      range_for_next_append_(ranges_.end()),
+      new_media_segment_(false),
+      last_appended_buffer_timestamp_(kNoTimestamp()),
+      last_appended_buffer_is_keyframe_(false),
+      last_output_buffer_timestamp_(kNoTimestamp()),
+      max_interbuffer_distance_(kNoTimestamp()),
+      memory_limit_(kDefaultAudioMemoryLimit),
+      config_change_pending_(false),
+      splice_buffers_index_(0),
+      pre_splice_complete_(false),
+      splice_frames_enabled_(splice_frames_enabled) {}
+
 SourceBufferStream::~SourceBufferStream() {
   while (!ranges_.empty()) {
     delete ranges_.front();
@@ -396,14 +443,14 @@ void SourceBufferStream::OnNewMediaSegment(
   }
 }
 
-bool SourceBufferStream::Append(
-    const SourceBufferStream::BufferQueue& buffers) {
-  TRACE_EVENT2("mse", "SourceBufferStream::Append",
+bool SourceBufferStream::Append(const BufferQueue& buffers) {
+  TRACE_EVENT2("media", "SourceBufferStream::Append",
                "stream type", GetStreamTypeName(),
                "buffers to append", buffers.size());
 
   DCHECK(!buffers.empty());
   DCHECK(media_segment_start_time_ != kNoTimestamp());
+  DCHECK(media_segment_start_time_ <= buffers.front()->GetDecodeTimestamp());
   DCHECK(!end_of_stream_);
 
   // New media segments must begin with a keyframe.
@@ -447,7 +494,8 @@ bool SourceBufferStream::Append(
     last_appended_buffer_timestamp_ = buffers.back()->GetDecodeTimestamp();
     last_appended_buffer_is_keyframe_ = buffers.back()->IsKeyframe();
   } else {
-    base::TimeDelta new_range_start_time = media_segment_start_time_;
+    base::TimeDelta new_range_start_time = std::min(
+        media_segment_start_time_, buffers.front()->GetDecodeTimestamp());
     const BufferQueue* buffers_for_new_range = &buffers;
     BufferQueue trimmed_buffers;
 
@@ -480,7 +528,7 @@ bool SourceBufferStream::Append(
 
     range_for_next_append_ =
         AddToRanges(new SourceBufferRange(
-            is_video(), *buffers_for_new_range, new_range_start_time,
+            GetType(), *buffers_for_new_range, new_range_start_time,
             base::Bind(&SourceBufferStream::GetMaxInterbufferDistance,
                        base::Unretained(this))));
     last_appended_buffer_timestamp_ =
@@ -588,15 +636,10 @@ void SourceBufferStream::RemoveInternal(
         SetSelectedRange(new_range);
     }
 
-    // If the current range now is completely covered by the removal
-    // range then we want to delete it.
-    bool delete_range = start < range->GetStartTimestamp() ||
-        (!is_exclusive && start == range->GetStartTimestamp());
-
     // Truncate the current range so that it only contains data before
     // the removal range.
     BufferQueue saved_buffers;
-    range->TruncateAt(start, &saved_buffers, is_exclusive);
+    bool delete_range = range->TruncateAt(start, &saved_buffers, is_exclusive);
 
     // Check to see if the current playback position was removed and
     // update the selected range appropriately.
@@ -649,6 +692,9 @@ void SourceBufferStream::ResetSeekState() {
   track_buffer_.clear();
   config_change_pending_ = false;
   last_output_buffer_timestamp_ = kNoTimestamp();
+  splice_buffers_index_ = 0;
+  splice_buffer_ = NULL;
+  pre_splice_complete_ = false;
 }
 
 bool SourceBufferStream::ShouldSeekToStartOfBuffered(
@@ -680,7 +726,7 @@ bool SourceBufferStream::IsMonotonicallyIncreasing(
 
       if (current_timestamp == prev_timestamp &&
           !AllowSameTimestamp(prev_is_keyframe, current_is_keyframe,
-                              is_video())) {
+                              GetType())) {
         MEDIA_LOG(log_cb_) << "Unexpected combination of buffers with the"
                            << " same timestamp detected at "
                            << current_timestamp.InSecondsF();
@@ -699,7 +745,7 @@ bool SourceBufferStream::IsNextTimestampValid(
   return (last_appended_buffer_timestamp_ != next_timestamp) ||
       new_media_segment_ ||
       AllowSameTimestamp(last_appended_buffer_is_keyframe_, next_is_keyframe,
-                         is_video());
+                         GetType());
 }
 
 
@@ -822,7 +868,7 @@ int SourceBufferStream::GetRemovalRange(
 
 int SourceBufferStream::FreeBuffers(int total_bytes_to_free,
                                     bool reverse_direction) {
-  TRACE_EVENT2("mse", "SourceBufferStream::FreeBuffers",
+  TRACE_EVENT2("media", "SourceBufferStream::FreeBuffers",
                "total bytes to free", total_bytes_to_free,
                "reverse direction", reverse_direction);
 
@@ -862,7 +908,7 @@ int SourceBufferStream::FreeBuffers(int total_bytes_to_free,
       DCHECK(!new_range_for_append);
       // Create a new range containing these buffers.
       new_range_for_append = new SourceBufferRange(
-          is_video(), buffers, kNoTimestamp(),
+          GetType(), buffers, kNoTimestamp(),
           base::Bind(&SourceBufferStream::GetMaxInterbufferDistance,
                      base::Unretained(this)));
       range_for_next_append_ = ranges_.end();
@@ -927,6 +973,12 @@ void SourceBufferStream::PrepareRangesForNextAppend(
     }
   }
 
+  // Handle splices between the existing buffers and the new buffers.  If a
+  // splice is generated the timestamp and duration of the first buffer in
+  // |new_buffers| will be modified.
+  if (splice_frames_enabled_)
+    GenerateSpliceFrame(new_buffers);
+
   base::TimeDelta prev_timestamp = last_appended_buffer_timestamp_;
   bool prev_is_keyframe = last_appended_buffer_is_keyframe_;
   base::TimeDelta next_timestamp = new_buffers.front()->GetDecodeTimestamp();
@@ -942,8 +994,15 @@ void SourceBufferStream::PrepareRangesForNextAppend(
   // timestamp situation. This prevents the first buffer in the current append
   // from deleting the last buffer in the previous append if both buffers
   // have the same timestamp.
-  bool is_exclusive = (prev_timestamp == next_timestamp) &&
-      AllowSameTimestamp(prev_is_keyframe, next_is_keyframe, is_video());
+  //
+  // The delete range should never be exclusive if a splice frame was generated
+  // because we don't generate splice frames for same timestamp situations.
+  DCHECK(new_buffers.front()->splice_timestamp() !=
+         new_buffers.front()->timestamp());
+  const bool is_exclusive =
+      new_buffers.front()->get_splice_buffers().empty() &&
+      prev_timestamp == next_timestamp &&
+      AllowSameTimestamp(prev_is_keyframe, next_is_keyframe, GetType());
 
   // Delete the buffers that |new_buffers| overlaps.
   base::TimeDelta start = new_buffers.front()->GetDecodeTimestamp();
@@ -1053,8 +1112,15 @@ void SourceBufferStream::OnSetDuration(base::TimeDelta duration) {
 
   // Need to partially truncate this range.
   if ((*itr)->GetStartTimestamp() < duration) {
-    (*itr)->TruncateAt(duration, NULL, false);
-    ++itr;
+    bool delete_range = (*itr)->TruncateAt(duration, NULL, false);
+    if ((*itr == selected_range_) && !selected_range_->HasNextBufferPosition())
+      SetSelectedRange(NULL);
+
+    if (delete_range) {
+      DeleteAndRemoveRange(&itr);
+    } else {
+      ++itr;
+    }
   }
 
   // Delete all ranges that begin after |duration|.
@@ -1069,17 +1135,84 @@ void SourceBufferStream::OnSetDuration(base::TimeDelta duration) {
 
 SourceBufferStream::Status SourceBufferStream::GetNextBuffer(
     scoped_refptr<StreamParserBuffer>* out_buffer) {
+  if (!splice_buffer_) {
+    const SourceBufferStream::Status status = GetNextBufferInternal(out_buffer);
+
+    // Just return if GetNextBufferInternal() failed or there's no fade out
+    // preroll, there's nothing else to do.
+    if (status != SourceBufferStream::kSuccess ||
+        (*out_buffer)->get_splice_buffers().empty()) {
+      return status;
+    }
+
+    // Fall through into splice buffer processing.
+    splice_buffers_index_ = 0;
+    splice_buffer_.swap(*out_buffer);
+  }
+
+  DCHECK(splice_buffer_);
+  const BufferQueue& splice_buffers = splice_buffer_->get_splice_buffers();
+  const size_t last_splice_buffer_index = splice_buffers.size() - 1;
+
+  // Are there any splice buffers left to hand out?  The last buffer should be
+  // handed out separately since it represents the first post-splice buffer.
+  if (splice_buffers_index_ < last_splice_buffer_index) {
+    // Account for config changes which occur between fade out buffers.
+    if (current_config_index_ !=
+        splice_buffers[splice_buffers_index_]->GetConfigId()) {
+      config_change_pending_ = true;
+      DVLOG(1) << "Config change (splice buffer config ID does not match).";
+      return SourceBufferStream::kConfigChange;
+    }
+
+    // Every pre splice buffer must have the same splice_timestamp().
+    DCHECK(splice_buffer_->splice_timestamp() ==
+           splice_buffers[splice_buffers_index_]->splice_timestamp());
+
+    *out_buffer = splice_buffers[splice_buffers_index_++];
+    return SourceBufferStream::kSuccess;
+  }
+
+  // Did we hand out the last pre-splice buffer on the previous call?
+  if (!pre_splice_complete_) {
+    DCHECK_EQ(splice_buffers_index_, last_splice_buffer_index);
+    pre_splice_complete_ = true;
+    config_change_pending_ = true;
+    DVLOG(1) << "Config change (forced for fade in of splice frame).";
+    return SourceBufferStream::kConfigChange;
+  }
+
+  // All pre-splice buffers have been handed out and a config change completed,
+  // so hand out the final buffer for fade in.  Because a config change is
+  // always issued prior to handing out this buffer, any changes in config id
+  // have been inherently handled.
+  DCHECK(pre_splice_complete_);
+  DCHECK_EQ(splice_buffers_index_, splice_buffers.size() - 1);
+  DCHECK(splice_buffers.back()->splice_timestamp() == kNoTimestamp());
+  *out_buffer = splice_buffers.back();
+  splice_buffer_ = NULL;
+  splice_buffers_index_ = 0;
+  pre_splice_complete_ = false;
+  return SourceBufferStream::kSuccess;
+}
+
+SourceBufferStream::Status SourceBufferStream::GetNextBufferInternal(
+    scoped_refptr<StreamParserBuffer>* out_buffer) {
   CHECK(!config_change_pending_);
 
   if (!track_buffer_.empty()) {
     DCHECK(!selected_range_);
-    if (track_buffer_.front()->GetConfigId() != current_config_index_) {
+    scoped_refptr<StreamParserBuffer>& next_buffer = track_buffer_.front();
+
+    // If the next buffer is an audio splice frame, the next effective config id
+    // comes from the first splice buffer.
+    if (GetConfigId(next_buffer, 0) != current_config_index_) {
       config_change_pending_ = true;
       DVLOG(1) << "Config change (track buffer config ID does not match).";
       return kConfigChange;
     }
 
-    *out_buffer = track_buffer_.front();
+    *out_buffer = next_buffer;
     track_buffer_.pop_front();
     last_output_buffer_timestamp_ = (*out_buffer)->GetDecodeTimestamp();
 
@@ -1181,6 +1314,13 @@ Ranges<base::TimeDelta> SourceBufferStream::GetBufferedTime() const {
   return ranges;
 }
 
+base::TimeDelta SourceBufferStream::GetBufferedDuration() const {
+  if (ranges_.empty())
+    return base::TimeDelta();
+
+  return ranges_.back()->GetBufferedEndTimestamp();
+}
+
 void SourceBufferStream::MarkEndOfStream() {
   DCHECK(!end_of_stream_);
   end_of_stream_ = true;
@@ -1213,6 +1353,10 @@ const VideoDecoderConfig& SourceBufferStream::GetCurrentVideoDecoderConfig() {
   return video_configs_[current_config_index_];
 }
 
+const TextTrackConfig& SourceBufferStream::GetCurrentTextTrackConfig() {
+  return text_track_config_;
+}
+
 base::TimeDelta SourceBufferStream::GetMaxInterbufferDistance() const {
   if (max_interbuffer_distance_ == kNoTimestamp())
     return base::TimeDelta::FromMilliseconds(kDefaultBufferDurationInMs);
@@ -1229,21 +1373,6 @@ bool SourceBufferStream::UpdateAudioConfig(const AudioDecoderConfig& config) {
     return false;
   }
 
-  if (audio_configs_[0].samples_per_second() != config.samples_per_second()) {
-    MEDIA_LOG(log_cb_) << "Audio sample rate changes not allowed.";
-    return false;
-  }
-
-  if (audio_configs_[0].channel_layout() != config.channel_layout()) {
-    MEDIA_LOG(log_cb_) << "Audio channel layout changes not allowed.";
-    return false;
-  }
-
-  if (audio_configs_[0].bits_per_channel() != config.bits_per_channel()) {
-    MEDIA_LOG(log_cb_) << "Audio bits per channel changes not allowed.";
-    return false;
-  }
-
   if (audio_configs_[0].is_encrypted() != config.is_encrypted()) {
     MEDIA_LOG(log_cb_) << "Audio encryption changes not allowed.";
     return false;
@@ -1270,11 +1399,6 @@ bool SourceBufferStream::UpdateVideoConfig(const VideoDecoderConfig& config) {
   DCHECK(audio_configs_.empty());
   DVLOG(3) << "UpdateVideoConfig.";
 
-  if (video_configs_[0].is_encrypted() != config.is_encrypted()) {
-    MEDIA_LOG(log_cb_) << "Video Encryption changes not allowed.";
-    return false;
-  }
-
   if (video_configs_[0].codec() != config.codec()) {
     MEDIA_LOG(log_cb_) << "Video codec changes not allowed.";
     return false;
@@ -1304,8 +1428,14 @@ bool SourceBufferStream::UpdateVideoConfig(const VideoDecoderConfig& config) {
 void SourceBufferStream::CompleteConfigChange() {
   config_change_pending_ = false;
 
+  if (splice_buffer_) {
+    current_config_index_ =
+        GetConfigId(splice_buffer_, splice_buffers_index_);
+    return;
+  }
+
   if (!track_buffer_.empty()) {
-    current_config_index_ = track_buffer_.front()->GetConfigId();
+    current_config_index_ = GetConfigId(track_buffer_.front(), 0);
     return;
   }
 
@@ -1435,13 +1565,25 @@ base::TimeDelta SourceBufferStream::FindKeyframeAfterTimestamp(
 }
 
 std::string SourceBufferStream::GetStreamTypeName() const {
-  if (!video_configs_.empty()) {
-    DCHECK(audio_configs_.empty());
-    return "VIDEO";
+  switch (GetType()) {
+    case kAudio:
+      return "AUDIO";
+    case kVideo:
+      return "VIDEO";
+    case kText:
+      return "TEXT";
   }
+  NOTREACHED();
+  return "";
+}
 
-  DCHECK(!audio_configs_.empty());
-  return "AUDIO";
+SourceBufferStream::Type SourceBufferStream::GetType() const {
+  if (!audio_configs_.empty())
+    return kAudio;
+  if (!video_configs_.empty())
+    return kVideo;
+  DCHECK_NE(text_track_config_.kind(), kTextNone);
+  return kText;
 }
 
 void SourceBufferStream::DeleteAndRemoveRange(RangeList::iterator* itr) {
@@ -1462,11 +1604,73 @@ void SourceBufferStream::DeleteAndRemoveRange(RangeList::iterator* itr) {
   *itr = ranges_.erase(*itr);
 }
 
+void SourceBufferStream::GenerateSpliceFrame(const BufferQueue& new_buffers) {
+  DCHECK(!new_buffers.empty());
+
+  // Splice frames are only supported for audio.
+  if (GetType() != kAudio)
+    return;
+
+  // Find the overlapped range (if any).
+  const base::TimeDelta splice_timestamp = new_buffers.front()->timestamp();
+  RangeList::iterator range_itr = FindExistingRangeFor(splice_timestamp);
+  if (range_itr == ranges_.end())
+    return;
+
+  const base::TimeDelta max_splice_end_timestamp =
+      splice_timestamp + base::TimeDelta::FromMilliseconds(
+                             AudioSplicer::kCrossfadeDurationInMilliseconds);
+
+  // Find all buffers involved before the splice point.
+  BufferQueue pre_splice_buffers;
+  if (!(*range_itr)->GetBuffersInRange(
+          splice_timestamp, max_splice_end_timestamp, &pre_splice_buffers)) {
+    return;
+  }
+
+  // If there are gaps in the timeline, it's possible that we only find buffers
+  // after the splice point but within the splice range.  For simplicity, we do
+  // not generate splice frames in this case.
+  //
+  // We also do not want to generate splices if the first new buffer replaces an
+  // existing buffer exactly.
+  if (pre_splice_buffers.front()->timestamp() >= splice_timestamp)
+    return;
+
+  // If any |pre_splice_buffers| are already splices, do not generate a splice.
+  for (size_t i = 0; i < pre_splice_buffers.size(); ++i) {
+    const BufferQueue& original_splice_buffers =
+        pre_splice_buffers[i]->get_splice_buffers();
+    if (!original_splice_buffers.empty()) {
+      DVLOG(1) << "Can't generate splice: overlapped buffers contain a "
+                  "pre-existing splice.";
+      return;
+    }
+  }
+
+  // Don't generate splice frames which represent less than two frames, since we
+  // need at least that much to generate a crossfade.  Per the spec, make this
+  // check using the sample rate of the overlapping buffers.
+  const base::TimeDelta splice_duration =
+      pre_splice_buffers.back()->timestamp() +
+      pre_splice_buffers.back()->duration() - splice_timestamp;
+  const base::TimeDelta minimum_splice_duration = base::TimeDelta::FromSecondsD(
+      2.0 / audio_configs_[append_config_index_].samples_per_second());
+  if (splice_duration < minimum_splice_duration) {
+    DVLOG(1) << "Can't generate splice: not enough samples for crossfade; have "
+             << splice_duration.InMicroseconds() << " us, but need "
+             << minimum_splice_duration.InMicroseconds() << " us.";
+    return;
+  }
+
+  new_buffers.front()->ConvertToSpliceBuffer(pre_splice_buffers);
+}
+
 SourceBufferRange::SourceBufferRange(
-    bool is_video, const BufferQueue& new_buffers,
+    SourceBufferStream::Type type, const BufferQueue& new_buffers,
     base::TimeDelta media_segment_start_time,
     const InterbufferDistanceCB& interbuffer_distance_cb)
-    : is_video_(is_video),
+    : type_(type),
       keyframe_map_index_base_(0),
       next_buffer_index_(-1),
       media_segment_start_time_(media_segment_start_time),
@@ -1480,9 +1684,12 @@ SourceBufferRange::SourceBufferRange(
 
 void SourceBufferRange::AppendBuffersToEnd(const BufferQueue& new_buffers) {
   DCHECK(buffers_.empty() || CanAppendBuffersToEnd(new_buffers));
-
+  DCHECK(media_segment_start_time_ == kNoTimestamp() ||
+         media_segment_start_time_ <=
+             new_buffers.front()->GetDecodeTimestamp());
   for (BufferQueue::const_iterator itr = new_buffers.begin();
-       itr != new_buffers.end(); ++itr) {
+       itr != new_buffers.end();
+       ++itr) {
     DCHECK((*itr)->GetDecodeTimestamp() != kNoTimestamp());
     buffers_.push_back(*itr);
     size_in_bytes_ += (*itr)->data_size();
@@ -1558,7 +1765,7 @@ SourceBufferRange* SourceBufferRange::SplitRange(
   // Create a new range with |removed_buffers|.
   SourceBufferRange* split_range =
       new SourceBufferRange(
-          is_video_, removed_buffers, kNoTimestamp(), interbuffer_distance_cb_);
+          type_, removed_buffers, kNoTimestamp(), interbuffer_distance_cb_);
 
   // If the next buffer position is now in |split_range|, update the state of
   // this range and |split_range| accordingly.
@@ -1570,20 +1777,18 @@ SourceBufferRange* SourceBufferRange::SplitRange(
   return split_range;
 }
 
-SourceBufferRange::BufferQueue::iterator SourceBufferRange::GetBufferItrAt(
-    base::TimeDelta timestamp, bool skip_given_timestamp) {
-  // Need to make a dummy buffer with timestamp |timestamp| in order to search
-  // the |buffers_| container.
-  scoped_refptr<StreamParserBuffer> dummy_buffer =
-      StreamParserBuffer::CopyFrom(NULL, 0, false);
-  dummy_buffer->SetDecodeTimestamp(timestamp);
-
-  if (skip_given_timestamp) {
-    return std::upper_bound(
-        buffers_.begin(), buffers_.end(), dummy_buffer, BufferComparator);
-  }
-  return std::lower_bound(
-      buffers_.begin(), buffers_.end(), dummy_buffer, BufferComparator);
+BufferQueue::iterator SourceBufferRange::GetBufferItrAt(
+    base::TimeDelta timestamp,
+    bool skip_given_timestamp) {
+  return skip_given_timestamp
+             ? std::upper_bound(buffers_.begin(),
+                                buffers_.end(),
+                                timestamp,
+                                CompareTimeDeltaToStreamParserBuffer)
+             : std::lower_bound(buffers_.begin(),
+                                buffers_.end(),
+                                timestamp,
+                                CompareStreamParserBufferToTimeDelta);
 }
 
 SourceBufferRange::KeyframeMap::iterator
@@ -1611,13 +1816,13 @@ void SourceBufferRange::DeleteAll(BufferQueue* removed_buffers) {
   TruncateAt(buffers_.begin(), removed_buffers);
 }
 
-void SourceBufferRange::TruncateAt(
+bool SourceBufferRange::TruncateAt(
     base::TimeDelta timestamp, BufferQueue* removed_buffers,
     bool is_exclusive) {
   // Find the place in |buffers_| where we will begin deleting data.
   BufferQueue::iterator starting_point =
       GetBufferItrAt(timestamp, is_exclusive);
-  TruncateAt(starting_point, removed_buffers);
+  return TruncateAt(starting_point, removed_buffers);
 }
 
 int SourceBufferRange::DeleteGOPFromFront(BufferQueue* deleted_buffers) {
@@ -1773,13 +1978,13 @@ void SourceBufferRange::FreeBufferRange(
   buffers_.erase(starting_point, ending_point);
 }
 
-void SourceBufferRange::TruncateAt(
+bool SourceBufferRange::TruncateAt(
     const BufferQueue::iterator& starting_point, BufferQueue* removed_buffers) {
   DCHECK(!removed_buffers || removed_buffers->empty());
 
   // Return if we're not deleting anything.
   if (starting_point == buffers_.end())
-    return;
+    return false;
 
   // Reset the next buffer index if we will be deleting the buffer that's next
   // in sequence.
@@ -1805,6 +2010,7 @@ void SourceBufferRange::TruncateAt(
 
   // Remove everything from |starting_point| onward.
   FreeBufferRange(starting_point, buffers_.end());
+  return buffers_.empty();
 }
 
 bool SourceBufferRange::GetNextBuffer(
@@ -1812,7 +2018,7 @@ bool SourceBufferRange::GetNextBuffer(
   if (!HasNextBuffer())
     return false;
 
-  *out_buffer = buffers_.at(next_buffer_index_);
+  *out_buffer = buffers_[next_buffer_index_];
   next_buffer_index_++;
   return true;
 }
@@ -1824,7 +2030,9 @@ bool SourceBufferRange::HasNextBuffer() const {
 
 int SourceBufferRange::GetNextConfigId() const {
   DCHECK(HasNextBuffer());
-  return buffers_.at(next_buffer_index_)->GetConfigId();
+  // If the next buffer is an audio splice frame, the next effective config id
+  // comes from the first fade out preroll buffer.
+  return GetConfigId(buffers_[next_buffer_index_], 0);
 }
 
 base::TimeDelta SourceBufferRange::GetNextTimestamp() const {
@@ -1835,7 +2043,7 @@ base::TimeDelta SourceBufferRange::GetNextTimestamp() const {
     return kNoTimestamp();
   }
 
-  return buffers_.at(next_buffer_index_)->GetDecodeTimestamp();
+  return buffers_[next_buffer_index_]->GetDecodeTimestamp();
 }
 
 bool SourceBufferRange::HasNextBufferPosition() const {
@@ -1941,9 +2149,14 @@ base::TimeDelta SourceBufferRange::KeyframeBeforeTimestamp(
 bool SourceBufferRange::IsNextInSequence(
     base::TimeDelta timestamp, bool is_keyframe) const {
   base::TimeDelta end = buffers_.back()->GetDecodeTimestamp();
-  return (end < timestamp && timestamp <= end + GetFudgeRoom()) ||
-      (timestamp == end && AllowSameTimestamp(
-          buffers_.back()->IsKeyframe(), is_keyframe, is_video_));
+  if (end < timestamp &&
+      (type_ == SourceBufferStream::kText ||
+          timestamp <= end + GetFudgeRoom())) {
+    return true;
+  }
+
+  return timestamp == end && AllowSameTimestamp(
+      buffers_.back()->IsKeyframe(), is_keyframe, type_);
 }
 
 base::TimeDelta SourceBufferRange::GetFudgeRoom() const {
@@ -1956,4 +2169,32 @@ base::TimeDelta SourceBufferRange::GetApproximateDuration() const {
   return max_interbuffer_distance;
 }
 
+bool SourceBufferRange::GetBuffersInRange(base::TimeDelta start,
+                                          base::TimeDelta end,
+                                          BufferQueue* buffers) {
+  // Find the nearest buffer with a decode timestamp <= start.
+  const base::TimeDelta first_timestamp = KeyframeBeforeTimestamp(start);
+  if (first_timestamp == kNoTimestamp())
+    return false;
+
+  // Find all buffers involved in the range.
+  const size_t previous_size = buffers->size();
+  for (BufferQueue::iterator it = GetBufferItrAt(first_timestamp, false);
+       it != buffers_.end();
+       ++it) {
+    const scoped_refptr<StreamParserBuffer>& buffer = *it;
+    // Buffers without duration are not supported, so bail if we encounter any.
+    if (buffer->duration() == kNoTimestamp() ||
+        buffer->duration() <= base::TimeDelta()) {
+      return false;
+    }
+    if (buffer->end_of_stream() || buffer->timestamp() >= end)
+      break;
+    if (buffer->timestamp() + buffer->duration() <= start)
+      continue;
+    buffers->push_back(buffer);
+  }
+  return previous_size < buffers->size();
+}
+
 }  // namespace media