Upstream version 7.36.149.0
[platform/framework/web/crosswalk.git] / src / media / cast / test / sender.cc
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 // Test application that simulates a cast sender - Data can be either generated
6 // or read from a file.
7
8 #include <queue>
9
10 #include "base/at_exit.h"
11 #include "base/base_paths.h"
12 #include "base/command_line.h"
13 #include "base/file_util.h"
14 #include "base/files/file_path.h"
15 #include "base/files/memory_mapped_file.h"
16 #include "base/files/scoped_file.h"
17 #include "base/json/json_writer.h"
18 #include "base/logging.h"
19 #include "base/memory/scoped_ptr.h"
20 #include "base/path_service.h"
21 #include "base/strings/string_number_conversions.h"
22 #include "base/threading/thread.h"
23 #include "base/time/default_tick_clock.h"
24 #include "base/values.h"
25 #include "media/audio/audio_parameters.h"
26 #include "media/base/audio_buffer.h"
27 #include "media/base/audio_bus.h"
28 #include "media/base/audio_fifo.h"
29 #include "media/base/audio_timestamp_helper.h"
30 #include "media/base/media.h"
31 #include "media/base/multi_channel_resampler.h"
32 #include "media/base/video_frame.h"
33 #include "media/base/video_util.h"
34 #include "media/cast/cast_config.h"
35 #include "media/cast/cast_environment.h"
36 #include "media/cast/cast_sender.h"
37 #include "media/cast/logging/encoding_event_subscriber.h"
38 #include "media/cast/logging/log_serializer.h"
39 #include "media/cast/logging/logging_defines.h"
40 #include "media/cast/logging/proto/raw_events.pb.h"
41 #include "media/cast/logging/receiver_time_offset_estimator_impl.h"
42 #include "media/cast/logging/stats_event_subscriber.h"
43 #include "media/cast/test/utility/audio_utility.h"
44 #include "media/cast/test/utility/default_config.h"
45 #include "media/cast/test/utility/input_builder.h"
46 #include "media/cast/test/utility/video_utility.h"
47 #include "media/cast/transport/cast_transport_defines.h"
48 #include "media/cast/transport/cast_transport_sender.h"
49 #include "media/cast/transport/transport/udp_transport.h"
50 #include "media/ffmpeg/ffmpeg_common.h"
51 #include "media/ffmpeg/ffmpeg_deleters.h"
52 #include "media/filters/audio_renderer_algorithm.h"
53 #include "media/filters/ffmpeg_demuxer.h"
54 #include "media/filters/ffmpeg_glue.h"
55 #include "media/filters/in_memory_url_protocol.h"
56 #include "ui/gfx/size.h"
57
58 namespace {
59 static const int kAudioChannels = 2;
60 static const int kAudioSamplingFrequency = 48000;
61 static const int kSoundFrequency = 1234;  // Frequency of sinusoid wave.
62 static const float kSoundVolume = 0.5f;
63 static const int kAudioFrameMs = 10;  // Each audio frame is exactly 10ms.
64 static const int kAudioPacketsPerSecond = 1000 / kAudioFrameMs;
65
66 // The max allowed size of serialized log.
67 const int kMaxSerializedLogBytes = 10 * 1000 * 1000;
68
69 // Flags for this program:
70 //
71 // --address=xx.xx.xx.xx
72 //   IP address of receiver.
73 //
74 // --port=xxxx
75 //   Port number of receiver.
76 //
77 // --source-file=xxx.webm
78 //   WebM file as source of video frames.
79 //
80 // --fps=xx
81 //   Override framerate of the video stream.
82
83 const char kSwitchAddress[] = "address";
84 const char kSwitchPort[] = "port";
85 const char kSwitchSourceFile[] = "source-file";
86 const char kSwitchFps[] = "fps";
87
88 }  // namespace
89
90 namespace media {
91 namespace cast {
92
93 AudioSenderConfig GetAudioSenderConfig() {
94   AudioSenderConfig audio_config;
95
96   audio_config.rtcp_c_name = "audio_sender@a.b.c.d";
97
98   audio_config.use_external_encoder = false;
99   audio_config.frequency = kAudioSamplingFrequency;
100   audio_config.channels = kAudioChannels;
101   audio_config.bitrate = 64000;
102   audio_config.codec = transport::kOpus;
103   audio_config.rtp_config.ssrc = 1;
104   audio_config.incoming_feedback_ssrc = 2;
105   audio_config.rtp_config.payload_type = 127;
106   audio_config.rtp_config.max_delay_ms = 300;
107   return audio_config;
108 }
109
110 VideoSenderConfig GetVideoSenderConfig() {
111   VideoSenderConfig video_config;
112
113   video_config.rtcp_c_name = "video_sender@a.b.c.d";
114   video_config.use_external_encoder = false;
115
116   // Resolution.
117   video_config.width = 1280;
118   video_config.height = 720;
119   video_config.max_frame_rate = 30;
120
121   // Bitrates.
122   video_config.max_bitrate = 2500000;
123   video_config.min_bitrate = 100000;
124   video_config.start_bitrate = video_config.min_bitrate;
125
126   // Codec.
127   video_config.codec = transport::kVp8;
128   video_config.max_number_of_video_buffers_used = 1;
129   video_config.number_of_encode_threads = 2;
130
131   // Quality options.
132   video_config.min_qp = 4;
133   video_config.max_qp = 40;
134
135   // SSRCs and payload type. Don't change them.
136   video_config.rtp_config.ssrc = 11;
137   video_config.incoming_feedback_ssrc = 12;
138   video_config.rtp_config.payload_type = 96;
139   video_config.rtp_config.max_delay_ms = 300;
140   return video_config;
141 }
142
143 void AVFreeFrame(AVFrame* frame) { avcodec_free_frame(&frame); }
144
145 class SendProcess {
146  public:
147   SendProcess(scoped_refptr<base::SingleThreadTaskRunner> thread_proxy,
148               base::TickClock* clock,
149               const VideoSenderConfig& video_config)
150       : test_app_thread_proxy_(thread_proxy),
151         video_config_(video_config),
152         synthetic_count_(0),
153         clock_(clock),
154         audio_frame_count_(0),
155         video_frame_count_(0),
156         weak_factory_(this),
157         av_format_context_(NULL),
158         audio_stream_index_(-1),
159         playback_rate_(1.0),
160         video_stream_index_(-1),
161         video_frame_rate_numerator_(video_config.max_frame_rate),
162         video_frame_rate_denominator_(1),
163         video_first_pts_(0),
164         video_first_pts_set_(false) {
165     audio_bus_factory_.reset(new TestAudioBusFactory(kAudioChannels,
166                                                      kAudioSamplingFrequency,
167                                                      kSoundFrequency,
168                                                      kSoundVolume));
169     const CommandLine* cmd = CommandLine::ForCurrentProcess();
170     int override_fps = 0;
171     if (base::StringToInt(cmd->GetSwitchValueASCII(kSwitchFps),
172                           &override_fps)) {
173       video_config_.max_frame_rate = override_fps;
174       video_frame_rate_numerator_ = override_fps;
175     }
176
177     // Load source file and prepare FFmpeg demuxer.
178     base::FilePath source_path = cmd->GetSwitchValuePath(kSwitchSourceFile);
179     if (source_path.empty())
180       return;
181
182     LOG(INFO) << "Source: " << source_path.value();
183     if (!file_data_.Initialize(source_path)) {
184       LOG(ERROR) << "Cannot load file.";
185       return;
186     }
187     protocol_.reset(
188         new InMemoryUrlProtocol(file_data_.data(), file_data_.length(), false));
189     glue_.reset(new FFmpegGlue(protocol_.get()));
190
191     if (!glue_->OpenContext()) {
192       LOG(ERROR) << "Cannot open file.";
193       return;
194     }
195
196     // AVFormatContext is owned by the glue.
197     av_format_context_ = glue_->format_context();
198     if (avformat_find_stream_info(av_format_context_, NULL) < 0) {
199       LOG(ERROR) << "Cannot find stream information.";
200       return;
201     }
202
203     // Prepare FFmpeg decoders.
204     for (unsigned int i = 0; i < av_format_context_->nb_streams; ++i) {
205       AVStream* av_stream = av_format_context_->streams[i];
206       AVCodecContext* av_codec_context = av_stream->codec;
207       AVCodec* av_codec = avcodec_find_decoder(av_codec_context->codec_id);
208
209       if (!av_codec) {
210         LOG(ERROR) << "Cannot find decoder for the codec: "
211                    << av_codec_context->codec_id;
212         continue;
213       }
214
215       // Number of threads for decoding.
216       av_codec_context->thread_count = 2;
217       av_codec_context->error_concealment = FF_EC_GUESS_MVS | FF_EC_DEBLOCK;
218       av_codec_context->request_sample_fmt = AV_SAMPLE_FMT_S16;
219
220       if (avcodec_open2(av_codec_context, av_codec, NULL) < 0) {
221         LOG(ERROR) << "Cannot open AVCodecContext for the codec: "
222                    << av_codec_context->codec_id;
223         return;
224       }
225
226       if (av_codec->type == AVMEDIA_TYPE_AUDIO) {
227         if (av_codec_context->sample_fmt == AV_SAMPLE_FMT_S16P) {
228           LOG(ERROR) << "Audio format not supported.";
229           continue;
230         }
231         ChannelLayout layout = ChannelLayoutToChromeChannelLayout(
232             av_codec_context->channel_layout,
233             av_codec_context->channels);
234         if (layout == CHANNEL_LAYOUT_UNSUPPORTED) {
235           LOG(ERROR) << "Unsupported audio channels layout.";
236           continue;
237         }
238         if (audio_stream_index_ != -1) {
239           LOG(WARNING) << "Found multiple audio streams.";
240         }
241         audio_stream_index_ = static_cast<int>(i);
242         audio_params_.Reset(
243             AudioParameters::AUDIO_PCM_LINEAR,
244             layout,
245             av_codec_context->channels,
246             av_codec_context->channels,
247             av_codec_context->sample_rate,
248             8 * av_get_bytes_per_sample(av_codec_context->sample_fmt),
249             av_codec_context->sample_rate / kAudioPacketsPerSecond);
250         LOG(INFO) << "Source file has audio.";
251       } else if (av_codec->type == AVMEDIA_TYPE_VIDEO) {
252         VideoFrame::Format format =
253             PixelFormatToVideoFormat(av_codec_context->pix_fmt);
254         if (format != VideoFrame::YV12) {
255           LOG(ERROR) << "Cannot handle non YV12 video format: " << format;
256           continue;
257         }
258         if (video_stream_index_ != -1) {
259           LOG(WARNING) << "Found multiple video streams.";
260         }
261         video_stream_index_ = static_cast<int>(i);
262         if (!override_fps) {
263           video_frame_rate_numerator_ = av_stream->r_frame_rate.num;
264           video_frame_rate_denominator_ = av_stream->r_frame_rate.den;
265           // Max frame rate is rounded up.
266           video_config_.max_frame_rate =
267               video_frame_rate_denominator_ +
268               video_frame_rate_numerator_ - 1;
269           video_config_.max_frame_rate /= video_frame_rate_denominator_;
270         } else {
271           // If video is played at a manual speed audio needs to match.
272           playback_rate_ = 1.0 * override_fps *
273                av_stream->r_frame_rate.den /  av_stream->r_frame_rate.num;
274         }
275         LOG(INFO) << "Source file has video.";
276       } else {
277         LOG(ERROR) << "Unknown stream type; ignore.";
278       }
279     }
280
281     Rewind();
282   }
283
284   ~SendProcess() {
285   }
286
287   void Start(scoped_refptr<AudioFrameInput> audio_frame_input,
288              scoped_refptr<VideoFrameInput> video_frame_input) {
289     audio_frame_input_ = audio_frame_input;
290     video_frame_input_ = video_frame_input;
291
292     LOG(INFO) << "Max Frame rate: " << video_config_.max_frame_rate;
293     LOG(INFO) << "Real Frame rate: "
294               << video_frame_rate_numerator_ << "/"
295               << video_frame_rate_denominator_ << " fps.";
296     LOG(INFO) << "Audio playback rate: " << playback_rate_;
297
298     if (!is_transcoding_audio() && !is_transcoding_video()) {
299       // Send fake patterns.
300       test_app_thread_proxy_->PostTask(
301           FROM_HERE,
302           base::Bind(
303               &SendProcess::SendNextFakeFrame,
304               base::Unretained(this)));
305       return;
306     }
307
308     // Send transcoding streams.
309     audio_algo_.Initialize(playback_rate_, audio_params_);
310     audio_algo_.FlushBuffers();
311     audio_fifo_input_bus_ =
312         AudioBus::Create(
313             audio_params_.channels(), audio_params_.frames_per_buffer());
314     // Audio FIFO can carry all data fron AudioRendererAlgorithm.
315     audio_fifo_.reset(
316         new AudioFifo(audio_params_.channels(),
317                       audio_algo_.QueueCapacity()));
318     audio_resampler_.reset(new media::MultiChannelResampler(
319         audio_params_.channels(),
320         static_cast<double>(audio_params_.sample_rate()) /
321         kAudioSamplingFrequency,
322         audio_params_.frames_per_buffer(),
323         base::Bind(&SendProcess::ProvideData, base::Unretained(this))));
324     test_app_thread_proxy_->PostTask(
325         FROM_HERE,
326         base::Bind(
327             &SendProcess::SendNextFrame,
328             base::Unretained(this)));
329   }
330
331   void SendNextFakeFrame() {
332     gfx::Size size(video_config_.width, video_config_.height);
333     scoped_refptr<VideoFrame> video_frame =
334         VideoFrame::CreateBlackFrame(size);
335     PopulateVideoFrame(video_frame, synthetic_count_);
336     ++synthetic_count_;
337
338     base::TimeTicks now = clock_->NowTicks();
339     if (start_time_.is_null())
340       start_time_ = now;
341
342     base::TimeDelta video_time = VideoFrameTime(video_frame_count_);
343     video_frame->set_timestamp(video_time);
344     video_frame_input_->InsertRawVideoFrame(video_frame,
345                                             start_time_ + video_time);
346
347     // Send just enough audio data to match next video frame's time.
348     base::TimeDelta audio_time = AudioFrameTime(audio_frame_count_);
349     while (audio_time < video_time) {
350       if (is_transcoding_audio()) {
351         Decode(true);
352         CHECK(!audio_bus_queue_.empty()) << "No audio decoded.";
353         scoped_ptr<AudioBus> bus(audio_bus_queue_.front());
354         audio_bus_queue_.pop();
355         audio_frame_input_->InsertAudio(
356             bus.Pass(), start_time_ + audio_time);
357       } else {
358         audio_frame_input_->InsertAudio(
359             audio_bus_factory_->NextAudioBus(
360                 base::TimeDelta::FromMilliseconds(kAudioFrameMs)),
361             start_time_ + audio_time);
362       }
363       audio_time = AudioFrameTime(++audio_frame_count_);
364     }
365
366     // This is the time since the stream started.
367     const base::TimeDelta elapsed_time = now - start_time_;
368
369     // Handle the case when frame generation cannot keep up.
370     // Move the time ahead to match the next frame.
371     while (video_time < elapsed_time) {
372       LOG(WARNING) << "Skipping one frame.";
373       video_time = VideoFrameTime(++video_frame_count_);
374     }
375
376     test_app_thread_proxy_->PostDelayedTask(
377         FROM_HERE,
378         base::Bind(&SendProcess::SendNextFakeFrame,
379                    weak_factory_.GetWeakPtr()),
380         video_time - elapsed_time);
381   }
382
383   // Return true if a frame was sent.
384   bool SendNextTranscodedVideo(base::TimeDelta elapsed_time) {
385     if (!is_transcoding_video())
386       return false;
387
388     Decode(false);
389     if (video_frame_queue_.empty())
390       return false;
391
392     scoped_refptr<VideoFrame> decoded_frame =
393         video_frame_queue_.front();
394     if (elapsed_time < decoded_frame->timestamp())
395       return false;
396
397     gfx::Size size(video_config_.width, video_config_.height);
398     scoped_refptr<VideoFrame> video_frame =
399         VideoFrame::CreateBlackFrame(size);
400     video_frame_queue_.pop();
401     media::CopyPlane(VideoFrame::kYPlane,
402                      decoded_frame->data(VideoFrame::kYPlane),
403                      decoded_frame->stride(VideoFrame::kYPlane),
404                      decoded_frame->rows(VideoFrame::kYPlane),
405                      video_frame);
406     media::CopyPlane(VideoFrame::kUPlane,
407                      decoded_frame->data(VideoFrame::kUPlane),
408                      decoded_frame->stride(VideoFrame::kUPlane),
409                      decoded_frame->rows(VideoFrame::kUPlane),
410                      video_frame);
411     media::CopyPlane(VideoFrame::kVPlane,
412                      decoded_frame->data(VideoFrame::kVPlane),
413                      decoded_frame->stride(VideoFrame::kVPlane),
414                      decoded_frame->rows(VideoFrame::kVPlane),
415                      video_frame);
416
417     base::TimeDelta video_time;
418     // Use the timestamp from the file if we're transcoding.
419     video_time = ScaleTimestamp(decoded_frame->timestamp());
420     video_frame_input_->InsertRawVideoFrame(
421         video_frame, start_time_ + video_time);
422
423     // Make sure queue is not empty.
424     Decode(false);
425     return true;
426   }
427
428   // Return true if a frame was sent.
429   bool SendNextTranscodedAudio(base::TimeDelta elapsed_time) {
430     if (!is_transcoding_audio())
431       return false;
432
433     Decode(true);
434     if (audio_bus_queue_.empty())
435       return false;
436
437     base::TimeDelta audio_time = audio_sent_ts_->GetTimestamp();
438     if (elapsed_time < audio_time)
439       return false;
440     scoped_ptr<AudioBus> bus(audio_bus_queue_.front());
441     audio_bus_queue_.pop();
442     audio_sent_ts_->AddFrames(bus->frames());
443     audio_frame_input_->InsertAudio(
444         bus.Pass(), start_time_ + audio_time);
445
446     // Make sure queue is not empty.
447     Decode(true);
448     return true;
449   }
450
451   void SendNextFrame() {
452     if (start_time_.is_null())
453       start_time_ = clock_->NowTicks();
454     if (start_time_.is_null())
455       start_time_ = clock_->NowTicks();
456
457     // Send as much as possible. Audio is sent according to
458     // system time.
459     while (SendNextTranscodedAudio(clock_->NowTicks() - start_time_));
460
461     // Video is sync'ed to audio.
462     while (SendNextTranscodedVideo(audio_sent_ts_->GetTimestamp()));
463
464     if (audio_bus_queue_.empty() && video_frame_queue_.empty()) {
465       // Both queues are empty can only mean that we have reached
466       // the end of the stream.
467       LOG(INFO) << "Rewind.";
468       Rewind();
469       start_time_ = base::TimeTicks();
470       audio_sent_ts_.reset();
471       video_first_pts_set_ = false;
472     }
473
474     // Send next send.
475     test_app_thread_proxy_->PostDelayedTask(
476         FROM_HERE,
477         base::Bind(
478             &SendProcess::SendNextFrame,
479             base::Unretained(this)),
480         base::TimeDelta::FromMilliseconds(kAudioFrameMs));
481   }
482
483   const VideoSenderConfig& get_video_config() const { return video_config_; }
484
485  private:
486   bool is_transcoding_audio() { return audio_stream_index_ >= 0; }
487   bool is_transcoding_video() { return video_stream_index_ >= 0; }
488
489   // Helper methods to compute timestamps for the frame number specified.
490   base::TimeDelta VideoFrameTime(int frame_number) {
491     return frame_number * base::TimeDelta::FromSeconds(1) *
492         video_frame_rate_denominator_ / video_frame_rate_numerator_;
493   }
494
495   base::TimeDelta ScaleTimestamp(base::TimeDelta timestamp) {
496     return base::TimeDelta::FromMicroseconds(
497         timestamp.InMicroseconds() / playback_rate_);
498   }
499
500   base::TimeDelta AudioFrameTime(int frame_number) {
501     return frame_number * base::TimeDelta::FromMilliseconds(kAudioFrameMs);
502   }
503
504   // Go to the beginning of the stream.
505   void Rewind() {
506     CHECK(av_seek_frame(av_format_context_, -1, 0, AVSEEK_FLAG_BACKWARD) >= 0)
507         << "Failed to rewind to the beginning.";
508   }
509
510   // Call FFmpeg to fetch one packet.
511   ScopedAVPacket DemuxOnePacket(bool* audio) {
512     ScopedAVPacket packet(new AVPacket());
513     if (av_read_frame(av_format_context_, packet.get()) < 0) {
514       LOG(ERROR) << "Failed to read one AVPacket.";
515       packet.reset();
516       return packet.Pass();
517     }
518
519     int stream_index = static_cast<int>(packet->stream_index);
520     if (stream_index == audio_stream_index_) {
521       *audio = true;
522     } else if (stream_index == video_stream_index_) {
523       *audio = false;
524     } else {
525       // Ignore unknown packet.
526       LOG(INFO) << "Unknown packet.";
527       packet.reset();
528     }
529     return packet.Pass();
530   }
531
532   void DecodeAudio(ScopedAVPacket packet) {
533     // Audio.
534     AVFrame* avframe = av_frame_alloc();
535
536     // Shallow copy of the packet.
537     AVPacket packet_temp = *packet.get();
538
539     do {
540       avcodec_get_frame_defaults(avframe);
541       int frame_decoded = 0;
542       int result = avcodec_decode_audio4(
543           av_audio_context(), avframe, &frame_decoded, &packet_temp);
544       CHECK(result >= 0) << "Failed to decode audio.";
545       packet_temp.size -= result;
546       packet_temp.data += result;
547       if (!frame_decoded)
548         continue;
549
550       int frames_read = avframe->nb_samples;
551       if (frames_read < 0)
552         break;
553
554       if (!audio_sent_ts_) {
555         // Initialize the base time to the first packet in the file.
556         // This is set to the frequency we send to the receiver.
557         // Not the frequency of the source file. This is because we
558         // increment the frame count by samples we sent.
559         audio_sent_ts_.reset(
560             new AudioTimestampHelper(kAudioSamplingFrequency));
561         // For some files this is an invalid value.
562         base::TimeDelta base_ts;
563         audio_sent_ts_->SetBaseTimestamp(base_ts);
564       }
565
566       scoped_refptr<AudioBuffer> buffer =
567           AudioBuffer::CopyFrom(
568               AVSampleFormatToSampleFormat(
569                   av_audio_context()->sample_fmt),
570               ChannelLayoutToChromeChannelLayout(
571                   av_audio_context()->channel_layout,
572                   av_audio_context()->channels),
573               av_audio_context()->channels,
574               av_audio_context()->sample_rate,
575               frames_read,
576               &avframe->data[0],
577               // Note: Not all files have correct values for pkt_pts.
578               base::TimeDelta::FromMilliseconds(avframe->pkt_pts));
579       audio_algo_.EnqueueBuffer(buffer);
580     } while (packet_temp.size > 0);
581     avcodec_free_frame(&avframe);
582
583     const int frames_needed_to_scale =
584         playback_rate_ * av_audio_context()->sample_rate /
585         kAudioPacketsPerSecond;
586     while (frames_needed_to_scale <= audio_algo_.frames_buffered()) {
587       if (!audio_algo_.FillBuffer(audio_fifo_input_bus_.get(),
588                                   audio_fifo_input_bus_->frames())) {
589         // Nothing can be scaled. Decode some more.
590         return;
591       }
592
593       // Prevent overflow of audio data in the FIFO.
594       if (audio_fifo_input_bus_->frames() + audio_fifo_->frames()
595           <= audio_fifo_->max_frames()) {
596         audio_fifo_->Push(audio_fifo_input_bus_.get());
597       } else {
598         LOG(WARNING) << "Audio FIFO full; dropping samples.";
599       }
600
601       // Make sure there's enough data to resample audio.
602       if (audio_fifo_->frames() <
603           2 * audio_params_.sample_rate() / kAudioPacketsPerSecond) {
604         continue;
605       }
606
607       scoped_ptr<media::AudioBus> resampled_bus(
608           media::AudioBus::Create(
609               audio_params_.channels(),
610               kAudioSamplingFrequency / kAudioPacketsPerSecond));
611       audio_resampler_->Resample(resampled_bus->frames(),
612                                  resampled_bus.get());
613       audio_bus_queue_.push(resampled_bus.release());
614     }
615   }
616
617   void DecodeVideo(ScopedAVPacket packet) {
618     // Video.
619     int got_picture;
620     AVFrame* avframe = av_frame_alloc();
621     avcodec_get_frame_defaults(avframe);
622     // Tell the decoder to reorder for us.
623     avframe->reordered_opaque =
624         av_video_context()->reordered_opaque = packet->pts;
625     CHECK(avcodec_decode_video2(
626         av_video_context(), avframe, &got_picture, packet.get()) >= 0)
627         << "Video decode error.";
628     if (!got_picture)
629       return;
630     gfx::Size size(av_video_context()->width, av_video_context()->height);
631     if (!video_first_pts_set_ ||
632         avframe->reordered_opaque < video_first_pts_) {
633       video_first_pts_set_ = true;
634       video_first_pts_ = avframe->reordered_opaque;
635     }
636     int64 pts = avframe->reordered_opaque - video_first_pts_;
637     video_frame_queue_.push(
638         VideoFrame::WrapExternalYuvData(
639             media::VideoFrame::YV12,
640             size,
641             gfx::Rect(size),
642             size,
643             avframe->linesize[0],
644             avframe->linesize[1],
645             avframe->linesize[2],
646             avframe->data[0],
647             avframe->data[1],
648             avframe->data[2],
649             base::TimeDelta::FromMilliseconds(pts),
650             base::Bind(&AVFreeFrame, avframe)));
651   }
652
653   void Decode(bool decode_audio) {
654     // Read the stream until one video frame can be decoded.
655     while (true) {
656       if (decode_audio && !audio_bus_queue_.empty())
657         return;
658       if (!decode_audio && !video_frame_queue_.empty())
659         return;
660
661       bool audio_packet = false;
662       ScopedAVPacket packet = DemuxOnePacket(&audio_packet);
663       if (!packet) {
664         LOG(INFO) << "End of stream.";
665         return;
666       }
667
668       if (audio_packet)
669         DecodeAudio(packet.Pass());
670       else
671         DecodeVideo(packet.Pass());
672     }
673   }
674
675   void ProvideData(int frame_delay, media::AudioBus* output_bus) {
676     if (audio_fifo_->frames() >= output_bus->frames()) {
677       audio_fifo_->Consume(output_bus, 0, output_bus->frames());
678     } else {
679       LOG(WARNING) << "Not enough audio data for resampling.";
680       output_bus->Zero();
681     }
682   }
683
684   AVStream* av_audio_stream() {
685     return av_format_context_->streams[audio_stream_index_];
686   }
687   AVStream* av_video_stream() {
688     return av_format_context_->streams[video_stream_index_];
689   }
690   AVCodecContext* av_audio_context() { return av_audio_stream()->codec; }
691   AVCodecContext* av_video_context() { return av_video_stream()->codec; }
692
693   scoped_refptr<base::SingleThreadTaskRunner> test_app_thread_proxy_;
694   VideoSenderConfig video_config_;
695   scoped_refptr<AudioFrameInput> audio_frame_input_;
696   scoped_refptr<VideoFrameInput> video_frame_input_;
697   uint8 synthetic_count_;
698   base::TickClock* const clock_;  // Not owned by this class.
699
700   // Time when the stream starts.
701   base::TimeTicks start_time_;
702
703   // The following three members are used only for fake frames.
704   int audio_frame_count_;  // Each audio frame is exactly 10ms.
705   int video_frame_count_;
706   scoped_ptr<TestAudioBusFactory> audio_bus_factory_;
707
708   // NOTE: Weak pointers must be invalidated before all other member variables.
709   base::WeakPtrFactory<SendProcess> weak_factory_;
710
711   base::MemoryMappedFile file_data_;
712   scoped_ptr<InMemoryUrlProtocol> protocol_;
713   scoped_ptr<FFmpegGlue> glue_;
714   AVFormatContext* av_format_context_;
715
716   int audio_stream_index_;
717   AudioParameters audio_params_;
718   double playback_rate_;
719
720   int video_stream_index_;
721   int video_frame_rate_numerator_;
722   int video_frame_rate_denominator_;
723
724   // These are used for audio resampling.
725   scoped_ptr<media::MultiChannelResampler> audio_resampler_;
726   scoped_ptr<media::AudioFifo> audio_fifo_;
727   scoped_ptr<media::AudioBus> audio_fifo_input_bus_;
728   media::AudioRendererAlgorithm audio_algo_;
729
730   // Track the timestamp of audio sent to the receiver.
731   scoped_ptr<media::AudioTimestampHelper> audio_sent_ts_;
732
733   std::queue<scoped_refptr<VideoFrame> > video_frame_queue_;
734   int64 video_first_pts_;
735   bool video_first_pts_set_;
736
737   std::queue<AudioBus*> audio_bus_queue_;
738
739   DISALLOW_COPY_AND_ASSIGN(SendProcess);
740 };
741
742 }  // namespace cast
743 }  // namespace media
744
745 namespace {
746 void UpdateCastTransportStatus(
747     media::cast::transport::CastTransportStatus status) {
748   VLOG(21) << "Transport status: " << status;
749 }
750
751 void LogRawEvents(
752     const scoped_refptr<media::cast::CastEnvironment>& cast_environment,
753     const std::vector<media::cast::PacketEvent>& packet_events) {
754   VLOG(1) << "Got packet events from transport, size: " << packet_events.size();
755   for (std::vector<media::cast::PacketEvent>::const_iterator it =
756            packet_events.begin();
757        it != packet_events.end();
758        ++it) {
759     cast_environment->Logging()->InsertPacketEvent(it->timestamp,
760                                                    it->type,
761                                                    it->rtp_timestamp,
762                                                    it->frame_id,
763                                                    it->packet_id,
764                                                    it->max_packet_id,
765                                                    it->size);
766   }
767 }
768
769 void InitializationResult(media::cast::CastInitializationStatus result) {
770   bool end_result = result == media::cast::STATUS_AUDIO_INITIALIZED ||
771                     result == media::cast::STATUS_VIDEO_INITIALIZED;
772   CHECK(end_result) << "Cast sender uninitialized";
773 }
774
775 net::IPEndPoint CreateUDPAddress(std::string ip_str, int port) {
776   net::IPAddressNumber ip_number;
777   CHECK(net::ParseIPLiteralToNumber(ip_str, &ip_number));
778   return net::IPEndPoint(ip_number, port);
779 }
780
781 void DumpLoggingData(const media::cast::proto::LogMetadata& log_metadata,
782                      const media::cast::FrameEventList& frame_events,
783                      const media::cast::PacketEventList& packet_events,
784                      base::ScopedFILE log_file) {
785   VLOG(0) << "Frame map size: " << frame_events.size();
786   VLOG(0) << "Packet map size: " << packet_events.size();
787
788   scoped_ptr<char[]> event_log(new char[kMaxSerializedLogBytes]);
789   int event_log_bytes;
790   if (!media::cast::SerializeEvents(log_metadata,
791                                     frame_events,
792                                     packet_events,
793                                     true,
794                                     kMaxSerializedLogBytes,
795                                     event_log.get(),
796                                     &event_log_bytes)) {
797     VLOG(0) << "Failed to serialize events.";
798     return;
799   }
800
801   VLOG(0) << "Events serialized length: " << event_log_bytes;
802
803   int ret = fwrite(event_log.get(), 1, event_log_bytes, log_file.get());
804   if (ret != event_log_bytes)
805     VLOG(0) << "Failed to write logs to file.";
806 }
807
808 void WriteLogsToFileAndDestroySubscribers(
809     const scoped_refptr<media::cast::CastEnvironment>& cast_environment,
810     scoped_ptr<media::cast::EncodingEventSubscriber> video_event_subscriber,
811     scoped_ptr<media::cast::EncodingEventSubscriber> audio_event_subscriber,
812     base::ScopedFILE video_log_file,
813     base::ScopedFILE audio_log_file) {
814   cast_environment->Logging()->RemoveRawEventSubscriber(
815       video_event_subscriber.get());
816   cast_environment->Logging()->RemoveRawEventSubscriber(
817       audio_event_subscriber.get());
818
819   VLOG(0) << "Dumping logging data for video stream.";
820   media::cast::proto::LogMetadata log_metadata;
821   media::cast::FrameEventList frame_events;
822   media::cast::PacketEventList packet_events;
823   video_event_subscriber->GetEventsAndReset(
824       &log_metadata, &frame_events, &packet_events);
825
826   DumpLoggingData(log_metadata,
827                   frame_events,
828                   packet_events,
829                   video_log_file.Pass());
830
831   VLOG(0) << "Dumping logging data for audio stream.";
832   audio_event_subscriber->GetEventsAndReset(
833       &log_metadata, &frame_events, &packet_events);
834
835   DumpLoggingData(log_metadata,
836                   frame_events,
837                   packet_events,
838                   audio_log_file.Pass());
839 }
840
841 void WriteStatsAndDestroySubscribers(
842     const scoped_refptr<media::cast::CastEnvironment>& cast_environment,
843     scoped_ptr<media::cast::StatsEventSubscriber> video_event_subscriber,
844     scoped_ptr<media::cast::StatsEventSubscriber> audio_event_subscriber,
845     scoped_ptr<media::cast::ReceiverTimeOffsetEstimatorImpl> estimator) {
846   cast_environment->Logging()->RemoveRawEventSubscriber(
847       video_event_subscriber.get());
848   cast_environment->Logging()->RemoveRawEventSubscriber(
849       audio_event_subscriber.get());
850   cast_environment->Logging()->RemoveRawEventSubscriber(estimator.get());
851
852   scoped_ptr<base::DictionaryValue> stats = video_event_subscriber->GetStats();
853   std::string json;
854   base::JSONWriter::WriteWithOptions(
855       stats.get(), base::JSONWriter::OPTIONS_PRETTY_PRINT, &json);
856   VLOG(0) << "Video stats: " << json;
857
858   stats = audio_event_subscriber->GetStats();
859   json.clear();
860   base::JSONWriter::WriteWithOptions(
861       stats.get(), base::JSONWriter::OPTIONS_PRETTY_PRINT, &json);
862   VLOG(0) << "Audio stats: " << json;
863 }
864
865 }  // namespace
866
867 int main(int argc, char** argv) {
868   base::AtExitManager at_exit;
869   CommandLine::Init(argc, argv);
870   InitLogging(logging::LoggingSettings());
871
872   // Load the media module for FFmpeg decoding.
873   base::FilePath path;
874   PathService::Get(base::DIR_MODULE, &path);
875   if (!media::InitializeMediaLibrary(path)) {
876     LOG(ERROR) << "Could not initialize media library.";
877     return 1;
878   }
879
880   base::Thread test_thread("Cast sender test app thread");
881   base::Thread audio_thread("Cast audio encoder thread");
882   base::Thread video_thread("Cast video encoder thread");
883   test_thread.Start();
884   audio_thread.Start();
885   video_thread.Start();
886
887   base::MessageLoopForIO io_message_loop;
888
889   // Default parameters.
890   CommandLine* cmd = CommandLine::ForCurrentProcess();
891   std::string remote_ip_address = cmd->GetSwitchValueASCII(kSwitchAddress);
892   if (remote_ip_address.empty())
893     remote_ip_address = "127.0.0.1";
894   int remote_port = 0;
895   if (!base::StringToInt(cmd->GetSwitchValueASCII(kSwitchPort),
896                          &remote_port)) {
897     remote_port = 2344;
898   }
899   LOG(INFO) << "Sending to " << remote_ip_address << ":" << remote_port
900             << ".";
901
902   media::cast::AudioSenderConfig audio_config =
903       media::cast::GetAudioSenderConfig();
904   media::cast::VideoSenderConfig video_config =
905       media::cast::GetVideoSenderConfig();
906
907   // Running transport on the main thread.
908   // Setting up transport config.
909   net::IPEndPoint remote_endpoint =
910       CreateUDPAddress(remote_ip_address, remote_port);
911
912   // Enable raw event and stats logging.
913   // Running transport on the main thread.
914   scoped_refptr<media::cast::CastEnvironment> cast_environment(
915       new media::cast::CastEnvironment(
916           make_scoped_ptr<base::TickClock>(new base::DefaultTickClock()),
917           io_message_loop.message_loop_proxy(),
918           audio_thread.message_loop_proxy(),
919           video_thread.message_loop_proxy()));
920
921   // SendProcess initialization.
922   scoped_ptr<media::cast::SendProcess> send_process(
923       new media::cast::SendProcess(test_thread.message_loop_proxy(),
924                                    cast_environment->Clock(),
925                                    video_config));
926
927   // CastTransportSender initialization.
928   scoped_ptr<media::cast::transport::CastTransportSender> transport_sender =
929       media::cast::transport::CastTransportSender::Create(
930           NULL,  // net log.
931           cast_environment->Clock(),
932           remote_endpoint,
933           base::Bind(&UpdateCastTransportStatus),
934           base::Bind(&LogRawEvents, cast_environment),
935           base::TimeDelta::FromSeconds(1),
936           io_message_loop.message_loop_proxy());
937
938   // CastSender initialization.
939   scoped_ptr<media::cast::CastSender> cast_sender =
940       media::cast::CastSender::Create(cast_environment, transport_sender.get());
941   cast_sender->InitializeVideo(
942       send_process->get_video_config(),
943       base::Bind(&InitializationResult),
944       media::cast::CreateDefaultVideoEncodeAcceleratorCallback(),
945       media::cast::CreateDefaultVideoEncodeMemoryCallback());
946   cast_sender->InitializeAudio(audio_config, base::Bind(&InitializationResult));
947   transport_sender->SetPacketReceiver(cast_sender->packet_receiver());
948
949   // Set up event subscribers.
950   scoped_ptr<media::cast::EncodingEventSubscriber> video_event_subscriber;
951   scoped_ptr<media::cast::EncodingEventSubscriber> audio_event_subscriber;
952   std::string video_log_file_name("/tmp/video_events.log.gz");
953   std::string audio_log_file_name("/tmp/audio_events.log.gz");
954   LOG(INFO) << "Logging audio events to: " << audio_log_file_name;
955   LOG(INFO) << "Logging video events to: " << video_log_file_name;
956   video_event_subscriber.reset(new media::cast::EncodingEventSubscriber(
957       media::cast::VIDEO_EVENT, 10000));
958   audio_event_subscriber.reset(new media::cast::EncodingEventSubscriber(
959       media::cast::AUDIO_EVENT, 10000));
960   cast_environment->Logging()->AddRawEventSubscriber(
961       video_event_subscriber.get());
962   cast_environment->Logging()->AddRawEventSubscriber(
963       audio_event_subscriber.get());
964
965   // Subscribers for stats.
966   scoped_ptr<media::cast::ReceiverTimeOffsetEstimatorImpl> offset_estimator(
967       new media::cast::ReceiverTimeOffsetEstimatorImpl);
968   cast_environment->Logging()->AddRawEventSubscriber(offset_estimator.get());
969   scoped_ptr<media::cast::StatsEventSubscriber> video_stats_subscriber(
970       new media::cast::StatsEventSubscriber(media::cast::VIDEO_EVENT,
971                                             cast_environment->Clock(),
972                                             offset_estimator.get()));
973   scoped_ptr<media::cast::StatsEventSubscriber> audio_stats_subscriber(
974       new media::cast::StatsEventSubscriber(media::cast::AUDIO_EVENT,
975                                             cast_environment->Clock(),
976                                             offset_estimator.get()));
977   cast_environment->Logging()->AddRawEventSubscriber(
978       video_stats_subscriber.get());
979   cast_environment->Logging()->AddRawEventSubscriber(
980       audio_stats_subscriber.get());
981
982   base::ScopedFILE video_log_file(fopen(video_log_file_name.c_str(), "w"));
983   if (!video_log_file) {
984     VLOG(1) << "Failed to open video log file for writing.";
985     exit(-1);
986   }
987
988   base::ScopedFILE audio_log_file(fopen(audio_log_file_name.c_str(), "w"));
989   if (!audio_log_file) {
990     VLOG(1) << "Failed to open audio log file for writing.";
991     exit(-1);
992   }
993
994   const int logging_duration_seconds = 10;
995   io_message_loop.message_loop_proxy()->PostDelayedTask(
996       FROM_HERE,
997       base::Bind(&WriteLogsToFileAndDestroySubscribers,
998                  cast_environment,
999                  base::Passed(&video_event_subscriber),
1000                  base::Passed(&audio_event_subscriber),
1001                  base::Passed(&video_log_file),
1002                  base::Passed(&audio_log_file)),
1003       base::TimeDelta::FromSeconds(logging_duration_seconds));
1004
1005   io_message_loop.message_loop_proxy()->PostDelayedTask(
1006       FROM_HERE,
1007       base::Bind(&WriteStatsAndDestroySubscribers,
1008                  cast_environment,
1009                  base::Passed(&video_stats_subscriber),
1010                  base::Passed(&audio_stats_subscriber),
1011                  base::Passed(&offset_estimator)),
1012       base::TimeDelta::FromSeconds(logging_duration_seconds));
1013
1014   send_process->Start(cast_sender->audio_frame_input(),
1015                       cast_sender->video_frame_input());
1016
1017   io_message_loop.Run();
1018   return 0;
1019 }