Upstream version 10.39.225.0
[platform/framework/web/crosswalk.git] / src / chrome / browser / extensions / api / cast_streaming / cast_streaming_apitest.cc
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include <algorithm>
6 #include <cmath>
7 #include <vector>
8
9 #include "base/callback_helpers.h"
10 #include "base/command_line.h"
11 #include "base/memory/scoped_ptr.h"
12 #include "base/run_loop.h"
13 #include "base/strings/string_number_conversions.h"
14 #include "base/strings/stringprintf.h"
15 #include "chrome/browser/extensions/extension_apitest.h"
16 #include "chrome/common/chrome_switches.h"
17 #include "content/public/common/content_switches.h"
18 #include "extensions/common/switches.h"
19 #include "media/base/bind_to_current_loop.h"
20 #include "media/base/video_frame.h"
21 #include "media/cast/cast_config.h"
22 #include "media/cast/cast_environment.h"
23 #include "media/cast/test/utility/audio_utility.h"
24 #include "media/cast/test/utility/default_config.h"
25 #include "media/cast/test/utility/in_process_receiver.h"
26 #include "media/cast/test/utility/net_utility.h"
27 #include "media/cast/test/utility/standalone_cast_environment.h"
28 #include "net/base/net_errors.h"
29 #include "net/base/net_util.h"
30 #include "net/base/rand_callback.h"
31 #include "net/udp/udp_socket.h"
32 #include "testing/gtest/include/gtest/gtest.h"
33
34 using media::cast::test::GetFreeLocalPort;
35
36 namespace extensions {
37
38 class CastStreamingApiTest : public ExtensionApiTest {
39  public:
40   virtual void SetUpCommandLine(CommandLine* command_line) OVERRIDE {
41     ExtensionApiTest::SetUpCommandLine(command_line);
42     command_line->AppendSwitchASCII(
43         extensions::switches::kWhitelistedExtensionID,
44         "ddchlicdkolnonkihahngkmmmjnjlkkf");
45     command_line->AppendSwitchASCII(::switches::kWindowSize, "300,300");
46   }
47 };
48
49 // Test running the test extension for Cast Mirroring API.
50 IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, Basics) {
51   ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "basics.html")) << message_;
52 }
53
54 IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, Stats) {
55   ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "stats.html")) << message_;
56 }
57
58 IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, BadLogging) {
59   ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "bad_logging.html"))
60       << message_;
61 }
62
63 IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, DestinationNotSet) {
64   ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "destination_not_set.html"))
65       << message_;
66 }
67
68 IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, StopNoStart) {
69   ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "stop_no_start.html"))
70       << message_;
71 }
72
73 IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, NullStream) {
74   ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "null_stream.html"))
75       << message_;
76 }
77
78 namespace {
79
80 struct YUVColor {
81   int y;
82   int u;
83   int v;
84
85   YUVColor() : y(0), u(0), v(0) {}
86   YUVColor(int y_val, int u_val, int v_val) : y(y_val), u(u_val), v(v_val) {}
87 };
88
89
90 media::cast::FrameReceiverConfig WithFakeAesKeyAndIv(
91     media::cast::FrameReceiverConfig config) {
92   config.aes_key = "0123456789abcdef";
93   config.aes_iv_mask = "fedcba9876543210";
94   return config;
95 }
96
97 // An in-process Cast receiver that examines the audio/video frames being
98 // received for expected colors and tones.  Used in
99 // CastStreamingApiTest.EndToEnd, below.
100 class TestPatternReceiver : public media::cast::InProcessReceiver {
101  public:
102   explicit TestPatternReceiver(
103       const scoped_refptr<media::cast::CastEnvironment>& cast_environment,
104       const net::IPEndPoint& local_end_point)
105       : InProcessReceiver(
106             cast_environment,
107             local_end_point,
108             net::IPEndPoint(),
109             WithFakeAesKeyAndIv(media::cast::GetDefaultAudioReceiverConfig()),
110             WithFakeAesKeyAndIv(media::cast::GetDefaultVideoReceiverConfig())) {
111   }
112
113   virtual ~TestPatternReceiver() {}
114
115   void AddExpectedTone(int tone_frequency) {
116     expected_tones_.push_back(tone_frequency);
117   }
118
119   void AddExpectedColor(const YUVColor& yuv_color) {
120     expected_yuv_colors_.push_back(yuv_color);
121   }
122
123   // Blocks the caller until all expected tones and colors have been observed.
124   void WaitForExpectedTonesAndColors() {
125     base::RunLoop run_loop;
126     cast_env()->PostTask(
127         media::cast::CastEnvironment::MAIN,
128         FROM_HERE,
129         base::Bind(&TestPatternReceiver::NotifyOnceObservedAllTonesAndColors,
130                    base::Unretained(this),
131                    media::BindToCurrentLoop(run_loop.QuitClosure())));
132     run_loop.Run();
133   }
134
135  private:
136   void NotifyOnceObservedAllTonesAndColors(const base::Closure& done_callback) {
137     DCHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
138     done_callback_ = done_callback;
139     MaybeRunDoneCallback();
140   }
141
142   void MaybeRunDoneCallback() {
143     DCHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
144     if (done_callback_.is_null())
145       return;
146     if (expected_tones_.empty() && expected_yuv_colors_.empty()) {
147       base::ResetAndReturn(&done_callback_).Run();
148     } else {
149       LOG(INFO) << "Waiting to encounter " << expected_tones_.size()
150                 << " more tone(s) and " << expected_yuv_colors_.size()
151                 << " more color(s).";
152     }
153   }
154
155   // Invoked by InProcessReceiver for each received audio frame.
156   virtual void OnAudioFrame(scoped_ptr<media::AudioBus> audio_frame,
157                             const base::TimeTicks& playout_time,
158                             bool is_continuous) OVERRIDE {
159     DCHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
160
161     if (audio_frame->frames() <= 0) {
162       NOTREACHED() << "OnAudioFrame called with no samples?!?";
163       return;
164     }
165
166     if (done_callback_.is_null() || expected_tones_.empty())
167       return;  // No need to waste CPU doing analysis on the signal.
168
169     // Assume the audio signal is a single sine wave (it can have some
170     // low-amplitude noise).  Count zero crossings, and extrapolate the
171     // frequency of the sine wave in |audio_frame|.
172     int crossings = 0;
173     for (int ch = 0; ch < audio_frame->channels(); ++ch) {
174       crossings += media::cast::CountZeroCrossings(audio_frame->channel(ch),
175                                                    audio_frame->frames());
176     }
177     crossings /= audio_frame->channels();  // Take the average.
178     const float seconds_per_frame =
179         audio_frame->frames() / static_cast<float>(audio_config().frequency);
180     const float frequency = crossings / seconds_per_frame / 2.0f;
181     VLOG(1) << "Current audio tone frequency: " << frequency;
182
183     const int kTargetWindowHz = 20;
184     for (std::vector<int>::iterator it = expected_tones_.begin();
185          it != expected_tones_.end(); ++it) {
186       if (abs(static_cast<int>(frequency) - *it) < kTargetWindowHz) {
187         LOG(INFO) << "Heard tone at frequency " << *it << " Hz.";
188         expected_tones_.erase(it);
189         MaybeRunDoneCallback();
190         break;
191       }
192     }
193   }
194
195   virtual void OnVideoFrame(const scoped_refptr<media::VideoFrame>& video_frame,
196                             const base::TimeTicks& playout_time,
197                             bool is_continuous) OVERRIDE {
198     DCHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
199
200     CHECK(video_frame->format() == media::VideoFrame::YV12 ||
201           video_frame->format() == media::VideoFrame::I420 ||
202           video_frame->format() == media::VideoFrame::YV12A);
203
204     if (done_callback_.is_null() || expected_yuv_colors_.empty())
205       return;  // No need to waste CPU doing analysis on the frame.
206
207     // Take the median value of each plane because the test image will contain a
208     // letterboxed content region of mostly a solid color plus a small piece of
209     // "something" that's animating to keep the tab capture pipeline generating
210     // new frames.
211     const gfx::Rect region = FindLetterboxedContentRegion(video_frame.get());
212     YUVColor current_color;
213     current_color.y = ComputeMedianIntensityInRegionInPlane(
214         region,
215         video_frame->stride(media::VideoFrame::kYPlane),
216         video_frame->data(media::VideoFrame::kYPlane));
217     current_color.u = ComputeMedianIntensityInRegionInPlane(
218         gfx::ScaleToEnclosedRect(region, 0.5f),
219         video_frame->stride(media::VideoFrame::kUPlane),
220         video_frame->data(media::VideoFrame::kUPlane));
221     current_color.v = ComputeMedianIntensityInRegionInPlane(
222         gfx::ScaleToEnclosedRect(region, 0.5f),
223         video_frame->stride(media::VideoFrame::kVPlane),
224         video_frame->data(media::VideoFrame::kVPlane));
225     VLOG(1) << "Current video color: yuv(" << current_color.y << ", "
226             << current_color.u << ", " << current_color.v << ')';
227
228     const int kTargetWindow = 10;
229     for (std::vector<YUVColor>::iterator it = expected_yuv_colors_.begin();
230          it != expected_yuv_colors_.end(); ++it) {
231       if (abs(current_color.y - it->y) < kTargetWindow &&
232           abs(current_color.u - it->u) < kTargetWindow &&
233           abs(current_color.v - it->v) < kTargetWindow) {
234         LOG(INFO) << "Saw color yuv(" << it->y << ", " << it->u << ", "
235                   << it->v << ").";
236         expected_yuv_colors_.erase(it);
237         MaybeRunDoneCallback();
238         break;
239       }
240     }
241   }
242
243   // Return the region that excludes the black letterboxing borders surrounding
244   // the content within |frame|, if any.
245   static gfx::Rect FindLetterboxedContentRegion(
246       const media::VideoFrame* frame) {
247     const int kNonBlackIntensityThreshold = 20;  // 16 plus some fuzz.
248     const int width = frame->row_bytes(media::VideoFrame::kYPlane);
249     const int height = frame->rows(media::VideoFrame::kYPlane);
250     const int stride = frame->stride(media::VideoFrame::kYPlane);
251
252     gfx::Rect result;
253
254     // Scan from the bottom-right until the first non-black pixel is
255     // encountered.
256     for (int y = height - 1; y >= 0; --y) {
257       const uint8* const start =
258           frame->data(media::VideoFrame::kYPlane) + y * stride;
259       const uint8* const end = start + width;
260       for (const uint8* p = end - 1; p >= start; --p) {
261         if (*p > kNonBlackIntensityThreshold) {
262           result.set_width(p - start + 1);
263           result.set_height(y + 1);
264           y = 0;  // Discontinue outer loop.
265           break;
266         }
267       }
268     }
269
270     // Scan from the upper-left until the first non-black pixel is encountered.
271     for (int y = 0; y < result.height(); ++y) {
272       const uint8* const start =
273           frame->data(media::VideoFrame::kYPlane) + y * stride;
274       const uint8* const end = start + result.width();
275       for (const uint8* p = start; p < end; ++p) {
276         if (*p > kNonBlackIntensityThreshold) {
277           result.set_x(p - start);
278           result.set_width(result.width() - result.x());
279           result.set_y(y);
280           result.set_height(result.height() - result.y());
281           y = result.height();  // Discontinue outer loop.
282           break;
283         }
284       }
285     }
286
287     return result;
288   }
289
290   static uint8 ComputeMedianIntensityInRegionInPlane(const gfx::Rect& region,
291                                                      int stride,
292                                                      const uint8* data) {
293     if (region.IsEmpty())
294       return 0;
295     const size_t num_values = region.size().GetArea();
296     scoped_ptr<uint8[]> values(new uint8[num_values]);
297     for (int y = 0; y < region.height(); ++y) {
298       memcpy(values.get() + y * region.width(),
299              data + (region.y() + y) * stride + region.x(),
300              region.width());
301     }
302     const size_t middle_idx = num_values / 2;
303     std::nth_element(values.get(),
304                      values.get() + middle_idx,
305                      values.get() + num_values);
306     return values[middle_idx];
307   }
308
309   std::vector<int> expected_tones_;
310   std::vector<YUVColor> expected_yuv_colors_;
311   base::Closure done_callback_;
312
313   DISALLOW_COPY_AND_ASSIGN(TestPatternReceiver);
314 };
315
316 }  // namespace
317
318 class CastStreamingApiTestWithPixelOutput : public CastStreamingApiTest {
319   virtual void SetUp() OVERRIDE {
320     EnablePixelOutput();
321     CastStreamingApiTest::SetUp();
322   }
323
324   virtual void SetUpCommandLine(CommandLine* command_line) OVERRIDE {
325     command_line->AppendSwitchASCII(::switches::kWindowSize, "128,128");
326     CastStreamingApiTest::SetUpCommandLine(command_line);
327   }
328 };
329
330 // Tests the Cast streaming API and its basic functionality end-to-end.  An
331 // extension subtest is run to generate test content, capture that content, and
332 // use the API to send it out.  At the same time, this test launches an
333 // in-process Cast receiver, listening on a localhost UDP socket, to receive the
334 // content and check whether it matches expectations.
335 //
336 // TODO(miu): In order to get this test up-and-running again, we will first
337 // confirm it is stable on Release build bots, then later we will enable it for
338 // the Debug build bots.  http://crbug.com/396413
339 // Also, it seems that the test fails to generate any video (audio is fine) on
340 // the ChromeOS bot.  Need to root-cause and resolve that issue.
341 #if defined(NDEBUG) && !defined(OS_CHROMEOS)
342 #define MAYBE_EndToEnd EndToEnd
343 #else
344 #define MAYBE_EndToEnd DISABLED_EndToEnd
345 #endif
346 IN_PROC_BROWSER_TEST_F(CastStreamingApiTestWithPixelOutput, MAYBE_EndToEnd) {
347   scoped_ptr<net::UDPSocket> receive_socket(
348       new net::UDPSocket(net::DatagramSocket::DEFAULT_BIND,
349                          net::RandIntCallback(),
350                          NULL,
351                          net::NetLog::Source()));
352   receive_socket->AllowAddressReuse();
353   ASSERT_EQ(net::OK, receive_socket->Bind(GetFreeLocalPort()));
354   net::IPEndPoint receiver_end_point;
355   ASSERT_EQ(net::OK, receive_socket->GetLocalAddress(&receiver_end_point));
356   receive_socket.reset();
357
358   // Start the in-process receiver that examines audio/video for the expected
359   // test patterns.
360   const scoped_refptr<media::cast::StandaloneCastEnvironment> cast_environment(
361       new media::cast::StandaloneCastEnvironment());
362   TestPatternReceiver* const receiver =
363       new TestPatternReceiver(cast_environment, receiver_end_point);
364
365   // Launch the page that: 1) renders the source content; 2) uses the
366   // chrome.tabCapture and chrome.cast.streaming APIs to capture its content and
367   // stream using Cast; and 3) calls chrome.test.succeed() once it is
368   // operational.
369   const std::string page_url = base::StringPrintf(
370       "end_to_end_sender.html?port=%d&aesKey=%s&aesIvMask=%s",
371       receiver_end_point.port(),
372       base::HexEncode(receiver->audio_config().aes_key.data(),
373                       receiver->audio_config().aes_key.size()).c_str(),
374       base::HexEncode(receiver->audio_config().aes_iv_mask.data(),
375                       receiver->audio_config().aes_iv_mask.size()).c_str());
376   ASSERT_TRUE(RunExtensionSubtest("cast_streaming", page_url)) << message_;
377
378   // Examine the Cast receiver for expected audio/video test patterns.  The
379   // colors and tones specified here must match those in end_to_end_sender.js.
380   // Note that we do not check that the color and tone are received
381   // simultaneously since A/V sync should be measured in perf tests.
382   receiver->AddExpectedTone(200 /* Hz */);
383   receiver->AddExpectedTone(500 /* Hz */);
384   receiver->AddExpectedTone(1800 /* Hz */);
385   receiver->AddExpectedColor(YUVColor(82, 90, 240));  // rgb(255, 0, 0)
386   receiver->AddExpectedColor(YUVColor(145, 54, 34));  // rgb(0, 255, 0)
387   receiver->AddExpectedColor(YUVColor(41, 240, 110));  // rgb(0, 0, 255)
388   receiver->Start();
389   receiver->WaitForExpectedTonesAndColors();
390   receiver->Stop();
391
392   delete receiver;
393   cast_environment->Shutdown();
394 }
395
396 IN_PROC_BROWSER_TEST_F(CastStreamingApiTestWithPixelOutput, RtpStreamError) {
397   ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "rtp_stream_error.html"));
398 }
399
400 }  // namespace extensions