Upstream version 9.37.197.0
[platform/framework/web/crosswalk.git] / src / third_party / webrtc / modules / rtp_rtcp / source / H264 / rtp_sender_h264.cc
1 /*
2  *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10
11 #include "rtp_sender_h264.h"
12
13 #include "rtp_utility.h"
14
15 namespace webrtc {
16 RTPSenderH264::RTPSenderH264(RTPSenderInterface* rtpSender) :
17     // H264
18     _rtpSender(*rtpSender),
19     _h264Mode(H264_SINGLE_NAL_MODE),
20     _h264SendPPS_SPS(true),
21     _h264SVCPayloadType(-1),
22     _h264SVCRelaySequenceNumber(0),
23     _h264SVCRelayTimeStamp(0),
24     _h264SVCRelayLayerComplete(false),
25
26     _useHighestSendLayer(false),
27     _highestDependencyLayerOld(MAX_NUMBER_OF_TEMPORAL_ID-1),
28     _highestDependencyQualityIDOld(MAX_NUMBER_OF_DEPENDENCY_QUALITY_ID-1),
29     _highestDependencyLayer(0),
30     _highestDependencyQualityID(0),
31     _highestTemporalLayer(0)
32 {
33 }
34
35 RTPSenderH264::~RTPSenderH264()
36 {
37 }
38
39 int32_t
40 RTPSenderH264::Init()
41 {
42     _h264SendPPS_SPS = true;
43     _h264Mode = H264_SINGLE_NAL_MODE;
44     return 0;
45 }
46
47 /*
48     multi-session
49     3 modes supported
50     NI-T        timestamps
51     NI-TC        timestamps/CS-DON
52     NI-C        CS-DON
53
54     Non-interleaved timestamp based mode (NI-T)
55     Non-interleaved cross-session decoding order number (CS-DON) based mode (NI-C)
56     Non-interleaved combined timestamp and CS-DON mode (NI-TC)
57
58     NOT supported  Interleaved CS-DON (I-C) mode.
59
60     NI-T and NI-TC modes both use timestamps to recover the decoding
61     order.  In order to be able to do so, it is necessary for the RTP
62     packet stream to contain data for all sampling instances of a given
63     RTP session in all enhancement RTP sessions that depend on the given
64     RTP session.  The NI-C and I-C modes do not have this limitation,
65     and use the CS-DON values as a means to explicitly indicate decoding
66     order, either directly coded in PACSI NAL units, or inferred from
67     them using the packetization rules.  It is noted that the NI-TC mode
68     offers both alternatives and it is up to the receiver to select
69     which one to use.
70 */
71
72 bool
73 RTPSenderH264::AddH264SVCNALUHeader(const H264_SVC_NALUHeader& svc,
74                                     uint8_t* databuffer,
75                                     int32_t& curByte) const
76 {
77    // +---------------+---------------+---------------+
78    // |0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7|
79    // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
80    // |R|I|   PRID    |N| DID |  QID  | TID |U|D|O| RR|
81    // +---------------+---------------+---------------+
82
83    // R    - Reserved for future extensions (MUST be 1). Receivers SHOULD ignore the value of R.
84    // I    - Is layer representation an IDR layer (1) or not (0).
85    // PRID - Priority identifier for the NAL unit.
86    // N    - Specifies whether inter-layer prediction may be used for decoding the coded slice (1) or not (0).
87    // DID  - Indicates the int32_t:er-layer coding dependency level of a layer representation.
88    // QID  - Indicates the quality level of an MGS layer representation.
89    // TID  - Indicates the temporal level of a layer representation.
90    // U    - Use only reference base pictures during the int32_t:er prediction process (1) or not (0).
91    // D    - Discardable flag.
92    // O    - Output_flag. Affects the decoded picture output process as defined in Annex C of [H.264].
93    // RR   - Reserved_three_2bits (MUST be '11'). Receivers SHOULD ignore the value of RR.
94
95    // Add header data
96    databuffer[curByte++] = (svc.r << 7)              + (svc.idr << 6)           + (svc.priorityID & 0x3F);
97    databuffer[curByte++] = (svc.interLayerPred << 7) + (svc.dependencyID << 4)  + (svc.qualityID & 0x0F);
98    databuffer[curByte++] = (svc.temporalID << 5)     + (svc.useRefBasePic << 4) + (svc.discardable << 3) +
99                            (svc.output << 2)         + (svc.rr & 0x03);
100    return true;
101 }
102
103 int32_t
104 RTPSenderH264::AddH264PACSINALU(const bool firstPacketInNALU,
105                                 const bool lastPacketInNALU,
106                                 const H264_PACSI_NALU& pacsi,
107                                 const H264_SVC_NALUHeader& svc,
108                                 const uint16_t DONC,
109                                 uint8_t* databuffer,
110                                 int32_t& curByte) const
111 {
112     //  0                   1                   2                   3
113     //  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
114     // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
115     // |F|NRI|Type(30) |              SVC NAL unit header              |
116     // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
117     // |X|Y|T|A|P|C|S|E| TL0PICIDX (o.)|        IDRPICID (o.)          |
118     // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
119     // |          DONC (o.)            |        NAL unit size 1        |
120     // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
121     // |                                                               |
122     // |                 SEI NAL unit 1                                |
123     // |                                                               |
124     // |                         +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
125     // |                         |        NAL unit size 2        |     |
126     // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+     |
127     // |                                                               |
128     // |            SEI NAL unit 2                                     |
129     // |                                           +-+-+-+-+-+-+-+-+-+-+
130     // |                                           |
131     // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
132
133
134     // If present, MUST be first NAL unit in aggregation packet + there MUST be at least
135     // one additional unit in the same packet! The RTPHeader and payload header are set as if the 2nd NAL unit
136     // (first non-PACSI NAL unit) is encapsulated in the same packet.
137     // contains scalability info common for all remaining NAL units.
138
139     // todo add API to configure this required for multisession
140     const bool addDONC = false;
141
142     if (svc.length == 0 || pacsi.NALlength == 0)
143     {
144       return 0;
145     }
146
147     int32_t startByte = curByte;
148
149     // NAL unit header
150     databuffer[curByte++] = 30; // NRI will be added later
151
152     // Extended SVC header
153     AddH264SVCNALUHeader(svc, databuffer, curByte);
154
155     // Flags
156     databuffer[curByte++] = (pacsi.X << 7) +
157                             (pacsi.Y << 6) +
158                             (addDONC << 5) +
159                             (pacsi.A << 4) +
160                             (pacsi.P << 3) +
161                             (pacsi.C << 2) +
162                             firstPacketInNALU?(pacsi.S << 1):0 +
163                             lastPacketInNALU?(pacsi.E):0;
164
165     // Optional fields
166     if (pacsi.Y)
167     {
168         databuffer[curByte++] = pacsi.TL0picIDx;
169         databuffer[curByte++] = (uint8_t)(pacsi.IDRpicID >> 8);
170         databuffer[curByte++] = (uint8_t)(pacsi.IDRpicID);
171     }
172     // Decoding order number
173     if (addDONC) // pacsi.T
174     {
175         databuffer[curByte++] = (uint8_t)(DONC >> 8);
176         databuffer[curByte++] = (uint8_t)(DONC);
177     }
178
179     // SEI NALU
180     if(firstPacketInNALU) // IMPROVEMENT duplicate it to make sure it arrives...
181     {
182         // we only set this for NALU 0 to make sure we send it only once per frame
183         for (uint32_t i = 0; i < pacsi.numSEINALUs; i++)
184         {
185             // NALU size
186             databuffer[curByte++] = (uint8_t)(pacsi.seiMessageLength[i] >> 8);
187             databuffer[curByte++] = (uint8_t)(pacsi.seiMessageLength[i]);
188
189             // NALU data
190             memcpy(databuffer + curByte, pacsi.seiMessageData[i], pacsi.seiMessageLength[i]);
191             curByte += pacsi.seiMessageLength[i];
192         }
193     }
194     return curByte - startByte;
195 }
196
197 int32_t
198 RTPSenderH264::SetH264RelaySequenceNumber(const uint16_t seqNum)
199 {
200     _h264SVCRelaySequenceNumber = seqNum;
201     return 0;
202 }
203
204 int32_t
205 RTPSenderH264::SetH264RelayCompleteLayer(const bool complete)
206 {
207     _h264SVCRelayLayerComplete = complete;
208     return 0;
209 }
210
211 /*
212     12  Filler data
213
214         The only restriction of filler data NAL units within an
215         access unit is that they shall not precede the first VCL
216         NAL unit with the same access unit.
217 */
218 int32_t
219 RTPSenderH264::SendH264FillerData(const WebRtcRTPHeader* rtpHeader,
220                                   const uint16_t bytesToSend,
221                                   const uint32_t ssrc)
222 {
223     uint16_t fillerLength = bytesToSend - 12 - 1;
224
225     if (fillerLength > WEBRTC_IP_PACKET_SIZE - 12 - 1)
226     {
227         return 0;
228     }
229
230     if (fillerLength == 0)
231     {
232         // do not send an empty packet, will not reach JB
233         fillerLength = 1;
234     }
235
236     // send codec valid data, H.264 has defined data which is binary 1111111
237     uint8_t dataBuffer[WEBRTC_IP_PACKET_SIZE];
238
239     dataBuffer[0] = static_cast<uint8_t>(0x80);            // version 2
240     dataBuffer[1] = rtpHeader->header.payloadType;
241     ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+2, _rtpSender.IncrementSequenceNumber()); // get the current SequenceNumber and add by 1 after returning
242     ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer+4, rtpHeader->header.timestamp);
243     ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer+8, rtpHeader->header.ssrc);
244
245     // set filler NALU type
246     dataBuffer[12] = 12;        // NRI field = 0, type 12
247
248     // fill with 0xff
249     memset(dataBuffer + 12 + 1, 0xff, fillerLength);
250
251     return _rtpSender.SendToNetwork(dataBuffer,
252                         fillerLength,
253                         12 + 1);
254 }
255
256 int32_t
257 RTPSenderH264::SendH264FillerData(const uint32_t captureTimestamp,
258                                   const uint8_t payloadType,
259                                   const uint32_t bytes
260                                   )
261 {
262
263     const uint16_t rtpHeaderLength = _rtpSender.RTPHeaderLength();
264     uint16_t maxLength = _rtpSender.MaxPayloadLength() - FECPacketOverhead() - _rtpSender.RTPHeaderLength();
265
266     int32_t bytesToSend=bytes;
267     uint16_t fillerLength=0;
268
269     uint8_t dataBuffer[WEBRTC_IP_PACKET_SIZE];
270
271     while(bytesToSend>0)
272     {
273         fillerLength=maxLength;
274         if(fillerLength<maxLength)
275         {
276             fillerLength = (uint16_t) bytesToSend;
277         }
278
279         bytesToSend-=fillerLength;
280
281         if (fillerLength > WEBRTC_IP_PACKET_SIZE - 12 - 1)
282         {
283             return 0;
284         }
285
286         if (fillerLength == 0)
287         {
288             // do not send an empty packet, will not reach JB
289             fillerLength = 1;
290         }
291
292         // send paded data
293         // correct seq num, time stamp and payloadtype
294         _rtpSender.BuildRTPheader(dataBuffer, payloadType, false,captureTimestamp, true, true);
295
296         // set filler NALU type
297         dataBuffer[12] = 12;        // NRI field = 0, type 12
298
299         // send codec valid data, H.264 has defined data which is binary 1111111
300         // fill with 0xff
301         memset(dataBuffer + 12 + 1, 0xff, fillerLength-1);
302
303         if( _rtpSender.SendToNetwork(dataBuffer,
304                             fillerLength,
305                             12)<0)
306         {
307
308             return -1;;
309         }
310     }
311     return 0;
312 }
313
314 int32_t
315 RTPSenderH264::SendH264SVCRelayPacket(const WebRtcRTPHeader* rtpHeader,
316                                       const uint8_t* incomingRTPPacket,
317                                       const uint16_t incomingRTPPacketSize,
318                                       const uint32_t ssrc,
319                                       const bool higestLayer)
320 {
321     if (rtpHeader->header.sequenceNumber != (uint16_t)(_h264SVCRelaySequenceNumber + 1))
322     {
323          // not continous, signal loss
324          _rtpSender.IncrementSequenceNumber();
325     }
326     _h264SVCRelaySequenceNumber = rtpHeader->header.sequenceNumber;
327
328
329     if (rtpHeader->header.timestamp != _h264SVCRelayTimeStamp)
330     {
331         // new frame
332         _h264SVCRelayLayerComplete = false;
333     }
334
335     if (rtpHeader->header.timestamp == _h264SVCRelayTimeStamp &&
336         _h264SVCRelayLayerComplete)
337     {
338         // sanity, end of layer already sent
339         // Could happened for fragmented packet with missing PACSI info (PACSI packet reorded and received after packet it belongs to)
340         // fragmented packet has no layer info set (default info 0)
341         return 0;
342     }
343     _h264SVCRelayTimeStamp = rtpHeader->header.timestamp;
344
345     // re-packetize H.264-SVC packets
346     // we keep the timestap unchanged
347     // make a copy and only change the SSRC and seqNum
348
349     uint8_t dataBuffer[WEBRTC_IP_PACKET_SIZE];
350     memcpy(dataBuffer, incomingRTPPacket, incomingRTPPacketSize);
351
352     // _sequenceNumber initiated in Init()
353     // _ssrc initiated in constructor
354
355     // re-write payload type
356     if(_h264SVCPayloadType != -1)
357     {
358         dataBuffer[1] &= kRtpMarkerBitMask;
359         dataBuffer[1] += _h264SVCPayloadType;
360     }
361
362     // _sequenceNumber will not work for re-ordering by NACK from original sender
363     // engine responsible for this
364     ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+2, _rtpSender.IncrementSequenceNumber()); // get the current SequenceNumber and add by 1 after returning
365     //ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer+8, ssrc);
366
367     // how do we know it's the last relayed packet in a frame?
368     // 1) packets arrive in order, the engine manages that
369     // 2) highest layer that we relay
370     // 3) the end bit is set for the highest layer
371
372     if(higestLayer && rtpHeader->type.Video.codecHeader.H264.relayE)
373     {
374         // set marker bit
375         dataBuffer[1] |= kRtpMarkerBitMask;
376
377         // set relayed layer as complete
378         _h264SVCRelayLayerComplete = true;
379     }
380     return _rtpSender.SendToNetwork(dataBuffer,
381                          incomingRTPPacketSize - rtpHeader->header.headerLength,
382                          rtpHeader->header.headerLength);
383 }
384
385 int32_t
386 RTPSenderH264::SendH264_STAP_A(const FrameType frameType,
387                                 const H264Info* ptrH264Info,
388                                 uint16_t &idxNALU,
389                                 const int8_t payloadType,
390                                 const uint32_t captureTimeStamp,
391                                 bool& switchToFUA,
392                                 int32_t &payloadBytesToSend,
393                                 const uint8_t*& data,
394                                 const uint16_t rtpHeaderLength)
395 {
396     const int32_t H264_NALU_LENGTH = 2;
397
398     uint16_t h264HeaderLength = 1; // normal header length
399     uint16_t maxPayloadLengthSTAP_A = _rtpSender.MaxPayloadLength() -
400                                           FECPacketOverhead() - rtpHeaderLength -
401                                           h264HeaderLength - H264_NALU_LENGTH;
402
403     int32_t dataOffset = rtpHeaderLength + h264HeaderLength;
404     uint8_t NRI = 0;
405     uint16_t payloadBytesInPacket = 0;
406     uint8_t dataBuffer[WEBRTC_IP_PACKET_SIZE];
407
408     if (ptrH264Info->payloadSize[idxNALU] > maxPayloadLengthSTAP_A)
409     {
410         // we need to fragment NAL switch to mode FU-A
411         switchToFUA = true;
412     } else
413     {
414         // combine as many NAL units in every IP packet
415         do
416         {
417             if(!_h264SendPPS_SPS)
418             {
419                 // don't send NALU of type 7 and 8 SPS and PPS
420                 if(ptrH264Info->type[idxNALU] == 7 || ptrH264Info->type[idxNALU] == 8)
421                 {
422                     payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
423                     data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
424                     idxNALU++;
425                     continue;
426                 }
427             }
428             if(ptrH264Info->payloadSize[idxNALU] + payloadBytesInPacket <= maxPayloadLengthSTAP_A)
429             {
430                 if(ptrH264Info->NRI[idxNALU] > NRI)
431                 {
432                     NRI = ptrH264Info->NRI[idxNALU];
433                 }
434                 // put NAL size into packet
435                 dataBuffer[dataOffset] = (uint8_t)(ptrH264Info->payloadSize[idxNALU] >> 8);
436                 dataOffset++;
437                 dataBuffer[dataOffset] = (uint8_t)(ptrH264Info->payloadSize[idxNALU] & 0xff);
438                 dataOffset++;
439                 // Put payload in packet
440                 memcpy(&dataBuffer[dataOffset], &data[ptrH264Info->startCodeSize[idxNALU]], ptrH264Info->payloadSize[idxNALU]);
441                 dataOffset += ptrH264Info->payloadSize[idxNALU];
442                 data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
443                 payloadBytesInPacket += (uint16_t)(ptrH264Info->payloadSize[idxNALU] + H264_NALU_LENGTH);
444                 payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
445             } else
446             {
447                 // we don't fitt the next NALU in this packet
448                 break;
449             }
450             idxNALU++;
451         }while(payloadBytesToSend);
452     }
453
454     // sanity
455     // don't send empty packets
456     if (payloadBytesInPacket)
457     {
458         // add RTP header
459         _rtpSender.BuildRTPheader(dataBuffer, payloadType, (payloadBytesToSend==0)?true:false, captureTimeStamp);
460         dataBuffer[rtpHeaderLength] = 24 + NRI; // STAP-A == 24
461         uint16_t payloadLength = payloadBytesInPacket + h264HeaderLength;
462
463         if(-1 == SendVideoPacket(frameType, dataBuffer, payloadLength, rtpHeaderLength))
464         {
465             return -1;
466         }
467     }
468     return 0;
469 }  // end STAP-A
470
471 // STAP-A for H.264 SVC
472 int32_t
473 RTPSenderH264::SendH264_STAP_A_PACSI(const FrameType frameType,
474                                       const H264Info* ptrH264Info,
475                                       uint16_t &idxNALU,
476                                       const int8_t payloadType,
477                                       const uint32_t captureTimeStamp,
478                                       bool& switchToFUA,
479                                       int32_t &payloadBytesToSend,
480                                       const uint8_t*& data,
481                                       const uint16_t rtpHeaderLength,
482                                       uint16_t& decodingOrderNumber)
483 {
484     const int32_t H264_NALU_LENGTH = 2;
485
486     uint16_t h264HeaderLength = 1; // normal header length
487     uint16_t maxPayloadLengthSTAP_A = _rtpSender.MaxPayloadLength() - FECPacketOverhead() - rtpHeaderLength - h264HeaderLength - H264_NALU_LENGTH;
488     int32_t dataOffset = rtpHeaderLength + h264HeaderLength;
489     uint8_t NRI = 0;
490     uint16_t payloadBytesInPacket = 0;
491     uint8_t dataBuffer[WEBRTC_IP_PACKET_SIZE];
492     bool firstNALUNotIDR = true; //delta
493
494     // Put PACSI NAL unit into packet
495     int32_t lengthPACSI = 0;
496     uint32_t PACSI_NALlength = ptrH264Info->PACSI[idxNALU].NALlength;
497     if (PACSI_NALlength > maxPayloadLengthSTAP_A)
498     {
499         return -1;
500     }
501     dataBuffer[dataOffset++] = (uint8_t)(PACSI_NALlength >> 8);
502     dataBuffer[dataOffset++] = (uint8_t)(PACSI_NALlength & 0xff);
503
504     // end bit will be updated later, since another NALU in this packet might be the last
505     int32_t lengthPASCINALU = AddH264PACSINALU(true,
506                                                false,
507                                                ptrH264Info->PACSI[idxNALU],
508                                                ptrH264Info->SVCheader[idxNALU],
509                            decodingOrderNumber,
510                            dataBuffer,
511                                                    dataOffset);
512     if (lengthPASCINALU <= 0)
513     {
514         return -1;
515     }
516     decodingOrderNumber++;
517
518     lengthPACSI = H264_NALU_LENGTH + lengthPASCINALU;
519     maxPayloadLengthSTAP_A -= (uint16_t)lengthPACSI;
520     if (ptrH264Info->payloadSize[idxNALU] > maxPayloadLengthSTAP_A)
521     {
522         // we need to fragment NAL switch to mode FU-A
523         switchToFUA = true;
524         return 0;
525     }
526     if(!ptrH264Info->SVCheader[idxNALU].idr)
527     {
528         firstNALUNotIDR = true;
529     }
530
531     uint32_t layer = (ptrH264Info->SVCheader[idxNALU].dependencyID << 16)+
532                          (ptrH264Info->SVCheader[idxNALU].qualityID << 8) +
533                           ptrH264Info->SVCheader[idxNALU].temporalID;
534
535     {
536         // combine as many NAL units in every IP packet, with the same priorityID
537         // Improvement we could allow several very small MGS NALU from different layers to be sent in one packet
538
539         do
540         {
541             if(!_h264SendPPS_SPS)
542             {
543                 // Don't send NALU of type 7 and 8 SPS and PPS,
544                 // they could be signaled outofband
545                 if(ptrH264Info->type[idxNALU] == 7 || ptrH264Info->type[idxNALU] == 8)
546                 {
547                     payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
548                     data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
549                     idxNALU++;
550                     continue;
551                 }
552             }
553             //    don't send NALU type 6 (SEI message) not allowed when we send it in PACSI
554             if(ptrH264Info->type[idxNALU] == 6)
555             {
556                 // SEI NALU Don't send, not allowed when we send it in PACSI
557                 payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
558                 data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
559                 idxNALU++;
560                 continue;
561             }
562
563             const uint32_t layerNALU = (ptrH264Info->SVCheader[idxNALU].dependencyID << 16)+
564                                            (ptrH264Info->SVCheader[idxNALU].qualityID << 8) +
565                                             ptrH264Info->SVCheader[idxNALU].temporalID;
566
567             // we need to break on a new layer
568             if( ptrH264Info->payloadSize[idxNALU] + payloadBytesInPacket <= maxPayloadLengthSTAP_A &&
569                 layerNALU == layer)
570             {
571                 if(ptrH264Info->NRI[idxNALU] > NRI)
572                 {
573                     NRI = ptrH264Info->NRI[idxNALU];
574                 }
575                 // put NAL size into packet
576                 dataBuffer[dataOffset] = (uint8_t)(ptrH264Info->payloadSize[idxNALU] >> 8);
577                 dataOffset++;
578                 dataBuffer[dataOffset] = (uint8_t)(ptrH264Info->payloadSize[idxNALU] & 0xff);
579                 dataOffset++;
580                 // Put payload in packet
581                 memcpy(&dataBuffer[dataOffset], &data[ptrH264Info->startCodeSize[idxNALU]], ptrH264Info->payloadSize[idxNALU]);
582                 dataOffset += ptrH264Info->payloadSize[idxNALU];
583                 data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
584                 payloadBytesInPacket += (uint16_t)(ptrH264Info->payloadSize[idxNALU] + H264_NALU_LENGTH);
585                 payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
586             } else
587             {
588                 // we don't fitt the next NALU in this packet or,
589                 // it's the next layer
590
591                 // check if we should send this NALU
592                 // based on the layer
593
594                 if(_useHighestSendLayer && layerNALU != layer)
595                 {
596                     // we don't send this NALU due to it's a new layer
597                     // check if we should send the next or if this is the last
598                     const uint8_t dependencyQualityID = (ptrH264Info->SVCheader[idxNALU].dependencyID << 4) + ptrH264Info->SVCheader[idxNALU].qualityID;
599
600                     bool highestLayer;
601                     if(SendH264SVCLayer(frameType,
602                                         ptrH264Info->SVCheader[idxNALU].temporalID,
603                                         dependencyQualityID,
604                                         highestLayer) == false)
605                     {
606                         // will trigger markerbit and stop sending this frame
607                         payloadBytesToSend = 0;
608                     }
609                 }
610                 break;
611             }
612             idxNALU++;
613
614         }while(payloadBytesToSend);
615     }
616
617     // sanity, don't send empty packets
618     if (payloadBytesInPacket)
619     {
620         // add RTP header
621         _rtpSender.BuildRTPheader(dataBuffer, payloadType, (payloadBytesToSend==0)?true:false, captureTimeStamp);
622
623         dataBuffer[rtpHeaderLength] = 24 + NRI; // STAP-A == 24
624
625         // NRI for PACSI
626         dataBuffer[rtpHeaderLength + H264_NALU_LENGTH + 1] &= 0x1f;   // zero out NRI field
627         dataBuffer[rtpHeaderLength + H264_NALU_LENGTH + 1] |= NRI;
628
629         if(ptrH264Info->PACSI[idxNALU-1].E)
630         {
631             // update end bit
632             dataBuffer[rtpHeaderLength + H264_NALU_LENGTH + 5] |= 0x01;
633         }
634         if(firstNALUNotIDR)
635         {
636             // we have to check if any of the NALU in this packet is an IDR NALU
637             bool setIBit = false;
638             for(int i = 0; i < idxNALU; i++)
639             {
640                 if(ptrH264Info->SVCheader[i].idr)
641                 {
642                     setIBit = true;
643                     break;
644                 }
645             }
646             if(setIBit)
647             {
648                 // update I bit
649                 dataBuffer[rtpHeaderLength + H264_NALU_LENGTH + 2] |= 0x40;
650             }
651         }
652         const uint16_t payloadLength = payloadBytesInPacket + h264HeaderLength + (uint16_t)lengthPACSI;
653         if(-1 == SendVideoPacket(frameType,
654                                  dataBuffer,
655                                  payloadLength,
656                                  rtpHeaderLength,
657                                  layer==0))
658         {
659             return -1;
660         }
661     }
662     return 0;
663 }  // end STAP-A
664
665 int32_t
666 RTPSenderH264::SendH264_FU_A(const FrameType frameType,
667                               const H264Info* ptrH264Info,
668                               uint16_t &idxNALU,
669                               const int8_t payloadType,
670                               const uint32_t captureTimeStamp,
671                               int32_t &payloadBytesToSend,
672                               const uint8_t*& data,
673                               const uint16_t rtpHeaderLength,
674                               uint16_t& decodingOrderNumber,
675                               const bool sendSVCPACSI)
676 {
677
678     // FUA for the rest of the frame
679     uint16_t maxPayloadLength = _rtpSender.MaxPayloadLength() - FECPacketOverhead() - rtpHeaderLength;
680     uint8_t dataBuffer[WEBRTC_IP_PACKET_SIZE];
681     uint32_t payloadBytesRemainingInNALU = ptrH264Info->payloadSize[idxNALU];
682
683     bool isBaseLayer=false;
684
685     if(payloadBytesRemainingInNALU > maxPayloadLength)
686     {
687         // we need to fragment NALU
688         const uint16_t H264_FUA_LENGTH = 2; // FU-a H.264 header is 2 bytes
689
690         if(sendSVCPACSI)
691         {
692             SendH264_SinglePACSI(frameType,
693                                  ptrH264Info,
694                                  idxNALU,
695                                  payloadType,
696                                  captureTimeStamp,
697                                  true,
698                                  false);
699
700             uint32_t layer = (ptrH264Info->SVCheader[idxNALU].dependencyID << 16)+
701                                  (ptrH264Info->SVCheader[idxNALU].qualityID << 8) +
702                                   ptrH264Info->SVCheader[idxNALU].temporalID;
703             isBaseLayer=(layer==0);
704         }
705
706         // First packet
707         _rtpSender.BuildRTPheader(dataBuffer,payloadType, false, captureTimeStamp);
708
709         uint16_t maxPayloadLengthFU_A = maxPayloadLength - H264_FUA_LENGTH ;
710         uint8_t fuaIndc = 28 + ptrH264Info->NRI[idxNALU];
711         dataBuffer[rtpHeaderLength] = fuaIndc;                                                     // FU-A indicator
712         dataBuffer[rtpHeaderLength+1] = (uint8_t)(ptrH264Info->type[idxNALU] + 0x80)/*start*/; // FU-A header
713
714         memcpy(&dataBuffer[rtpHeaderLength + H264_FUA_LENGTH], &data[ptrH264Info->startCodeSize[idxNALU]+1], maxPayloadLengthFU_A);
715         uint16_t payloadLength = maxPayloadLengthFU_A + H264_FUA_LENGTH;
716         if(-1 == SendVideoPacket(frameType, dataBuffer, payloadLength, rtpHeaderLength, isBaseLayer))
717         {
718             return -1;
719         }
720
721         //+1 is from the type that is coded into the FU-a header
722         data += maxPayloadLengthFU_A + 1 + ptrH264Info->startCodeSize[idxNALU];             // inc data ptr
723         payloadBytesToSend -= maxPayloadLengthFU_A+1+ptrH264Info->startCodeSize[idxNALU];
724         payloadBytesRemainingInNALU -= maxPayloadLengthFU_A+1;
725
726         // all non first/last packets
727         while(payloadBytesRemainingInNALU  > maxPayloadLengthFU_A)
728         {
729             if(sendSVCPACSI)
730             {
731                 SendH264_SinglePACSI(frameType,
732                                      ptrH264Info,
733                                      idxNALU,
734                                      payloadType,
735                                      captureTimeStamp,
736                                      false,
737                                      false);
738             }
739
740             // prepare next header
741             _rtpSender.BuildRTPheader(dataBuffer, payloadType, false, captureTimeStamp);
742
743             dataBuffer[rtpHeaderLength] = (uint8_t)fuaIndc;           // FU-A indicator
744             dataBuffer[rtpHeaderLength+1] = ptrH264Info->type[idxNALU];   // FU-A header
745
746             memcpy(&dataBuffer[rtpHeaderLength+H264_FUA_LENGTH], data, maxPayloadLengthFU_A);
747             payloadLength = maxPayloadLengthFU_A + H264_FUA_LENGTH;
748
749             if(-1 == SendVideoPacket(frameType, dataBuffer, payloadLength, rtpHeaderLength,isBaseLayer))
750             {
751                 return -1;
752             }
753             data += maxPayloadLengthFU_A; // inc data ptr
754             payloadBytesToSend -= maxPayloadLengthFU_A;
755             payloadBytesRemainingInNALU -= maxPayloadLengthFU_A;
756             dataBuffer[rtpHeaderLength] = fuaIndc;                         // FU-A indicator
757             dataBuffer[rtpHeaderLength+1] = ptrH264Info->type[idxNALU];    // FU-A header
758         }
759         if(sendSVCPACSI)
760         {
761             SendH264_SinglePACSI(frameType,
762                                  ptrH264Info,
763                                  idxNALU,
764                                  payloadType,
765                                  captureTimeStamp,
766                                  false,
767                                  true); // last packet in NALU
768
769             if(_useHighestSendLayer && idxNALU+1 < ptrH264Info->numNALUs)
770             {
771                 // not last NALU in frame
772                 // check if it's the the next layer should not be sent
773
774                 // check if we should send the next or if this is the last
775                 const uint8_t dependencyQualityID = (ptrH264Info->SVCheader[idxNALU+1].dependencyID << 4) +
776                                                          ptrH264Info->SVCheader[idxNALU+1].qualityID;
777
778                 bool highestLayer;
779                 if(SendH264SVCLayer(frameType,
780                                     ptrH264Info->SVCheader[idxNALU+1].temporalID,
781                                     dependencyQualityID,
782                                     highestLayer) == false)
783                 {
784                     // will trigger markerbit and stop sending this frame
785                     payloadBytesToSend = payloadBytesRemainingInNALU;
786                 }
787             }
788         }
789         // last packet in NALU
790         _rtpSender.BuildRTPheader(dataBuffer, payloadType,(payloadBytesToSend == (int32_t)payloadBytesRemainingInNALU)?true:false, captureTimeStamp);
791         dataBuffer[rtpHeaderLength+1] = ptrH264Info->type[idxNALU] + 0x40/*stop*/; // FU-A header
792
793         memcpy(&dataBuffer[rtpHeaderLength+H264_FUA_LENGTH], data, payloadBytesRemainingInNALU);
794         payloadLength = (uint16_t)payloadBytesRemainingInNALU + H264_FUA_LENGTH;
795         payloadBytesToSend -= payloadBytesRemainingInNALU;
796         if(payloadBytesToSend != 0)
797         {
798             data += payloadBytesRemainingInNALU; // inc data ptr
799         }
800         idxNALU++;
801         if(-1 == SendVideoPacket(frameType, dataBuffer, payloadLength, rtpHeaderLength,isBaseLayer))
802         {
803             return -1;
804         }
805     } else
806     {
807         // send NAL unit in singel mode
808         return SendH264_SingleMode(frameType,
809                                    ptrH264Info,
810                                    idxNALU,
811                                    payloadType,
812                                    captureTimeStamp,
813                                    payloadBytesToSend,
814                                    data,
815                                    rtpHeaderLength,
816                                    sendSVCPACSI);
817     }
818     // end FU-a
819     return 0;
820 }
821
822 int32_t
823 RTPSenderH264::SendH264_SingleMode(const FrameType frameType,
824                                     const H264Info* ptrH264Info,
825                                     uint16_t &idxNALU,
826                                     const int8_t payloadType,
827                                     const uint32_t captureTimeStamp,
828                                     int32_t &payloadBytesToSend,
829                                     const uint8_t*& data,
830                                     const uint16_t rtpHeaderLength,
831                                     uint16_t& decodingOrderNumber,
832                                     const bool sendSVCPACSI)
833 {
834     // no H.264 header lenght in single mode
835     // we use WEBRTC_IP_PACKET_SIZE instead of the configured MTU since it's better to send fragmented UDP than not to send
836     const uint16_t maxPayloadLength = WEBRTC_IP_PACKET_SIZE - _rtpSender.PacketOverHead() - FECPacketOverhead() - rtpHeaderLength;
837     uint8_t dataBuffer[WEBRTC_IP_PACKET_SIZE];
838     bool isBaseLayer=false;
839
840     if(ptrH264Info->payloadSize[idxNALU] > maxPayloadLength)
841     {
842         return -3;
843     }
844     if(!_h264SendPPS_SPS)
845     {
846         // don't send NALU of type 7 and 8 SPS and PPS
847         if(ptrH264Info->type[idxNALU] == 7 || ptrH264Info->type[idxNALU] == 8)
848         {
849             payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
850             data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
851             idxNALU++;
852             return 0;
853         }
854     }
855     if(sendSVCPACSI)
856     {
857         SendH264_SinglePACSI(frameType,
858                              ptrH264Info,
859                              idxNALU,
860                              payloadType,
861                              captureTimeStamp,
862                              true,
863                              true);
864
865         uint32_t layer = (ptrH264Info->SVCheader[idxNALU].dependencyID << 16)+
866                              (ptrH264Info->SVCheader[idxNALU].qualityID << 8) +
867                               ptrH264Info->SVCheader[idxNALU].temporalID;
868         isBaseLayer=(layer==0);
869     }
870
871     // Put payload in packet
872     memcpy(&dataBuffer[rtpHeaderLength], &data[ptrH264Info->startCodeSize[idxNALU]], ptrH264Info->payloadSize[idxNALU]);
873
874     uint16_t payloadBytesInPacket = (uint16_t)ptrH264Info->payloadSize[idxNALU];
875     payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU]; // left to send
876
877     //
878     _rtpSender.BuildRTPheader(dataBuffer,payloadType,(payloadBytesToSend ==0)?true:false, captureTimeStamp);
879
880     dataBuffer[rtpHeaderLength] &= 0x1f; // zero out NRI field
881     dataBuffer[rtpHeaderLength] |= ptrH264Info->NRI[idxNALU]; // nri
882     if(payloadBytesToSend > 0)
883     {
884         data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
885     }
886     idxNALU++;
887     if(-1 == SendVideoPacket(frameType, dataBuffer, payloadBytesInPacket, rtpHeaderLength,isBaseLayer))
888     {
889         return -1;
890     }
891     return 0;
892 }
893
894 int32_t
895 RTPSenderH264::SendH264_SinglePACSI(const FrameType frameType,
896                                     const H264Info* ptrH264Info,
897                                      const uint16_t idxNALU,
898                                      const int8_t payloadType,
899                                      const uint32_t captureTimeStamp,
900                                      const bool firstPacketInNALU,
901                                      const bool lastPacketInNALU);
902 {
903     // Send PACSI in single mode
904     uint8_t dataBuffer[WEBRTC_IP_PACKET_SIZE];
905     uint16_t rtpHeaderLength = (uint16_t)_rtpSender.BuildRTPheader(dataBuffer, payloadType,false, captureTimeStamp);
906     int32_t dataOffset = rtpHeaderLength;
907
908     int32_t lengthPASCINALU = AddH264PACSINALU(firstPacketInNALU,
909                                                lastPacketInNALU,
910                                                ptrH264Info->PACSI[idxNALU],
911                                                ptrH264Info->SVCheader[idxNALU],
912                                                decodingOrderNumber,
913                                                dataBuffer,
914                                                dataOffset);
915
916     if (lengthPASCINALU <= 0)
917     {
918         return -1;
919     }
920     decodingOrderNumber++;
921
922     uint16_t payloadBytesInPacket = (uint16_t)lengthPASCINALU;
923
924     // Set payload header (first payload byte co-serves as the payload header)
925     dataBuffer[rtpHeaderLength] &= 0x1f;        // zero out NRI field
926     dataBuffer[rtpHeaderLength] |= ptrH264Info->NRI[idxNALU]; // nri
927
928     const uint32_t layer = (ptrH264Info->SVCheader[idxNALU].dependencyID << 16)+
929                                (ptrH264Info->SVCheader[idxNALU].qualityID << 8) +
930                                 ptrH264Info->SVCheader[idxNALU].temporalID;
931
932     if (-1 == SendVideoPacket(frameType, dataBuffer, payloadBytesInPacket, rtpHeaderLength,layer==0))
933     {
934         return -1;
935     }
936     return 0;
937 }
938
939
940
941
942 int32_t
943 RTPSenderH264::SendH264SVC(const FrameType frameType,
944                             const int8_t payloadType,
945                             const uint32_t captureTimeStamp,
946                             const uint8_t* payloadData,
947                             const uint32_t payloadSize,
948                             H264Information& h264Information,
949                             uint16_t& decodingOrderNumber)
950 {
951     int32_t payloadBytesToSend = payloadSize;
952     const uint16_t rtpHeaderLength = _rtpSender.RTPHeaderLength();
953
954     const H264Info* ptrH264Info = NULL;
955     if (h264Information.GetInfo(payloadData,payloadSize, ptrH264Info) == -1)
956     {
957         return -1;
958     }
959     if(_useHighestSendLayer)
960     {
961         // we need to check if we should drop the frame
962         // it could be a temporal layer (aka a temporal frame)
963         const uint8_t dependencyQualityID = (ptrH264Info->SVCheader[0].dependencyID << 4) + ptrH264Info->SVCheader[0].qualityID;
964
965         bool dummyHighestLayer;
966         if(SendH264SVCLayer(frameType,
967                             ptrH264Info->SVCheader[0].temporalID,
968                             dependencyQualityID,
969                             dummyHighestLayer) == false)
970         {
971             // skip send this frame
972             return 0;
973         }
974     }
975
976     uint16_t idxNALU = 0;
977     while (payloadBytesToSend > 0)
978     {
979         bool switchToFUA = false;
980         if (SendH264_STAP_A_PACSI(frameType,
981                                   ptrH264Info,
982                                   idxNALU,
983                                   payloadType,
984                                   captureTimeStamp,
985                                   switchToFUA,
986                                   payloadBytesToSend,
987                                   payloadData,
988                                   rtpHeaderLength,
989                                   decodingOrderNumber) != 0)
990         {
991             return -1;
992         }
993         if(switchToFUA)
994         {
995             // FU_A for this NALU
996             if (SendH264_FU_A(frameType,
997                               ptrH264Info,
998                               idxNALU,
999                               payloadType,
1000                               captureTimeStamp,
1001                               payloadBytesToSend,
1002                               payloadData,
1003                               rtpHeaderLength,
1004                               true) != 0)
1005             {
1006                 return -1;
1007             }
1008         }
1009     }
1010     return 0;
1011 }
1012
1013 int32_t
1014 RTPSenderH264::SetH264PacketizationMode(const H264PacketizationMode mode)
1015 {
1016     _h264Mode = mode;
1017     return 0;
1018 }
1019
1020 int32_t
1021 RTPSenderH264::SetH264SendModeNALU_PPS_SPS(const bool dontSend)
1022 {
1023     _h264SendPPS_SPS = !dontSend;
1024     return 0;
1025 }
1026
1027 bool
1028 RTPSenderH264::SendH264SVCLayer(const FrameType frameType,
1029                                   const uint8_t temporalID,
1030                                   const uint8_t dependencyQualityID,
1031                                   bool& higestLayer)
1032 {
1033     uint8_t dependencyID  = dependencyQualityID >> 4;
1034
1035     // keyframe required to switch between dependency layers not quality and temporal
1036     if( _highestDependencyLayer != _highestDependencyLayerOld)
1037     {
1038         // we want to switch dependency layer
1039         if(frameType == kVideoFrameKey)
1040         {
1041             // key frame we can change layer if it's correct layer
1042             if(_highestDependencyLayer > _highestDependencyLayerOld)
1043             {
1044                 // we want to switch up
1045                 // does this packet belong to a new layer?
1046
1047                 if( dependencyID > _highestDependencyLayerOld &&
1048                     dependencyID <= _highestDependencyLayer)
1049                 {
1050                     _highestDependencyLayerOld = dependencyID;
1051                     _highestDependencyQualityIDOld = _highestDependencyQualityID;
1052
1053                     if( dependencyID == _highestDependencyLayer &&
1054                         dependencyQualityID == _highestDependencyQualityID)
1055                     {
1056                         higestLayer = true;
1057                     }
1058                     // relay
1059                     return true;
1060                 }
1061             }
1062             if(_highestDependencyLayer < _highestDependencyLayerOld)
1063             {
1064                 // we want to switch down
1065                 // does this packet belong to a low layer?
1066                 if( dependencyID <= _highestDependencyLayer)
1067                 {
1068                     _highestDependencyLayerOld = dependencyID;
1069                     _highestDependencyQualityIDOld = _highestDependencyQualityID;
1070                     if( dependencyID == _highestDependencyLayer &&
1071                         dependencyQualityID == _highestDependencyQualityID)
1072                     {
1073                         higestLayer = true;
1074                     }
1075                     // relay
1076                     return true;
1077                 }
1078             }
1079         } else
1080         {
1081             // Delta frame and we are waiting to switch dependency layer
1082             if(_highestDependencyLayer > _highestDependencyLayerOld)
1083             {
1084                 // we want to switch up to a higher dependency layer
1085                 // use old setting until we get a key-frame
1086
1087                 // filter based on old dependency
1088                 // we could have allowed to add a MGS layer lower than the dependency ID
1089                 // but then we can't know the highest layer relayed we assume that the user
1090                 // will add one layer at a time
1091                 if( _highestTemporalLayer < temporalID ||
1092                     _highestDependencyLayerOld < dependencyID ||
1093                     _highestDependencyQualityIDOld < dependencyQualityID)
1094                 {
1095                     // drop
1096                     return false;
1097                 }
1098                 // highest layer based on old
1099                 if( dependencyID == _highestDependencyLayerOld &&
1100                     dependencyQualityID == _highestDependencyQualityIDOld)
1101                 {
1102                     higestLayer = true;
1103                 }
1104             } else
1105             {
1106                 // we want to switch down to a lower dependency layer,
1107                 // use old setting, done bellow
1108                 // drop all temporal layers while waiting for the key-frame
1109                 if(temporalID > 0)
1110                 {
1111                     // drop
1112                     return false;
1113                 }
1114                 // we can't drop a lower MGS layer since this might depend on it
1115                 // however we can drop MGS layers larger than dependecyQualityId
1116                 // with dependency from old and quality 0
1117                 if( _highestDependencyLayerOld < dependencyID ||
1118                     (_highestDependencyQualityIDOld & 0xf0) < dependencyQualityID)
1119                 {
1120                     // drop
1121                     return false;
1122                 }
1123                 if( dependencyID == _highestDependencyLayerOld &&
1124                     dependencyQualityID == (_highestDependencyQualityIDOld & 0xf0))
1125                 {
1126                     higestLayer = true;
1127                 }
1128             }
1129         }
1130     } else
1131     {
1132         // filter based on current state
1133         if( _highestTemporalLayer < temporalID ||
1134             _highestDependencyLayer < dependencyID ||
1135             _highestDependencyQualityID < dependencyQualityID)
1136         {
1137             // drop
1138             return false;
1139         }
1140         if( dependencyID == _highestDependencyLayer &&
1141             dependencyQualityID == _highestDependencyQualityID)
1142         {
1143             higestLayer = true;
1144         }
1145     }
1146     return true;
1147 }
1148
1149 int32_t
1150 RTPSenderH264::SetHighestSendLayer(const uint8_t dependencyQualityLayer,
1151                                    const uint8_t temporalLayer)
1152 {
1153     const uint8_t dependencyLayer = (dependencyQualityLayer >> 4);
1154
1155     if(_highestDependencyLayerOld != _highestDependencyLayer)
1156     {
1157         // we have not switched to the new dependency yet
1158     } else
1159     {
1160         if(_highestDependencyLayer == dependencyLayer)
1161         {
1162             // no change of dependency
1163             // switch now _highestDependencyQualityIDOld
1164             _highestDependencyQualityIDOld = dependencyQualityLayer;
1165         }else
1166         {
1167             // change of dependency, update _highestDependencyQualityIDOld store as old
1168             _highestDependencyQualityIDOld = _highestDependencyQualityID;
1169         }
1170     }
1171     _useHighestSendLayer = true;
1172     _highestDependencyLayer = dependencyLayer;
1173     _highestDependencyQualityID = dependencyQualityLayer;
1174     _highestTemporalLayer = temporalLayer;
1175     return 0;
1176 }
1177
1178 int32_t
1179 RTPSenderH264::HighestSendLayer(uint8_t& dependencyQualityLayer,
1180                                 uint8_t& temporalLayer)
1181 {
1182     if (!_useHighestSendLayer)
1183     {
1184         // No information set
1185         return -1;
1186     }
1187     dependencyQualityLayer = _highestDependencyQualityID;
1188     temporalLayer = _highestTemporalLayer;
1189     return 0;
1190 }
1191 /*
1192 *   H.264
1193 */
1194 int32_t
1195 RTPSenderH264::SendH264(const FrameType frameType,
1196                         const int8_t payloadType,
1197                         const uint32_t captureTimeStamp,
1198                         const uint8_t* payloadData,
1199                         const uint32_t payloadSize,
1200                         H264Information& h264Information)
1201 {
1202     int32_t payloadBytesToSend = payloadSize;
1203     const uint8_t* data = payloadData;
1204     bool switchToFUA = false;
1205     const uint16_t rtpHeaderLength = _rtpSender.RTPHeaderLength();
1206
1207     const H264Info* ptrH264Info = NULL;
1208     if (h264Information.GetInfo(payloadData,payloadSize, ptrH264Info) == -1)
1209     {
1210         return -1;
1211     }
1212     uint16_t idxNALU = 0;
1213     uint16_t DONCdummy = 0;
1214
1215     while (payloadBytesToSend > 0)
1216     {
1217         switch(_h264Mode)
1218         {
1219         case H264_NON_INTERLEAVED_MODE:
1220
1221             if(!switchToFUA)
1222             {
1223                 if(SendH264_STAP_A(frameType,
1224                                    ptrH264Info,
1225                                    idxNALU,
1226                                    payloadType,
1227                                    captureTimeStamp,
1228                                    switchToFUA,
1229                                    payloadBytesToSend,
1230                                    data,
1231                                    rtpHeaderLength) != 0)
1232                 {
1233                     return -1;
1234                 }
1235             }
1236             else
1237             {
1238                 // FUA for the rest of the frame
1239                 if(SendH264_FU_A(frameType,
1240                                  ptrH264Info,
1241                                  idxNALU,
1242                                  payloadType,
1243                                  captureTimeStamp,
1244                                  payloadBytesToSend,
1245                                  data,
1246                                  rtpHeaderLength,
1247                                  DONCdummy) != 0)
1248                 {
1249                     return -1;
1250                 }
1251                 // try to go back to STAP_A
1252                 switchToFUA = false;
1253             }
1254             break;
1255         case H264_SINGLE_NAL_MODE:
1256             {
1257                 // modeSingleU
1258                 if(SendH264_SingleMode(frameType,
1259                                        ptrH264Info,
1260                                        idxNALU,
1261                                        payloadType,
1262                                        captureTimeStamp,
1263                                        payloadBytesToSend,
1264                                        data,
1265                                        rtpHeaderLength,
1266                                        DONCdummy) != 0)
1267                 {
1268                     return -1;
1269                 }
1270                 break;
1271             }
1272         case H264_INTERLEAVED_MODE:
1273             // not supported
1274             assert(false);
1275             return -1;
1276         }
1277     }
1278     return 0;
1279 }
1280 }  // namespace webrtc