1 : /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 : /* vim:set ts=2 sw=2 sts=2 et cindent: */
3 : /* ***** BEGIN LICENSE BLOCK *****
4 : * Version: ML 1.1/GPL 2.0/LGPL 2.1
5 : *
6 : * The contents of this file are subject to the Mozilla Public License Version
7 : * 1.1 (the "License"); you may not use this file except in compliance with
8 : * the License. You may obtain a copy of the License at
9 : * http://www.mozilla.org/MPL/
10 : *
11 : * Software distributed under the License is distributed on an "AS IS" basis,
12 : * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
13 : * for the specific language governing rights and limitations under the
14 : * License.
15 : *
16 : * The Original Code is Mozilla code.
17 : *
18 : * The Initial Developer of the Original Code is the Mozilla Foundation.
19 : * Portions created by the Initial Developer are Copyright (C) 2010
20 : * the Initial Developer. All Rights Reserved.
21 : *
22 : * Contributor(s):
23 : * Chris Double <chris.double@double.co.nz>
24 : * Chris Pearce <chris@pearce.org.nz>
25 : *
26 : * Alternatively, the contents of this file may be used under the terms of
27 : * either the GNU General Public License Version 2 or later (the "GPL"), or
28 : * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
29 : * in which case the provisions of the GPL or the LGPL are applicable instead
30 : * of those above. If you wish to allow use of your version of this file only
31 : * under the terms of either the GPL or the LGPL, and not to allow others to
32 : * use your version of this file under the terms of the MPL, indicate your
33 : * decision by deleting the provisions above and replace them with the notice
34 : * and other provisions required by the GPL or the LGPL. If you do not delete
35 : * the provisions above, a recipient may use your version of this file under
36 : * the terms of any one of the MPL, the GPL or the LGPL.
37 : *
38 : * ***** END LICENSE BLOCK ***** */
39 :
40 : #include "nsBuiltinDecoder.h"
41 : #include "nsBuiltinDecoderReader.h"
42 : #include "nsBuiltinDecoderStateMachine.h"
43 : #include "VideoUtils.h"
44 :
45 : #include "mozilla/mozalloc.h"
46 : #include "mozilla/StandardInteger.h"
47 :
48 : using namespace mozilla;
49 : using mozilla::layers::ImageContainer;
50 : using mozilla::layers::PlanarYCbCrImage;
51 :
52 : // Verify these values are sane. Once we've checked the frame sizes, we then
53 : // can do less integer overflow checking.
54 : PR_STATIC_ASSERT(MAX_VIDEO_WIDTH < PlanarYCbCrImage::MAX_DIMENSION);
55 : PR_STATIC_ASSERT(MAX_VIDEO_HEIGHT < PlanarYCbCrImage::MAX_DIMENSION);
56 : PR_STATIC_ASSERT(PlanarYCbCrImage::MAX_DIMENSION < PR_UINT32_MAX / PlanarYCbCrImage::MAX_DIMENSION);
57 :
58 : // Un-comment to enable logging of seek bisections.
59 : //#define SEEK_LOGGING
60 :
61 : #ifdef PR_LOGGING
62 : extern PRLogModuleInfo* gBuiltinDecoderLog;
63 : #define LOG(type, msg) PR_LOG(gBuiltinDecoderLog, type, msg)
64 : #ifdef SEEK_LOGGING
65 : #define SEEK_LOG(type, msg) PR_LOG(gBuiltinDecoderLog, type, msg)
66 : #else
67 : #define SEEK_LOG(type, msg)
68 : #endif
69 : #else
70 : #define LOG(type, msg)
71 : #define SEEK_LOG(type, msg)
72 : #endif
73 :
74 : static bool
75 0 : ValidatePlane(const VideoData::YCbCrBuffer::Plane& aPlane)
76 : {
77 : return aPlane.mWidth <= PlanarYCbCrImage::MAX_DIMENSION &&
78 : aPlane.mHeight <= PlanarYCbCrImage::MAX_DIMENSION &&
79 : aPlane.mWidth * aPlane.mHeight < MAX_VIDEO_WIDTH * MAX_VIDEO_HEIGHT &&
80 0 : aPlane.mStride > 0;
81 : }
82 :
83 : bool
84 0 : nsVideoInfo::ValidateVideoRegion(const nsIntSize& aFrame,
85 : const nsIntRect& aPicture,
86 : const nsIntSize& aDisplay)
87 : {
88 : return
89 : aFrame.width <= PlanarYCbCrImage::MAX_DIMENSION &&
90 : aFrame.height <= PlanarYCbCrImage::MAX_DIMENSION &&
91 : aFrame.width * aFrame.height <= MAX_VIDEO_WIDTH * MAX_VIDEO_HEIGHT &&
92 : aFrame.width * aFrame.height != 0 &&
93 : aPicture.width <= PlanarYCbCrImage::MAX_DIMENSION &&
94 : aPicture.x < PlanarYCbCrImage::MAX_DIMENSION &&
95 : aPicture.x + aPicture.width < PlanarYCbCrImage::MAX_DIMENSION &&
96 : aPicture.height <= PlanarYCbCrImage::MAX_DIMENSION &&
97 : aPicture.y < PlanarYCbCrImage::MAX_DIMENSION &&
98 : aPicture.y + aPicture.height < PlanarYCbCrImage::MAX_DIMENSION &&
99 : aPicture.width * aPicture.height <= MAX_VIDEO_WIDTH * MAX_VIDEO_HEIGHT &&
100 : aPicture.width * aPicture.height != 0 &&
101 : aDisplay.width <= PlanarYCbCrImage::MAX_DIMENSION &&
102 : aDisplay.height <= PlanarYCbCrImage::MAX_DIMENSION &&
103 : aDisplay.width * aDisplay.height <= MAX_VIDEO_WIDTH * MAX_VIDEO_HEIGHT &&
104 0 : aDisplay.width * aDisplay.height != 0;
105 : }
106 :
107 0 : VideoData* VideoData::Create(nsVideoInfo& aInfo,
108 : ImageContainer* aContainer,
109 : PRInt64 aOffset,
110 : PRInt64 aTime,
111 : PRInt64 aEndTime,
112 : const YCbCrBuffer& aBuffer,
113 : bool aKeyframe,
114 : PRInt64 aTimecode,
115 : nsIntRect aPicture)
116 : {
117 0 : if (!aContainer) {
118 0 : return nsnull;
119 : }
120 :
121 : // The following situation should never happen unless there is a bug
122 : // in the decoder
123 0 : if (aBuffer.mPlanes[1].mWidth != aBuffer.mPlanes[2].mWidth ||
124 : aBuffer.mPlanes[1].mHeight != aBuffer.mPlanes[2].mHeight) {
125 0 : NS_ERROR("C planes with different sizes");
126 0 : return nsnull;
127 : }
128 :
129 : // The following situations could be triggered by invalid input
130 0 : if (aPicture.width <= 0 || aPicture.height <= 0) {
131 0 : NS_WARNING("Empty picture rect");
132 0 : return nsnull;
133 : }
134 0 : if (!ValidatePlane(aBuffer.mPlanes[0]) || !ValidatePlane(aBuffer.mPlanes[1]) ||
135 0 : !ValidatePlane(aBuffer.mPlanes[2])) {
136 0 : NS_WARNING("Invalid plane size");
137 0 : return nsnull;
138 : }
139 :
140 : // Ensure the picture size specified in the headers can be extracted out of
141 : // the frame we've been supplied without indexing out of bounds.
142 0 : CheckedUint32 xLimit = aPicture.x + CheckedUint32(aPicture.width);
143 0 : CheckedUint32 yLimit = aPicture.y + CheckedUint32(aPicture.height);
144 0 : if (!xLimit.valid() || xLimit.value() > aBuffer.mPlanes[0].mStride ||
145 0 : !yLimit.valid() || yLimit.value() > aBuffer.mPlanes[0].mHeight)
146 : {
147 : // The specified picture dimensions can't be contained inside the video
148 : // frame, we'll stomp memory if we try to copy it. Fail.
149 0 : NS_WARNING("Overflowing picture rect");
150 0 : return nsnull;
151 : }
152 :
153 : nsAutoPtr<VideoData> v(new VideoData(aOffset,
154 : aTime,
155 : aEndTime,
156 : aKeyframe,
157 : aTimecode,
158 0 : aInfo.mDisplay));
159 : // Currently our decoder only knows how to output to PLANAR_YCBCR
160 : // format.
161 0 : Image::Format format = Image::PLANAR_YCBCR;
162 0 : v->mImage = aContainer->CreateImage(&format, 1);
163 0 : if (!v->mImage) {
164 0 : return nsnull;
165 : }
166 0 : NS_ASSERTION(v->mImage->GetFormat() == Image::PLANAR_YCBCR,
167 : "Wrong format?");
168 0 : PlanarYCbCrImage* videoImage = static_cast<PlanarYCbCrImage*>(v->mImage.get());
169 :
170 0 : PlanarYCbCrImage::Data data;
171 0 : data.mYChannel = aBuffer.mPlanes[0].mData;
172 0 : data.mYSize = gfxIntSize(aBuffer.mPlanes[0].mWidth, aBuffer.mPlanes[0].mHeight);
173 0 : data.mYStride = aBuffer.mPlanes[0].mStride;
174 0 : data.mCbChannel = aBuffer.mPlanes[1].mData;
175 0 : data.mCrChannel = aBuffer.mPlanes[2].mData;
176 0 : data.mCbCrSize = gfxIntSize(aBuffer.mPlanes[1].mWidth, aBuffer.mPlanes[1].mHeight);
177 0 : data.mCbCrStride = aBuffer.mPlanes[1].mStride;
178 0 : data.mPicX = aPicture.x;
179 0 : data.mPicY = aPicture.y;
180 0 : data.mPicSize = gfxIntSize(aPicture.width, aPicture.height);
181 0 : data.mStereoMode = aInfo.mStereoMode;
182 :
183 0 : videoImage->SetData(data); // Copies buffer
184 0 : return v.forget();
185 : }
186 :
187 0 : nsBuiltinDecoderReader::nsBuiltinDecoderReader(nsBuiltinDecoder* aDecoder)
188 0 : : mDecoder(aDecoder)
189 : {
190 0 : MOZ_COUNT_CTOR(nsBuiltinDecoderReader);
191 0 : }
192 :
193 0 : nsBuiltinDecoderReader::~nsBuiltinDecoderReader()
194 : {
195 0 : ResetDecode();
196 0 : MOZ_COUNT_DTOR(nsBuiltinDecoderReader);
197 0 : }
198 :
199 0 : nsresult nsBuiltinDecoderReader::ResetDecode()
200 : {
201 0 : nsresult res = NS_OK;
202 :
203 0 : mVideoQueue.Reset();
204 0 : mAudioQueue.Reset();
205 :
206 0 : return res;
207 : }
208 :
209 0 : VideoData* nsBuiltinDecoderReader::FindStartTime(PRInt64& aOutStartTime)
210 : {
211 0 : NS_ASSERTION(mDecoder->OnStateMachineThread() || mDecoder->OnDecodeThread(),
212 : "Should be on state machine or decode thread.");
213 :
214 : // Extract the start times of the bitstreams in order to calculate
215 : // the duration.
216 0 : PRInt64 videoStartTime = INT64_MAX;
217 0 : PRInt64 audioStartTime = INT64_MAX;
218 0 : VideoData* videoData = nsnull;
219 :
220 0 : if (HasVideo()) {
221 : videoData = DecodeToFirstData(&nsBuiltinDecoderReader::DecodeVideoFrame,
222 0 : mVideoQueue);
223 0 : if (videoData) {
224 0 : videoStartTime = videoData->mTime;
225 : }
226 : }
227 0 : if (HasAudio()) {
228 : AudioData* audioData = DecodeToFirstData(&nsBuiltinDecoderReader::DecodeAudioData,
229 0 : mAudioQueue);
230 0 : if (audioData) {
231 0 : audioStartTime = audioData->mTime;
232 : }
233 : }
234 :
235 0 : PRInt64 startTime = NS_MIN(videoStartTime, audioStartTime);
236 0 : if (startTime != INT64_MAX) {
237 0 : aOutStartTime = startTime;
238 : }
239 :
240 0 : return videoData;
241 : }
242 :
243 : template<class Data>
244 : Data* nsBuiltinDecoderReader::DecodeToFirstData(DecodeFn aDecodeFn,
245 : MediaQueue<Data>& aQueue)
246 : {
247 0 : bool eof = false;
248 0 : while (!eof && aQueue.GetSize() == 0) {
249 : {
250 0 : ReentrantMonitorAutoEnter decoderMon(mDecoder->GetReentrantMonitor());
251 0 : if (mDecoder->GetDecodeState() == nsDecoderStateMachine::DECODER_STATE_SHUTDOWN) {
252 0 : return nsnull;
253 : }
254 : }
255 0 : eof = !(this->*aDecodeFn)();
256 : }
257 0 : Data* d = nsnull;
258 0 : return (d = aQueue.PeekFront()) ? d : nsnull;
259 : }
260 :
261 0 : nsresult nsBuiltinDecoderReader::DecodeToTarget(PRInt64 aTarget)
262 : {
263 : // Decode forward to the target frame. Start with video, if we have it.
264 0 : if (HasVideo()) {
265 0 : bool eof = false;
266 0 : PRInt64 startTime = -1;
267 0 : nsAutoPtr<VideoData> video;
268 0 : while (HasVideo() && !eof) {
269 0 : while (mVideoQueue.GetSize() == 0 && !eof) {
270 0 : bool skip = false;
271 0 : eof = !DecodeVideoFrame(skip, 0);
272 : {
273 0 : ReentrantMonitorAutoEnter decoderMon(mDecoder->GetReentrantMonitor());
274 0 : if (mDecoder->GetDecodeState() == nsBuiltinDecoderStateMachine::DECODER_STATE_SHUTDOWN) {
275 0 : return NS_ERROR_FAILURE;
276 : }
277 : }
278 : }
279 0 : if (mVideoQueue.GetSize() == 0) {
280 : // Hit end of file, we want to display the last frame of the video.
281 0 : if (video) {
282 0 : mVideoQueue.PushFront(video.forget());
283 : }
284 0 : break;
285 : }
286 0 : video = mVideoQueue.PeekFront();
287 : // If the frame end time is less than the seek target, we won't want
288 : // to display this frame after the seek, so discard it.
289 0 : if (video && video->mEndTime <= aTarget) {
290 0 : if (startTime == -1) {
291 0 : startTime = video->mTime;
292 : }
293 0 : mVideoQueue.PopFront();
294 : } else {
295 0 : video.forget();
296 0 : break;
297 : }
298 : }
299 : {
300 0 : ReentrantMonitorAutoEnter decoderMon(mDecoder->GetReentrantMonitor());
301 0 : if (mDecoder->GetDecodeState() == nsBuiltinDecoderStateMachine::DECODER_STATE_SHUTDOWN) {
302 0 : return NS_ERROR_FAILURE;
303 : }
304 : }
305 0 : LOG(PR_LOG_DEBUG, ("First video frame after decode is %lld", startTime));
306 : }
307 :
308 0 : if (HasAudio()) {
309 : // Decode audio forward to the seek target.
310 0 : bool eof = false;
311 0 : while (HasAudio() && !eof) {
312 0 : while (!eof && mAudioQueue.GetSize() == 0) {
313 0 : eof = !DecodeAudioData();
314 : {
315 0 : ReentrantMonitorAutoEnter decoderMon(mDecoder->GetReentrantMonitor());
316 0 : if (mDecoder->GetDecodeState() == nsBuiltinDecoderStateMachine::DECODER_STATE_SHUTDOWN) {
317 0 : return NS_ERROR_FAILURE;
318 : }
319 : }
320 : }
321 0 : const AudioData* audio = mAudioQueue.PeekFront();
322 0 : if (!audio)
323 0 : break;
324 0 : CheckedInt64 startFrame = UsecsToFrames(audio->mTime, mInfo.mAudioRate);
325 0 : CheckedInt64 targetFrame = UsecsToFrames(aTarget, mInfo.mAudioRate);
326 0 : if (!startFrame.valid() || !targetFrame.valid()) {
327 0 : return NS_ERROR_FAILURE;
328 : }
329 0 : if (startFrame.value() + audio->mFrames <= targetFrame.value()) {
330 : // Our seek target lies after the frames in this AudioData. Pop it
331 : // off the queue, and keep decoding forwards.
332 0 : delete mAudioQueue.PopFront();
333 0 : audio = nsnull;
334 0 : continue;
335 : }
336 0 : if (startFrame.value() > targetFrame.value()) {
337 : // The seek target doesn't lie in the audio block just after the last
338 : // audio frames we've seen which were before the seek target. This
339 : // could have been the first audio data we've seen after seek, i.e. the
340 : // seek terminated after the seek target in the audio stream. Just
341 : // abort the audio decode-to-target, the state machine will play
342 : // silence to cover the gap. Typically this happens in poorly muxed
343 : // files.
344 0 : NS_WARNING("Audio not synced after seek, maybe a poorly muxed file?");
345 0 : break;
346 : }
347 :
348 : // The seek target lies somewhere in this AudioData's frames, strip off
349 : // any frames which lie before the seek target, so we'll begin playback
350 : // exactly at the seek target.
351 0 : NS_ASSERTION(targetFrame.value() >= startFrame.value(),
352 : "Target must at or be after data start.");
353 0 : NS_ASSERTION(targetFrame.value() < startFrame.value() + audio->mFrames,
354 : "Data must end after target.");
355 :
356 0 : PRInt64 framesToPrune = targetFrame.value() - startFrame.value();
357 0 : if (framesToPrune > audio->mFrames) {
358 : // We've messed up somehow. Don't try to trim frames, the |frames|
359 : // variable below will overflow.
360 0 : NS_WARNING("Can't prune more frames that we have!");
361 0 : break;
362 : }
363 0 : PRUint32 frames = audio->mFrames - static_cast<PRUint32>(framesToPrune);
364 0 : PRUint32 channels = audio->mChannels;
365 0 : nsAutoArrayPtr<AudioDataValue> audioData(new AudioDataValue[frames * channels]);
366 0 : memcpy(audioData.get(),
367 0 : audio->mAudioData.get() + (framesToPrune * channels),
368 0 : frames * channels * sizeof(AudioDataValue));
369 0 : CheckedInt64 duration = FramesToUsecs(frames, mInfo.mAudioRate);
370 0 : if (!duration.valid()) {
371 0 : return NS_ERROR_FAILURE;
372 : }
373 : nsAutoPtr<AudioData> data(new AudioData(audio->mOffset,
374 : aTarget,
375 : duration.value(),
376 : frames,
377 : audioData.forget(),
378 0 : channels));
379 0 : delete mAudioQueue.PopFront();
380 0 : mAudioQueue.PushFront(data.forget());
381 : break;
382 : }
383 : }
384 0 : return NS_OK;
385 : }
386 :
387 :
|