AVCaptureInput *input;
AVCaptureVideoDataOutput *output;
AVCaptureDevice *device;
+ AVCaptureConnection *connection;
CMClockRef inputClock;
dispatch_queue_t mainQueue;
[session addInput:input];
[session addOutput:output];
+ /* retained by session */
+ connection = [[output connections] firstObject];
+ inputClock = ((AVCaptureInputPort *)connection.inputPorts[0]).clock;
+
*successPtr = YES;
});
dispatch_sync (mainQueue, ^{
g_assert (![session isRunning]);
+ connection = nil;
+ inputClock = nil;
+
[session removeInput:input];
[session removeOutput:output];
offset = 0;
latency = GST_CLOCK_TIME_NONE;
- inputClock = nil;
lastSampling = GST_CLOCK_TIME_NONE;
count = 0;
bufQueueLock = nil;
[bufQueue release];
bufQueue = nil;
- inputClock = nil;
if (textureCache)
gst_core_video_texture_cache_free (textureCache);
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
- fromConnection:(AVCaptureConnection *)connection
+ fromConnection:(AVCaptureConnection *)aConnection
{
GstClockTime timestamp, duration;
return;
}
- if (inputClock == nil)
- inputClock = ((AVCaptureInputPort *)connection.inputPorts[0]).clock;
[self getSampleBuffer:sampleBuffer timestamp:×tamp duration:&duration];
if ([bufQueue count] == BUFFER_QUEUE_SIZE)