Thursday, 29 August 2013

Simple Video Recoder

StopNGo



  • Documentation

========================================================================
DESCRIPTION:

StopNGo is a simple stop-motion animation QuickTime movie recorder that uses AVFoundation.

It creates a AVCaptureSession, AVCaptureDevice, AVCaptureVideoPreviewLayer, and AVCaptureStillImageOutput to preview and capture still images from a video capture device, then re-times each sample buffer to a frame rate of 5 fps and writes frames to disk using AVAssetWriter.

A frame rate of 5 fps means that 5 still images will result in a 1 second long movie. This value is hard coded in the sample but may be changed as required by the developer.

========================================================================
BUILD REQUIREMENTS:

Xcode 4.2 or later; iPhone iOS SDK 5.0 or later.

========================================================================
RUNTIME REQUIREMENTS:

iOS 5.0 or later. This app will not produce camera output on the iOS simulator.



========================================================================
Copyright (C) 2011 Apple Inc. All rights reserved.





  • Design     


       
       

  •  Code    

StopNGoAppDelegate.h

#import <UIKit/UIKit.h>

@interface StopNGoAppDelegate : UIResponder <UIApplicationDelegate>

@property (strong, nonatomic) UIWindow *window;

@end




------------------------------------------------------------
StopNGoAppDelegate.m


#import "StopNGoAppDelegate.h"

@implementation StopNGoAppDelegate

@synthesize window = _window;

- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions
{
    // Override point for customization after application launch.
    [self.window makeKeyAndVisible];
    return YES;
}


@end

------------------------------------------------------------

StopNGoViewController.h

 
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>

@interface StopNGoViewController : UIViewController
{
BOOL started;
CMTime frameDuration;
CMTime nextPTS;
AVAssetWriter *assetWriter;
AVAssetWriterInput *assetWriterInput;
AVCaptureStillImageOutput *stillImageOutput;
NSURL *outputURL;
}

@property (nonatomic, retain) IBOutlet UIView *previewView;
@property (nonatomic, retain) IBOutlet UISlider *fpsSlider;
@property (nonatomic, retain) IBOutlet UIBarButtonItem *startFinishButton;
@property (nonatomic, retain) IBOutlet UIBarButtonItem *takePictureButton;

- (IBAction)takePicture:(id)sender;
- (IBAction)startStop:(id)sender;
@end

------------------------------------------------------------
StopNGoViewController.m


 
#import "StopNGoViewController.h"
#include <mach/mach_time.h>
#import <AssetsLibrary/AssetsLibrary.h>
@implementation StopNGoViewController

@synthesize previewView, fpsSlider, startFinishButton, takePictureButton;

- (BOOL)setupAVCapture
{
NSError *error = nil;
    // 5 fps - taking 5 pictures will equal 1 second of video
frameDuration = CMTimeMakeWithSeconds(1./5., 90000); 
AVCaptureSession *session = [AVCaptureSession new];
[session setSessionPreset:AVCaptureSessionPresetHigh];
// Select a video device, make an input
AVCaptureDevice *backCamera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:&error];
if (error)
return NO;
if ([session canAddInput:input])
[session addInput:input];
// Make a still image output
stillImageOutput = [AVCaptureStillImageOutput new];
if ([session canAddOutput:stillImageOutput])
[session addOutput:stillImageOutput];
// Make a preview layer so we can see the visual output of an AVCaptureSession
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspect];
[previewLayer setFrame:[previewView bounds]];
    // add the preview layer to the hierarchy
    CALayer *rootLayer = [previewView layer];
[rootLayer setBackgroundColor:[[UIColor blackColor] CGColor]];
[rootLayer addSublayer:previewLayer];
    // start the capture session running, note this is an async operation
    // status is provided via notifications such as AVCaptureSessionDidStartRunningNotification/AVCaptureSessionDidStopRunningNotification
    [session startRunning];
return YES;
}

static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};

- (BOOL)setupAssetWriterForURL:(NSURL *)fileURL formatDescription:(CMFormatDescriptionRef)formatDescription
{
    // allocate the writer object with our output file URL
NSError *error = nil;
assetWriter = [[AVAssetWriter alloc] initWithURL:fileURL fileType:AVFileTypeQuickTimeMovie error:&error];
if (error)
return NO;
    // initialized a new input for video to receive sample buffers for writing
    // passing nil for outputSettings instructs the input to pass through appended samples, doing no processing before they are written
assetWriterInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:nil];
[assetWriterInput setExpectsMediaDataInRealTime:YES];
if ([assetWriter canAddInput:assetWriterInput])
[assetWriter addInput:assetWriterInput];
    // specify the prefered transform for the output file
CGFloat rotationDegrees;
switch ([[UIDevice currentDevice] orientation]) { 
case UIDeviceOrientationPortraitUpsideDown:
rotationDegrees = -90.;
break;
case UIDeviceOrientationLandscapeLeft: // no rotation
rotationDegrees = 0.;
break;
case UIDeviceOrientationLandscapeRight:
rotationDegrees = 180.;
break;
case UIDeviceOrientationPortrait:
case UIDeviceOrientationUnknown:
case UIDeviceOrientationFaceUp:
case UIDeviceOrientationFaceDown:
default:
rotationDegrees = 90.;
break;
}
CGFloat rotationRadians = DegreesToRadians(rotationDegrees);
[assetWriterInput setTransform:CGAffineTransformMakeRotation(rotationRadians)];
    // initiates a sample-writing at time 0
nextPTS = kCMTimeZero;
[assetWriter startWriting];
[assetWriter startSessionAtSourceTime:nextPTS];
    return YES;
}

- (IBAction)takePicture:(id)sender
{
    // initiate a still image capture, return immediately
    // the completionHandler is called when a sample buffer has been captured
AVCaptureConnection *stillImageConnection = [stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
[stillImageOutput captureStillImageAsynchronouslyFromConnection:stillImageConnection 
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *__strong error) {
 
          // set up the AVAssetWriter using the format description from the first sample buffer captured
          if ( !assetWriter ) {
 outputURL = [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@/%llu.mov", NSTemporaryDirectory(), mach_absolute_time()]];
 //NSLog(@"Writing movie to \"%@\"", outputURL);
 CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(imageDataSampleBuffer);
 if ( NO == [self setupAssetWriterForURL:outputURL formatDescription:formatDescription] )
 return;
 }
          
 // re-time the sample buffer - in this sample frameDuration is set to 5 fps
 CMSampleTimingInfo timingInfo = kCMTimingInfoInvalid;
 timingInfo.duration = frameDuration;
 timingInfo.presentationTimeStamp = nextPTS;
 CMSampleBufferRef sbufWithNewTiming = NULL;
 OSStatus err = CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, 
  imageDataSampleBuffer, 
  1, // numSampleTimingEntries
  &timingInfo, 
  &sbufWithNewTiming);
 if (err)
 return;
 
           // append the sample buffer if we can and increment presnetation time
 if ( [assetWriterInput isReadyForMoreMediaData] ) {
 if ([assetWriterInput appendSampleBuffer:sbufWithNewTiming]) {
 nextPTS = CMTimeAdd(frameDuration, nextPTS);
 }
 else {
 NSError *error = [assetWriter error];
 NSLog(@"failed to append sbuf: %@", error);
 }
 }
          
          // release the copy of the sample buffer we made
 CFRelease(sbufWithNewTiming);
}];
}

- (void)saveMovieToCameraRoll
{
    // save the movie to the camera roll
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
//NSLog(@"writing \"%@\" to photos album", outputURL);
[library writeVideoAtPathToSavedPhotosAlbum:outputURL
completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
NSLog(@"assets library failed (%@)", error);
}
else {
[[NSFileManager defaultManager] removeItemAtURL:outputURL error:&error];
if (error)
NSLog(@"Couldn't remove temporary movie file \"%@\"", outputURL);
}
outputURL = nil;
}];
}

- (IBAction)startStop:(id)sender
{
if (started) {
if (assetWriter) {
[assetWriterInput markAsFinished];
[assetWriter finishWriting];
assetWriterInput = nil;
assetWriter = nil;
[self saveMovieToCameraRoll];
}
[sender setTitle:@"Start"];
[takePictureButton setEnabled:NO];
}
else {
[sender setTitle:@"Finish"];
[takePictureButton setEnabled:YES];
}
started = !started;
}

- (void)didReceiveMemoryWarning
{
    [super didReceiveMemoryWarning];
    // Release any cached data, images, etc that aren't in use.
}

#pragma mark - View lifecycle

- (void)viewDidLoad
{
    [super viewDidLoad];
[self setupAVCapture];
// Do any additional setup after loading the view, typically from a nib.
}

- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
    // Return YES for supported orientations
return ( UIInterfaceOrientationPortrait == interfaceOrientation );
}


@end

------------------------------------------------------------

 Copyright (C) 2011 Apple Inc. All Rights Reserved.



No comments:

Post a Comment