Skip to content

Instantly share code, notes, and snippets.

@cyrildiagne
Created November 1, 2012 16:44
Show Gist options
  • Save cyrildiagne/3994942 to your computer and use it in GitHub Desktop.
Save cyrildiagne/3994942 to your computer and use it in GitHub Desktop.
iOS native video overlay inside a Unity3d app
/*
add this at the end of prepareRunLoop
*/
[_displayLink setPaused:YES];
/*
add this at the end of (void) applicationDidBecomeActive:
*/
video = [[InteractiveVideo alloc] initWithFrame:_window.bounds];
video.delegate = self;
[UnityGetGLView() addSubview:video];
/*
add this somewhere in the AppController implementation
*/
-(void) videoEnded
{
[video removeFromSuperview];
[video removeObservers];
[video release];
video = nil;
[_displayLink setPaused:NO];
}
//
// InteractiveVideo.h
// Unity-iPhone
//
// Created by kikko on 01/11/12.
//
//
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
@class AppController;
@interface InteractiveVideo : UIView {
AVPlayer* _player;
AVPlayerLayer* _playerLayer;
id delegate;
id periodicObserver;
id boundaryObserver;
}
@property (assign) id delegate;
-(void) removeObservers;
@end
//
// InteractiveVideo.m
// Unity-iPhone
//
// Created by kikko on 01/11/12.
//
//
#import "InteractiveVideo.h"
#import "AppController.h"
@implementation InteractiveVideo
@synthesize delegate;
- (id)initWithFrame:(CGRect)frame
{
// manually swap orientation
NSLog(@"%f %f %f %f", frame.origin.x, frame.origin.y, frame.size.width, frame.size.height);
double w = frame.size.width;
frame.size.width = frame.size.height;
frame.size.height = w;
self = [super initWithFrame:frame];
if (self) {
// get asset path
NSString* videoString = [[NSBundle mainBundle] pathForResource:@"intro" ofType:@"m4v"];
NSString* expandedPath = [videoString stringByExpandingTildeInPath];
NSURL* videoUrl = [NSURL fileURLWithPath:expandedPath];
// create player and listen for status changes
_player = [[AVPlayer alloc] initWithURL:videoUrl];
[_player addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:NULL];
// observe progress
periodicObserver = [[_player addPeriodicTimeObserverForInterval: CMTimeMakeWithSeconds(1.0, 1)
queue: NULL
usingBlock: ^(CMTime time){ [self updateUI]; }] retain];
// observe timed events
NSMutableArray* timedEvents = [NSMutableArray arrayWithObjects:
[NSValue valueWithCMTime:CMTimeMakeWithSeconds(10.76, 1)],
[NSValue valueWithCMTime:CMTimeMakeWithSeconds(17.24, 1)],
[NSValue valueWithCMTime:CMTimeMakeWithSeconds(27.24, 1)],
nil];
boundaryObserver = [[_player addBoundaryTimeObserverForTimes:timedEvents
queue:NULL
usingBlock:^{ [self processTimedEvent]; }] retain];
// observe end of video
_player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
[[NSNotificationCenter defaultCenter] addObserver: self
selector: @selector(videoEnded:)
name: AVPlayerItemDidPlayToEndTimeNotification
object: [_player currentItem]];
// create player layer
_playerLayer = [AVPlayerLayer playerLayerWithPlayer:_player];
_playerLayer.frame = self.bounds;
_playerLayer.videoGravity = AVLayerVideoGravityResizeAspect;
[self.layer addSublayer:_playerLayer];
[self setMultipleTouchEnabled:YES];
}
return self;
}
// observation callback for the video player
-(void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if(object==_player) {
if(_player.status==AVPlayerStatusFailed) {
NSLog(@"fail!");
} else if(_player.status==AVPlayerStatusReadyToPlay) {
NSLog(@"success!");
[_player play];
} else if(_player.status==AVPlayerStatusUnknown) {
NSLog(@"not ready!");
}
}
}
-(void)updateUI {
//float progress = CMTimeGetSeconds(_player.currentTime)/CMTimeGetSeconds(_player.currentItem.duration);
//NSLog(@"video percent : %f", progress);
}
-(void)processTimedEvent {
//float seconds = _player.currentTime.value / _player.currentTime.timescale;
[_player pause];
}
-(void)videoEnded:(NSNotification *)notification {
if(delegate) {
[delegate performSelectorOnMainThread:@selector(videoEnded) withObject:nil waitUntilDone:NO];
}
}
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event{
[super touchesEnded:touches withEvent:event];
[_player play];
/*
UITouch *touch = [touches anyObject];
CGPoint currentPosition = [touch locationInView:self];
CGFloat deltaX = fabsf(gestureStartPoint.x - currentPosition.x); // will always be positive
CGFloat deltaY = fabsf(gestureStartPoint.y - currentPosition.y); // will always be positive
if (deltaY == 0 && deltaX == 0) {
}
*/
}
-(void) removeObservers
{
[_player removeTimeObserver:periodicObserver];
[periodicObserver release];
periodicObserver = nil;
[_player removeTimeObserver:boundaryObserver];
[boundaryObserver release];
boundaryObserver = nil;
[[NSNotificationCenter defaultCenter] removeObserver:self];
}
-(void)dealloc {
NSLog(@"video dealloc");
[self removeFromSuperview];
if(periodicObserver) {
[self removeObservers];
}
[_playerLayer removeFromSuperlayer];
[_player release];
[super dealloc];
}
@end
@esnho
Copy link

esnho commented Apr 4, 2016

This looks cool but I've never developed for native iOS, do you have an example project?

@ajaysinghthakur
Copy link

i would also like to have working sample

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment