Skip to content
This repository was archived by the owner on Jun 15, 2019. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
72 changes: 51 additions & 21 deletions AVPlayerExample/ViewController.m
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ typedef NS_ENUM(NSUInteger, ViewControllerState) {
NSString *const kVideoMovURL = @"https://s3-us-west-1.amazonaws.com/avplayervideo/What+Is+Cloud+Communications.mov";
NSString *const kStatusKey = @"status";

@interface ViewController () <UITextFieldDelegate, TVIRemoteParticipantDelegate, TVIRoomDelegate, TVIVideoViewDelegate, TVICameraCapturerDelegate>
@interface ViewController () <UITextFieldDelegate, TVIRemoteParticipantDelegate, TVIRoomDelegate, TVIVideoViewDelegate, TVICameraSourceDelegate>

// Configure access token manually for testing in `viewDidLoad`, if desired! Create one manually in the console.
@property (nonatomic, strong) NSString *accessToken;
Expand All @@ -41,7 +41,7 @@ @interface ViewController () <UITextFieldDelegate, TVIRemoteParticipantDelegate,

@property (nonatomic, strong) TVIRoom *room;
@property (nonatomic, strong) TVIDefaultAudioDevice *audioDevice;
@property (nonatomic, strong) TVICameraCapturer *camera;
@property (nonatomic, strong) TVICameraSource *camera;
@property (nonatomic, strong) TVILocalVideoTrack *localVideoTrack;
@property (nonatomic, strong) TVILocalAudioTrack *localAudioTrack;
@property (nonatomic, strong) TVIRemoteParticipant *remoteParticipant;
Expand Down Expand Up @@ -157,31 +157,60 @@ - (IBAction)micButtonPressed:(id)sender {
#pragma mark - Private

- (void)startPreview {
// TVICameraSource is not supported with the Simulator.
if ([PlatformUtils isSimulator]) {
[self.previewView removeFromSuperview];
return;
}

self.camera = [[TVICameraCapturer alloc] initWithSource:TVICameraCaptureSourceFrontCamera delegate:self];
self.localVideoTrack = [TVILocalVideoTrack trackWithCapturer:self.camera];
if (!self.localVideoTrack) {
[self logMessage:@"Failed to add video track"];
} else {
AVCaptureDevice *frontCamera = [TVICameraSource captureDeviceForPosition:AVCaptureDevicePositionFront];
AVCaptureDevice *backCamera = [TVICameraSource captureDeviceForPosition:AVCaptureDevicePositionBack];

if (frontCamera != nil || backCamera != nil) {
self.camera = [[TVICameraSource alloc] initWithDelegate:self];
self.localVideoTrack = [TVILocalVideoTrack trackWithSource:self.camera
enabled:YES
name:@"Cameara"];
// Add renderer to video track for local preview
[self.localVideoTrack addRenderer:self.previewView];

[self logMessage:@"Video track created"];

UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc] initWithTarget:self
action:@selector(flipCamera)];
[self.previewView addGestureRecognizer:tap];
if (frontCamera != nil && backCamera != nil) {
UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc] initWithTarget:self
action:@selector(flipCamera)];
[self.previewView addGestureRecognizer:tap];
}

[self.camera startCaptureWithDevice:frontCamera != nil ? frontCamera : backCamera
completion:^(AVCaptureDevice *device, TVIVideoFormat *format, NSError *error) {
if (error != nil) {
[self logMessage:[NSString stringWithFormat:@"Start capture failed with error.\ncode = %lu error = %@", error.code, error.localizedDescription]];
} else {
self.previewView.mirror = (device.position == AVCaptureDevicePositionFront);
}
}];
} else {
[self logMessage:@"No front or back capture device found!"];
}
}

- (void)flipCamera {
if (self.camera.source == TVICameraCaptureSourceFrontCamera) {
[self.camera selectSource:TVICameraCaptureSourceBackCameraWide];
AVCaptureDevice *newDevice = nil;

if (self.camera.device.position == AVCaptureDevicePositionFront) {
newDevice = [TVICameraSource captureDeviceForPosition:AVCaptureDevicePositionBack];
} else {
[self.camera selectSource:TVICameraCaptureSourceFrontCamera];
newDevice = [TVICameraSource captureDeviceForPosition:AVCaptureDevicePositionFront];
}

if (newDevice != nil) {
[self.camera selectCaptureDevice:newDevice completion:^(AVCaptureDevice *device, TVIVideoFormat *format, NSError *error) {
if (error != nil) {
[self logMessage:[NSString stringWithFormat:@"Error selecting capture device.\ncode = %lu error = %@", error.code, error.localizedDescription]];
} else {
self.previewView.mirror = (device.position == AVCaptureDevicePositionFront);
}
}];
}
}

Expand Down Expand Up @@ -536,18 +565,19 @@ - (void)videoView:(TVIVideoView *)view videoDimensionsDidChange:(CMVideoDimensio
[self.view setNeedsLayout];
}

#pragma mark - TVICameraCapturerDelegate

- (void)cameraCapturer:(TVICameraCapturer *)capturer didStartWithSource:(TVICameraCaptureSource)source {
self.previewView.mirror = (source == TVICameraCaptureSourceFrontCamera);

self.localVideoTrack.enabled = YES;
#pragma mark - TVICameraSourceDelegate
- (void)cameraSource:(TVICameraSource *)source didFailWithError:(NSError *)error {
[self logMessage:[NSString stringWithFormat:@"Capture failed with error.\ncode = %lu error = %@", error.code, error.localizedDescription]];
}

- (void)cameraCapturerWasInterrupted:(TVICameraCapturer *)capturer reason:(AVCaptureSessionInterruptionReason)reason {
- (void)cameraSourceWasInterrupted:(TVICameraSource *)source reason:(AVCaptureSessionInterruptionReason)reason {
// We will disable `self.localVideoTrack` when the TVICameraCapturer is interrupted.
// This prevents other Participants from seeing a frozen frame while the Client is backgrounded.
self.localVideoTrack.enabled = NO;
}

- (void)cameraSourceInterruptionEnded:(TVICameraSource *)source {
self.localVideoTrack.enabled = YES;
}

@end
70 changes: 47 additions & 23 deletions ObjCVideoQuickstart/ViewController.m
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,15 @@

#import <TwilioVideo/TwilioVideo.h>

@interface ViewController () <UITextFieldDelegate, TVIRemoteParticipantDelegate, TVIRoomDelegate, TVIVideoViewDelegate, TVICameraCapturerDelegate>
@interface ViewController () <UITextFieldDelegate, TVIRemoteParticipantDelegate, TVIRoomDelegate, TVIVideoViewDelegate, TVICameraSourceDelegate>

// Configure access token manually for testing in `ViewDidLoad`, if desired! Create one manually in the console.
@property (nonatomic, strong) NSString *accessToken;
@property (nonatomic, strong) NSString *tokenUrl;

#pragma mark Video SDK components

@property (nonatomic, strong) TVICameraCapturer *camera;
@property (nonatomic, strong) TVICameraSource *camera;
@property (nonatomic, strong) TVILocalVideoTrack *localVideoTrack;
@property (nonatomic, strong) TVILocalAudioTrack *localAudioTrack;
@property (nonatomic, strong) TVIRemoteParticipant *remoteParticipant;
Expand Down Expand Up @@ -116,36 +116,60 @@ - (IBAction)micButtonPressed:(id)sender {
#pragma mark - Private

- (void)startPreview {
// TVICameraCapturer is not supported with the Simulator.
// TVICameraSource is not supported with the Simulator.
if ([PlatformUtils isSimulator]) {
[self.previewView removeFromSuperview];
return;
}
self.camera = [[TVICameraCapturer alloc] initWithSource:TVICameraCaptureSourceFrontCamera delegate:self];
self.localVideoTrack = [TVILocalVideoTrack trackWithCapturer:self.camera
enabled:YES
constraints:nil
name:@"Camera"];
if (!self.localVideoTrack) {
[self logMessage:@"Failed to add video track"];
} else {

AVCaptureDevice *frontCamera = [TVICameraSource captureDeviceForPosition:AVCaptureDevicePositionFront];
AVCaptureDevice *backCamera = [TVICameraSource captureDeviceForPosition:AVCaptureDevicePositionBack];

if (frontCamera != nil || backCamera != nil) {
self.camera = [[TVICameraSource alloc] initWithDelegate:self];
self.localVideoTrack = [TVILocalVideoTrack trackWithSource:self.camera
enabled:YES
name:@"Cameara"];
// Add renderer to video track for local preview
[self.localVideoTrack addRenderer:self.previewView];

[self logMessage:@"Video track created"];

UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc] initWithTarget:self
action:@selector(flipCamera)];
[self.previewView addGestureRecognizer:tap];

if (frontCamera != nil && backCamera != nil) {
UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc] initWithTarget:self
action:@selector(flipCamera)];
[self.previewView addGestureRecognizer:tap];
}

[self.camera startCaptureWithDevice:frontCamera != nil ? frontCamera : backCamera
completion:^(AVCaptureDevice *device, TVIVideoFormat *format, NSError *error) {
if (error != nil) {
[self logMessage:[NSString stringWithFormat:@"Start capture failed with error.\ncode = %lu error = %@", error.code, error.localizedDescription]];
} else {
self.previewView.mirror = (device.position == AVCaptureDevicePositionFront);
}
}];
} else {
[self logMessage:@"No front or back capture device found!"];
}
}

- (void)flipCamera {
if (self.camera.source == TVICameraCaptureSourceFrontCamera) {
[self.camera selectSource:TVICameraCaptureSourceBackCameraWide];
AVCaptureDevice *newDevice = nil;

if (self.camera.device.position == AVCaptureDevicePositionFront) {
newDevice = [TVICameraSource captureDeviceForPosition:AVCaptureDevicePositionBack];
} else {
[self.camera selectSource:TVICameraCaptureSourceFrontCamera];
newDevice = [TVICameraSource captureDeviceForPosition:AVCaptureDevicePositionFront];
}

if (newDevice != nil) {
[self.camera selectCaptureDevice:newDevice completion:^(AVCaptureDevice *device, TVIVideoFormat *format, NSError *error) {
if (error != nil) {
[self logMessage:[NSString stringWithFormat:@"Error selecting capture device.\ncode = %lu error = %@", error.code, error.localizedDescription]];
} else {
self.previewView.mirror = (device.position == AVCaptureDevicePositionFront);
}
}];
}
}

Expand Down Expand Up @@ -467,10 +491,10 @@ - (void)videoView:(TVIVideoView *)view videoDimensionsDidChange:(CMVideoDimensio
[self.view setNeedsLayout];
}

#pragma mark - TVICameraCapturerDelegate
#pragma mark - TVICameraSourceDelegate

- (void)cameraCapturer:(TVICameraCapturer *)capturer didStartWithSource:(TVICameraCaptureSource)source {
self.previewView.mirror = (source == TVICameraCaptureSourceFrontCamera);
- (void)cameraSource:(TVICameraSource *)source didFailWithError:(NSError *)error {
[self logMessage:[NSString stringWithFormat:@"Capture failed with error.\ncode = %lu error = %@", error.code, error.localizedDescription]];
}

@end
2 changes: 1 addition & 1 deletion Podfile
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ workspace 'VideoQuickStart'
platform :ios, '9.0'

abstract_target 'TwilioVideo' do
pod 'TwilioVideo', '~> 2.5'
pod 'TwilioVideo', '~> 2.6'

target 'ObjCVideoQuickstart' do
project 'ObjCVideoQuickstart.xcproject'
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

# Twilio Video Quickstart for Objective-C

> NOTE: These sample applications use the Twilio Video 2.x APIs. For examples using our 2.6.0-preview APIs, please see the [2.6.0-preview](https://github.com/twilio/video-quickstart-objc/tree/2.6.0-preview) branch, and for 1.x APIs, please see the [1.x](https://github.com/twilio/video-quickstart-objc/tree/1.x) branch.
> NOTE: These sample applications use the Twilio Video 2.x APIs. For examples using our 1.x APIs, please see the [1.x](https://github.com/twilio/video-quickstart-objc/tree/1.x) branch.

Get started with Video on iOS:

Expand Down