Skip to content

Instantly share code, notes, and snippets.

@kapejod
Created December 9, 2016 06:50
Show Gist options
  • Save kapejod/d99ec6e6ce206bd584055ec75ad63740 to your computer and use it in GitHub Desktop.
Save kapejod/d99ec6e6ce206bd584055ec75ad63740 to your computer and use it in GitHub Desktop.
How i have built WebRTC for tvOs (some time ago....might not apply anymore)
diff --git a/webrtc/base/posix.cc b/webrtc/base/posix.cc
index 0eb24ee..7e48273 100644
--- a/webrtc/base/posix.cc
+++ b/webrtc/base/posix.cc
@@ -44,7 +44,7 @@ enum {
bool RunAsDaemon(const char *file, const char *const argv[]) {
// Fork intermediate child to daemonize.
- pid_t pid = fork();
+ pid_t pid = -1;
if (pid < 0) {
LOG_ERR(LS_ERROR) << "fork()";
return false;
@@ -68,7 +68,7 @@ bool RunAsDaemon(const char *file, const char *const argv[]) {
#endif
// Fork again to become a daemon.
- pid = fork();
+ pid = -1;
// It is important that everything here use _exit() and not exit(), because
// exit() would call the destructors of all global variables in the whole
// process, which is both unnecessary and unsafe.
@@ -81,7 +81,7 @@ bool RunAsDaemon(const char *file, const char *const argv[]) {
// WEBRTC_POSIX requires the args to be typed as non-const for historical
// reasons, but it mandates that the actual implementation be const, so
// the cast is safe.
- execvp(file, const_cast<char *const *>(argv));
+ // execvp(file, const_cast<char *const *>(argv));
_exit(255); // if execvp failed
}
diff --git a/webrtc/build/ios/client.webrtc/iOS64_Debug.json b/webrtc/build/ios/client.webrtc/iOS64_Debug.json
index 657ba91..110f80f 100644
--- a/webrtc/build/ios/client.webrtc/iOS64_Debug.json
+++ b/webrtc/build/ios/client.webrtc/iOS64_Debug.json
@@ -12,7 +12,7 @@
},
"compiler": "ninja",
"configuration": "Debug",
- "sdk": "iphoneos9.0",
+ "sdk": "tvos9.0",
"tests": [
]
}
diff --git a/webrtc/build/ios/client.webrtc/iOS64_Release.json b/webrtc/build/ios/client.webrtc/iOS64_Release.json
index 097a1b1..d9e1095 100644
--- a/webrtc/build/ios/client.webrtc/iOS64_Release.json
+++ b/webrtc/build/ios/client.webrtc/iOS64_Release.json
@@ -12,7 +12,7 @@
},
"compiler": "ninja",
"configuration": "Release",
- "sdk": "iphoneos9.0",
+ "sdk": "appletvos9.1",
"tests": [
]
}
diff --git a/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m b/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m
index 6f52657..e855efb 100644
--- a/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m
+++ b/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m
@@ -120,9 +120,7 @@ static CGFloat const kAppLabelHeight = 20;
UILabel *_appLabel;
ARDRoomTextField *_roomText;
UILabel *_callOptionsLabel;
- UISwitch *_audioOnlySwitch;
UILabel *_audioOnlyLabel;
- UISwitch *_loopbackSwitch;
UILabel *_loopbackLabel;
UIButton *_startCallButton;
UIButton *_audioLoopButton;
@@ -162,9 +160,6 @@ static CGFloat const kAppLabelHeight = 20;
[_callOptionsLabel sizeToFit];
[self addSubview:_callOptionsLabel];
- _audioOnlySwitch = [[UISwitch alloc] initWithFrame:CGRectZero];
- [_audioOnlySwitch sizeToFit];
- [self addSubview:_audioOnlySwitch];
_audioOnlyLabel = [[UILabel alloc] initWithFrame:CGRectZero];
_audioOnlyLabel.text = @"Audio only";
@@ -173,9 +168,6 @@ static CGFloat const kAppLabelHeight = 20;
[_audioOnlyLabel sizeToFit];
[self addSubview:_audioOnlyLabel];
- _loopbackSwitch = [[UISwitch alloc] initWithFrame:CGRectZero];
- [_loopbackSwitch sizeToFit];
- [self addSubview:_loopbackSwitch];
_loopbackLabel = [[UILabel alloc] initWithFrame:CGRectZero];
_loopbackLabel.text = @"Loopback mode";
@@ -242,34 +234,6 @@ static CGFloat const kAppLabelHeight = 20;
CGFloat audioOnlyTop =
CGRectGetMaxY(_callOptionsLabel.frame) + kCallControlMargin * 2;
- CGRect audioOnlyRect = CGRectMake(kCallControlMargin * 3,
- audioOnlyTop,
- _audioOnlySwitch.frame.size.width,
- _audioOnlySwitch.frame.size.height);
- _audioOnlySwitch.frame = audioOnlyRect;
- CGFloat audioOnlyLabelCenterX = CGRectGetMaxX(audioOnlyRect) +
- kCallControlMargin + _audioOnlyLabel.frame.size.width / 2;
- _audioOnlyLabel.center = CGPointMake(audioOnlyLabelCenterX,
- CGRectGetMidY(audioOnlyRect));
-
- CGFloat loopbackModeTop =
- CGRectGetMaxY(_audioOnlySwitch.frame) + kCallControlMargin;
- CGRect loopbackModeRect = CGRectMake(kCallControlMargin * 3,
- loopbackModeTop,
- _loopbackSwitch.frame.size.width,
- _loopbackSwitch.frame.size.height);
- _loopbackSwitch.frame = loopbackModeRect;
- CGFloat loopbackModeLabelCenterX = CGRectGetMaxX(loopbackModeRect) +
- kCallControlMargin + _loopbackLabel.frame.size.width / 2;
- _loopbackLabel.center = CGPointMake(loopbackModeLabelCenterX,
- CGRectGetMidY(loopbackModeRect));
-
- CGFloat audioLoopTop =
- CGRectGetMaxY(loopbackModeRect) + kCallControlMargin * 3;
- _audioLoopButton.frame = CGRectMake(kCallControlMargin,
- audioLoopTop,
- _audioLoopButton.frame.size.width,
- _audioLoopButton.frame.size.height);
CGFloat startCallTop =
CGRectGetMaxY(_audioLoopButton.frame) + kCallControlMargin * 3;
@@ -307,14 +271,14 @@ static CGFloat const kAppLabelHeight = 20;
- (void)onStartCall:(id)sender {
NSString *room = _roomText.roomText;
// If this is a loopback call, allow a generated room name.
- if (!room.length && _loopbackSwitch.isOn) {
+ if (!room.length) {
room = [[NSUUID UUID] UUIDString];
}
room = [room stringByReplacingOccurrencesOfString:@"-" withString:@""];
[_delegate mainView:self
didInputRoom:room
- isLoopback:_loopbackSwitch.isOn
- isAudioOnly:_audioOnlySwitch.isOn];
+ isLoopback:YES
+ isAudioOnly:NO];
}
@end
diff --git a/webrtc/examples/objc/AppRTCDemo/ios/ARDMainViewController.m b/webrtc/examples/objc/AppRTCDemo/ios/ARDMainViewController.m
index 8de6f6a..b883eea 100644
--- a/webrtc/examples/objc/AppRTCDemo/ios/ARDMainViewController.m
+++ b/webrtc/examples/objc/AppRTCDemo/ios/ARDMainViewController.m
@@ -80,12 +80,6 @@
#pragma mark - Private
- (void)showAlertWithMessage:(NSString*)message {
- UIAlertView* alertView = [[UIAlertView alloc] initWithTitle:nil
- message:message
- delegate:nil
- cancelButtonTitle:@"OK"
- otherButtonTitles:nil];
- [alertView show];
}
@end
diff --git a/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m b/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m
index 51290a0..5b2a7a3 100644
--- a/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m
+++ b/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m
@@ -185,12 +185,6 @@
}
- (void)showAlertWithMessage:(NSString*)message {
- UIAlertView* alertView = [[UIAlertView alloc] initWithTitle:nil
- message:message
- delegate:nil
- cancelButtonTitle:@"OK"
- otherButtonTitles:nil];
- [alertView show];
}
@end
diff --git a/webrtc/media/engine/webrtcvideoengine2.cc b/webrtc/media/engine/webrtcvideoengine2.cc
index 02de6b8..c80113b 100644
--- a/webrtc/media/engine/webrtcvideoengine2.cc
+++ b/webrtc/media/engine/webrtcvideoengine2.cc
@@ -163,8 +163,7 @@ bool CodecIsInternallySupported(const std::string& codec_name) {
return true;
}
if (CodecNamesEq(codec_name, kH264CodecName)) {
- return webrtc::H264Encoder::IsSupported() &&
- webrtc::H264Decoder::IsSupported();
+ return false;
}
return false;
}
diff --git a/webrtc/modules/audio_device/ios/audio_device_ios.mm b/webrtc/modules/audio_device/ios/audio_device_ios.mm
index 4390f49..0411f53 100644
--- a/webrtc/modules/audio_device/ios/audio_device_ios.mm
+++ b/webrtc/modules/audio_device/ios/audio_device_ios.mm
@@ -120,13 +120,9 @@ static bool VerifyAudioSession(RTCAudioSession* session) {
}
// Ensure that the required category and mode are actually activated.
- if (![session.category isEqualToString:AVAudioSessionCategoryPlayAndRecord]) {
+ if (![session.category isEqualToString:AVAudioSessionCategoryPlayback) {
LOG(LS_ERROR)
- << "Failed to set category to AVAudioSessionCategoryPlayAndRecord";
- return false;
- }
- if (![session.mode isEqualToString:AVAudioSessionModeVoiceChat]) {
- LOG(LS_ERROR) << "Failed to set mode to AVAudioSessionModeVoiceChat";
+ << "Failed to set category to AVAudioSessionCategoryPlayback";
return false;
}
return true;
@@ -156,19 +152,12 @@ static bool ActivateAudioSession(RTCAudioSession* session, bool activate) {
// audio sessions which are also nonmixable.
if (session.category != AVAudioSessionCategoryPlayAndRecord) {
error = nil;
- success = [session setCategory:AVAudioSessionCategoryPlayAndRecord
- withOptions:AVAudioSessionCategoryOptionAllowBluetooth
+ success = [session setCategory:AVAudioSessionCategoryPlayback
+ withOptions: 0
error:&error];
RTC_DCHECK(CheckAndLogError(success, error));
}
- // Specify mode for two-way voice communication (e.g. VoIP).
- if (session.mode != AVAudioSessionModeVoiceChat) {
- error = nil;
- success = [session setMode:AVAudioSessionModeVoiceChat error:&error];
- RTC_DCHECK(CheckAndLogError(success, error));
- }
-
// Set the session's sample rate or the hardware sample rate.
// It is essential that we use the same sample rate as stream format
// to ensure that the I/O unit does not have to do sample rate conversion.
@@ -442,37 +431,12 @@ int32_t AudioDeviceIOS::StopRecording() {
// Change the default receiver playout route to speaker.
int32_t AudioDeviceIOS::SetLoudspeakerStatus(bool enable) {
LOGI() << "SetLoudspeakerStatus(" << enable << ")";
-
- RTCAudioSession* session = [RTCAudioSession sharedInstance];
- [session lockForConfiguration];
- NSString* category = session.category;
- AVAudioSessionCategoryOptions options = session.categoryOptions;
- // Respect old category options if category is
- // AVAudioSessionCategoryPlayAndRecord. Otherwise reset it since old options
- // might not be valid for this category.
- if ([category isEqualToString:AVAudioSessionCategoryPlayAndRecord]) {
- if (enable) {
- options |= AVAudioSessionCategoryOptionDefaultToSpeaker;
- } else {
- options &= ~AVAudioSessionCategoryOptionDefaultToSpeaker;
- }
- } else {
- options = AVAudioSessionCategoryOptionDefaultToSpeaker;
- }
- NSError* error = nil;
- BOOL success = [session setCategory:AVAudioSessionCategoryPlayAndRecord
- withOptions:options
- error:&error];
- ios::CheckAndLogError(success, error);
- [session unlockForConfiguration];
- return (error == nil) ? 0 : -1;
+ return 0;
}
int32_t AudioDeviceIOS::GetLoudspeakerStatus(bool& enabled) const {
LOGI() << "GetLoudspeakerStatus";
- RTCAudioSession* session = [RTCAudioSession sharedInstance];
- AVAudioSessionCategoryOptions options = session.categoryOptions;
- enabled = options & AVAudioSessionCategoryOptionDefaultToSpeaker;
+ enabled = 1;
return 0;
}
diff --git a/webrtc/modules/audio_device/ios/objc/RTCAudioSession.mm b/webrtc/modules/audio_device/ios/objc/RTCAudioSession.mm
index ea7c546..1fa6182 100644
--- a/webrtc/modules/audio_device/ios/objc/RTCAudioSession.mm
+++ b/webrtc/modules/audio_device/ios/objc/RTCAudioSession.mm
@@ -266,7 +266,7 @@ NSInteger const kRTCAudioSessionErrorLockRequired = -1;
if (![self checkLock:outError]) {
return NO;
}
- return [self.session setCategory:category withOptions:options error:outError];
+ return [self.session setCategory:category error:outError];
}
- (BOOL)setMode:(NSString *)mode error:(NSError **)outError {
diff --git a/webrtc/modules/video_capture/video_capture_factory.cc b/webrtc/modules/video_capture/video_capture_factory.cc
index 618c08b..8cc9222 100644
--- a/webrtc/modules/video_capture/video_capture_factory.cc
+++ b/webrtc/modules/video_capture/video_capture_factory.cc
@@ -17,25 +17,17 @@ namespace webrtc
VideoCaptureModule* VideoCaptureFactory::Create(const int32_t id,
const char* deviceUniqueIdUTF8) {
-#if defined(ANDROID)
return nullptr;
-#else
- return videocapturemodule::VideoCaptureImpl::Create(id, deviceUniqueIdUTF8);
-#endif
}
VideoCaptureModule* VideoCaptureFactory::Create(const int32_t id,
VideoCaptureExternal*& externalCapture) {
- return videocapturemodule::VideoCaptureImpl::Create(id, externalCapture);
+ return nullptr;
}
VideoCaptureModule::DeviceInfo* VideoCaptureFactory::CreateDeviceInfo(
const int32_t id) {
-#if defined(ANDROID)
return nullptr;
-#else
- return videocapturemodule::VideoCaptureImpl::CreateDeviceInfo(id);
-#endif
}
} // namespace webrtc
diff --git a/webrtc/modules/video_coding/codec_database.cc b/webrtc/modules/video_coding/codec_database.cc
index a5a7c1e..777184c 100644
--- a/webrtc/modules/video_coding/codec_database.cc
+++ b/webrtc/modules/video_coding/codec_database.cc
@@ -586,10 +586,7 @@ VCMGenericDecoder* VCMCodecDataBase::CreateDecoder(VideoCodecType type) const {
case kVideoCodecI420:
return new VCMGenericDecoder(new I420Decoder());
case kVideoCodecH264:
- if (H264Decoder::IsSupported()) {
- return new VCMGenericDecoder(H264Decoder::Create());
- }
- break;
+ return nullptr;
default:
break;
}
diff --git a/webrtc/video/overuse_frame_detector.cc b/webrtc/video/overuse_frame_detector.cc
index 522a505..4f56137 100644
--- a/webrtc/video/overuse_frame_detector.cc
+++ b/webrtc/video/overuse_frame_detector.cc
@@ -56,7 +56,7 @@ CpuOveruseOptions::CpuOveruseOptions()
min_frame_samples(120),
min_process_count(3),
high_threshold_consecutive_count(2) {
-#if defined(WEBRTC_MAC)
+#if defined(WEBRTC_MAC_NOTHANKS)
// This is proof-of-concept code for letting the physical core count affect
// the interval into which we attempt to scale. For now, the code is Mac OS
// specific, since that's the platform were we saw most problems.
diff --git a/webrtc/video/video_decoder.cc b/webrtc/video/video_decoder.cc
index 8e0a503..5b9bedc 100644
--- a/webrtc/video/video_decoder.cc
+++ b/webrtc/video/video_decoder.cc
@@ -19,13 +19,11 @@
namespace webrtc {
VideoDecoder* VideoDecoder::Create(VideoDecoder::DecoderType codec_type) {
switch (codec_type) {
- case kH264:
- RTC_DCHECK(H264Decoder::IsSupported());
- return H264Decoder::Create();
case kVp8:
return VP8Decoder::Create();
case kVp9:
return VP9Decoder::Create();
+ case kH264:
case kUnsupportedCodec:
LOG(LS_ERROR) << "Creating NullVideoDecoder for unsupported codec.";
return new NullVideoDecoder();
diff --git a/webrtc/video/video_encoder.cc b/webrtc/video/video_encoder.cc
index e85e3d9..cf34141 100644
--- a/webrtc/video/video_encoder.cc
+++ b/webrtc/video/video_encoder.cc
@@ -19,13 +19,11 @@
namespace webrtc {
VideoEncoder* VideoEncoder::Create(VideoEncoder::EncoderType codec_type) {
switch (codec_type) {
- case kH264:
- RTC_DCHECK(H264Encoder::IsSupported());
- return H264Encoder::Create();
case kVp8:
return VP8Encoder::Create();
case kVp9:
return VP9Encoder::Create();
+ case kH264:
case kUnsupportedCodec:
RTC_NOTREACHED();
return nullptr;
listcontains() {
for word in $1; do
[[ $word = $2 ]] && return 0
done
return 1
}
ninjas=`find src/out_tvos -name '*.ninja'`
ninjas=$(echo "$ninjas" | tr ' ' '\n' | sort -u | tr '\n' ' ')
exclude_ninjas="src/out_tvos/Release-iphoneos/obj/webrtc/modules/webrtc_h264_video_toolbox.ninja src/out_tvos/Release-iphoneos/obj/webrtc/modules/webrtc_h264.ninja src/out_tvos/Release-iphoneos/obj/webrtc/modules/video_capture_module_internal_impl.ninja"
echo $ninjas
for ninja in $ninjas; do
if listcontains "$exclude_ninjas" "$ninja";
then
echo "ignoring $ninja"
else
sed -i -- "s/iPhoneOS.platform\/Developer\/SDKs\/iPhoneOS9.2.sdk/AppleTVOS.platform\/Developer\/SDKs\/AppleTVOS9.1.sdk/g" $ninja
sed -i -- "s/libwebrtc_h264_video_toolbox.a//g" $ninja
sed -i -- "s/libwebrtc_h264.a//g" $ninja
sed -i -- "s/-framework VideoToolbox//g" $ninja
sed -i -- "s/iphoneos-version/appletvos-version/g" $ninja
sed -i -- "s/libvideo_capture_module_internal_impl.a//g" $ninja
fi
done
sed -i -- "s/obj\/talk\/app\/webrtc\/objc\/libjingle_peerconnection_objc.avfoundationvideocapturer.o $/g" src/out_tvos/Release-iphoneos/obj/talk/app/webrtc/ibjingle_peerconnection_objc.ninja
sed -i -- "s/obj\/talk\/app\/webrtc\/objc\/libjingle_peerconnection_objc.RTCAVFoundationVideoSource.o $/g" src/out_tvos/Release-iphoneos/obj/talk/app/webrtc/ibjingle_peerconnection_objc.ninja
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment