Skip to content

Instantly share code, notes, and snippets.

@jevgenits
Last active May 27, 2020 15:28
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save jevgenits/2ef3a7147d31b5f6126304e3d6d78ca7 to your computer and use it in GitHub Desktop.
Save jevgenits/2ef3a7147d31b5f6126304e3d6d78ca7 to your computer and use it in GitHub Desktop.
#import "TSTextToSpeechUtils.h"
@interface TSTextToSpeechUtils ()
@property (strong, nonatomic) AVSpeechSynthesizer *synthesizer;
@property (strong, nonatomic) AVSpeechUtterance *lastPlayingUtterance;
@property (strong, nonatomic) AVAudioSession *audioSession;
@end
@implementation TSTextToSpeechUtils
- (instancetype)init
{
if ((self = [super init])) {
_synthesizer = [[AVSpeechSynthesizer alloc] init];
_synthesizer.delegate = self;
}
return self;
}
- (void)synthesizeSpeechForText:(NSString *)text
{
if ([text length] == 0) {
return;
}
self.audioSession = [AVAudioSession sharedInstance];
NSError *error;
// activate audioSession to play utterance
[self.audioSession setCategory:AVAudioSessionCategoryPlayback withOptions:AVAudioSessionCategoryOptionDuckOthers error:&error];
[self.audioSession setActive:YES error:&error];
AVSpeechUtterance *utterance = [[AVSpeechUtterance alloc] initWithString:text];
utterance.rate = AVSpeechUtteranceDefaultSpeechRate;
utterance.voice = [AVSpeechSynthesisVoice voiceWithLanguage:[self detectLanguageFromText:text]];
utterance.volume = 0.7;
[self.synthesizer speakUtterance:utterance];
self.lastPlayingUtterance = utterance;
}
- (void)speechSynthesizer:(AVSpeechSynthesizer *)synthesizer didFinishSpeechUtterance:(AVSpeechUtterance *)utterance
{
if (synthesizer == self.synthesizer && self.lastPlayingUtterance == utterance) {
NSError *error;
// after last utterance has played - deactivate the audio session
[self.audioSession setActive:NO error:&error];
}
}
- (NSString *)detectLanguageFromText:(NSString *)text
{
NSArray *tagSchemes = [NSArray arrayWithObjects:NSLinguisticTagSchemeLanguage, nil];
NSLinguisticTagger *tagger = [[NSLinguisticTagger alloc] initWithTagSchemes:tagSchemes options:0];
[tagger setString:text];
NSString *textLanguage = [tagger tagAtIndex:0 scheme:NSLinguisticTagSchemeLanguage tokenRange:nil sentenceRange:nil];
NSString *detectedLanguage = nil;
// check if the text language exists within installed languages
for (id installedLanguage in [AVSpeechSynthesisVoice speechVoices]) {
NSArray *languageStringParts = [[installedLanguage language] componentsSeparatedByString:@"-"];
if (languageStringParts.count > 0 && [languageStringParts[0] isEqualToString:textLanguage]) {
detectedLanguage = [installedLanguage language];
break;
}
}
if (detectedLanguage == nil) {
// if language could not be detected assign to default
detectedLanguage = @"en-US";
}
return detectedLanguage;
}
@end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment