#import <Foundation/Foundation.h>
#import <Speech/Speech.h>

@interface SpeechListener : NSObject
<SFSpeechRecognitionTaskDelegate,SFSpeechRecognizerDelegate>

@property(nonatomic,strong)SFSpeechRecognizer *listener;
@property(nonatomic,strong)SFSpeechRecognitionTask *currentTask;
@property(nonatomic,assign)SFSpeechRecognizerAuthorizationStatus authorizationStatus;
@property(nonatomic,strong)SFSpeechAudioBufferRecognitionRequest *currentRequest;
@property(nonatomic,assign)BOOL inRecognizer;
@property(nonatomic,copy)void (^haveRecognizerSpeakTextBlock)(NSString *text,NSTimeInterval startLocation,NSTimeInterval length);

-(void)startRecognizer;
-(void)giveBuffer:(CMSampleBufferRef)buffer;
-(void)endRecognizer;

@end
#import "SpeechListener.h"

@interface SpeechListener()

@property(nonatomic,copy)NSString *currentText;
@property(nonatomic,assign)NSTimeInterval beginListenLocation;

@end

@implementation SpeechListener

-(instancetype)init
{
    self = [super init];
    if (self) {
        self.listener = [[SFSpeechRecognizer alloc] initWithLocale:[NSLocale localeWithLocaleIdentifier:@"zh-CN"]];
        [self.listener setDelegate:self];
        [self requestAuthorizationStatus];
    }
    return self;
}

-(void)requestAuthorizationStatus
{
    typeof(self) __weak weakself = self;
    [SFSpeechRecognizer requestAuthorization:^(SFSpeechRecognizerAuthorizationStatus status) {
        typeof(weakself) __strong strongself = weakself;
        strongself.authorizationStatus = status;
    }];
}

-(void)startRecognizer
{
    self.beginListenLocation = CFAbsoluteTimeGetCurrent();
    self.currentRequest = [[SFSpeechAudioBufferRecognitionRequest alloc] init];
    [self.currentRequest setShouldReportPartialResults:YES];
    typeof(self) __weak weakself = self;
    self.currentTask = [self.listener recognitionTaskWithRequest:self.currentRequest resultHandler:^(SFSpeechRecognitionResult *result, NSError *error) {
        typeof(weakself) __strong strongself = weakself;
        if (strongself.inRecognizer) {
            if (error != nil) {
                [strongself endRecognizer];
            }
            else
                if(result != nil) {
                if (strongself.inRecognizer) {
                    strongself.currentText = [[result bestTranscription] formattedString];
                }
                if ([result isFinal]) {
                    [strongself endRecognizer];
                }
            }
        }
        if (error) {

        }
    }];
    self.inRecognizer = YES;
}

-(void)endRecognizer
{
    if (self.currentText.length > 0) {
        if (self.haveRecognizerSpeakTextBlock) {
            self.haveRecognizerSpeakTextBlock(self.currentText, self.beginListenLocation, CFAbsoluteTimeGetCurrent() - self.beginListenLocation);
        }
    }
    
    self.inRecognizer = NO;
    if (self.currentTask) {
        [self.currentTask cancel];
        self.currentTask = nil;
    }
    if (self.currentRequest) {
        [self.currentRequest endAudio];
        self.currentRequest = nil;
    }
    self.currentText = @"";
}

-(void)speechRecognizer:(SFSpeechRecognizer *)speechRecognizer availabilityDidChange:(BOOL)available
{
    self.inRecognizer = !available;
}

-(void)giveBuffer:(CMSampleBufferRef)buffer
{
    if (self.currentRequest) {
        [self.currentRequest appendAudioSampleBuffer:buffer];
    }
    if (self.inRecognizer && (CFAbsoluteTimeGetCurrent() - self.beginListenLocation) > 30) {
        [self endRecognizer];
        [self startRecognizer];
    }
}

@end

 https://www.politepix.com/openears/   一款免费的识别工具

版权声明:本文为yuxiaoyiyou原创文章,遵循 CC 4.0 BY-SA 版权协议,转载请附上原文出处链接和本声明。
本文链接:https://www.cnblogs.com/yuxiaoyiyou/p/9570755.html