最近又在做即时通讯了, 关于语音录制的文件总是那么几个类型的, 但想要与 Android 实现音频互通. 那么就要转化Mp3格式的音频文件或者Amr 格式文件. 当然Amr是最好,因为同等时长下, amr文件数据大小是最小的, 非常适合移动设备的数据传输。
在iOS中使用AVAudioRecorder无法录制MP3格式的音频文件,虽然你可能会看到过这样的枚举, 但是事实是只有几个类型是可以正常生成的,如caf, wav等.
kAudioFormatLinearPCM = 'lpcm',
kAudioFormatAC3 = 'ac-3',
kAudioFormat60958AC3 = 'cac3',
kAudioFormatAppleIMA4 = 'ima4',
kAudioFormatMPEG4AAC = 'aac ',
kAudioFormatMPEG4CELP = 'celp',
kAudioFormatMPEG4HVXC = 'hvxc',
kAudioFormatMPEG4TwinVQ = 'twvq',
kAudioFormatMACE3 = 'MAC3',
kAudioFormatMACE6 = 'MAC6',
kAudioFormatULaw = 'ulaw',
kAudioFormatALaw = 'alaw',
kAudioFormatQDesign = 'QDMC',
kAudioFormatQDesign2 = 'QDM2',
kAudioFormatQUALCOMM = 'Qclp',
kAudioFormatMPEGLayer1 = '.mp1',
kAudioFormatMPEGLayer2 = '.mp2',
kAudioFormatMPEGLayer3 = '.mp3',
kAudioFormatTimeCode = 'time',
kAudioFormatMIDIStream = 'midi',
kAudioFormatParameterValueStream = 'apvs',
kAudioFormatAppleLossless = 'alac',
kAudioFormatMPEG4AAC_HE = 'aach',
kAudioFormatMPEG4AAC_LD = 'aacl',
kAudioFormatMPEG4AAC_ELD = 'aace',
kAudioFormatMPEG4AAC_ELD_SBR = 'aacf',
kAudioFormatMPEG4AAC_ELD_V2 = 'aacg',
kAudioFormatMPEG4AAC_HE_V2 = 'aacp',
kAudioFormatMPEG4AAC_Spatial = 'aacs',
kAudioFormatAMR = 'samr',
kAudioFormatAMR_WB = 'sawb',
kAudioFormatAudible = 'AUDB',
kAudioFormatiLBC = 'ilbc',
kAudioFormatDVIIntelIMA = 0x6D730011,
kAudioFormatMicrosoftGSM = 0x6D730031,
kAudioFormatAES3 = 'aes3',
kAudioFormatEnhancedAC3 = 'ec-3'
我这里使用的是音频转码MP3开源库lame进行转化的.
AudioToolbox.framework是一套基于C语言的框架,使用它来播放音效其本质是将短音频注册到系统声音服务(System Sound Service)。System Sound Service是一种简单、底层的声音播放服务,但是它本身也存在着一些限制:
音频播放时间不能超过30s
数据必须是PCM或者IMA4格式
音频文件必须打包成.caf、.aif、.wav中的一种(注意这是官方文档的说法,实际测试发现一些.mp3也可以播放)
//录音机
-(AVAudioRecorder *)audioRecorder
{
if (!_audioRecorder) {
//创建录音文件保存路径
NSURL *url=[NSURL fileURLWithPath:[self getTheAudioSavePath]];
[self getTheAudioSavePath];
//创建录音格式设置
NSDictionary *setting=[self getAudioSetting];
//创建录音机
NSError *error=nil;
_audioRecorder=[[AVAudioRecorder alloc]initWithURL:url settings:setting error:&error];
_audioRecorder.delegate=self;
_audioRecorder.meteringEnabled=YES;//如果要监控声波则必须设置为YES
[_audioRecorder recordForDuration:60.0];
if (error) {
NSLog(@"创建录音机对象时发生错误,错误信息:%@",error.localizedDescription);
[self chatKeyBoardDidCancelRecording:self.chatKeyBoard];
return nil;
}
}
return _audioRecorder;
}
//录音文件保存路径
- (NSString *)getTheAudioSavePath
{
NSString *urlStr=[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject];
urlStr=[urlStr stringByAppendingPathComponent:@"myRecord.caf"];
NSLog(@"file path:%@",urlStr);
return urlStr;
}
//录音器配置字典
-(NSDictionary *)getAudioSetting
{
NSMutableDictionary *dicM=[NSMutableDictionary dictionary];
//设置录音格式
[dicM setObject:@(kAudioFormatLinearPCM) forKey:AVFormatIDKey];
//设置录音采样率,8000是电话采样率,对于一般录音已经够了
[dicM setObject:@(8000) forKey:AVSampleRateKey];
// //设置通道,单声道/双声道
[dicM setObject:@(2) forKey:AVNumberOfChannelsKey];
//音质
[dicM setObject:@(AVAudioQualityMin) forKey: AVEncoderAudioQualityKey];
// //每个采样点位数,分为8、16、24、32
// [dicM setObject:@(8) forKey:AVLinearPCMBitDepthKey];
// //是否使用浮点数采样
// [dicM setObject:@(YES) forKey:AVLinearPCMIsFloatKey];
// [dicM setObject:@(NO) forKey:AVLinearPCMIsBigEndianKey];
// [dicM setObject:@12800 forKey:AVEncoderBitRateKey];//解码率
return dicM;
}
1.添加lame库到项目
添加的lame库资源最好是即支持32位又支持64位的lame库下载地址: 支持arm64的lame库
2.导入头文件 #import “lame.h”
3.核心代码
//caf异步转mp3
-(void)conversionAction
{ dispatch_sync(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
NSString *audioMP3Path = [NSString stringWithFormat:@"file://%@",[self audio_PCMtoMP3]];
});
}
- (NSString *)audio_PCMtoMP3
{
NSString *urlStr=[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject];
NSString *cafFilePath =[urlStr stringByAppendingPathComponent:@"myRecord.caf"] ;//原caf文件位置
NSString *mp3FilePath = [urlStr stringByAppendingPathComponent:@"myRecord.mp3"];//转化过后的MP3文件位置
@try {
int read, write;
FILE *pcm = fopen([cafFilePath cStringUsingEncoding:1], "rb"); //source 被转换的音频文件位置
fseek(pcm, 4*1024, SEEK_CUR); //skip file header
FILE *mp3 = fopen([mp3FilePath cStringUsingEncoding:1], "wb"); //output 输出生成的Mp3文件位置
const int PCM_SIZE = 8192;
const int MP3_SIZE = 8192;
short int pcm_buffer[PCM_SIZE*2];
unsigned char mp3_buffer[MP3_SIZE];
lame_t lame = lame_init();
lame_set_in_samplerate(lame, 8000);
lame_set_VBR(lame, vbr_default);
lame_init_params(lame);
do {
read = fread(pcm_buffer, 2*sizeof(short int), PCM_SIZE, pcm);
if (read == 0)
write = lame_encode_flush(lame, mp3_buffer, MP3_SIZE);
else
write = lame_encode_buffer_interleaved(lame, pcm_buffer, read, mp3_buffer, MP3_SIZE);
fwrite(mp3_buffer, write, 1, mp3);
} while (read != 0);
lame_close(lame);
fclose(mp3);
fclose(pcm);
}
@catch (NSException *exception) {
NSLog(@"%@",[exception description]);
}
@finally {
NSString *urlStr=[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject];
NSString *cafFilePath =[urlStr stringByAppendingPathComponent:@"myRecord.caf"];
NSString *mp3FilePath = [urlStr stringByAppendingPathComponent:@"myRecord.mp3"];
NSData *data1= [NSData dataWithContentsOfFile:cafFilePath];
NSData *data2= [NSData dataWithContentsOfFile:mp3FilePath];
NSLog(@"%lu,%lu",(unsigned long)data1.length, (unsigned long)data2.length);
return mp3FilePath;
}
}
4.注意事项