我必须合并音频文件和录制的语音.例如录制的语音是47秒.我必须将4分钟的音频歌曲剪切或修剪为47秒.并合并音频文件.
var url:NSURL? if self.audioRecorder != nil { url = self.audioRecorder!.url } else { url = self.soundFileURL! } print("playing \(url)") do { self.newplayer = try AVPlayer(URL: url!) let avAsset = AVURLAsset(URL: url!,options: nil) print("\(avAsset)") let audioDuration = avAsset.duration let totalSeconds = CMTimeGetSeconds(audioDuration) let hours = floor(totalSeconds / 3600) var minutes = floor(totalSeconds % 3600 / 60) var seconds = floor(totalSeconds % 3600 % 60) print("hours = \(hours),minutes = \(minutes),seconds = \(seconds)")}
这是输出:// hours = 0.0,minutes = 0.0,seconds = 42.0
对于trim方法我只是尝试了这个:如何设置精确的持续时间,开始时间和结束时间以及新网址:
func exportAsset(asset:AVAsset,fileName:String) { let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory,inDomains: .UserDomainMask)[0] let trimmedSoundFileURL = documentsDirectory.URLByAppendingPathComponent(fileName) print("saving to \(trimmedSoundFileURL!.absoluteString)") let filemanager = NSFileManager.defaultManager() if filemanager.fileExistsAtPath(trimmedSoundFileURL!.absoluteString!) { print("sound exists") } let exporter = AVAssetExportSession(asset: asset,presetName: AVAssetExportPresetAppleM4A) exporter!.outputFileType = AVFileTypeAppleM4A exporter!.outputURL = trimmedSoundFileURL let duration = CMTimeGetSeconds(asset.duration) if (duration < 5.0) { print("sound is not long enough") return } // e.g. the first 5 seconds let startTime = CMTimeMake(0,1) let stopTime = CMTimeMake(5,1) let exportTimeRange = CMTimeRangeFromTimeToTime(startTime,stopTime) exporter!.timeRange = exportTimeRange // do it exporter!.exportAsynchronouslyWithCompletionHandler({ switch exporter!.status { case AVAssetExportSessionStatus.Failed: print("export failed \(exporter!.error)") case AVAssetExportSessionStatus.Cancelled: print("export cancelled \(exporter!.error)") default: print("export complete") } }) }
解决方法
最后我找到了我的问题的答案.它工作正常……我附上了下面的代码.我在其中添加了trim音频代码.对于那些试图合并和修剪音频(swift2.3)的人来说,它会很有用:
func mixAudio() { let currentTime = CFAbsoluteTimeGetCurrent() let composition = AVMutableComposition() let compositionAudioTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio,preferredTrackID: kCMPersistentTrackID_Invalid) compositionAudioTrack.preferredVolume = 0.8 let avAsset = AVURLAsset.init(URL: soundFileURL,options: nil) print("\(avAsset)") var tracks = avAsset.tracksWithMediaType(AVMediaTypeAudio) let clipAudioTrack = tracks[0] do { try compositionAudioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero,avAsset.duration),ofTrack: clipAudioTrack,atTime: kCMTimeZero) } catch _ { } let compositionAudioTrack1 = composition.addMutableTrackWithMediaType(AVMediaTypeAudio,preferredTrackID: kCMPersistentTrackID_Invalid) compositionAudioTrack.preferredVolume = 0.8 let avAsset1 = AVURLAsset.init(URL: soundFileURL1) print(avAsset1) var tracks1 = avAsset1.tracksWithMediaType(AVMediaTypeAudio) let clipAudioTrack1 = tracks1[0] do { try compositionAudioTrack1.insertTimeRange(CMTimeRangeMake(kCMTimeZero,avAsset1.duration),ofTrack: clipAudioTrack1,atTime: kCMTimeZero) } catch _ { } var paths = NSSearchPathForDirectoriesInDomains(.LibraryDirectory,.UserDomainMask,true) let CachesDirectory = paths[0] let strOutputFilePath = CachesDirectory.stringByAppendingString("/Fav") print(" strOutputFilePath is \n \(strOutputFilePath)") let requiredOutputPath = CachesDirectory.stringByAppendingString("/Fav.m4a") print(" requiredOutputPath is \n \(requiredOutputPath)") soundFile1 = NSURL.fileURLWithPath(requiredOutputPath) print(" OUtput path is \n \(soundFile1)") var audioDuration = avAsset.duration var totalSeconds = CMTimeGetSeconds(audioDuration) var hours = floor(totalSeconds / 3600) var minutes = floor(totalSeconds % 3600 / 60) var seconds = Int64(totalSeconds % 3600 % 60) print("hours = \(hours),seconds = \(seconds)") let recordSettings:[String : AnyObject] = [ AVFormatIDKey: Int(kAudioFormatMPEG4AAC),AVSampleRateKey: 12000,AVNumberOfChannelsKey: 1,AVEncoderAudioQualityKey: AVAudioQuality.Low.rawValue ] do { audioRecorder = try AVAudioRecorder(URL: soundFile1,settings: recordSettings) audioRecorder!.delegate = self audioRecorder!.meteringEnabled = true audioRecorder!.prepareToRecord() } catch let error as NSError { audioRecorder = nil print(error.localizedDescription) } do { try NSFileManager.defaultManager().removeItemAtURL(soundFile1) } catch _ { } let exporter = AVAssetExportSession(asset: composition,presetName: AVAssetExportPresetAppleM4A) exporter!.outputURL = soundFile1 exporter!.outputFileType = AVFileTypeAppleM4A let duration = CMTimeGetSeconds(avAsset1.duration) print(duration) if (duration < 5.0) { print("sound is not long enough") return } // e.g. the first 30 seconds let startTime = CMTimeMake(0,1) let stopTime = CMTimeMake(seconds,stopTime) print(exportTimeRange) exporter!.timeRange = exportTimeRange print(exporter!.timeRange) exporter!.exportAsynchronouslyWithCompletionHandler {() -> Void in print(" OUtput path is \n \(requiredOutputPath)") print("export complete: \(CFAbsoluteTimeGetCurrent() - currentTime)") var url:NSURL? if self.audioRecorder != nil { url = self.audioRecorder!.url } else { url = self.soundFile1! print(url) } print("playing \(url)") do { print(self.soundFile1) print(" OUtput path is \n \(requiredOutputPath)") self.setSessionPlayback() do { self.optData = try NSData(contentsOfURL: self.soundFile1!,options: NSDataReadingOptions.DataReadingMappedIfSafe) print(self.optData) self.recordencryption = self.optData.base64EncodedStringWithOptions(NSDataBase64EncodingOptions()) // print(self.recordencryption) self.myImageUploadRequest() } self.wasteplayer = try AVAudioPlayer(contentsOfURL: self.soundFile1) self.wasteplayer.numberOfLoops = 0 self.wasteplayer.play() } catch _ { } } }
版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。