視頻錄製,從總體上講,界面是用xib畫的,進入到錄製界面,上面有四個按鈕,分別是退出、前後置攝像頭切換、閃光燈、播放按鈕,這四個按鈕在一個view上,同過調整view的約束,可以改變view的位置。下麵是一個進度條,這是一個自定義的view。最下麵有兩個button,一個是錄製按鈕,另一個是調取相冊 ...
視頻錄製,從總體上講,界面是用xib畫的,進入到錄製界面,上面有四個按鈕,分別是退出、前後置攝像頭切換、閃光燈、播放按鈕,這四個按鈕在一個view上,同過調整view的約束,可以改變view的位置。下麵是一個進度條,這是一個自定義的view。最下麵有兩個button,一個是錄製按鈕,另一個是調取相冊按鈕。
最終圖片
思路:1.對錄製的視頻進行編碼封裝的類
#import "MLRecordEncoder.h" @interface MLRecordEncoder () @property (nonatomic, strong) AVAssetWriter * writer; //媒體寫入對象 @property (nonatomic, strong) AVAssetWriterInput * videoInput; //視頻寫入 @property (nonatomic, strong) AVAssetWriterInput * audioInput; //音頻寫入 @property (nonatomic, strong) NSString * path; //寫入路徑 @end @implementation MLRecordEncoder - (void)dealloc{ _writer = nil; _videoInput = nil; _audioInput = nil; _path = nil; } //MLRecordEncoder遍歷構造器的 + (MLRecordEncoder *)encoderForPath:(NSString *)path height:(NSInteger)cy width:(NSInteger)cx channels:(int)ch samples:(Float64)rate{ MLRecordEncoder * enc = [MLRecordEncoder alloc]; return [enc initPath:path height:cy width:cx channels:ch samples:rate]; } //初始化方法 - (instancetype)initPath:(NSString *)path height:(NSInteger)cy width:(NSInteger)cx channels:(int)ch samples:(Float64)rate{ if (self = [super init]) { self.path = path; //先把路徑下的文件給刪除掉,保證錄製的文件是最新的 [[NSFileManager defaultManager] removeItemAtPath:self.path error:nil]; NSURL * url = [NSURL fileURLWithPath:self.path]; //初始化寫入媒體類型為Mp4類型 _writer = [AVAssetWriter assetWriterWithURL:url fileType:AVFileTypeMPEG4 error:nil]; //使其更適合在網路上播放 _writer.shouldOptimizeForNetworkUse = YES; //初始化視頻輸入 [self initVideoInputHeight:cy width:cx]; //確保採集到rate和ch if (rate != 0 && ch != 0) { //初始化音頻輸入 [self initAudioInputChannels:ch samples:rate]; } } return self; } //初始化視頻輸入 - (void)initVideoInputHeight:(NSInteger)cy width:(NSInteger)cx{ //錄製視頻的一些配置、解析度、編碼方式等等 NSDictionary * settings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInteger:cx], AVVideoWidthKey, [NSNumber numberWithInteger:cy], AVVideoHeightKey, nil]; //初始化視頻寫入類 _videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings]; //表明輸入是否應該調整其處理為實時數據源的數據 _videoInput.expectsMediaDataInRealTime = YES; //將視頻輸入源加入 [_writer addInput:_videoInput]; } //初始化音頻輸入 - (void)initAudioInputChannels:(int)ch samples:(Float64)rate{ //音頻的一些配置包括音頻各種這裡為AAC,音頻通道、採樣率和音頻的比特率 NSDictionary * settings = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kAudioFormatMPEG4AAC], AVFormatIDKey, [NSNumber numberWithInt:ch], AVNumberOfChannelsKey, [NSNumber numberWithFloat:rate], AVSampleRateKey, [NSNumber numberWithInt:128000], AVEncoderBitRateKey, nil]; //初始化音頻寫入類 _audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:settings]; //表明輸入是否應該調整其處理為實時數據源的數據 _audioInput.expectsMediaDataInRealTime = YES; //將音頻輸入源加入 [_writer addInput:_audioInput]; } //完成視頻錄製時調用 - (void)finishWithCompletionHandler:(void (^)(void))handler{ [_writer finishWritingWithCompletionHandler:handler]; } //通過這個方法寫入數據 - (BOOL)encodeFrame:(CMSampleBufferRef)sampleBuffer isVideo:(BOOL)isVideo{ //數據是否準備寫入 if (CMSampleBufferDataIsReady(sampleBuffer)) { //寫入狀態為未知,保證視頻先寫入 if (_writer.status == AVAssetWriterStatusUnknown && isVideo) { //獲取開始寫入的CMTime CMTime startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); //開始寫入 [_writer startWriting]; [_writer startSessionAtSourceTime:startTime]; } //寫入失敗 if (_writer.status == AVAssetWriterStatusFailed) { NSLog(@"writer error %@",_writer.error.localizedDescription); return NO; } //判斷是否是視頻 if (isVideo) { //視頻輸入是否準備接受更多的媒體數據 if (_videoInput.readyForMoreMediaData == YES) { //拼接數據 [_videoInput appendSampleBuffer:sampleBuffer]; return YES; } }else{ //音頻輸入是否準備接受更多的媒體數據 if (_audioInput.readyForMoreMediaData) { //拼接數據 [_audioInput appendSampleBuffer:sampleBuffer]; return YES; } } } return NO; } @end
2.對視頻的錄製進行封裝
#import "MLRecordEngine.h" #import "MLRecordEncoder.h" #import <AVFoundation/AVFoundation.h> #import <Photos/Photos.h> @interface MLRecordEngine ()<AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate> { CMTime _timeOffset; //錄製的偏移CMTime CMTime _lastVideo; //記錄上一次視頻數據文件的CMTime CMTime _lastAudio; //記錄上一次音頻數據文件的CMTime NSInteger _cx; //視頻分辨的寬 NSInteger _cy; //視頻分辨的高 int _channels; //音頻通道 Float64 _samplerate; //音頻採樣率 } @property (nonatomic, strong) MLRecordEncoder * recordEncoder; //錄製編碼 @property (nonatomic, strong) AVCaptureSession * recordSession; //捕獲視頻的會話 @property (nonatomic, strong) AVCaptureVideoPreviewLayer * previewLayer; //捕獲到的視頻呈現的layer @property (nonatomic, strong) AVCaptureDeviceInput * backCameraInput; //後置攝像頭輸入 @property (nonatomic, strong) AVCaptureDeviceInput * frontCameraInput; //前置攝像頭輸入 @property (nonatomic, strong) AVCaptureDeviceInput * audioMicInput; //麥克風輸入 @property (nonatomic, copy) dispatch_queue_t captureQueue; //錄製的隊列 @property (nonatomic, strong) AVCaptureConnection * audioConnection; //音頻錄製連接 @property (nonatomic, strong) AVCaptureConnection * videoConnection; //視頻錄製連接 @property (nonatomic, strong) AVCaptureVideoDataOutput * videoOutput; //視頻輸出 @property (nonatomic, strong) AVCaptureAudioDataOutput * audioOutput; //音頻輸出 @property (atomic, assign) BOOL isCapturing; //正在錄製 @property (atomic, assign) BOOL isPaused; //是否暫停 @property (atomic, assign) BOOL discont; //是否中斷 @property (atomic, assign) CMTime startTime; //開始錄製的時間 @property (atomic, assign) CGFloat currentRecordTime; //當前錄製時間 @end @implementation MLRecordEngine - (void)dealloc{ [_recordSession stopRunning]; _captureQueue = nil; _recordSession = nil; _previewLayer = nil; _backCameraInput = nil; _frontCameraInput = nil; _audioOutput = nil; _videoOutput = nil; _audioConnection = nil; _videoConnection = nil; _recordEncoder = nil; } - (instancetype)init{ if (self = [super init]) { self.maxRecordTime = 60.0f; } return self; } #pragma mark - 公開的方法 //啟用錄製功能 - (void)startUp{ self.startTime = CMTimeMake(0, 0); self.isCapturing = NO; self.isPaused = NO; self.discont = NO; [self.recordSession startRunning]; } //關閉錄製功能 - (void)shutdown{ _startTime = CMTimeMake(0, 0); if (_recordSession) { [_recordSession stopRunning]; } [_recordEncoder finishWithCompletionHandler:^{ NSLog(@"錄製完成"); }]; } //開始錄製 - (void)startCapture{ @synchronized(self) { if (!self.isCapturing) { NSLog(@"開始錄製"); self.recordEncoder = nil; self.isPaused = NO; self.discont = NO; _timeOffset = CMTimeMake(0, 0); self.isCapturing = YES; } } } //暫停錄製 - (void)pauseCapture{ @synchronized(self) { if (self.isCapturing) { self.isPaused = YES; self.discont = YES; } } } //繼續錄製 - (void)resumeCapture{ @synchronized(self) { if (self.isPaused) { self.isPaused = NO; } } } //停止錄製 - (void)stopCaptureHandler:(void (^)(UIImage *))handler{ @synchronized(self) { if (self.isCapturing) { NSString * path = self.recordEncoder.path; NSURL * url = [NSURL fileURLWithPath:path]; self.isCapturing = NO; dispatch_async(_captureQueue, ^{ [self.recordEncoder finishWithCompletionHandler:^{ self.isCapturing = NO; self.recordEncoder = nil; [[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{ [PHAssetChangeRequest creationRequestForAssetFromVideoAtFileURL:url]; } completionHandler:^(BOOL success, NSError * _Nullable error) { NSLog(@"保存成功"); }]; [self movieToImageHandler:handler]; }]; }); } } } //獲取視頻第一幀的圖片 - (void)movieToImageHandler:(void (^) (UIImage * movieImage))handler{ NSURL * url = [NSURL fileURLWithPath:self.videoPath]; AVURLAsset * asset = [[AVURLAsset alloc] initWithURL:url options:nil]; AVAssetImageGenerator * generator = [[AVAssetImageGenerator alloc] initWithAsset:asset]; generator.appliesPreferredTrackTransform = TRUE; CMTime thumbTime = CMTimeMakeWithSeconds(0, 60); generator.apertureMode = AVAssetImageGeneratorApertureModeEncodedPixels; AVAssetImageGeneratorCompletionHandler generatorHandler = ^(CMTime requestedTime, CGImageRef im, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * error){ if (result == AVAssetImageGeneratorSucceeded) { UIImage * thumbImg = [UIImage imageWithCGImage:im]; if (handler) { dispatch_async(dispatch_get_main_queue(), ^{ handler(thumbImg); }); } } }; [generator generateCGImagesAsynchronouslyForTimes:[NSArray arrayWithObject:[NSValue valueWithCMTime:thumbTime]] completionHandler:generatorHandler]; } #pragma mark - set、get方法 //捕獲視頻的會話 - (AVCaptureSession *)recordSession{ if (_recordSession == nil) { _recordSession = [[AVCaptureSession alloc] init]; //添加後置攝像頭的輸入 if ([_recordSession canAddInput:self.backCameraInput]) { [_recordSession addInput:self.backCameraInput]; } //添加後置麥克風的輸入 if ([_recordSession canAddInput:self.audioMicInput]) { [_recordSession addInput:self.audioMicInput]; } //添加視頻的輸出 if ([_recordSession canAddOutput:self.videoOutput]) { [_recordSession addOutput:self.videoOutput]; //設置視頻的解析度 _cx = 720; _cy = 1280; } //添加音頻的輸出 if ([_recordSession canAddOutput:self.audioOutput]) { [_recordSession addOutput:self.audioOutput]; } //設置視頻錄製的方向 self.videoConnection.videoOrientation = AVCaptureVideoOrientationPortrait; } return _recordSession; } //後置攝像頭輸入 - (AVCaptureDeviceInput *)backCameraInput{ if (_backCameraInput == nil) { NSError * error; _backCameraInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self backCamera] error:&error]; if (error) { NSLog(@"獲取後置攝像頭失敗~"); } } return _backCameraInput; } //前置攝像頭輸入 - (AVCaptureDeviceInput *)frontCameraInput{ if (_frontCameraInput == nil) { NSError * error; _frontCameraInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self frontCamera] error:&error]; if (error) { NSLog(@"獲取前置攝像頭失敗~"); } } return _frontCameraInput; } //麥克風輸入 - (AVCaptureDeviceInput *)audioMicInput{ if (_audioMicInput == nil) { AVCaptureDevice * mic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; NSError * error; _audioMicInput = [AVCaptureDeviceInput deviceInputWithDevice:mic error:&error]; if (error) { NSLog(@"獲取麥克風失敗~"); } } return _audioMicInput; } //視頻輸出 - (AVCaptureVideoDataOutput *)videoOutput{ if (_videoOutput == nil) { _videoOutput = [[AVCaptureVideoDataOutput alloc] init]; [_videoOutput setSampleBufferDelegate:self queue:self.captureQueue]; NSDictionary * setcapSettings = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey, nil]; _videoOutput.videoSettings = setcapSettings; } return _videoOutput; } //音頻輸出 - (AVCaptureAudioDataOutput *)audioOutput{ if (_audioOutput == nil) { _audioOutput = [[AVCaptureAudioDataOutput alloc] init]; [_audioOutput setSampleBufferDelegate:self queue:self.captureQueue]; } return _audioOutput; } //視頻連接 - (AVCaptureConnection *)videoConnection{ _videoConnection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo]; return _videoConnection; } //音頻連接 - (AVCaptureConnection *)audioConnection{ if (_audioConnection == nil) { _audioConnection = [self.audioOutput connectionWithMediaType:AVMediaTypeAudio]; } return _audioConnection; } //捕獲到的視頻呈現的layer - (AVCaptureVideoPreviewLayer *)previewLayer{ if (_previewLayer == nil) { //通過AVCaptureSession初始化 AVCaptureVideoPreviewLayer * preview = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.recordSession]; //設置比例為鋪滿全屏 preview.videoGravity = AVLayerVideoGravityResizeAspectFill; _previewLayer = preview; } return _previewLayer; } //錄製的隊列 - (dispatch_queue_t)captureQueue{ if (_captureQueue == nil) { _captureQueue = dispatch_queue_create("cn.qiuyouqun.im.wclrecordengine.capture", DISPATCH_QUEUE_SERIAL); } return _captureQueue; } #pragma mark - 切換動畫 - (void)changeCameraAnimation{ CATransition * changeAnimation = [CATransition animation]; changeAnimation.delegate = self; changeAnimation.duration = 0.45; changeAnimation.type = @"oglFlip"; changeAnimation.subtype = kCATransitionFromRight; changeAnimation.timingFunction = UIViewAnimationCurveEaseInOut; [self.previewLayer addAnimation:changeAnimation forKey:@"changeAnimation"]; } - (void)animationDidStart:(CAAnimation *)anim{ self.videoConnection.videoOrientation = AVCaptureVideoOrientationPortrait; [self.recordSession startRunning]; } #pragma mark - 將mov文件轉為Mp4文件 - (void)changeMovToMp4:(NSURL *)mediaURL dataBlock:(void (^)(UIImage *))handler{ AVAsset * video = [AVAsset assetWithURL:mediaURL]; AVAssetExportSession * exportSession = [AVAssetExportSession exportSessionWithAsset:video presetName:AVAssetExportPreset1280x720]; exportSession.shouldOptimizeForNetworkUse = YES; exportSession.outputFileType = AVFileTypeMPEG4; NSString * basePath = [self getVideoCachePath]; self.videoPath = [basePath stringByAppendingPathComponent:[self getUploadFile_type:@"video" fileType:@"mp4"]]; exportSession.outputURL = [NSURL fileURLWithPath:self.videoPath]; [exportSession exportAsynchronouslyWithCompletionHandler:^{ [self movieToImageHandler:handler]; }]; } #pragma mark - 視頻相關 //返回前置攝像頭 - (AVCaptureDevice *)frontCamera{ return [self cameraWithPosition:AVCaptureDevicePositionFront]; } //返回後置攝像頭 - (AVCaptureDevice *)backCamera{ return [self cameraWithPosition:AVCaptureDevicePositionBack]; } //切換前後置攝像頭 - (void)changeCameraInputDeviceisFront:(BOOL)isFront{ if (isFront) { [self.recordSession stopRunning]; [self.recordSession removeInput:self.backCameraInput]; if ([self.recordSession canAddInput:self.frontCameraInput]) { [self changeCameraAnimation]; [self.recordSession addInput:self.frontCameraInput]; } }else{ [self.recordSession stopRunning]; [self.recordSession removeInput:self.frontCameraInput]; if ([self.recordSession canAddInput:self.backCameraInput]) { [self changeCameraAnimation]; [self.recordSession addInput:self.backCameraInput]; } } } //用來返回時前置攝像頭還是後置攝像頭 - (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position{ //返回和視頻錄製相關的所有預設設備 NSArray * devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; //遍歷這些設備返回跟position相關的設備 for (AVCaptureDevice * device in devices) { if ([device position] == position) { return device; } } return nil; } //開啟閃光燈 - (void)openFlashLight{ AVCaptureDevice * backCamera = [self backCamera]; if (backCamera.torchMode == AVCaptureTorchModeOff) { [backCamera lockForConfiguration:nil]; backCamera.torchMode = AVCaptureTorchModeOn; backCamera.flashMode = AVCaptureTorchModeOn; [backCamera unlockForConfiguration]; } } //關閉閃光燈 - (void)closeFlashLight{ AVCaptureDevice * backCamera = [self backCamera]; if (backCamera.torchMode == AVCaptureTorchModeOn) { [backCamera lockForConfiguration:nil]; backCamera.torchMode = AVCaptureTorchModeOff; backCamera.flashMode = AVCaptureTorchModeOff; [backCamera unlockForConfiguration]; } } //獲得視頻存放地址 - (NSString *)getVideoCachePath{ NSString * videoCache = [NSTemporaryDirectory() stringByAppendingPathComponent:@"videos"]; BOOL isDir = NO; NSFileManager * fileManager = [NSFileManager defaultManager]; BOOL existed = [fileManager fileExistsAtPath:videoCache isDirectory:&isDir]; if (!(isDir == YES && existed == YES)) { [fileManager createDirectoryAtPath:videoCache withIntermediateDirectories:YES attributes:nil error:nil]; } return videoCache; } - (NSString *)getUploadFile_type:(NSString *)type fileType:(NSString *)fileType{ NSTimeInterval now = [[NSDate date] timeIntervalSince1970]; NSDateFormatter * formatter = [[NSDateFormatter alloc] init]; [formatter setDateFormat:@"HHmmss"]; NSDate * nowDate = [NSDate dateWithTimeIntervalSince1970:now]; NSString * timeStr = [formatter stringFromDate:nowDate]; NSString * fileName = [NSString stringWithFormat:@"%@_%@.%@",type, timeStr, fileType]; return fileName; } #pragma mark - 寫入數據 - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{ BOOL isVideo = YES; @synchronized(self) { if (!self.isCapturing || self.isPaused) { return; } if (captureOutput != self.videoOutput) { isVideo = NO; } //初始化編碼器,當有音頻和視頻參數時創建編碼器 if ((self.recordEncoder == nil) && !isVideo) { CMFormatDescriptionRef fmt = CMSampleBufferGetFormatDescription(sampleBuffer); [self setAudioFormat:fmt]; NSString * videoName = [self getUploadFile_type:@"video" fileType:@"mp4"]; self.videoPath = [[self getVideoCachePath] stringByAppendingPathComponent:videoName]; self.recordEncoder = [MLRecordEncoder encoderForPath:self.videoPath height:_cy width:_cx channels:_channels samples:_samplerate]; } //判斷是否中斷錄製過 if (self.discont) { if (isVideo) { return; } self.discont = NO; //計算暫停的時間 CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); CMTime last = isVideo ? _lastVideo : _lastAudio; if (last.flags & kCMTimeFlags_Valid) { if (_timeOffset.flags & kCMTimeFlags_Valid) { pts = CMTimeSubtract(pts, _timeOffset); } CMTime offset = CMTimeSubtract(pts, last); if (_timeOffset.value == 0) { _timeOffset = offset; }else{ _timeOffset = CMTimeAdd(_timeOffset, offset); } } _lastVideo.flags = 0; _lastAudio.flags = 0; } //增加sampleBuffer的引用計時,這樣我們可以釋放這個或修改這個數據,防止在修改時被釋放 CFRetain(sampleBuffer); if (_timeOffset.value > 0) { CFRelease(sampleBuffer); //根據得到的timeOffset調整 sampleBuffer = [self adjustTime:sampleBuffer by:_timeOffset]; } //記錄暫停上一次錄製的時間 CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); CMTime dur = CMSampleBufferGetDuration(sampleBuffer); if (dur.value > 0) { pts = CMTimeAdd(pts, dur); } if (isVideo) { _lastVideo = pts; }else{ _lastAudio = pts; } } CMTime dur = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); if (self.startTime.value == 0) { self.startTime = dur; } CMTime sub = CMTimeSubtract(dur, self.startTime); self.currentRecordTime = CMTimeGetSeconds(sub); if (self.currentRecordTime > self.maxRecordTime) { if (self.currentRecordTime - self.maxRecordTime < 0.1) { if ([self.delegate respondsToSelector:@selector(recordProgress:)]) { dispatch_async(dispatch_get_main_queue(), ^{ [self.delegate recordProgress:self.currentRecordTime / self.maxRecordTime]; }); } } return; } if ([self.delegate respondsToSelector:@selector(recordProgress:)]) { dispatch_async(dispatch_get_main_queue(), ^{ [self.delegate recordProgress:self.currentRecordTime / self.maxRecordTime]; }); } //進行數據編碼 [self.recordEncoder encodeFrame:sampleBuffer isVideo:isVideo]; CFRelease(sampleBuffer); } //設置音頻格式 - (void)setAudioFormat:(CMFormatDescriptionRef)fmt{ const AudioStreamBasicDescription * asbd = CMAudioFormatDescriptionGetStreamBasicDescription(fmt); _samplerate = asbd->mSampleRate; _channels = asbd->mChannelsPerFrame; } //調整媒體數據的時間 - (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset{ CMItemCount count; CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count); CMSampleTimingInfo * pInfo = malloc(sizeof(CMSampleTimingInfo) * count); CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count); for (CMItemCount i = 0; i < count; i++) { pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset); pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset); } CMSampleBufferRef sout; CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout); free(pInfo); return sout; } @end
3.對進度條的封裝
#import "MLRecordProgressView.h" @implementation MLRecordProgressView - (void)setProgress:(CGFloat)progress{ _progress = progress; [self setNeedsDisplay]; } - (void)setProgressBgColor:(UIColor *)progressBgColor{ _progressBgColor = progressBgColor; [self setNeedsDisplay]; } - (void)setLoadProgressColor:(UIColor *)loadProgressColor{ _loadProgressColor = loadProgressColor; [self setNeedsDisplay]; } - (void)setLoadProgress:(CGFloat)loadProgress{ _loadProgress = loadProgress; [self setNeedsDisplay]; } - (void)setProgressColor:(UIColor *)progressColor{ _progressColor = progressColor; [self setNeedsDisplay]; } - (void)drawRect:(CGRect)rect{ CGContextRef context = UIGraphicsGetCurrentContext(); CGContextAddRect(context, CGRectMake(0, 0, rect.size.width, rect.size.height)); [self.progressBgColor set]; CGContextSetAlpha(context, 0.5); CGContextDrawPath(context, kCGPathFill); CGContextAddRect(context, CGRectMake(0, 0, rect.size.width * self.self.loadProgress, rect.size.height)); [self.progressBgColor set]; CGContextSetAlpha(context, 1); CGContextDrawPath(context, kCGPathFill); CGContextAddRect(context, CGRectMake(0, 0, rect.size.width * self.progress, rect.size.height)); [self.progressColor set]; CGContextSetAlpha(context, 1); CGContextDrawPath(context, kCGPathFill); } @end
源碼地址:https://github.com/ChangYulong/MLRecordVideo