iOS使用AVFoundation展示视频

2025-05-29 0 89

本文实例为大家分享了iOS使用AVFoundation展示视频的具体代码,供大家参考,具体内容如下

?

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

178

179

180

181

182
//

// Capter2ViewController.m

// IosTest

//

// Created by garin on 13-7-19.

// Copyright (c) 2013年 garin. All rights reserved.

//

#import "Capter2ViewController.h"

@interface Capter2ViewController ()

@end

@implementation Capter2ViewController

-(void) dealloc

{

[session release];

[super dealloc];

}

- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil

{

self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];

if (self) {

// Custom initialization

}

return self;

}

- (void)viewDidLoad

{

[super viewDidLoad];

videoPreviewView=[[UIView alloc] initWithFrame:CGRectMake(10, 10, 320, 200)];

[self.view addSubview:videoPreviewView];

[videoPreviewView release];

// Do any additional setup after loading the view.

//在viewdidload调用下面的函数显示摄像信息

[self setupCaptureSession];

// imgView=[[UIImageView alloc] initWithFrame:CGRectMake(10, 230, 320, 100)];

// imgView.backgroundColor=[UIColor grayColor];

// [self.view addSubview:imgView];

// [imgView release];

UIButton *cloeseBtn=[UIButton buttonWithType:UIButtonTypeRoundedRect];

cloeseBtn.frame=CGRectMake(10, 220, 300, 50);

[cloeseBtn setTitle:@"Press" forState:UIControlStateNormal];

[cloeseBtn addTarget:self action:@selector(closeBtnClick:) forControlEvents:UIControlEventTouchUpInside];

[self.view addSubview:cloeseBtn];

}

-(void) closeBtnClick:(id) sender

{

[session stopRunning];

}

- (void)didReceiveMemoryWarning

{

[super didReceiveMemoryWarning];

// Dispose of any resources that can be recreated.

}

- (void)setupCaptureSession

{

NSError *error = nil;

// Create the session

session = [[AVCaptureSession alloc] init];

// Configure the session to produce lower resolution video frames, if your

// processing algorithm can cope. We'll specify medium quality for the

// chosen device.

session.sessionPreset = AVCaptureSessionPresetLow;

// Find a suitable AVCaptureDevice

AVCaptureDevice *device = [AVCaptureDevice

defaultDeviceWithMediaType:AVMediaTypeVideo];

// Create a device input with the device and add it to the session.

AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device

error:&error];

if (!input) {

// Handling the error appropriately.

}

[session addInput:input];

// Create a VideoDataOutput and add it to the session

AVCaptureVideoDataOutput *output = [[[AVCaptureVideoDataOutput alloc] init] autorelease];

[session addOutput:output];

// Configure your output.

dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);

[output setSampleBufferDelegate:self queue:queue];

dispatch_release(queue);

// Specify the pixel format

output.videoSettings =

[NSDictionary dictionaryWithObject:

[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]

forKey:(id)kCVPixelBufferPixelFormatTypeKey];

// If you wish to cap the frame rate to a known value, such as 15 fps, set

// minFrameDuration.

//output.minFrameDuration = CMTimeMake(1, 15);

//AVCaptureConnection *avcaptureconn=[[AVCaptureConnection alloc] init];

//[avcaptureconn setVideoMinFrameDuration:CMTimeMake(1, 15)];

// Start the session running to start the flow of data

[session startRunning];

AVCaptureVideoPreviewLayer* previewLayer = [AVCaptureVideoPreviewLayer layerWithSession: session];

previewLayer.frame = videoPreviewView.bounds; //视频显示到的UIView

previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;

// [previewLayer setOrientation:AVCaptureVideoOrientationLandscapeRight];

// if(previewLayer.orientationSupported){

// previewLayer.orientation = mOrientation;

// }

[videoPreviewView.layer addSublayer: previewLayer];

if(![session isRunning]){

[session startRunning];

}

// Assign session to an ivar.

//[self setSession:session];

}

//得到视频流

- (void)captureOutput:(AVCaptureOutput *)captureOutput

didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer

fromConnection:(AVCaptureConnection *)connection

{

// Create a UIImage from the sample buffer data

return;

UIImage *image = [self imageFromSampleBuffer:sampleBuffer];

//得到的视频流图片

imgView.image=image;

}

// Create a UIImage from sample buffer data

- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer

{

// Get a CMSampleBuffer's Core Video image buffer for the media data

CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

// Lock the base address of the pixel buffer

CVPixelBufferLockBaseAddress(imageBuffer, 0);

// Get the number of bytes per row for the pixel buffer

void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);

// Get the number of bytes per row for the pixel buffer

size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);

// Get the pixel buffer width and height

size_t width = CVPixelBufferGetWidth(imageBuffer);

size_t height = CVPixelBufferGetHeight(imageBuffer);

// Create a device-dependent RGB color space

CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

// Create a bitmap graphics context with the sample buffer data

CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,

bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);

// Create a Quartz image from the pixel data in the bitmap graphics context

CGImageRef quartzImage = CGBitmapContextCreateImage(context);

// Unlock the pixel buffer

CVPixelBufferUnlockBaseAddress(imageBuffer,0);

// Free up the context and color space

CGContextRelease(context);

CGColorSpaceRelease(colorSpace);

// Create an image object from the Quartz image

UIImage *image = [UIImage imageWithCGImage:quartzImage];

// Release the Quartz image

CGImageRelease(quartzImage);

return (image);

}

@end

以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持快网idc。

收藏 (0) 打赏

感谢您的支持,我会继续努力的!

打开微信/支付宝扫一扫,即可进行扫码打赏哦,分享从这里开始,精彩与您同在
点赞 (0)

声明:本站所有文章,如无特殊说明或标注,均为本站原创发布。任何个人或组织,在未征得本站同意时,禁止复制、盗用、采集、发布本站内容到任何网站、书籍等各类媒体平台。如若本站内容侵犯了原著者的合法权益,可联系我们进行处理。

快网idc优惠网 建站教程 iOS使用AVFoundation展示视频 https://www.kuaiidc.com/89009.html

相关文章

发表评论
暂无评论