需求播放h264格式的视频,在iOS端进行解码得到CVpixelbuffer,在flutter上播放,用到纹理。
flutter端:
1,创建一个MethodChannel _channel = MethodChannel(‘opengl_texture’);
用来和iOS端通信,主要是从iOS端获取_textureID,
2,把Texture(textureId: _textureID,)添加到widget上,这个用来播放视频的,原理是从底层获取iOS端的CVpixelbuffer,
把CVpixelbuffer渲染到flutter页面上。
代码如下:
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
void main() => runApp(MyApp());
class MyApp extends StatelessWidget {
// This widget is the root of your application.
@override
Widget build(BuildContext context) {
return MaterialApp(
title: 'Flutter Demo',
theme: ThemeData(
primarySwatch: Colors.blue,
),
home: MyHomePage(title: 'Flutter Demo Home Page'),
);
}
}
class MyHomePage extends StatefulWidget {
MyHomePage({Key key, this.title}) : super(key: key);
final String title;
@override
_MyHomePageState createState() => _MyHomePageState();
}
class _MyHomePageState extends State<MyHomePage> {
MethodChannel _channel = MethodChannel('com.ios.texture');
bool _isTextureOK = false;
int _textureID = -1;
@override
void initState() {
super.initState();
}
void getTexture() async {
_textureID = await _channel.invokeMethod('newTexture');
setState(() {
_isTextureOK = true;
});
}
Widget getTextureWidget(BuildContext context) {
return Container(
// color: Colors.red,
width: 300,
height: 300,
child: Texture(textureId: _textureID,),
);
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text(widget.title),
),
body: Stack(
children: [
Positioned.fill(
child: Center(
//在这里加载纹理Texture
child: _isTextureOK ? getTextureWidget(context) : Text('video'),
)
),
Positioned(
left: 0,
bottom: 0,
child: FlatButton(
onPressed: (){
getTexture();
},
child: Text("getTexture")
)),
Positioned(
right: 0,
bottom: 0,
child: FlatButton(
onPressed: (){
_channel.invokeMethod('open');
},
child: Text("open camera")
)),
],
),
);
}
}
iOS端:
- 定义一个TexturePlugin类,并实现FlutterPlugin协议,FlutterPlugin是一个插件协议,实现该协议可以自定义一个插件。实现FlutterPlugin协议的类方法:
- (void)registerWithRegistrar:(NSObject FlutterPluginRegistrar \*)registrar
- 在AppDelegate里,AppDelegate继承FlutterAppDelegate类,在FlutterAppDelegate类继承UIResponder并实现 UIApplicationDelegate, FlutterPluginRegistry, FlutterAppLifeCycleProvider协议
#import "AppDelegate.h"
#import "TexturePlugin.h"
@implementation AppDelegate
- (BOOL)application:(UIApplication *)application
didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
[TexturePlugin registerWithRegistrar:[self registrarForPlugin:@"TexturePlugin"]];
return [super application:application didFinishLaunchingWithOptions:launchOptions];
}
@end
- 创建一个GLRender类,并实现FlutterTexture;实现协议的方法,把cvpixelbuffer赋值给_target属性,
- (CVPixelBufferRef)copyPixelBuffer {
// 实现FlutterTexture协议的接口,每次flutter是直接读取我们映射了纹理的pixelBuffer对象
return _target;
}
TexturePlugin.h文件
#import <Flutter/Flutter.h>
NS_ASSUME_NONNULL_BEGIN
@interface TexturePlugin : NSObject <FlutterPlugin>
@end
NS_ASSUME_NONNULL_END
TexturePlugin.m文件
#import "TexturePlugin.h"
#import "GLRender.h"
#import "ViddeoController.h"
@interface TexturePlugin ()<ViddeoControllerDelegate>
{
ViddeoController *video ;
int64_t _textureId;//这个是创建纹理得到的ID
}
@property (nonatomic, strong) NSObject<FlutterTextureRegistry> *textures;
@property (nonatomic, strong) GLRender *glRender;
@end
@implementation TexturePlugin
- (instancetype) initWithTextures:(NSObject<FlutterTextureRegistry> *)textures {
if (self = [super init]) {
video = [[ViddeoController alloc] init];
video.delegate = self;
_textures = textures;
}
return self;
}
//协议方法
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
//创建一个FlutterMethodChannel,用来和flutter通信。
FlutterMethodChannel *channel = [FlutterMethodChannel methodChannelWithName:@"com.ios.texture" binaryMessenger:[registrar messenger]];
//创建这个插件对象,把实现了<FlutterPluginRegistrar>协议的对象传给TexturePlugin
TexturePlugin *instance = [[TexturePlugin alloc] initWithTextures:registrar.textures];
//把channel的代理设置给instance;
[registrar addMethodCallDelegate:instance channel:channel];
}
//FlutterMethodChannel代理,
- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result
{
if ([call.method isEqualToString:@"newTexture"]) {
//收到flutter获取纹理的信号
_glRender = [[GLRender alloc] init];
//生成textureId
_textureId = [_textures registerTexture:_glRender];
//把textureId反馈给flutter
result(@(_textureId));
}else if ([call.method isEqualToString:@"open"]){
//开启手机摄像头,
[video cameraButtonAction:YES];
}
}
//把摄像头的视频封装成CVpixelBufferRef
- (void)video:(CVImageBufferRef)imageBuffer
{
[_glRender createCVBufferWith:imageBuffer];
//刷新frame,告诉flutter去读取新的CVpixelBufferRef
[self.textures textureFrameAvailable:_textureId];
// CVPixelBufferRelease(imageBuffer);
}
@end
GLRender.h文件
#import <Foundation/Foundation.h>
#import <Flutter/Flutter.h>
NS_ASSUME_NONNULL_BEGIN
@interface GLRender : NSObject <FlutterTexture>
- (instancetype)init;
- (void)createCVBufferWith:(CVPixelBufferRef )target;
@end
NS_ASSUME_NONNULL_END
GLRender.m文件
@implementation GLRender
{
CVPixelBufferRef _target;
}
- (CVPixelBufferRef)copyPixelBuffer {
// 实现FlutterTexture协议的接口,每次flutter是直接读取我们映射了纹理的pixelBuffer对象
return _target;
}
- (void)createCVBufferWith:(CVPixelBufferRef )target
{
_target = target;
}
合并后
//
// TexturePlugin.m
// Runner
//
// Created by jonasluo on 2019/12/11.
// Copyright © 2019 The Chromium Authors. All rights reserved.
//
#import "TexturePlugin.h"
#import "ViddeoController.h"
@interface GLTexture : NSObject<FlutterTexture>
@property(nonatomic)CVPixelBufferRef target;
@end
@implementation GLTexture
- (CVPixelBufferRef)copyPixelBuffer {
// 实现FlutterTexture协议的接口,每次flutter是直接读取我们映射了纹理的pixelBuffer对象
return _target;
}
@end
@interface TexturePlugin ()<ViddeoControllerDelegate,FlutterPlugin>
{
ViddeoController *video ;//用来把摄像头的视频转码成cvpixelbuffer
int64_t _textureId;
GLTexture *_glTexture;
}
@property (nonatomic, strong) NSObject<FlutterTextureRegistry> *textures;//其实是FlutterEngine
@end
@implementation TexturePlugin
- (instancetype) initWithTextures:(NSObject<FlutterTextureRegistry> *)textures {
if (self = [super init]) {
video = [[ViddeoController alloc] init];
video.delegate = self;
_textures = textures;
}
return self;
}
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
FlutterMethodChannel *channel = [FlutterMethodChannel methodChannelWithName:@"com.ios.texture" binaryMessenger:[registrar messenger]];
TexturePlugin *instance = [[TexturePlugin alloc] initWithTextures:registrar.textures];
[registrar addMethodCallDelegate:instance channel:channel];
}
- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result
{
if ([call.method isEqualToString:@"newTexture"]) {
_glTexture = [[GLTexture alloc] init];
_textureId = [_textures registerTexture:_glTexture];
result(@(_textureId));
}else if ([call.method isEqualToString:@"open"]){
[video cameraButtonAction:YES];
}
}
- (void)video:(CVImageBufferRef)imageBuffer
{
_glTexture.target = imageBuffer;
[self.textures textureFrameAvailable:_textureId];
}
@end
另一个共享cvpixelbuffer的
#import "GLRender.h"
#import <OpenGLES/EAGL.h>
#import <OpenGLES/ES2/gl.h>
#import <OpenGLES/ES2/glext.h>
#import <CoreVideo/CoreVideo.h>
#import <UIKit/UIKit.h>
@implementation GLRender
{
EAGLContext *_context;
CGSize _size;
CVOpenGLESTextureCacheRef _textureCache;
CVOpenGLESTextureRef _texture;
CVPixelBufferRef _target;
GLuint _program;
GLuint _frameBuffer;
}
- (CVPixelBufferRef)copyPixelBuffer {
// 实现FlutterTexture协议的接口,每次flutter是直接读取我们映射了纹理的pixelBuffer对象
return _target;
}
- (instancetype)init
{
if (self = [super init]) {
_size = CGSizeMake(1000, 1000);
[self initGL];
[self loadShaders];
}
return self;
}
- (void)initGL {
_context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
[EAGLContext setCurrentContext:_context];
// 先调用上面的函数创建共享内存的pixelBuffer和texture对象
[self createCVBufferWith:&_target withOutTexture:&_texture];
// 创建帧缓冲区
glGenFramebuffers(1, &_frameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, _frameBuffer);
// 将纹理附加到帧缓冲区上
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(_texture), 0);
glViewport(0, 0, _size.width, _size.height);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
NSLog(@"failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER));
}
}
- (void)createCVBufferWith:(CVPixelBufferRef *)target withOutTexture:(CVOpenGLESTextureRef *)texture {
// 创建纹理缓存池,这个不是重点
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_textureCache);
if (err) {
return;
}
CFDictionaryRef empty;
CFMutableDictionaryRef attrs;
empty = CFDictionaryCreate(kCFAllocatorDefault, NULL, NULL, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
attrs = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
// 核心参数是这个,共享内存必须要设置这个kCVPixelBufferIOSurfacePropertiesKey
CFDictionarySetValue(attrs, kCVPixelBufferIOSurfacePropertiesKey, empty);
// 分配pixelBuffer对象的内存,注意flutter需要的是BGRA格式
CVPixelBufferCreate(kCFAllocatorDefault, _size.width, _size.height, kCVPixelFormatType_32BGRA, attrs, target);
// 映射上面的pixelBuffer对象到一个纹理上
CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textureCache, *target, NULL, GL_TEXTURE_2D, GL_RGBA, _size.width, _size.height, GL_BGRA, GL_UNSIGNED_BYTE, 0, texture);
CFRelease(empty);
CFRelease(attrs);
}
- (void)deinitGL {
glDeleteFramebuffers(1, &_frameBuffer);
CFRelease(_target);
CFRelease(_textureCache);
CFRelease(_texture);
}
- (void)createCVBufferWith:(CVPixelBufferRef )target
{
_target = target;
}
#pragma mark - shader compilation
- (BOOL)loadShaders
{
GLuint vertShader, fragShader;
NSString *vertShaderPathname, *fragShaderPathname;
_program = glCreateProgram();
vertShaderPathname = [[NSBundle mainBundle] pathForResource:@"Shader" ofType:@"vsh"];
if (![self compileShader:&vertShader type:GL_VERTEX_SHADER file:vertShaderPathname]) {
NSLog(@"failed to compile vertex shader");
return NO;
}
fragShaderPathname = [[NSBundle mainBundle] pathForResource:@"Shader" ofType:@"fsh"];
if (![self compileShader:&fragShader type:GL_FRAGMENT_SHADER file:fragShaderPathname]) {
NSLog(@"failed to compile fragment shader");
return NO;
}
glAttachShader(_program, vertShader);
glAttachShader(_program, fragShader);
if (![self linkProgram:_program]) {
NSLog(@"failed to link program: %d", _program);
if (vertShader) {
glDeleteShader(vertShader);
vertShader = 0;
}
if (fragShader) {
glDeleteShader(fragShader);
fragShader = 0;
}
if (_program) {
glDeleteProgram(_program);
_program = 0;
}
return NO;
}
if (vertShader) {
glDetachShader(_program, vertShader);
glDeleteShader(vertShader);
}
if (fragShader) {
glDetachShader(_program, fragShader);
glDeleteShader(fragShader);
}
NSLog(@"load shaders succ");
return YES;
}
- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type file:(NSString *)file
{
GLint status;
const GLchar *source;
source = (GLchar*)[[NSString stringWithContentsOfFile:file encoding:NSUTF8StringEncoding error:nil] UTF8String];
if (!source) {
NSLog(@"failed to load shader. type: %i", type);
return NO;
}
*shader = glCreateShader(type);
glShaderSource(*shader, 1, &source, NULL);
glCompileShader(*shader);
#if defined(DEBUG)
GLint logLength;
glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar *log = (GLchar *)malloc(logLength);
glGetShaderInfoLog(*shader, logLength, &logLength, log);
NSLog(@"Shader compile log:\n%s", log);
free(log);
}
#endif
glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
if (status == 0) {
glDeleteShader(*shader);
return NO;
}
return YES;
}
- (BOOL)linkProgram:(GLuint)prog
{
GLint status;
glLinkProgram(prog);
glGetProgramiv(prog, GL_LINK_STATUS, &status);
if (status == 0) {
return NO;
}
return YES;
}
- (BOOL)validateProgram:(GLuint)prog
{
GLint logLength, status;
glValidateProgram(prog);
glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar *log = (GLchar *)malloc(logLength);
glGetProgramInfoLog(prog, logLength, &logLength, log);
NSLog(@"program validate log : \n%s", log);
free(log);
}
glGetProgramiv(prog, GL_VALIDATE_STATUS, &status);
if (status == 0) {
return NO;
}
return YES;
}
@end