承接ARVR项目开发: QQ 2118590660
1. 先在Untiy中 建一个新的script 用来截屏 录屏后的地址 发送到iOS
using UnityEngine;
using System.Runtime.InteropServices;
public class UnitySendMessageToiOS : MonoBehaviour {
[DllImport("__Internal")]
private static extern void _TakeAPictureBtn(string path);
[DllImport("__Internal")]
private static extern void _RecordingBtn(string path);
public static void SentPicturePathToiOS(string path){
if (Application.platform != RuntimePlatform.OSXEditor) {
_TakeAPictureBtn (path);
}
}
public static void SentVideoPathToiOS(string path){
if (Application.platform != RuntimePlatform.OSXEditor) {
_RecordingBtn (path);
}
}
}
2. 在 截屏 录屏结束 按钮点击事件 分别调用
截屏功能实现,并且发送地址到iOS(无UI)
public void TakeAPictureBtnClick(){
StartCoroutine (CaptureByCamera (CameraTrans, new Rect (0, 0, 1024, 768),
m_OtherCameraPath));
UnitySendMessageToiOS.SentPicturePathToiOS (m_OtherCameraPath);
}
private IEnumerator CaptureByCamera (Camera mCamera, Rect mRect, string mFileName)
{
//等待渲染线程结束
yield return new WaitForEndOfFrame ();
//初始化RenderTexture 深度只能是【0、16、24】截不全图请修改
RenderTexture mRender = new RenderTexture ((int)mRect.width, (int)mRect.height,16);
//设置相机的渲染目标
mCamera.targetTexture = mRender;
//开始渲染
mCamera.Render ();
//激活渲染贴图读取信息
RenderTexture.active = mRender;
Texture2D mTexture = new Texture2D ((int)mRect.width, (int)mRect.height, TextureFormat.RGB24, false);
//读取屏幕像素信息并存储为纹理数据
mTexture.ReadPixels (mRect, 0, 0);
//应用
mTexture.Apply ();
//释放相机,销毁渲染贴图
mCamera.targetTexture = null;
RenderTexture.active = null;
GameObject.Destroy (mRender);
//将图片信息编码为字节信息
byte[] bytes = mTexture.EncodeToPNG ();
//保存
System.IO.File.WriteAllBytes (mFileName, bytes);
//需要展示次截图,可以返回截图
//return mTexture;
}
录屏方法实现 录屏使用ShareREC (导入ShareREC-for-Unity3d 的包,AppKey Share网站注册)
using com.mob;
using System;
void Start () {
ShareREC.registerApp("1dd5e013484a8");
}
ShareREC.startRecoring();
}
public void StopRecordingBtnClick(){
FinishedRecordEvent evt = new FinishedRecordEvent(recordFinishedHandler);
ShareREC.stopRecording(evt);
}
{
if (ex == null)
{
UnitySendMessageToiOS.SentVideoPathToiOS (ShareREC.lastRecordingPath());
//ShareREC.playLastRecording();
}
}
3.继续Xcode里 新建类用来接收 Unity里发来的地址 并且保存到 iOS相册
#import
@implementation ReceiveUnitySystemEvent
- ( void ) imageSaved: ( UIImage *) image didFinishSavingWithError:( NSError *)error
contextInfo: ( void *) contextInfo
{
if (error != nil)
{
NSLog(@"有错误");
}
else
{
NSLog(@"保存结束");
}
}
void _TakeAPictureBtn(char *path)
{
NSString *strReadAddr = [NSString stringWithUTF8String:path];
UIImage *img = [UIImage imageWithContentsOfFile:strReadAddr];
NSLog(@"%@",[NSString stringWithFormat:@"w:%f, h:%f", img.size.width, img.size.height]);
ReceiveUnitySystemEvent * instance = [[ReceiveUnitySystemEvent alloc]init];
UIImageWriteToSavedPhotosAlbum(img, instance,
@selector(imageSaved:didFinishSavingWithError:contextInfo:), nil);
}
void _RecordingBtn(char *path){
NSLog(@"存入视频地址%s-------------",path);
if (path == NULL) {
return;
}
NSString *videoPath = [NSString stringWithUTF8String:path];
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:[NSURL fileURLWithPath:videoPath]
completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
NSLog(@"Save video fail:%@",error);
} else {
NSLog(@"Save video succeed.");
}
}];
}
@end
拖得时候 Copy 和 Creat group 都都选
libicucore.dylib
libz.1.2.5.dylib
libstdc++.dylib
删除包, 重新拖一遍
Build Phases - Compile Sources - JSONKit.m 改为 -fno-obj-arc
public void RecordingBtnClick(){
void recordFinishedHandler(Exception ex)
Unity端完工
#import "ReceiveUnitySystemEvent.h"
xcode 导入 ShareREC的包
必须添加的依赖库如下:
'ShareREC/ShareREC+Ext.h' file not found 报错
JSONkit里 mrc报错的话