cocos creator的版本更新,2.x和3.x很多接口都不一样了,现在将两种截图方式搬运上来
参考:
2.x:【工具类】CocosCreator 截屏(全屏,指定node区域) - gamedaybyday - 博客园
3.x:https://note.youdao.com/ynoteshare/index.html?id=6e341c297dec36391775de676ceaaa04&type=note&_time=1665574155946
2.x代码,返回一个node节点,如下:
/**
* 2.x版本 截图
* @param targetNode 截图目标节点,如果为null则表示截全屏
* @returns 返回截屏图片的node
*/
screenShot(targetNode: cc.Node = null) {
//获取当前场景Camera
let camera = cc.director.getScene().getComponentInChildren(cc.Camera);
//创建新的texture
let texture = new cc.RenderTexture();
texture.initWithSize(cc.winSize.width, cc.winSize.height, (cc.game as any)._renderContext.STENCIL_INDEX8);
//创建新的spriteFrame
let spriteFrame = new cc.SpriteFrame();
if (targetNode == null) {
spriteFrame.setTexture(texture);
} else {
let nodeX = cc.winSize.width / 2 + targetNode.x - targetNode.width / 2;
let nodeY = cc.winSize.height / 2 + targetNode.y - targetNode.height / 2;
let nodeWidth = targetNode.width;
let nodeHeight = targetNode.height;
//只显示node部分的图片
spriteFrame.setTexture(texture, new cc.Rect(nodeX, nodeY, nodeWidth, nodeHeight));
}
//创建新的node
let node = new cc.Node();
let sprite = node.addComponent(cc.Sprite);
sprite.spriteFrame = spriteFrame;
//截图是反的,这里将截图scaleY取反,这样就是正的了
sprite.node.scaleY = - Math.abs(sprite.node.scaleY);
//手动渲染camera
camera.cullingMask = 0xffffffff;
camera.targetTexture = texture;
camera.render();
camera.targetTexture = null;
return node;
}
3.x代码如下,独立的一个类,返回资源SpriteFrame:
import * as cc from "cc";
/**
* 矩形
*/
interface IRect {
x?: number;
y?: number;
width?: number;
height?: number;
}
/**
* 节点基础信息
*/
interface INodeInfo {
width?: number;
height?: number;
anchorX?: number;
anchorY?: number;
scaleX?: number;
scaleY?: number;
scaleZ?: number;
}
/**
* 截图工具
* @author liwenin
*/
export default class CaptureUtils {
/**
* 全局摄像机
*/
private static _camera: cc.Camera;
/**
* 画布
*/
private static _canvas: HTMLCanvasElement;
/**
* 临时变量
*/
private static _tmpMat4: cc.Mat4 = cc.mat4();
/**
* 临时变量
*/
private static _tmpVec3: cc.Vec3 = cc.v3();
/**
* 临时变量
*/
private static _tmpInfo: INodeInfo = {};
/**
* 屏幕捕捉,核心功能——摄像机的锚点在中心点,设置捕捉区域的属性需要注意
* @param area 捕捉区域,同时也是摄像机的父节点,以及渲染节点
* @param rect 需要捕捉的内部区域
*/
private static _capture(area: cc.Scene | cc.Node, rect: IRect): cc.RenderTexture {
var camera = CaptureUtils.getCamera(), cNode = camera.node, texture = new cc.RenderTexture;
texture.reset({ width: rect.width, height: rect.height });
cNode.setPosition(rect.x, rect.y);
area.addChild(cNode);
camera.orthoHeight = rect.height / 2;
camera.targetTexture = texture;
// 执行渲染,单个渲染会报错,那就直接全部渲染一次吧(单个渲染:cc.director.root.pipeline.render([camera.camera]);)
cc.director.root.frameMove(0);
camera.targetTexture = null;
cNode.parent = null;
return texture;
}
/**
* 获取节点信息
* @param node
*/
private static _getNodeInfo(node: cc.Node | cc.Scene): INodeInfo {
var tmpInfo = CaptureUtils._tmpInfo, scale = node.getWorldMatrix(CaptureUtils._tmpMat4).getScale(CaptureUtils._tmpVec3);
tmpInfo.scaleX = scale.x;
tmpInfo.scaleY = scale.y;
tmpInfo.scaleZ = scale.z;
if (node == cc.director.getScene()) {
let view = cc.view['_visibleRect'] as cc.Rect;// 可以用cc.view.getVisibleSize(),不过每次调用都会新建一个Size
tmpInfo.anchorX = tmpInfo.anchorY = 0;
tmpInfo.width = view.width;
tmpInfo.height = view.height;
tmpInfo.scaleX = tmpInfo.scaleY = 1;
}
else {
let ui = node.getComponent(cc.UITransform);
if (ui) {
tmpInfo.anchorX = ui.anchorX;
tmpInfo.anchorY = ui.anchorY;
tmpInfo.width = ui.width;
tmpInfo.height = ui.height;
}
else {
tmpInfo.anchorX = tmpInfo.anchorY = 0.5;
tmpInfo.width = tmpInfo.height = 1;
}
}
return tmpInfo;
}
/**
* 获取摄像机
*/
protected static getCamera(): cc.Camera {
var camera = CaptureUtils._camera;
if (!camera) {
let node = new cc.Node('CaptureUtils');
camera = CaptureUtils._camera = node.addComponent(cc.Camera);
// 采取自动适配尺寸,非全屏
camera.projection = cc.Camera.ProjectionType.ORTHO;
camera.near = 0;/* 默认1,必须改为0否则黑屏 */
camera.visibility = 41943040;/* 显示2D和3D,显示不同分组需要调整,cc.Layers.Enum.UI_2D | cc.Layers.Enum.UI_3D */
}
return camera;
}
/**
* 获取画布——不支持document形式创建,则替换成对应平台提供的方式来创建即可
*/
protected static getCanvas(): HTMLCanvasElement {
return CaptureUtils._canvas || (CaptureUtils._canvas = document.createElement('canvas'));
}
/**
* 捕捉节点的内部区域
* @param area 需要捕捉的节点,默认当前场景下的Canvas;用Canvas的原因是它有宽高,而场景没有,这样rect也可不传
* @param rect 需要捕捉的内部区域,坐标默认捕捉区域的中心点,宽高默认节点的宽高;注意若节点本身宽高为0,会导致捕捉异常,因此必须手动传入rect的宽高;
*/
public static capture(area?: cc.Scene | cc.Node, rect?: IRect): cc.SpriteFrame {
var void0 = void 0, spf = new cc.SpriteFrame, info: INodeInfo;
area === void0 && (area = cc.find('Canvas') || cc.director.getScene());
rect === void0 && (rect = {});
// 获取节点信息
info = CaptureUtils._getNodeInfo(area);
if (rect.width === void0) {
rect.width = info.width * info.scaleX;
}
if (rect.height === void0) {
rect.height = info.height * info.scaleY;
}
if (rect.x === void0) {
rect.x = (.5 - info.anchorX) * info.width;
}
if (rect.y === void0) {
rect.y = (.5 - info.anchorY) * info.height;
}
spf.texture = CaptureUtils._capture(area, rect);
// 此处做了翻转,也可根据官方给的示例将像素点数据进行翻转(调用toImgurl来转换)
spf.flipUVY = true;
return spf;
}
/**
* 读取渲染纹理像素信息
* @param texture
* @param filpY 是否翻转Y轴,默认true
*/
public static readPixels(texture: cc.__private._cocos_core_assets_texture_base__TextureBase, filpY?: boolean): Uint8Array {
// 通用版本
var { width, height } = texture, gfxTexture = texture.getGFXTexture(), gfxDevice = texture['_getGFXDevice'](), bufferViews = [],
region = new cc.gfx.BufferTextureCopy, buffer = new Uint8Array(width * height * 4);
// region.texOffset.x = region.texOffset.y = 0;
region.texExtent.width = width;
region.texExtent.height = height;
bufferViews.push(buffer);
gfxDevice?.copyTextureToBuffers(gfxTexture, bufferViews, [region]);
// 翻转
if (filpY !== false) {
let i = 0, len1 = height / 2, len2 = width * 4, j: number, idx0: number, idx1: number;
while (i < len1) {
j = 0;
while (j < len2) {
idx0 = i * len2 + j;
idx1 = (height - i - 1) * len2 + j++;
[buffer[idx0], buffer[idx1]] = [buffer[idx1], buffer[idx0]];
}
i++;
}
}
return buffer;
// 3.4及以上版本,有点问题
// return texture.readPixels();
}
/**
* 渲染纹理转图片路径,根据环境区分;
* @param texture 渲染纹理
* @description
* 1、部分小游戏平台不支持toTempFilePathSync同步版(异步版toTempFilePath),若需要也可则切换成Promise版本;
* 2、返回的url可通过“cc.assetManager.loadRemote(url, { ext: '.png' }, (_, img) => { let texture = new cc.Texture2D;texture.image = img;”来生成纹理,也可通过"var img = new Image; img.src = url; img.onload = function () { let texture = new cc.Texture2D; texture.reset({ width: img.width, height: img.height }); texture.uploadData(img); };"来生成纹理(web);
*/
public static toImgUrl(texture: cc.__private._cocos_core_assets_texture_base__TextureBase): string {
var data = CaptureUtils.readPixels(texture), width = texture.width, height = texture.height, url: string;
if (cc.sys.isNative) {
// 需注入jsb.saveImageData方法,详情查看 https://gitee.com/zzf2019/engine-native/commit/1ddb6ec9627a8320cd3545d353d8861da33282a8
let filePath = jsb.fileUtils.getWritablePath() + 'tmpImg.png';/* 临时文件名 */
jsb['saveImageData'](data, width, height, filePath);
url = filePath;
}
else {
// 通用模式,只要确保能创建一个2d canvas即可
let canvas = CaptureUtils.getCanvas(), ctx = canvas.getContext('2d'), toTempFilePathSync = canvas['toTempFilePathSync'],
rowBytes = width * 4, row = 0;
// 调整画布成当前纹理大小
canvas.width = width;
canvas.height = height;
// 写入canvas
while (row < height) {
let imageData = ctx.createImageData(width, 1), start = row * width * 4;
for (let i = 0; i < rowBytes; i++) {
imageData.data[i] = data[start + i];
}
ctx.putImageData(imageData, 0, row++);
}
if (typeof toTempFilePathSync === 'function') {
// 异步版本
// return new Promise(function (resolve) { toTempFilePath.call(canvas, { success(res) { ctx.clearRect(0, 0, width, height),resolve(res.tempFilePath) } }) });
// 默认参数就是canvas自身大小、类型png,可以都不填写,一个空{}都可以,quality:图片质量,0~1,fileType='jpg'时有效
url = toTempFilePathSync.call(canvas, { /* x: 0, y: 0, width: width, height: height, destWidth: width, destHeight: height, fileType: 'png', quality: 1 */ });
}
else {
url = canvas.toDataURL('image/png');
}
// 用完立即清空数据
ctx.clearRect(0, 0, width, height);
}
// 异步版本
// return Promise.resolve(url);
return url;
}
}