目录

unityGPU顶点动画

【unity】GPU顶点动画

工具篇

模型合并

根据原模型创建预制件: 清理旧的合并结果,创建新容器对象 PS:注意 DestroyImmediate仅在 Editor 模式下有效,运行时需用Destroy var targetObj = GameObject.Find(“targetObj”); if (targetObj != null) { GameObject.DestroyImmediate(targetObj); } targetObj = new GameObject(“targetObj”); targetObj.transform.localPosition = Vector3.zero; // … 其他变换重置 … 收集网格和材质信息 获取所有需要合并的网格组件,包含静态和蒙皮网格。 MeshRenderer[] allMeshRender = sourceObj.GetComponentsInChildren(); SkinnedMeshRenderer[] SkinnedMeshRenderer = sourceObj.GetComponentsInChildren(); MeshFilter[] allfilter = sourceObj.GetComponentsInChildren(); 填充 MeshInfo 列表 PS: 必须添加:subMeshIndex 因为需要用于处理多材质网格的分段合并。 foreach (var filter in allfilter) { for (int subMeshIndex = 0; subMeshIndex < filter.sharedMesh.subMeshCount; subMeshIndex++) { MeshInfo meshInfo = new MeshInfo(); meshInfo.mesh = filter.sharedMesh; meshInfo.subIndex = subMeshIndex; meshInfo.matrix4x4 = filter.transform.localToWorldMatrix; meshInfos.Add(meshInfo); } } SkinnedMeshRenderer也需要处理 贴图合并 需要创建一个大的贴图Texture2D,获取所有materials材质球的_MainTex(我只处理了_MainTex,当然也可以处理法线贴图这些其他内容) PS: COMBINE_TEXTURE_WIDTH是有问题的,后续需要修改,不能这样累加 使用的Texture2D.PackTextures进行打包到图集中的 for (int i = 0; i < materials.Count; i++) { Texture2D maintexture = materials[i].GetTexture("_MainTex") as Texture2D; COMBINE_TEXTURE_WIDTH += maintexture.width; COMBINE_TEXTURE_HEIGHT += maintexture.height; MainTexs.Add(maintexture); } Texture2D newMainTex = new Texture2D(COMBINE_TEXTURE_WIDTH, COMBINE_TEXTURE_HEIGHT, TextureFormat.RGBA32, true); Rect[] uvs = newMainTex.PackTextures(MainTexs.ToArray(), 0); UV需要进行偏移 List oldUV = new List(); Vector2[] uva, uvb; for (int j = 0; j < combines.Count; j++) { uva = combines[j].mesh.uv; uvb = new Vector2[uva.Length]; for (int k = 0; k < uva.Length; k++) { uvb[k] = new Vector2((uva[k].x * uvs[j].width) + uvs[j].x, (uva[k].y * uvs[j].height) + uvs[j].y); } oldUV.Add(uva); combines[j].mesh.uv = uvb; } 最后保存后记得还原uv: //重新赋值,以免影响其他对象的Mesh for (int i = 0; i < combines.Count; i++) { combines[i].mesh.uv = oldUV[i]; } 最后将贴图赋值到一个新建的材质球上进行保存就好了 Material newMaterial = new Material(mainShader); newMaterial.SetTexture("_MainTex", newMainTex); 合并网格设置 List combines = new List(); foreach (var i in meshInfos) { var ci = new CombineInstance(); ci.mesh = i.mesh; ci.subMeshIndex = i.subIndex; ci.transform = i.matrix4x4; combines.Add(ci); } 需要用到CombineInstance方法进行储存 https://i-blog.csdnimg.cn/direct/4846e134dabf4f5a8bb58be8d430325d.png 然后使用CombineMeshes方法进行合并 Mesh mesh = new Mesh(); mesh.CombineMeshes(combines.ToArray(), true, false); 最后进行保存到对应路径: AssetDatabase.CreateAsset(newMainTex, $"{MaterialTexturePath}CombineTexture.asset"); AssetDatabase.CreateAsset(newMaterial, $"{MaterialTexturePath}CombineMaterial.mat"); AssetDatabase.CreateAsset(mesh, $"{MaterialTexturePath}CombineMaterial.asset"); 完整代码: using System.Collections.Generic; using Unity.VisualScripting; using UnityEditor; using UnityEngine; public struct MeshInfo { public Mesh mesh; public Matrix4x4 matrix4x4; public int subIndex; } public class CombineVertexTool { const string MaterialTexturePath = “Assets/Resources/Material/”; public static void CombineFilter(GameObject sourceObj) { var targetObj = GameObject.Find(“targetObj”); if (targetObj != null) { GameObject.DestroyImmediate(targetObj); } targetObj = new GameObject(“targetObj”); targetObj.transform.localPosition = Vector3.zero; targetObj.transform.localRotation = Quaternion.identity; targetObj.transform.localScale = Vector3.one; MeshRenderer[] allMeshRender = sourceObj.GetComponentsInChildren(); //包含MeshRenderer和SkinnedMeshRenderer SkinnedMeshRenderer[] SkinnedMeshRenderer = sourceObj.GetComponentsInChildren(); MeshFilter[] allfilter = sourceObj.GetComponentsInChildren(); //网格 List meshInfos = new List(); foreach (var filter in allfilter) { for (int subMeshIndex = 0; subMeshIndex < filter.sharedMesh.subMeshCount; subMeshIndex++) { MeshInfo meshInfo = new MeshInfo(); meshInfo.mesh = filter.sharedMesh; meshInfo.subIndex = subMeshIndex; meshInfo.matrix4x4 = filter.transform.localToWorldMatrix; meshInfos.Add(meshInfo); } } //材质球数组 List materials = new List(); foreach (var i in allMeshRender) { foreach (var j in i.sharedMaterials) { materials.Add(j); } } foreach (SkinnedMeshRenderer i in SkinnedMeshRenderer) { foreach (var j in i.sharedMaterials) { materials.Add(j); } for (int subMeshIndex = 0; subMeshIndex < i.sharedMesh.subMeshCount; subMeshIndex++) { MeshInfo meshInfo = new MeshInfo(); meshInfo.mesh = i.sharedMesh; meshInfo.subIndex = subMeshIndex; meshInfo.matrix4x4 = i.transform.localToWorldMatrix; meshInfos.Add(meshInfo); } } Shader mainShader = materials[0].shader; //要求所有shader都必须使用同一个shader // 合并 Mesh // 后去自身和子物体中所有 MsehFilter 组件 List combines = new List(); foreach (var i in meshInfos) { var ci = new CombineInstance(); ci.mesh = i.mesh; ci.subMeshIndex = i.subIndex; //ci.mesh.triangles = i.mesh.triangles; //添加会改变原模型的网格 ci.transform = i.matrix4x4; combines.Add(ci); } //收集所有材质贴图 List MainTexs = new List(); int COMBINE_TEXTURE_WIDTH = 0; int COMBINE_TEXTURE_HEIGHT = 0; for (int i = 0; i < materials.Count; i++) { Texture2D maintexture = materials[i].GetTexture("_MainTex") as Texture2D; COMBINE_TEXTURE_WIDTH += maintexture.width; COMBINE_TEXTURE_HEIGHT += maintexture.height; MainTexs.Add(maintexture); } //所有贴图合并到newDiffuseTex这张大贴图上 Texture2D newMainTex = new Texture2D(COMBINE_TEXTURE_WIDTH, COMBINE_TEXTURE_HEIGHT, TextureFormat.RGBA32, true); Rect[] uvs = newMainTex.PackTextures(MainTexs.ToArray(), 0); List oldUV = new List(); Vector2[] uva, uvb; // 遍历所有合并实例(CombineInstance) for (int j = 0; j < combines.Count; j++) { // 获取当前网格的原始UV数组 uva = combines[j].mesh.uv; // 创建新UV数组(长度与原始UV相同) uvb = new Vector2[uva.Length]; for (int k = 0; k < uva.Length; k++) { uvb[k] = new Vector2((uva[k].x * uvs[j].width) + uvs[j].x, (uva[k].y * uvs[j].height) + uvs[j].y); } oldUV.Add(uva); combines[j].mesh.uv = uvb; } Material newMaterial = new Material(mainShader); newMaterial.SetTexture("_MainTex", newMainTex); Mesh mesh = new Mesh(); mesh.CombineMeshes(combines.ToArray(), // 合并实例数组 true, // mergeSubMeshes:是否合并子网格 false // useMatrices:是否应用变换矩阵 ); AssetDatabase.CreateAsset(newMainTex, $"{MaterialTexturePath}CombineTexture.asset"); AssetDatabase.CreateAsset(newMaterial, $"{MaterialTexturePath}CombineMaterial.mat"); AssetDatabase.CreateAsset(mesh, $"{MaterialTexturePath}CombineMaterial.asset"); MeshFilter meshFilter = targetObj.AddComponent(); meshFilter.sharedMesh = mesh; MeshRenderer meshRenderer = targetObj.AddComponent(); meshRenderer.sharedMaterial = newMaterial; //重新赋值,以免影响其他对象的Mesh for (int i = 0; i < combines.Count; i++) { combines[i].mesh.uv = oldUV[i]; } AssetDatabase.SaveAssets(); AssetDatabase.Refresh(); } } PS:需要开启纹理贴图,模型可读写: https://i-blog.csdnimg.cn/direct/403d76f602df44edb420a00626a42847.png https://i-blog.csdnimg.cn/direct/0b944dd9d0964af2936f46b50283e1fe.png

生成动画数据

根据动画时长和帧率计算总帧数,确定纹理尺寸 AnimationClip clip = clips[0]; //foreach (var clip in clips) //{ //根据动画时长和帧率计算总帧数,确定纹理尺寸 int animLength = Mathf.CeilToInt(clip.frameRate * clip.length); int texwidth = meshRenderer.sharedMesh.vertexCount; texwidth = Mathf.NextPowerOfTwo(texwidth);//取顶点数的下一个二次幂(适配 GPU 纹理要求) int texHeight = animLength; 生成一张纹理: Texture2D tex = new Texture2D(texwidth, texHeight, TextureFormat.RGBAHalf, false); 逐帧写入纹理 // 步骤3:逐帧处理 for (int i = 0; i < animLength; i++) { float time = i / clip.frameRate; //Clip.SampleAnimation(GameObject, time)。Clip是我们需要采样的动画剪辑,输入游戏对象后和采样的时间后,就可以在不运行的情况下直接播放动画,加速烘焙过程 clip.SampleAnimation(sourceObj, time); // B. 烘焙网格 Mesh bakedMesh = new Mesh(); //烘焙当前帧顶点数据 meshRenderer.BakeMesh(bakedMesh); Vector3[] vertices = bakedMesh.vertices; // 写入纹理行(将没一帧顶点数据转换成color数据保存到图片) for (int frame = 0; frame < meshRenderer.sharedMesh.vertexCount; frame++) { // 超出原始顶点数的部分填充黑色 Color pixel = Color.black; if (frame <= vertices.Length) { Vector3 pos = vertices[frame]; pixel = new Color(pos.x, pos.y, pos.z, 1); } tex.SetPixel(frame, i, pixel);//x要设置的像素的 x 坐标。范围为 0 到(纹理宽度 - 1),y要设置的像素的 y 坐标。范围为 0 到(纹理高度 - 1),color要设置的颜色。 tex.Apply(false); // 可选:分批提交数据(当设置为 true 时,将重新计算多级渐进纹理级别) } } 完整代码 /// /// 生成动画数据(顶点动画) /// public static void CreateVertexAnimaSettingData(GameObject sourceObj) { var animator = sourceObj.GetComponent(); var clips = animator.runtimeAnimatorController.animationClips;//检索控制器使用的所有 AnimationClip animator.speed = 1; //MeshFilter meshFilter = sourceObj.GetComponentInChildren(); SkinnedMeshRenderer meshRenderer = sourceObj.GetComponentInChildren(); AnimationClip clip = clips[0]; //foreach (var clip in clips) //{ //根据动画时长和帧率计算总帧数,确定纹理尺寸 int animLength = Mathf.CeilToInt(clip.frameRate * clip.length); int texwidth = meshRenderer.sharedMesh.vertexCount; texwidth = Mathf.NextPowerOfTwo(texwidth);//取顶点数的下一个二次幂(适配 GPU 纹理要求) int texHeight = animLength; Texture2D tex = new Texture2D(texwidth, texHeight, TextureFormat.RGBAHalf, false); // 步骤3:逐帧处理 for (int i = 0; i < animLength; i++) { float time = i / clip.frameRate; //Clip.SampleAnimation(GameObject, time)。Clip是我们需要采样的动画剪辑,输入游戏对象后和采样的时间后,就可以在不运行的情况下直接播放动画,加速烘焙过程 clip.SampleAnimation(sourceObj, time); // B. 烘焙网格 Mesh bakedMesh = new Mesh(); //烘焙当前帧顶点数据 meshRenderer.BakeMesh(bakedMesh); Vector3[] vertices = bakedMesh.vertices; // 写入纹理行(将没一帧顶点数据转换成color数据保存到图片) for (int frame = 0; frame < meshRenderer.sharedMesh.vertexCount; frame++) { // 超出原始顶点数的部分填充黑色 Color pixel = Color.black; if (frame <= vertices.Length) { Vector3 pos = vertices[frame]; pixel = new Color(pos.x, pos.y, pos.z, 1); } tex.SetPixel(frame, i, pixel);//x要设置的像素的 x 坐标。范围为 0 到(纹理宽度 - 1),y要设置的像素的 y 坐标。范围为 0 到(纹理高度 - 1),color要设置的颜色。 tex.Apply(false); // 可选:分批提交数据(当设置为 true 时,将重新计算多级渐进纹理级别) } } tex.Apply(true); // 最终提交 tex.name = sourceObj.name; //} AssetDatabase.CreateAsset(tex, $"{MaterialTexturePath}{sourceObj.name}_VerTex.asset"); AssetDatabase.SaveAssets(); AssetDatabase.Refresh(); } 我的动画计算出来是19帧,所以height为19像素 https://i-blog.csdnimg.cn/direct/43335385b5a147a58e22e63466ca1305.png

工具调用:

using System.Collections; using System.Collections.Generic; using UnityEditor; using UnityEngine; using static Unity.VisualScripting.Member; public class BuildGpuAnimaEditorVertex : EditorWindow { [MenuItem(“Tools/GPU动画处理工具(顶点动画)”)] static void CreateSecetionData() { EditorWindow.GetWindow(true, “GPU动画处理工具(顶点动画)”, true).Show(); } private GameObject source; public void OnGUI() { EditorGUILayout.BeginHorizontal(); EditorGUILayout.LabelField(“源资源:”, GUILayout.Width(60)); source = EditorGUILayout.ObjectField(source, typeof(GameObject), true) as GameObject; EditorGUILayout.EndHorizontal(); EditorGUILayout.Space(); EditorGUILayout.LabelField(“根据源资源生成对应动画数据贴图资源”, GUILayout.Width(240)); EditorGUILayout.Space(); if (GUILayout.Button(“开始”, GUILayout.Height(20))) { CombineVertexTool.CreateVertexAnimaSettingData(source); } EditorGUILayout.Space(); EditorGUILayout.LabelField(“根据shader和模型生成材质贴图和合并网格”, GUILayout.Width(240)); EditorGUILayout.Space(); if (GUILayout.Button(“开始”, GUILayout.Height(20))) { CombineVertexTool.CombineFilter(source); } } }

Shader处理

需要添加 #pragma multi_compile_instancing开启GPU 实例化 可以参考这位 Shader"XXX"{ Properties{ … } SubShader{ … Pass{ … CGPROGRAM #pragma vertex vert #pragma fragment frag #pragma multi_compile_instancing //这里,第一步 … struct a2v{ … UNITY_VERTEX_INPUT_INSTANCE_ID //这里,第二步 }; struct v2f{ … UNITY_VERTEX_INPUT_INSTANCE_ID //这里,第二步 }; v2f vert(a2v v){ v2f o; UNITY_SETUP_INSTANCE_ID(v); // 这里第三步 UNITY_TRANSFER_INSTANCE_ID(v,o); //第三步 … return o; } fixed4 frag(v2f i):SV_Target{ UNITY_SETUP_INSTANCE_ID(i); //最后一步 … } ENDCG } } FallBack"Diffuse" } 完整代码: Shader “Unlit/yu2” { Properties { _MainTex (“Texture”, 2D) = “white” {} // 主纹理,用于基础颜色 _AnimTex (“Texture”, 2D) = “white” {}// 动画纹理,存储顶点位置 _AnimTime (“AnimTime”, float) = 1// 动画速度控制参数 } SubShader { Tags { “RenderType”=“Opaque” }// 不透明渲染类型 LOD 100 // 细节层级 Pass { CGPROGRAM #pragma target 3.5 //使用Shader Model 3.5 #pragma vertex vert // 顶点着色器 #pragma fragment frag // 像素着色器 // make fog work #pragma multi_compile_instancing // 启用GPU实例化 #include “UnityCG.cginc” //引用unity自带的UnityCG //顶点输入与输出结构 struct appdata { float4 vertex : POSITION; float2 uv : TEXCOORD0; uint vid : SV_VERTEXID; UNITY_VERTEX_INPUT_INSTANCE_ID }; struct v2f { float2 uv : TEXCOORD0; UNITY_FOG_COORDS(1) float4 vertex : SV_POSITION; //通过vid在动画纹理中定位顶点位置 UNITY_VERTEX_INPUT_INSTANCE_ID }; //CBUFFER_START 常量缓冲区:使用后才会允许支持我们的Shader被SRP Batcher(可编程渲染管线合批)允许,从而节省渲染上的性能 CBUFFER_START(UnityPerMaterial) sampler2D _MainTex; sampler2D _AnimTex; //float4 _MainTex_ST; //必须在特殊命名的常量缓冲区中定义每个实例的属性。使用这对宏来包装对每个实例唯一的属性。 UNITY_INSTANCING_BUFFER_START(Props) UNITY_DEFINE_INSTANCED_PROP(float4, _AnimTex_TexelSize)//_TexelSize 是一个内置变量,用于获取纹理的像素尺寸信息。这个变量是一个 Vector4,包含了纹理的宽度、高度以及它们的倒数 _AnimTex_TexelSize.xyzw = (1/width, 1/height, width, height) UNITY_DEFINE_INSTANCED_PROP(float4, _MainTex_ST)//是贴图_MainTex的tiling和offset的四元数,_MainTex_ST.xy 是tiling的值,_MainTex_ST.zw 是offset的值 在Material中可以设置的Tiling就是xy,Offset就是zw。 UNITY_DEFINE_INSTANCED_PROP(float, _AnimTime) UNITY_INSTANCING_BUFFER_END(Props) CBUFFER_END //UNITY_ACCESS_INSTANCED_PROP : 从缓冲区中获取实例的属性值,arrayName与UNITY_INSTANCING_BUFFER_START(name)对应 v2f vert (appdata v) { v2f o; UNITY_SETUP_INSTANCE_ID(v); UNITY_TRANSFER_INSTANCE_ID(v, o);// 仅当您要访问片元着色器中的实例化属性时才需要 //为什么要加 0.5 //像素中心对齐:避免采样时插值到相邻像素。数学原理:将顶点 ID 映射到纹理像素中心坐标。公式推导:纹理U坐标 = (顶点ID + 0.5) * (1.0 / 纹理宽度) float4 uv_anim = float4((v.vid + 0.5) * UNITY_ACCESS_INSTANCED_PROP(Props, _AnimTex_TexelSize).x, _Time.y / UNITY_ACCESS_INSTANCED_PROP(Props, _AnimTime),0.0,0.0); //tex2Dlod 函数,从动画纹理中读取顶点位置 参数说明 _AnimTex:存储顶点位置数据的纹理 //uv_anim.xy:计算得到的 UV 坐标 //uv_anim.zw:LOD 级别(此处设为 0) v.vertex = float4(tex2Dlod(_AnimTex, uv_anim).xyz,1.0); //v.vertex.y = v.vertex.y - 0.5; o.vertex = UnityObjectToClipPos(v.vertex);//模型空间转到屏幕裁切空间 o.uv = v.uv * UNITY_ACCESS_INSTANCED_PROP(Props, _MainTex_ST).xy + UNITY_ACCESS_INSTANCED_PROP(Props, _MainTex_ST).zw; //就是将模型顶点的uv和Tiling、Offset两个变量进行运算,计算出实际显示用的定点uv。 return o; } fixed4 frag (v2f i) : SV_Target { UNITY_SETUP_INSTANCE_ID(i);// 仅当要在片元着色器中访问任何实例化属性时才需要 fixed4 col = tex2D(_MainTex, i.uv);//采样贴图 return col; } ENDCG } } } 大部分参考和借鉴: 这位大佬的文章