前段时间公司有一个AR涂涂乐的项目,虽然以前接触过AR也写太小Demo,可是没有完整开发过AR项目.不过通过1个多星期的学习,如今已经把项目相关的技术都学会了,在此向互联网上那些乐于分享的程序员前辈们致敬.学习的过程当中我发现好多博客只有代码没有讲解,在这里我就写一个详细一点的涂涂乐教程吧.html
目前市场上全部的AR产品中,涂涂乐是一个作的比较成功的产品,由于其形象 生动 新奇的特色,在早教行业内很受欢迎.其实AR涂涂乐的原理很是简单,就是把被看成画画工具的识别图上的颜色材质渲染到空白的模型上.程序员
我大概总结了下从模型到AR应用涉及到的具体流程,以下:编程
这里以我最近写的项目为例:app
这个模型只有花是用来画画的,所以识别图是一个空白的花.ide
配置安卓开发环境(以前写过的博客,很具体)电梯函数
转换开发平台(不提早转换导出时有可能会报错)工具
整体思想:编程阶段作的主要工做是把识别图上的材质信息通过计算赋给模型,这里由于动画的缘由,每一个模型可能涉及不少单个小模块,每一个单个的模块都要通过这个计算,在这里,花的7个花瓣是独立的,也就须要7次运算.学习
using UnityEngine; using Vuforia; using System.Collections; public class ARRender : MonoBehaviour { public GameObject Scene; private Animator flowerAnimator; //七色花的七个花瓣 public GameObject flower1; public GameObject flower2; public GameObject flower3; public GameObject flower4; public GameObject flower5; public GameObject flower6; public GameObject flower7; private Texture2D texture; //申请Texture2D变量储存屏幕截图 private int screenWidth; //保存屏幕宽度 private int screenHeight; //保存屏幕高度 //拾取真正贴图的四个点的坐标 Vector3 targetAnglePoint1; //左上角坐标 Vector3 targetAnglePoint2; //左下角坐标 Vector3 targetAnglePoint3; //右上角坐标 Vector3 targetAnglePoint4; //右下角坐标 public GameObject plane; //储存肯定贴图大小的面片物体 Vector2 halfSize; //记录plane宽高的一半值 void Start() { screenWidth = Screen.width; //屏幕宽 screenHeight = Screen.height; //屏幕高 texture = new Texture2D(screenWidth, screenHeight, TextureFormat.RGB24, false);//实例化空纹理 flowerAnimator = this.GetComponent<Animator>(); } //截屏函数 public void ScreenShot() { Scene.SetActive(true); flowerAnimator.SetTrigger("FlowerRainbow"); texture.ReadPixels(new Rect(0, 0, screenWidth, screenHeight), 0, 0); //读取屏幕像素信息 texture.Apply(); //存储为纹理数据 halfSize = new Vector2(plane.GetComponent<MeshFilter>().mesh.bounds.size.x, plane.GetComponent<MeshFilter>().mesh.bounds.size.z) * 50.0f*0.5f; //获取Plane的长宽的一半值 //肯定真实贴图的世界坐标 targetAnglePoint1 = transform.parent.position + new Vector3(-halfSize.x, 0, halfSize.y); targetAnglePoint2 = transform.parent.position + new Vector3(-halfSize.x, 0, -halfSize.y); targetAnglePoint3 = transform.parent.position + new Vector3(halfSize.x, 0, halfSize.y); targetAnglePoint4 = transform.parent.position + new Vector3(halfSize.x, 0, -halfSize.y); //获取VP值 Matrix4x4 P = GL.GetGPUProjectionMatrix(Camera.main.projectionMatrix, false); Matrix4x4 V = Camera.main.worldToCameraMatrix; Matrix4x4 VP = P * V; //给地球的Shader传递贴图四个点的世界坐标,VP,以及贴图 flower1.GetComponent<Renderer>().material.SetVector("_Uvpoint1", new Vector4(targetAnglePoint1.x, targetAnglePoint1.y, targetAnglePoint1.z, 1f)); flower1.GetComponent<Renderer>().material.SetVector("_Uvpoint2", new Vector4(targetAnglePoint2.x, targetAnglePoint2.y, targetAnglePoint2.z, 1f)); flower1.GetComponent<Renderer>().material.SetVector("_Uvpoint3", new Vector4(targetAnglePoint3.x, targetAnglePoint3.y, targetAnglePoint3.z, 1f)); flower1.GetComponent<Renderer>().material.SetVector("_Uvpoint4", new Vector4(targetAnglePoint4.x, targetAnglePoint4.y, targetAnglePoint4.z, 1f)); flower1.GetComponent<Renderer>().material.SetMatrix("_VP", VP); flower1.GetComponent<Renderer>().material.mainTexture = texture; flower2.GetComponent<Renderer>().material.SetVector("_Uvpoint1", new Vector4(targetAnglePoint1.x, targetAnglePoint1.y, targetAnglePoint1.z, 1f)); flower2.GetComponent<Renderer>().material.SetVector("_Uvpoint2", new Vector4(targetAnglePoint2.x, targetAnglePoint2.y, targetAnglePoint2.z, 1f)); flower2.GetComponent<Renderer>().material.SetVector("_Uvpoint3", new Vector4(targetAnglePoint3.x, targetAnglePoint3.y, targetAnglePoint3.z, 1f)); flower2.GetComponent<Renderer>().material.SetVector("_Uvpoint4", new Vector4(targetAnglePoint4.x, targetAnglePoint4.y, targetAnglePoint4.z, 1f)); flower2.GetComponent<Renderer>().material.SetMatrix("_VP", VP); flower2.GetComponent<Renderer>().material.mainTexture = texture; flower3.GetComponent<Renderer>().material.SetVector("_Uvpoint1", new Vector4(targetAnglePoint1.x, targetAnglePoint1.y, targetAnglePoint1.z, 1f)); flower3.GetComponent<Renderer>().material.SetVector("_Uvpoint2", new Vector4(targetAnglePoint2.x, targetAnglePoint2.y, targetAnglePoint2.z, 1f)); flower3.GetComponent<Renderer>().material.SetVector("_Uvpoint3", new Vector4(targetAnglePoint3.x, targetAnglePoint3.y, targetAnglePoint3.z, 1f)); flower3.GetComponent<Renderer>().material.SetVector("_Uvpoint4", new Vector4(targetAnglePoint4.x, targetAnglePoint4.y, targetAnglePoint4.z, 1f)); flower3.GetComponent<Renderer>().material.SetMatrix("_VP", VP); flower3.GetComponent<Renderer>().material.mainTexture = texture; flower4.GetComponent<Renderer>().material.SetVector("_Uvpoint1", new Vector4(targetAnglePoint1.x, targetAnglePoint1.y, targetAnglePoint1.z, 1f)); flower4.GetComponent<Renderer>().material.SetVector("_Uvpoint2", new Vector4(targetAnglePoint2.x, targetAnglePoint2.y, targetAnglePoint2.z, 1f)); flower4.GetComponent<Renderer>().material.SetVector("_Uvpoint3", new Vector4(targetAnglePoint3.x, targetAnglePoint3.y, targetAnglePoint3.z, 1f)); flower4.GetComponent<Renderer>().material.SetVector("_Uvpoint4", new Vector4(targetAnglePoint4.x, targetAnglePoint4.y, targetAnglePoint4.z, 1f)); flower4.GetComponent<Renderer>().material.SetMatrix("_VP", VP); flower4.GetComponent<Renderer>().material.mainTexture = texture; flower5.GetComponent<Renderer>().material.SetVector("_Uvpoint1", new Vector4(targetAnglePoint1.x, targetAnglePoint1.y, targetAnglePoint1.z, 1f)); flower5.GetComponent<Renderer>().material.SetVector("_Uvpoint2", new Vector4(targetAnglePoint2.x, targetAnglePoint2.y, targetAnglePoint2.z, 1f)); flower5.GetComponent<Renderer>().material.SetVector("_Uvpoint3", new Vector4(targetAnglePoint3.x, targetAnglePoint3.y, targetAnglePoint3.z, 1f)); flower5.GetComponent<Renderer>().material.SetVector("_Uvpoint4", new Vector4(targetAnglePoint4.x, targetAnglePoint4.y, targetAnglePoint4.z, 1f)); flower5.GetComponent<Renderer>().material.SetMatrix("_VP", VP); flower5.GetComponent<Renderer>().material.mainTexture = texture; flower6.GetComponent<Renderer>().material.SetVector("_Uvpoint1", new Vector4(targetAnglePoint1.x, targetAnglePoint1.y, targetAnglePoint1.z, 1f)); flower6.GetComponent<Renderer>().material.SetVector("_Uvpoint2", new Vector4(targetAnglePoint2.x, targetAnglePoint2.y, targetAnglePoint2.z, 1f)); flower6.GetComponent<Renderer>().material.SetVector("_Uvpoint3", new Vector4(targetAnglePoint3.x, targetAnglePoint3.y, targetAnglePoint3.z, 1f)); flower6.GetComponent<Renderer>().material.SetVector("_Uvpoint4", new Vector4(targetAnglePoint4.x, targetAnglePoint4.y, targetAnglePoint4.z, 1f)); flower6.GetComponent<Renderer>().material.SetMatrix("_VP", VP); flower6.GetComponent<Renderer>().material.mainTexture = texture; flower7.GetComponent<Renderer>().material.SetVector("_Uvpoint1", new Vector4(targetAnglePoint1.x, targetAnglePoint1.y, targetAnglePoint1.z, 1f)); flower7.GetComponent<Renderer>().material.SetVector("_Uvpoint2", new Vector4(targetAnglePoint2.x, targetAnglePoint2.y, targetAnglePoint2.z, 1f)); flower7.GetComponent<Renderer>().material.SetVector("_Uvpoint3", new Vector4(targetAnglePoint3.x, targetAnglePoint3.y, targetAnglePoint3.z, 1f)); flower7.GetComponent<Renderer>().material.SetVector("_Uvpoint4", new Vector4(targetAnglePoint4.x, targetAnglePoint4.y, targetAnglePoint4.z, 1f)); flower7.GetComponent<Renderer>().material.SetMatrix("_VP", VP); flower7.GetComponent<Renderer>().material.mainTexture = texture; } }
Shader "AR paint/ToMaterial" { Properties { _MainTex ("Base (RGB)", 2D) = "white" {} _Uvpoint1("point1", Vector) = (0 , 0 , 0 , 0) _Uvpoint2("point2", Vector) = (0 , 0 , 0 , 0) _Uvpoint3("point3", Vector) = (0 , 0 , 0 , 0) _Uvpoint4("point4", Vector) = (0 , 0 , 0 , 0) } SubShader { Tags { "Queue"="Transparent" "RenderType"="Transparent" } LOD 200 Pass{ Blend SrcAlpha OneMinusSrcAlpha CGPROGRAM #pragma vertex vert #pragma fragment frag #include "UnityCG.cginc" sampler2D _MainTex; float4 _MainTex_ST; float4 _Uvpoint1; float4 _Uvpoint2; float4 _Uvpoint3; float4 _Uvpoint4; float4x4 _VP; struct v2f { float4 pos : SV_POSITION; float2 uv : TEXCOORD0; float4 fixedPos : TEXCOORD2; } ; v2f vert (appdata_base v) { v2f o; o.pos = mul(UNITY_MATRIX_MVP,v.vertex); o.uv = TRANSFORM_TEX(v.texcoord,_MainTex); float4 top = lerp(_Uvpoint1, _Uvpoint3, o.uv.x); float4 bottom = lerp(_Uvpoint2, _Uvpoint4, o.uv.x); float4 fixedPos = lerp(bottom, top, o.uv.y); o.fixedPos = ComputeScreenPos(mul(UNITY_MATRIX_VP, fixedPos)); return o; } float4 frag (v2f i) : COLOR { float4 top = lerp(_Uvpoint1, _Uvpoint3, i.uv.x); float4 bottom = lerp(_Uvpoint2, _Uvpoint4, i.uv.x); float4 fixedPos = lerp(bottom, top, i.uv.y); fixedPos = ComputeScreenPos(mul(_VP, fixedPos)); return tex2D(_MainTex, fixedPos.xy / fixedPos.w); } ENDCG } } //FallBack "Diffuse" }
新建一个动画控制器,把刚才切好的动画拖到动画状态机里面,右键创建Transition,Parameters选项卡中建立Trigger,这样就能够在程序中控制动画了测试
找到ImageTarget下的 DefaultTrackableEventHandler 脚本 声明AudioSource变量而后分别在 OnTrackingFound() 和 OnTrackingLost()这两个方法里添加声音暂停和开始方法动画
public class DefaultTrackableEventHandler : MonoBehaviour, ITrackableEventHandler { public AudioSource clothesAudioSource; #region PRIVATE_MEMBER_VARIABLES private TrackableBehaviour mTrackableBehaviour; #endregion // PRIVATE_MEMBER_VARIABLES #region UNTIY_MONOBEHAVIOUR_METHODS void Start() { mTrackableBehaviour = GetComponent<TrackableBehaviour>(); if (mTrackableBehaviour) { mTrackableBehaviour.RegisterTrackableEventHandler(this); } } #endregion // UNTIY_MONOBEHAVIOUR_METHODS #region PUBLIC_METHODS /// <summary> /// Implementation of the ITrackableEventHandler function called when the /// tracking state changes. /// </summary> public void OnTrackableStateChanged( TrackableBehaviour.Status previousStatus, TrackableBehaviour.Status newStatus) { if (newStatus == TrackableBehaviour.Status.DETECTED || newStatus == TrackableBehaviour.Status.TRACKED || newStatus == TrackableBehaviour.Status.EXTENDED_TRACKED) { OnTrackingFound(); } else { OnTrackingLost(); } } #endregion // PUBLIC_METHODS #region PRIVATE_METHODS private void OnTrackingFound() { //gameObject.transform.FindChild ("Earth").gameObject.SetActive (true); Renderer[] rendererComponents = GetComponentsInChildren<Renderer>(true); Collider[] colliderComponents = GetComponentsInChildren<Collider>(true); // Enable rendering: foreach (Renderer component in rendererComponents) { component.enabled = true; } // Enable colliders: foreach (Collider component in colliderComponents) { component.enabled = true; } if (!clothesAudioSource.isPlaying) { clothesAudioSource.Play(); } Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " found"); } private void OnTrackingLost() { Renderer[] rendererComponents = GetComponentsInChildren<Renderer>(true); Collider[] colliderComponents = GetComponentsInChildren<Collider>(true); // Disable rendering: foreach (Renderer component in rendererComponents) { component.enabled = false; } // Disable colliders: foreach (Collider component in colliderComponents) { component.enabled = false; } clothesAudioSource.Pause(); Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " lost"); } #endregion // PRIVATE_METHODS } }
Vuforia默认不会自动对焦,新建一个脚本,把下面代码复制进去,把脚本拖到ARCamera上面就行了
using UnityEngine; using System.Collections; public class Duijiao : MonoBehaviour { // Use this for initialization void Start() { GameObject ARCamera = GameObject.Find("ARCamera"); Vuforia.CameraDevice.Instance.SetFocusMode(Vuforia.CameraDevice.FocusMode.FOCUS_MODE_CONTINUOUSAUTO); } // Update is called once per frame void Update() { Vuforia.CameraDevice.Instance.SetFocusMode(Vuforia.CameraDevice.FocusMode.FOCUS_MODE_CONTINUOUSAUTO); } }
因为尚未重构,代码比较丑陋,不过好处是适合新手,很好理解