准备工作:前往百度AI网页注册账号,百度AI开放平台-全球领先的人工智能服务平台
在开放能力平台,能找到想要的功能介绍,然后要创建一个应用,需要用到ak和sk,百度AI开发里边介绍比较清楚,这里就不赘述了。
开发逻辑 调用摄像头 -> 截取一帧画面 -> 上传百度AI云融合 -> 返回融合结果显示
首先呢,需要创建一个WebCamera类,完善摄像头的各种功能。
/// <summary>
/// 简单的摄像头单例类,挂载在场景物体上
/// </summary>
public class WebCamera : MonoBehaviour
{
public static WebCamera Instance;
/// <summary>
/// 当前摄像头下标,存在多个摄像头设备时用于切换功能
/// </summary>
private int curCamIndex = 0;
/// <summary>
/// 所有摄像头设备列表
/// </summary>
private WebCamDevice[] devices;
/// <summary>
/// 摄像头渲染纹理
/// </summary>
private WebCamTexture webCamTex;
/// <summary>
/// 当前设备的名称
/// </summary>
public string deviceName { get; private set; }
/// <summary>
/// 摄像头是否打开
/// </summary>
public bool CameraIsOpen { get; private set; }
/// <summary>
/// 最终渲染画面
/// </summary>
public Texture renderTex { get; private set; }
/// <summary>
/// 最新的截图
/// </summary>
public Texture2D lastShotText { get; private set; }
/// <summary>
/// 画面的宽高
/// </summary>
private int width,height;
/// <summary>
/// 帧率
/// </summary>
private int fps;
void Awake()
{
Instance = this;
}
public void InitCamera(int width,int height,int fps=30)
{
this.width = width;
this.height = height;
this.fps = fps;
}
/// <summary>
/// 打开摄像头
/// </summary>
public void OpenCamera()
{
//用户授权
if (Application.HasUserAuthorization(UserAuthorization.WebCam))
{
//显示画面的设备就是要打开的摄像头
devices = WebCamTexture.devices;
if (devices.Length <= 0)
{
Debug.LogError("没有检测到摄像头,检查设备是否正常"); return;
}
deviceName = devices[curCamIndex].name;
webCamTex = new WebCamTexture(deviceName, width,height,fps);
renderTex = webCamTex;
//开启摄像头
webCamTex.Play();
CameraIsOpen = true;
}
}
/// <summary>
/// 关闭摄像头
/// </summary>
public void CloseCamera()
{
if (CameraIsOpen && webCamTex != null)
{
webCamTex.Stop();
CameraIsOpen=false;
}
}
/// <summary>
/// 切换摄像头
/// </summary>
public void SwapCamera()
{
if (devices.Length > 0)
{
curCamIndex = (curCamIndex + 1) % devices.Length;
if (webCamTex!= null)
{
CloseCamera();
OpenCamera();
}
}
}
public void SaveScreenShot(string path)
{
Texture2D shotTex = TextureToTexture2D(webCamTex);
lastShotText = shotTex;
byte[] textureBytes = shotTex.EncodeToJPG();
string fileName = string.Format("IMG_{0}{1}{2}_{3}{4}{5}.jpg",DateTime.Now.Year,DateTime.Now.Month,
DateTime.Now.Day,DateTime.Now.Hour,DateTime.Now.Minute,DateTime.Now.Second);
if (!Directory.Exists(path))
{
Directory.CreateDirectory(path);
}
Debug.Log($"图片已保存:{path}/{fileName}");
File.WriteAllBytes($"{ path}/{fileName}", textureBytes);
if (File.Exists($"{path}/{fileName}"))
{
Debug.Log("找到照片");
}
else
{
Debug.Log("未找到");
}
}
/// <summary>
/// Texture转换成Texture2D
/// </summary>
/// <param name="texture"></param>
/// <returns></returns>
private Texture2D TextureToTexture2D(Texture texture)
{
Texture2D texture2D = new Texture2D(texture.width, texture.height, TextureFormat.RGBA32, false);
RenderTexture currentRT = RenderTexture.active;
RenderTexture renderTexture = RenderTexture.GetTemporary(texture.width, texture.height, 32);
Graphics.Blit(texture, renderTexture);
RenderTexture.active = renderTexture;
texture2D.ReadPixels(new Rect(0, 0, renderTexture.width, renderTexture.height), 0, 0);
texture2D.Apply();
RenderTexture.active = currentRT;
RenderTexture.ReleaseTemporary(renderTexture);
return texture2D;
}
}
创建一个脚本AssessToken,用来获取token,官网示例中返回的是一个token对象,需要再次解析。clientAK和clientSk需要写自己申请的应用的ak和sk
public static class AccessToken
{
//调用GetAccessToken()获取的access_token建议根据expires_in时间 设置缓存
//百度云中开通对应服务应用的API Key建议开通应用的时候多选服务
private static string clientAk = "***********************";
//百度云中开通对应服务应用的 Secret Key
private static string clientSk = "****************************";
public static string GetAssessToken()
{
string authHost = "https://aip.baidubce.com/oauth/2.0/token";
HttpClient client = new HttpClient();
List<KeyValuePair<string, string>> paraList = new List<KeyValuePair<string, string>>();
paraList.Add(new KeyValuePair<string, string>("grant_type", "client_credentials"));
paraList.Add(new KeyValuePair<string, string>("client_id", clientAk));
paraList.Add(new KeyValuePair<string, string>("client_secret", clientSk));
HttpResponseMessage response = client.PostAsync(authHost, new FormUrlEncodedContent(paraList)).Result;
string result = response.Content.ReadAsStringAsync().Result;
TokenInfo tokenInfo = JsonUtility.FromJson<TokenInfo>(result);
return tokenInfo.access_token;
}
public class TokenInfo
{
public string refresh_token;
public string access_token;
}
}
创建一个FaceMerge,用来请求人脸融合的API,当然也可以丰富更多的功能
public class FaceMerge : MonoBehaviour
{
public static FaceMerge Instance;
private void Awake()
{
Instance = this;
}
//人脸融合
public void PostFaceMerge(string json, UnityAction<string> sucessResponse, UnityAction<string> errorRes = null)
{
StartCoroutine(IPostFaceMerge(json, sucessResponse, errorRes));
}
private IEnumerator IPostFaceMerge(string json, UnityAction<string> sucessResponse, UnityAction<string> errorRes = null)
{
string token = AccessToken.GetAssessToken();
string url = "https://aip.baidubce.com/rest/2.0/face/v1/merge?access_token=" + token;
using (UnityWebRequest request = new UnityWebRequest(url, "POST"))
{
Encoding encoding = Encoding.Default;
byte[] buffer = encoding.GetBytes(json);
request.uploadHandler = new UploadHandlerRaw(buffer);
request.downloadHandler = new DownloadHandlerBuffer();
yield return request.SendWebRequest();
if (request.result == UnityWebRequest.Result.Success)
{
sucessResponse?.Invoke(request.downloadHandler.text);
request.Abort();
}
else
{
errorRes?.Invoke(request.downloadHandler.text);
request.Abort();
}
}
}
public Texture2D Base64ToTexture2D(int width,int height,string base64Str)
{
Texture2D pic = new Texture2D(width, height, TextureFormat.RGBA32, false);
byte[] data = System.Convert.FromBase64String(base64Str);
pic.LoadImage(data);
return pic;
}
public string Texture2DToBase64(Texture2D tex2d)
{
byte[] bytes = tex2d.EncodeToJPG();
string strBase64 = Convert.ToBase64String(bytes);
return strBase64;
}
}
需要一个Response类反序列化返回的结果
public class Response
{
public int error_code;
public string error_msg;
public long log_id;
public long timestamp;
public int cached;
public Result result;
}
public class Result
{
public string merge_image;
}
创建一个Test测试类,调试功能,里边json解析的时候,用到了一个JsonMapper的类,这个类是在LitJson插件中的,需要大家自行下载。
public class Test : MonoBehaviour
{
/// <summary>
/// 显示相机渲染的画面
/// </summary>
[SerializeField] RawImage rawImg;
/// <summary>
/// 融合模板图
/// </summary>
[SerializeField] Texture2D targetFusionTex;
/// <summary>
/// 融合结果显示
/// </summary>
[SerializeField] RawImage resultImg;
/// <summary>
/// 截图保存的路径
/// </summary>
private string path;
void Start()
{
path = Application.streamingAssetsPath + "/";
WebCamera.Instance.InitCamera(800,600);
WebCamera.Instance.OpenCamera();
rawImg.texture = WebCamera.Instance.renderTex;
}
private void FunsionFace()
{
WebCamera.Instance.SaveScreenShot(path);
var curTex = WebCamera.Instance.lastShotText;
//序列化字典内容到json格式 上传到百度ai
Dictionary<string,object> dict = new Dictionary<string,object>();
dict.Add("version","4.0");
dict.Add("alpha",0);
ImageInfo imgTemplate = new ImageInfo();
imgTemplate.image = FaceMerge.Instance.Texture2DToBase64(targetFusionTex);
imgTemplate.image_type = "BASE64";
imgTemplate.quality_control = "NONE";
dict.Add("image_template", imgTemplate);
ImageInfo imgTarget = new ImageInfo();
imgTarget.image = FaceMerge.Instance.Texture2DToBase64(curTex);
imgTarget.image_type = "BASE64";
imgTarget.quality_control = "NONE";
dict.Add("image_target",imgTarget);
dict.Add("merge_degree", "COMPLETE");
string json = JsonMapper.ToJson(dict); // 反序列化用了 litjson的工具,使用JsonUtility序列化dict会是空的
FaceMerge.Instance.PostFaceMerge(json, OnFaceMerge);
}
private void OnFaceMerge(string info)
{
Debug.Log(info);
Response response = JsonMapper.ToObject<Response>(info);
if (response.error_code == 0) // 0 表示成功融合图片
{
Debug.Log(response.error_msg);
string ImgBase64 = response.result.merge_image;
resultImg.texture = FaceMerge.Instance.Base64ToTexture2D(targetFusionTex.width, targetFusionTex.height, ImgBase64);
}
}
void Update()
{
if (Input.GetKeyUp(KeyCode.W))
{
FunsionFace();
}
}
public class ImageInfo
{
public string image; //图片信息
public string image_type; //图片类型 BASE64 URL FACE_TOKEN
public string quality_control; //质量控制 NONE LOW NORMAL HIGH HIGH
}
}
csdn资源链接地址:https://download.csdn.net/download/qq_40666661/87704717不需要积分就可以下载文章来源:https://www.toymoban.com/news/detail-774224.html
百度网盘:链接:https://pan.baidu.com/s/1LTqsc8bxf69RZAWWB9Nw0Q?pwd=heyo
提取码:heyo 文章来源地址https://www.toymoban.com/news/detail-774224.html
到了这里,关于unity使用百度AI实现人脸融合的文章就介绍完了。如果您还想了解更多内容,请在右上角搜索TOY模板网以前的文章或继续浏览下面的相关文章,希望大家以后多多支持TOY模板网!