﻿using UnityEngine;
using UnityEngine.UI;
using System;
using System.Collections;
using System.Xml;

using Augumenta;
using System.Collections.Generic;

/**
 * Example implementation of how to provide video frames to Augumenta AIP.
 *
 * This implementation uses the camera frame provided from Unity3D
 * WebCamTexture.
 */
public class WebCamHandler : MonoBehaviour
{
	private UnityAgapi agapi;
	private WebCamTexture webcam;
	// image to use to show the camera preview
	public RawImage webcam_image;
	// For Hololens, you should consider using 896x504
	[Header("Requested resolution")]
	[Tooltip("Preferred WebCam frame width in pixels")]
	public int webcamWidth = 640;
	[Tooltip("Preferred WebCam frame height in pixels")]
	public int webcamHeight = 480;
	// For Hololens, we calculated VFOV: 27.9 with HFOV: 47.7
	[Header("Known FOV")]
	[Tooltip("Horizontal field of view in degrees")]
	public float webcamFovHorizontal = 0;
	[Tooltip("Vertical field of view in degrees")]
	public float webcamFovVertical = 0;
	private double[] webcamLICC = new double[4];
	private double[] webcamNICC = new double[16];
	private byte webcamImageTransformation=0;
	[Header("Advanced options")]
	[Tooltip("Camera preview calibration")]
	public bool autoCam2Display = true;
	[Tooltip("Webcam start index")]
	public int webcamStartIdx = 0;
	[Tooltip("Debug camera heading")]
	public bool debugHeading = false;
	[Tooltip("Prevent screen to go dim")]
	public bool preventScreenToSleep = true;
	// image data cache
	private Color32[] imageData = null;
	Camera mCamera;
	private Boolean cameraWasStarted=false;
	// Reset allows us to set some defaults based on BuildTarget
	private void Reset()
	{
#if UNITY_EDITOR
		// Reseting to Hololens best defaults if BuildTarget is WSAPlayer
		if(UnityEditor.EditorUserBuildSettings.activeBuildTarget == UnityEditor.BuildTarget.WSAPlayer) {
			webcamWidth = 896;
			webcamHeight = 504;
			webcamFovHorizontal = 46.54f;
			webcamFovVertical = 27.28f;
			webcamLICC = new double[] {1041.814596f, 1038.646814f, 401.991348f, 221.598958f};
			webcamNICC = new double[] {0.103051f, 0.904975f, -5.675912f, 9.667461f, -9.921758f, -0.852060f, 10.781939f, 2.577127f, -0.097380f, -1.160038f, 7.126730f, -12.582519f, 10.215430f, 0.900961f, -7.343695f, -2.571147f};
		}
		// Reset the  Preview Image for Webcam_image
		webcam_image = this.GetComponentInChildren<RawImage>();
#endif
	}
	// Use this for initialization
	void Start()
	{
		agapi = UnityAgapi.Instance;
		mCamera=Camera.main; // cache the Camera.main

		if(mCamera==null) {
			Debug.LogError("Camera.main is null. Check in Editor that is properly tageed.");
		}

		// set screen to never sleep
		if(preventScreenToSleep) {
			Debug.Log("Screen set to never sleep");
			Screen.sleepTimeout = SleepTimeout.NeverSleep;
		}

		WebCamDevice[] webcams = WebCamTexture.devices;
		if(webcams.Length <= 0) {
			Debug.LogError("Found no WebCams!");
			return;
		}
		var i = 0;
		if(webcamStartIdx >= webcams.Length) {
			i = webcams.Length - 1; // Use the last camera on sight
		} else if(webcamStartIdx > 0) {
			i = webcamStartIdx;
		}
		for(; i < webcams.Length; i++) {
			try {
				Debug.Log("Opening camera [" + i + "]: " + webcams[i].name);
				// initialize default webcam and start preview
				webcam = new WebCamTexture(webcams[i].name, webcamWidth, webcamHeight);
				if(webcam_image) {
					webcam_image.texture = webcam;
				}
				// try playing the webcam
				webcam.Play();
				cameraWasStarted=true;
#if !WINDOWS_UWP // Hololens doesn't report correctly webcam.isPlaying
				if(webcam.isPlaying) {
					break;
				}
#endif
			} catch(Exception e) {
				Debug.Log("WebCam error: " + e.Message);
			}
		}
#if !WINDOWS_UWP // Hololens doesn't report correctly webcam.isPlaying
		if(!webcam || !webcam.isPlaying) {
			Debug.Log("Failed to open webcam");
			return;
		}
#endif
		// allocate the webcam frame buffer
		imageData = new Color32[webcam.width * webcam.height];
		// Get the product transformations
		AgapiProductProfiles.Product product = Agapi.CurrentProduct;
		if(product!=null && product.cameras!=null) {
			foreach(Augumenta.AgapiProductProfiles.Product.Camera camera
					in product.cameras) {
				if(camera.id=="FRONT" && camera.resolutions!=null) {
					foreach(Augumenta.AgapiProductProfiles.Product.Camera.Resolution resolution
							in camera.resolutions) {
						if(resolution.height==webcam.height &&
								resolution.width==webcam.width) {
							Debug.Log("Found camera resolution in HW profile");
							webcamLICC=resolution.GetLICC();
							webcamNICC=resolution.GetNICC();
							webcamFovHorizontal=resolution.horizontalFOV;
							webcamFovVertical=resolution.verticalFOV;
							webcamImageTransformation=
								resolution.imageTransformation.GetFrameOrigin();
							Debug.LogFormat("Using camera settings: {0}",
											resolution.ToString());
						}
					}
				}
			}
		}
	}
	// Update is called once per frame
	void Update()
	{
		if(cameraWasStarted&&!webcam.isPlaying) {
			// restart Play if the camera was started already
			webcam.Play();
		}
		if(autoCam2Display) {
			if(mCamera!=null && mCamera.fieldOfView!=webcamFovVertical) {
				Debug.LogWarning("Camera FOV (=" + webcamFovVertical +
								 ") is not equal with the Main Camera FOV (" +
								 Camera.main.fieldOfView + ")!");
				mCamera.fieldOfView = webcamFovVertical;
			}
			float distance = webcam.height*0.5f/Mathf.Tan(Camera.main.fieldOfView*0.5f*Mathf.Deg2Rad);
			if(mCamera!=null && mCamera.farClipPlane<distance) {
				Debug.LogWarning("Make sure your Main Camera far clipping plane is bigger than " + distance);
				mCamera.farClipPlane=distance+1;
			}
			// We put the Canvas to work in WorldSpace first
			Canvas mCanvas = gameObject.GetComponent<Canvas>();
			mCanvas.renderMode = RenderMode.WorldSpace;
			RectTransform mRectTransform = GetComponent<RectTransform>();
			mRectTransform.sizeDelta=new Vector2(webcam.width, webcam.height);
			mRectTransform.pivot=new Vector2(0.5f, 0.5f);
			mRectTransform.localPosition = new Vector3(0, 0, distance);
		}
		// check if ready for new detection, and trigger last detections
		if(agapi.Update()) {
			// check if there is a new frame available for detection
			if(webcam && webcam.didUpdateThisFrame && imageData != null) {
				webcam.GetPixels32(imageData);

				byte[] data = UnityAgapi.Color32ToY(imageData);
				Agapi.Frame frame = new Agapi.Frame(data, webcam.width, webcam.height);
				// webcam image is upside down
				frame.origin^=webcamImageTransformation;
				frame.origin^=1; // this replaces the frame.flipped=true on unity

				// set the specified field-of-view values in radians
				frame.FOV[0] = webcamFovHorizontal * Mathf.Deg2Rad;
				frame.FOV[1] = webcamFovVertical * Mathf.Deg2Rad;
				frame.LICC = webcamLICC;
				frame.NICC = webcamNICC;

				// send the image for processing
				agapi.Detect(frame);
			}
		}
		if(debugHeading) {
			Debug.DrawRay(this.transform.position, this.transform.forward, Color.blue);
		}
	}

	void OnDestroy()
	{
		if(webcam != null) {
			Debug.Log("Stopping WebCam");
			webcam.Stop();
		}
		if(agapi != null) {
			agapi.Abort();
			agapi = null;
		}
		if(cameraWasStarted) {
			cameraWasStarted=false;
		}
		imageData = null;
	}
}
