Play video with ARKit
I'm trying to place a Quad in 3D space and play a video on it.
I have set up my AR scene using the ARKit sample scene.
The Quad is placed when I tap the screen and I can move it about.
I don't want the video to Play on Awake, as the User may not have placed it yet, so I want it to start playing once they tap the screen to place the Quad.
The video is a 26sec 720p video playing from a URL that is my Amazon Server.
With the script I'm currently using, my Debug log will continually say "Preparing Video", whereas if I have it Play On Awake, it plays almost instantly.
using System;
using System.Collections.Generic;
using System.Collections;
using UnityEngine;
using UnityEngine.Networking;
using UnityEngine.Video;
namespace UnityEngine.XR.iOS
{
public class UnityARHitTestExample : MonoBehaviour
{
public Transform m_HitTransform;
public GameObject video;
bool HitTestWithResultType (ARPoint point, ARHitTestResultType resultTypes)
{
List<ARHitTestResult> hitResults = UnityARSessionNativeInterface.GetARSessionNativeInterface ().HitTest (point, resultTypes);
if (hitResults.Count > 0) {
foreach (var hitResult in hitResults) {
Debug.Log ("Got hit!");
m_HitTransform.position = UnityARMatrixOps.GetPosition (hitResult.worldTransform);
m_HitTransform.rotation = UnityARMatrixOps.GetRotation (hitResult.worldTransform);
Debug.Log (string.Format ("x:{0:0.######} y:{1:0.######} z:{2:0.######}", m_HitTransform.position.x, m_HitTransform.position.y, m_HitTransform.position.z));
return true;
}
}
return false;
}
void Start(){
video.SetActive(false);
}
// Update is called once per frame
void Update () {
if (Input.touchCount > 0 && m_HitTransform != null)
{
video.SetActive(true);
StartCoroutine(StartVid());
var touch = Input.GetTouch(0);
if (touch.phase == TouchPhase.Began || touch.phase == TouchPhase.Moved)
{
var screenPosition = Camera.main.ScreenToViewportPoint(touch.position);
ARPoint point = new ARPoint {
x = screenPosition.x,
y = screenPosition.y
};
// prioritize reults types
ARHitTestResultType[] resultTypes = {
ARHitTestResultType.ARHitTestResultTypeExistingPlaneUsingExtent,
// if you want to use infinite planes use this:
//ARHitTestResultType.ARHitTestResultTypeExistingPlane,
ARHitTestResultType.ARHitTestResultTypeHorizontalPlane,
ARHitTestResultType.ARHitTestResultTypeFeaturePoint
};
foreach (ARHitTestResultType resultType in resultTypes)
{
if (HitTestWithResultType (point, resultType))
{
return;
}
}
}
}
}
IEnumerator StartVid(){
video.GetComponent<VideoPlayer>().Prepare ();
//Wait until video is prepared
while (!video.GetComponent<VideoPlayer>().isPrepared) {
Debug.Log ("Preparing Video");
yield return null;
}
Debug.Log ("Done Preparing Video");
video.GetComponent<VideoPlayer>().Play();
Debug.Log("PLAYING VIDEO");
}
}
}
Im hoping I've just missed something simple!
Can anyone help me out please?
Your answer
Follow this Question
Related Questions
Playing a VR video on Android - Google Cardboard 0 Answers
Unity WebGL video player on mobile 0 Answers
(Videoplayer component) What file format and resolution do I need for iOS? 0 Answers
[iOS] WWW class GetAudioClip unsupported URL 1 Answer
Error message “Cannot Play a disabled VideoPlayer" in iOS 0 Answers