0

im new here and im new with Augmented Reality

currently, im working with my last project in colleg my project is combine 3D Object and Video in one app (android) using cloud AR.

Im stuck, I really confuse how to add video playback in cloud recognition Augmented Reality. what should I do ?

im using the cloud handler name "SimpleCloudHandler.cs" and attached it into Cloud Reacognition.

here the script

**using System;
using UnityEngine;
using Vuforia;
using UnityEngine.UI;
/// <summary>
/// This MonoBehaviour implements the Cloud Reco Event handling for this sample.
/// It registers itself at the CloudRecoBehaviour and is notified of new search results.
/// </summary>
public class SimpleCloudHandler : MonoBehaviour, ICloudRecoEventHandler
{
    #region PRIVATE_MEMBER_VARIABLES
    // CloudRecoBehaviour reference to avoid lookups
    private CloudRecoBehaviour mCloudRecoBehaviour;
    // ImageTracker reference to avoid lookups
    private ObjectTracker mImageTracker;
    private bool mIsScanning = false;
    private string mTargetMetadata = "";
    #endregion // PRIVATE_MEMBER_VARIABLES
    public AudioSource dubbing1;
    #region EXPOSED_PUBLIC_VARIABLES
    /// <summary>
    /// can be set in the Unity inspector to reference a ImageTargetBehaviour that is used for augmentations of new cloud reco results.
    /// </summary>
    public ImageTargetBehaviour ImageTargetTemplate;
    #endregion
    #region UNTIY_MONOBEHAVIOUR_METHODS
    /// <summary>
    /// register for events at the CloudRecoBehaviour
    /// </summary>
    void Start()
    {
        // register this event handler at the cloud reco behaviour
        CloudRecoBehaviour cloudRecoBehaviour = GetComponent<CloudRecoBehaviour>();
        if (cloudRecoBehaviour)
        {
            cloudRecoBehaviour.RegisterEventHandler(this);
        }
        // remember cloudRecoBehaviour for later
        mCloudRecoBehaviour = cloudRecoBehaviour;
    }
    #endregion // UNTIY_MONOBEHAVIOUR_METHODS
    #region ICloudRecoEventHandler_IMPLEMENTATION
    /// <summary>
    /// called when TargetFinder has been initialized successfully
    /// </summary>
    public void OnInitialized()
    {
        // get a reference to the Image Tracker, remember it
        mImageTracker = (ObjectTracker)TrackerManager.Instance.GetTracker<ObjectTracker>();
    }
    /// <summary>
    /// visualize initialization errors
    /// </summary>
    public void OnInitError(TargetFinder.InitState initError)
    {
    }
    /// <summary>
    /// visualize update errors
    /// </summary>
    public void OnUpdateError(TargetFinder.UpdateState updateError)
    {
    }**

    /// <summary>
    /// when we start scanning, unregister Trackable from the ImageTargetTemplate, then delete all trackables
    /// </summary>
    public void OnStateChanged(bool scanning) {
        mIsScanning = scanning;
        if (scanning) {
            // clear all known trackables
            ObjectTracker tracker = TrackerManager.Instance.GetTracker<ObjectTracker> ();
            tracker.TargetFinder.ClearTrackables (false);
        }
    }

    /// <summary>
    /// Handles new search results
    /// </summary>
    /// <param name="targetSearchResult"></param>
    public void OnNewSearchResult(TargetFinder.TargetSearchResult targetSearchResult)
    {
        // duplicate the referenced image target
        GameObject newImageTarget = Instantiate(ImageTargetTemplate.gameObject) as GameObject;

        GameObject augmentation = null;

        string model_name = targetSearchResult.MetaData;


        if( augmentation != null )
            augmentation.transform.parent = newImageTarget.transform;

        // enable the new result with the same ImageTargetBehaviour:
        ImageTargetAbstractBehaviour imageTargetBehaviour = mImageTracker.TargetFinder.EnableTracking(targetSearchResult, newImageTarget);

        Debug.Log("Metadata value is " + model_name );
        mTargetMetadata = model_name;


        switch( model_name ){

        case "Cube":

            Destroy (imageTargetBehaviour.gameObject.transform.Find ("Capsule").gameObject);
            break;

        case "Capsule":
            Destroy (imageTargetBehaviour.gameObject.transform.Find ("Cube").gameObject);
            break;

    }

        if (!mIsScanning)
        {
            // stop the target finder
            mCloudRecoBehaviour.CloudRecoEnabled = true;
        }
    }


    #endregion // ICloudRecoEventHandler_IMPLEMENTATION

    void OnGUI() {
        GUI.Box (new Rect(100,200,200,50), "Metadata: " + mTargetMetadata);
    }



}

and here the script of "DefaultTrackableEventHandler.cs", this script used for play the audio for each object

using UnityEngine;

namespace Vuforia
{
    /// <summary>
    /// A custom handler that implements the ITrackableEventHandler interface.
    /// </summary>
    public class DefaultTrackableEventHandler : MonoBehaviour,
                                                ITrackableEventHandler
    {
        //------------Begin Sound----------
        public AudioSource soundTarget;
        public AudioClip clipTarget; 
        private AudioSource[] allAudioSources;

        //function to stop all sounds
        void StopAllAudio()
        {
            allAudioSources = FindObjectsOfType(typeof(AudioSource)) as AudioSource[];
            foreach (AudioSource audioS in allAudioSources)
            {
                audioS.Stop();
            }
        }

        //function to play sound
        void playSound(string ss)
        {
            clipTarget = (AudioClip)Resources.Load(ss);
            soundTarget.clip = clipTarget;
            soundTarget.loop = false;
            soundTarget.playOnAwake = false;
            soundTarget.Play();
        }

        //-----------End Sound------------




        #region PRIVATE_MEMBER_VARIABLES

        private TrackableBehaviour mTrackableBehaviour;    
        #endregion // PRIVATE_MEMBER_VARIABLES


        #region UNTIY_MONOBEHAVIOUR_METHODS

        void Start()
        {
            mTrackableBehaviour = GetComponent<TrackableBehaviour>();
            if (mTrackableBehaviour)
            {
                mTrackableBehaviour.RegisterTrackableEventHandler(this);
            }

            //Register / add the AudioSource as object
            soundTarget = (AudioSource)gameObject.AddComponent<AudioSource>();
        }

        #endregion // UNTIY_MONOBEHAVIOUR_METHODS



        #region PUBLIC_METHODS

        /// <summary>
        /// Implementation of the ITrackableEventHandler function called when the
        /// tracking state changes.
        /// </summary>
        public void OnTrackableStateChanged(
                                        TrackableBehaviour.Status previousStatus,
                                        TrackableBehaviour.Status newStatus)
        {
            if (newStatus == TrackableBehaviour.Status.DETECTED ||
                newStatus == TrackableBehaviour.Status.TRACKED ||
                newStatus == TrackableBehaviour.Status.EXTENDED_TRACKED)
            {
                OnTrackingFound();
            }
            else
            {
                OnTrackingLost();
            }
        }

        #endregion // PUBLIC_METHODS



        #region PRIVATE_METHODS


        private void OnTrackingFound()
        {
            Renderer[] rendererComponents = GetComponentsInChildren<Renderer>(true);
            Collider[] colliderComponents = GetComponentsInChildren<Collider>(true);

            // Enable rendering:
            foreach (Renderer component in rendererComponents)
            {
                component.enabled = true;
            }

            // Enable colliders:
            foreach (Collider component in colliderComponents)
            {
                component.enabled = true;
            }

            Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " found");

            if (mTrackableBehaviour.TrackableName == "Cube") 
            {
                playSound ("sounds/efek1");
            }
            if (mTrackableBehaviour.TrackableName == "Capsule") 
            {
                playSound ("sounds/efek2");
            }

script above, work perfetly when the content is 3D object but not work if the the content is video. I really confuse how to add video playback+ cloud AR

CinCout
  • 9,486
  • 12
  • 49
  • 67
  • If I understand correctly: You want to use cloud recognition in order to recognize an image target and then display a video at that location and the problem you have is that you can't display a video? – Draco18s no longer trusts SE Jul 11 '17 at 13:53
  • Thank u so much for your respond, yes im trying to use Cloud AR to display video and also 3d content based on each marker on the cloud. for 3d content its work but in video case,t did'nt work, I've done the same step to add video as I'm adding 3d content, I add the videoplayback into image target and set video location on the "VideoPlaybackBeahvior.cs", and then i wrote the script on the "SimpleCloudhandler.cs" I add the switch case, and then add the metaddata named as the name of video playback. – Amir Hamri Jul 12 '17 at 07:22
  • but, when I run the project on unity, the marker show the video texture, but then it show the error "Null ReferenceException : Object reference not set an instance of an object TrackableEventHandler.OnTrackingFound () ( at Assets/Script/TrackableEventHandler.cs:117", I Really confuse how to modify the "TrackableEventHandler.cs" or "SimpleCloudHandler.cs" in order to be able to display the 3D content or play video by each marker. please can you help me out ? Sorry or my bad english. – Amir Hamri Jul 12 '17 at 07:28

0 Answers0