public static void main(string[] args)
        {
            SpeakerIdentification speakerIdentification = new SpeakerIdentification();
            URL resource = ClassLiteral <SpeakerIdentificationDemo> .Value.getResource("test.wav");

            ArrayList speakers = speakerIdentification.cluster(resource.openStream());

            SpeakerIdentificationDemo.printSpeakerIntervals(speakers, resource.getPath());
            SpeakerIdentificationDemo.speakerAdaptiveDecoding(speakers, resource);
        }
        /// <summary>
        /// HomeViewModel constructor. Assigsn API service clients, creates a <see cref="FrameGrabber{AnalysisResultType}"/> and <see cref="Recording"/> objects
        /// </summary>
        /// <param name="faceServiceClient"><see cref="FaceServiceClient"/> object</param>
        /// <param name="emotionServiceClient"><see cref="EmotionServiceClient"/> object</param>
        /// <param name="speakerIdentification"><see cref="ISpeakerIdentificationServiceClient"/> object</param>
        public HomeViewModel(FaceServiceClient faceServiceClient, ISpeakerIdentificationServiceClient speakerIdentification)
        {
            _faceServiceClient     = faceServiceClient;
            _speakerIdentification = new SpeakerIdentification(speakerIdentification);

            _frameGrabber = new FrameGrabber <CameraResult>();
            _recording    = new Recording();

            Initialize();
        }
        public AdministrationViewModel(FaceServiceClient faceServiceClient, ISpeakerIdentificationServiceClient speakerIdentification)
        {
            _speakerIdentification = new SpeakerIdentification(speakerIdentification);
            _speakerIdentification.OnSpeakerIdentificationError         += OnSpeakerIdentificationError;
            _speakerIdentification.OnSpeakerIdentificationStatusUpdated += OnSpeakerIdentificationStatusUpdated;

            _faceServiceClient = faceServiceClient;

            Initialize();
        }