예제 #1
0
 public Vcr()
 {
     StoppedState = new StoppedState(this);
     PlayingState = new PlayingState(this);
     RecordingState = new RecordingState(this);
     State = StoppedState;
 }
예제 #2
0
 public void Stop()
 {
     if (recordingState == RecordingState.Recording)
     {
         recordingState = RecordingState.RequestedStop;
         waveIn.StopRecording();
     }
 }
예제 #3
0
 public void BeginRecording(string waveFileName)
 {
     if (recordingState != RecordingState.Monitoring)
     {
         throw new InvalidOperationException("Can't begin recording while we are in this state: " + recordingState.ToString());
     }
     writer = new WaveFileWriter(waveFileName, recordingFormat);
     recordingState = RecordingState.Recording;
 }
예제 #4
0
        public Recorder()
        {
            recordingState = RecordingState.Monitoring;

            waveIn = new WaveIn();
            waveIn.DataAvailable += waveIn_DataAvailable;
            waveIn.RecordingStopped += waveIn_RecordingStopped;
            waveIn.BufferMilliseconds = 15;
            waveIn.WaveFormat = new WaveFormat(44100, 16, 2);

            if (!(bool)(DesignerProperties.IsInDesignModeProperty.GetMetadata(typeof(DependencyObject)).DefaultValue))
                waveIn.StartRecording();
        }
예제 #5
0
 public void BeginMonitoring(int recordingDevice)
 {
     if(recordingState != RecordingState.Stopped)
     {
         throw new InvalidOperationException("Can't begin monitoring while we are in this state: " + recordingState.ToString());
     }
     waveIn = new WaveIn();
     waveIn.DeviceNumber = recordingDevice;
     waveIn.DataAvailable += waveIn_DataAvailable;
     waveIn.RecordingStopped += new EventHandler(waveIn_RecordingStopped);
     waveIn.WaveFormat = recordingFormat;
     waveIn.StartRecording();
     TryGetVolumeControl();
     recordingState = RecordingState.Monitoring;
 }
                public override void Prepare(ref RecordingState state)
                {
                    if (IsShutdown)
                    {
                        state = null;
                        return;
                    }

                    using (_txOpMerger._parent.DocumentsStorage.ContextPool.AllocateOperationContext(out DocumentsOperationContext context))
                        using (var writer = new BlittableJsonTextWriter(context, _txOpMerger._recording.Stream))
                        {
                            writer.WriteStartArray();

                            var commandDetails       = new StartRecordingDetails();
                            var commandDetailsReader = SerializeRecordingCommandDetails(context, commandDetails);

                            context.Write(writer, commandDetailsReader);
                        }

                    state = new EnabledRecordingState(_txOpMerger);
                }
예제 #7
0
        /// <summary>
        /// Stop recording.
        /// </summary>
        public void Stop()
        {
            CheckForInvalidThreadCall();
            lock (_lockObj)
            {
                CheckForDisposed();
                //don't check for initialized since disposing without init would cause an exception

                if (RecordingState != RecordingState.Stopped)
                {
                    _recordingState = RecordingState.Stopped;
                    _recordThread.WaitForExit(); //possible deadlock
                    _recordThread = null;
                }
                else if (RecordingState == RecordingState.Stopped && _recordThread != null)
                {
                    _recordThread.WaitForExit();
                    _recordThread = null;
                }
            }
        }
예제 #8
0
        private void SetButtonState(RecordingState state)
        {
            switch ((int)state)
            {
            case (int)RecordingState.Stopped:
            {
                this._buttonState = RecordingState.Stopped;
                this.buttonAnimationTimer.Enabled = false;
                this.AnimatePauseButton();         // Ensure the paused button is returned to its normal color

                this.recordButton.Enabled = true;
                this.stopButton.Enabled   = false;
                this.pauseButton.Enabled  = false;
                break;
            }

            case (int)RecordingState.Paused:
            {
                this._buttonState = RecordingState.Paused;
                this.buttonAnimationTimer.Enabled = true;

                this.recordButton.Enabled = true;
                this.stopButton.Enabled   = true;
                this.pauseButton.Enabled  = false;
                break;
            }

            case (int)RecordingState.Recording:
            {
                this._buttonState = RecordingState.Recording;
                this.buttonAnimationTimer.Enabled = false;
                this.AnimatePauseButton();         // Ensure the paused button is returned to its normal color

                this.recordButton.Enabled = false;
                this.stopButton.Enabled   = true;
                this.pauseButton.Enabled  = true;
                break;
            }
            }
        }
예제 #9
0
        private bool Close()
        {
            RecordingState state  = this.recorder.State;
            bool           result = false;

            switch (state)
            {
            case RecordingState.Idle:
                result = true;
                break;

            case RecordingState.Preparing:
                break;

            case RecordingState.Paused:
            case RecordingState.Recording:
                result = this.view.ShowStopMessage();
                if (result)
                {
                    this.recorder.Stop();
                }
                break;
            }
            if (result)
            {
                this.view.AllowUpdate = false;
                if (this.hotKeyManager != null)
                {
                    this.hotKeyManager.Dispose();
                    this.hotKeyManager = null;
                }
                try {
                    this.configuration.Save();
                }
                catch (ConfigurationException ce) {
                    Trace.TraceError(ce.ToString());
                }
            }
            return(result);
        }
예제 #10
0
    void Update()
    {
        currentFrameAverage = 0f;

        if (_state != RecordingState.Stop)
        {
            currentFrameAverage = GetCurrentFrameAverage();

            if (_state == RecordingState.Sleep)
            {
                //Debug.Log(currentFrameAverage);
                if (currentFrameAverage > soundThreshold) //if player voice value exceeds a certain value
                {
                    _state = RecordingState.Record;
                }
            }
            if (_state == RecordingState.Record)
            {
                if (SpawnedBubble.GetComponent <BubbleSizeBehavior>().Detached)
                {
                    if (NearLips)
                    {
                        SpawnedBubble = Instantiate(BubblePrefabRef, gameObject.transform.position, gameObject.transform.rotation); //spawns bubble at location
                        SpawnedBubble.GetComponent <BubbleSizeBehavior>().BlowStick = gameObject;
                        //stop recording immediately after 1 bubble spawn
                    }
                }
                if (NearLips)
                {
                    if (!SpawnedBubble.GetComponent <BubbleSizeBehavior>().Detached)
                    {
                        SpawnedBubble.GetComponent <BubbleSizeBehavior>().IncreaseSizeOnAction();
                    }
                }

                StopRecording();
            }
        }
    }
    // Use this for initialization
    void Start()
    {
        OutputDirectoryBasePath = MeshSaver.MeshFolderName;

        m_ToggleRecordingButtonText = ToggleRecordingButton.GetComponentInChildren <Text>();
        Button btn = ToggleRecordingButton.GetComponent <Button>();

        btn.onClick.AddListener(ToggleButtonOnClick);

        m_CurrentRecordingState = RecordingState.NotRecording;
        UpdateRecordingUI();

        //Fetch a pointer to Unity's spatial coordinate system if you need pixel mapping
        _spatialCoordinateSystemPtr = WorldManager.GetNativeISpatialCoordinateSystemPtr();

        //Call this in Start() to ensure that the CameraStreamHelper is already "Awake".
        CameraStreamHelper.Instance.GetVideoCaptureAsync(OnVideoCaptureCreated);
        //You could also do this "shortcut":
        //CameraStreamManager.Instance.GetVideoCaptureAsync(v => videoCapture = v);

        _videoPanelUI = GameObject.FindObjectOfType <VideoPanel>();
    }
예제 #12
0
        private void OnRenderImage(RenderTexture source, RenderTexture destination)
        {
            if (CurrentState != RecordingState.Recording &&
                CurrentState != RecordingState.RecordNow)
            {
                Graphics.Blit(source, destination);
                return;
            }

            _elapsedTime += Time.unscaledDeltaTime;

            if (_elapsedTime >= 1.0f / CaptureFrameRate)
            {
                // Frame data
                var rt = _recycledRenderTexture;
                _recycledRenderTexture = null;

                if (rt == null)
                {
                    rt            = new RenderTexture(_width, _height, 0, RenderTextureFormat.ARGB32);
                    rt.wrapMode   = TextureWrapMode.Clamp;
                    rt.filterMode = FilterMode.Bilinear;
                    rt.anisoLevel = 0;
                }

                Graphics.Blit(source, rt);

                _elapsedTime = 0;

                StartCoroutine("StoreCaptureFrame", rt);

                if (CurrentState == RecordingState.RecordNow)
                {
                    CurrentState = RecordingState.OnHold;
                }
            }

            Graphics.Blit(source, destination);
        }
예제 #13
0
        private void HandlePlayingState()
        {
            if (playersAlive.Value == 0)
            {
                currentRecordingTime.Value = recordingLength;
                foreach (var inputRecorder in inputRecorders)
                {
                    inputRecorder.ClearInputQueue();
                }
            }

            if (currentRecordingTime.Value >= recordingLength)
            {
                recordingState = RecordingState.BREAK;
                audioManager.StopAll();
                audioManager.PlayClip(1);
                recordingStateAfterBreak   = RecordingState.RECORDING;
                currentRecordingTime.Value = 0f;
                playersAlive.Value         = FindObjectsOfType <PlayerMovement>().Length;
                resetLevel.RaiseGameEvent();
            }
        }
예제 #14
0
        /// <summary>
        /// Initializes a new instance of the <see cref="WasapiCapture"/> class.
        /// </summary>
        /// <param name="eventSync">True, to use eventsynchronization instead of a simple loop and sleep behavior. Don't use this in combination with exclusive mode.</param>
        /// <param name="shareMode">Specifies how to open the audio device. Note that if exclusive mode is used, the device can only be used once on the whole system. Don't use exclusive mode in combination with eventSync.</param>
        /// <param name="latency">Latency of the capture specified in milliseconds.</param>
        /// <param name="captureThreadPriority">ThreadPriority of the capturethread which runs in background and provides the audiocapture itself.</param>
        /// <param name="defaultFormat">The default WaveFormat to use for the capture. If this parameter is set to null, the best available format will be chosen automatically.</param>
        /// <param name="synchronizationContext">The <see cref="SynchronizationContext"/> to use to fire events on.</param>
        /// <exception cref="PlatformNotSupportedException">The current platform does not support Wasapi. For more details see: <see cref="IsSupportedOnCurrentPlatform"/>.</exception>
        /// <exception cref="ArgumentException">The <paramref name="eventSync"/> parameter is set to true while the <paramref name="shareMode"/> is set to <see cref="AudioClientShareMode.Exclusive"/>.</exception>
        public WasapiCapture(bool eventSync, AudioClientShareMode shareMode, int latency, WaveFormat defaultFormat,
                             ThreadPriority captureThreadPriority, SynchronizationContext synchronizationContext)
        {
            if (!IsSupportedOnCurrentPlatform)
            {
                throw new PlatformNotSupportedException("Wasapi is only supported on Windows Vista and above.");
            }
            if (eventSync && shareMode == AudioClientShareMode.Exclusive)
            {
                throw new ArgumentException("Don't use eventSync in combination with exclusive mode.");
            }

            _eventSync  = eventSync;
            _shareMode  = shareMode;
            _waveFormat = defaultFormat;

            _latency = latency;
            _captureThreadPriority  = captureThreadPriority;
            _synchronizationContext = synchronizationContext;

            _recordingState = RecordingState.Stopped;
        }
    void OnVideoModeStarted(VideoCaptureResult result)
    {
        if (result.success == false)
        {
            Debug.LogWarning("Could not start video mode.");
            return;
        }

        m_NumFrames          = 0;
        m_RecordingStartTime = Time.time;

        m_CurrentRecordingState = RecordingState.Recording;
        UpdateRecordingUI();

        Debug.Log(string.Format("Started video recording for session {0}", m_CurrentRecordingLabel));

        Debug.Log("Video capture started.");

        if (m_CurrentRecordingState == RecordingState.Recording)
        {
            this._videoCapture.RequestNextFrameSample(OnFrameSampleAcquired);
        }
    }
        private async Task DisposeMediaCaptureAsync()
        {
            try
            {
                ShowBusyIndicator("Freeing up resources...");

                if (currentState == RecordingState.Recording && mediaCapture != null)
                {
                    ShowBusyIndicator("recording stopped...");
                    await mediaCapture.StopRecordAsync();
                }
                else if (currentState == RecordingState.Previewing && mediaCapture != null)
                {
                    ShowBusyIndicator("video preview stopped...");
                    await mediaCapture.StopPreviewAsync();
                }

                currentState = RecordingState.Stopped;
            }
            catch (Exception ex)
            {
                Debug.WriteLine($"DisposeAll Error: {ex.Message}");
                await new MessageDialog($"Error disposing MediaCapture: {ex.Message}").ShowAsync();
            }
            finally
            {
                if (mediaCapture != null)
                {
                    mediaCapture.Failed -= Failed;
                    mediaCapture.Dispose();
                    mediaCapture = null;
                }

                PreviewMediaElement.Source = null;
                HideBusyIndicator();
            }
        }
예제 #17
0
        private void ShowRecordButton()
        {
            if (m_recordingState != RecordingState.Recording)
            {
                if (GUILayout.Button("Record Session"))
                {
                    m_recordedEvents.Clear();
                    startedRecordingTime = Time.realtimeSinceStartup;
                    m_recordingState     = RecordingState.Recording;
                }
            }
            else if (m_recordingState == RecordingState.Recording)
            {
                if (GUILayout.Button("Stop Recording"))
                {
                    m_recordingState = RecordingState.Stopped;

                    if (EditorUtility.DisplayDialog("Recording session stopped. Do you want to save recording ?", "Save Recording", "Discard"))
                    {
                        SaveRecording();
                    }
                }
            }
        }
예제 #18
0
    //update runs unbounded
    //25-60 fps
    //lerp to each frame in the list
    //add a little bit of easing to the movements;
    //try DOTween and record all positions as places to go; moveTo, RotateBy,
    //if fixed timestep is not set oorrecctly, might look weird. Fixed timestep should be set so you're running at 60 fps.



    private void FixedUpdate()
    {
        if (recordingState == RecordingState.RECORDING)
        {
            if (recordTime >= 0)
            {
                RecordMovement(transform.position);
                RecordRotation(transform.eulerAngles);
                RecordWeaponActivity(isAttacking);
            }
            else
            {
                Debug.Log(this.gameObject + "is not recording!");
                recordingState = RecordingState.NOT_RECORDING;
            }
        }

        if (recordingState == RecordingState.PLAYBACK)
        {
            MoveBasedOnRecording();
            RotateBasedOnRecording();
            AttackBasedOnRecording();
        }
    }
예제 #19
0
        public void AddKey(PropertyModification[] modifications)
        {
            var undoModifications = new UndoPropertyModification[modifications.Length];

            for (int i = 0; i < modifications.Length; ++i)
            {
                var modification = modifications[i];
                undoModifications[i].previousValue = modification;
                undoModifications[i].currentValue  = modification;
            }

            BeginKeyModification();

            var recordingState = new RecordingState(state, RecordingStateMode.ManualKey);

            AnimationRecording.Process(recordingState, undoModifications);

            EndKeyModification();

            RemoveFromCandidates(modifications);

            ResampleAnimation();
            state.Repaint();
        }
예제 #20
0
    public void StopRecording()
    {
        //buttonMng.GetComponent<buttonmanager> ().startLoading ();
        if (Microphone.devices.Length < 1)
        {
            return;
        }

        _state = RecordingState.Stop;
        _time  = 0f;

        int samplesCount = _sourceRecording.timeSamples - _frequency - _startSample;

        if (samplesCount > 0)
        {
            float[] samples = new float[samplesCount];
            _sourceRecording.clip.GetData(samples, _startSample);
            AudioClip clip = AudioClip.Create("audio", samplesCount, _sourceRecording.clip.channels, _frequency, false);
            clip.SetData(samples, 0);
            if (OnRecordingEnd != null)
            {
                OnRecordingEnd.Invoke(clip);
            }
            Debug.Log("test");
            RequestMaker.GetComponent <CallRequest> ().CheckInput(clip);
        }


        Microphone.End(_micName);

        _sourceRecording.Stop();
        if (_sourceRecording.clip != null)
        {
            Destroy(_sourceRecording.clip);
        }
    }
예제 #21
0
        public void Open()
        {
            if (this.state != RecordingState.Closed)
            {
                throw new InvalidOperationException();
            }
            int hr = WSSoundInterop.WSOpen(this.pws);

            if (hr != 0)
            {
                throw new SoundException("WSOpen", hr);
            }
            hr = WSSoundInterop.WSGetBufferLength(this.pws, out this.bufferLength);
            if (hr != 0)
            {
                throw new SoundException("WSGetBufferLength", hr);
            }
            hr = WSSoundInterop.WSGetPacketLength(this.pws, ref this.packetLength);
            if (hr != 0)
            {
                throw new SoundException("WSGetBufferLength", hr);
            }
            this.state = RecordingState.Opened;
        }
예제 #22
0
 public void StopRecording()
 {
     recordingState = RecordingState.RequestedStop;
     if (Markers != null)
         Markers.Clear();
 }
예제 #23
0
    /// <summary>
    /// Begin capturing the motion from the user.
    /// </summary>
    private void BeginRecording()
    {
        captureState = RecordingState.Recording;
        captureData = new NUIHumanoidAnimation();

        stopwatch.Stop();
        stopwatch.Reset();
		stopwatch.Start();
    }
예제 #24
0
 public Recorder(WaveFormat FORMAT, int BUFFER_SIZE)
 {
     format = FORMAT;
     buffer = new Buffer(BUFFER_SIZE);
     state  = RecordingState.Stopped;
 }
예제 #25
0
        public void FixedUpdate()
        {
            if (Input.GetKeyDown(RecordKey))
            {
                if (State == RecordingState.Idle)
                    State = RecordingState.Record;
                else if (State == RecordingState.Record)
                    State = RecordingState.Idle;
            }

            if (Input.GetKeyDown(PlayKey))
            {
                if (State == RecordingState.Idle)
                    State = RecordingState.Play;
                else if (State == RecordingState.Play)
                    State = RecordingState.Idle;
            }
        }
예제 #26
0
        private void RecordPrivate(DisplayProvider displayProvider, SoundProvider soundProvider)
        {
            bool recordDisplay = displayProvider != null;
             bool recordSound = soundProvider != null;
             AviFile aviFile = null;
             AcmEncoder audioEncoder = null;

             this.duration = TimeSpan.Zero;
             try {
            DisplayFormat videoFormat = null;
            SoundFormat audioFormat = null;

            int soundReadInterval = 0;
            if (recordDisplay) {
               displayProvider.Open();
               videoFormat = displayProvider.Format;
            }
            if (recordSound) {
               soundProvider.Open();
               soundReadInterval = (int)Math.Ceiling(soundProvider.BufferLength / 2.0); // ms
               audioFormat = soundProvider.Format;
               audioEncoder = soundProvider.GetEncoder();
            }
            // Open AVI file
            aviFile = new AviFile();
            aviFile.Open(fileName, videoFormat, fps, this.compressor, audioFormat, audioEncoder);

            // Initialize helper variables
            int frameIndex = 0;
            int frameDuration = recordDisplay ? (int)(msPerSecond / this.fps) : 0;
            int frameBufferLength = recordDisplay ? displayProvider.BitmapBytes : 0;
            int startingFrameIndex = 0;
            int soundSampleIndex = 0;
            long startTime = DateTime.Now.Ticks;
            long lastSoundRead = DateTime.Now.Ticks;
            TimeSpan prevDuration = TimeSpan.Zero;
            TimeSpan currentDuration = TimeSpan.Zero;

            // Update state
            lock (syncRoot) {
               this.state = RecordingState.Recording;
            }
            if (recordSound) {
               // Start sound recording
               soundProvider.Start();
            }
            // Recording loop; this is a long one huh?!
            do {
               // Check if paused
               if (this.state == RecordingState.Paused) {
                  prevDuration = prevDuration.Add(currentDuration);
                  if (recordSound) {
                     // Read remaining sound data and stop sound recording
                     byte[] soundData = soundProvider.Read(true);
                     soundSampleIndex += aviFile.AddSound(soundSampleIndex, soundData, true);
                     soundProvider.Stop();
                  }
                  // Let the thread executing Pause() know that pause is done
                  this.stateTransition.Set();
                  while (this.state == RecordingState.Paused) {
                     Thread.Sleep(pauseDelay);
                  }

                  // State is changed, check new state
                  if (this.state == RecordingState.Idle) {
                     return;
                  }

                  // Resume() is called
                  if (recordSound) {
                     soundProvider.Start();
                     lastSoundRead = DateTime.Now.Ticks;
                  }
                  if (recordDisplay) {
                     startingFrameIndex = frameIndex;
                  }

                  // Reset duration variables
                  startTime = DateTime.Now.Ticks;
                  currentDuration = TimeSpan.Zero;

                  // Let that executing thread known resume is done
                  this.stateTransition.Set();
               }

               // Add a video from
               if (recordDisplay) {
                  // Render display and add rendered bitmap to the avi file
                  displayProvider.Render();
                  IntPtr pFrameData = displayProvider.Lock();
                  try {
                     aviFile.AddFrame(pFrameData, frameIndex, 1, frameBufferLength);
                  }
                  finally {
                     displayProvider.Unlock();
                  }
                  frameIndex++;
               }

               // Add sound
               if (recordSound) {
                  // Read recorded sound if it's time to do so
                  if ((DateTime.Now.Ticks - lastSoundRead) / ticksPerMs >= soundReadInterval) {
                     // Read sound data
                     SoundFormat sourceFormat = soundProvider.SourceFormat;
                     byte[] soundData = soundProvider.Read();
                     int samplesRead = (int)(soundData.Length / sourceFormat.BlockAlign);

                     // Get number of out of sync samples
                     TimeSpan durationByNow = prevDuration + new TimeSpan(DateTime.Now.Ticks - startTime);
                     int nOutOfSyncSamples = GetOutOfSyncSamples(soundProvider, soundSampleIndex , durationByNow,
                                                                 samplesRead);
                     if (nOutOfSyncSamples > 0) {
                        // Add silence samples if we have less than expected samples
                        soundSampleIndex += aviFile.AddSilence(soundSampleIndex, nOutOfSyncSamples);
                     }
                     else if (nOutOfSyncSamples < 0) {
                        // Drop read samples as much as possible if we have more than expected samples
                        int nSamplesToKeep = Math.Max(0, samplesRead + nOutOfSyncSamples);
                        if (nSamplesToKeep > 0) {
                           int nBytesToKeep = nSamplesToKeep * sourceFormat.BlockAlign;
                           int nBytesToDrop = soundData.Length - nBytesToKeep;
                           byte[] droppedSoundData = new byte[nBytesToKeep];
                           Array.Copy(soundData, nBytesToDrop, droppedSoundData, 0, nBytesToKeep);
                           soundData = droppedSoundData;
                        }
                        samplesRead = nSamplesToKeep;
                     }
                     // Add sound data to the avi file
                     if (samplesRead > 0) {
                        soundSampleIndex += aviFile.AddSound(soundSampleIndex, soundData, false);
                     }
                     lastSoundRead = DateTime.Now.Ticks;
                  }
               }

               // Synchronize display
               if (recordDisplay) {
                  long delay = (DateTime.Now.Ticks - startTime) / ticksPerMs -
                                frameDuration * ((frameIndex - startingFrameIndex) - 1);
                  if (delay < frameDuration) {
                     // Extra delay to synchornize with fps
                     Thread.Sleep((int)(frameDuration - delay));
                  }
                  else {
                     // Calculate how many frames are lost
                     int lostFrames = (int)Math.Floor((decimal)delay / frameDuration);
                     frameIndex += lostFrames;
                     // Extra delay to synchornize with fps
                     Thread.Sleep((int)(frameDuration - delay % frameDuration));
                  }
               }
               else { /* No display recording, just sleep for a while so that sound buffers get filled  */
                  Thread.Sleep(1);
               }

               // Update duration
               currentDuration = new TimeSpan(DateTime.Now.Ticks - startTime);
               this.duration = prevDuration + currentDuration;

            } while (this.state != RecordingState.Idle);

            // Read remaining sound data and stop sound recording
            if (recordSound) {
               byte[] soundData = soundProvider.Read(true);
               soundSampleIndex += aviFile.AddSound(soundSampleIndex, soundData, true);
               soundProvider.Stop();
            }
             }
             finally {
            if (recordSound) {
               soundProvider.Close();
               if (audioEncoder != null) {
                  audioEncoder.Dispose();
               }
            }
            if (recordDisplay) {
               displayProvider.Close();
            }
            if (aviFile != null) {
               aviFile.Dispose();
            }
             }
        }
예제 #27
0
 public void Stop()
 {
     // Check state
      lock (syncRoot) {
     if (state == RecordingState.Idle) {
        throw new InvalidOperationException("Invalid state.");
     }
     state = RecordingState.Idle;
     this.stateTransition.WaitOne(); // Wait for recording thread to signal
      }
 }
예제 #28
0
        internal void Start()
        {
            this.videoProvider = VideoProviderFactory.Create(this.region);
            this.actions       = this.task.Actions.Select(a => {
                try {
                    // set options if needed
                    if (a.Options != null &&
                        Application.PluginManager.Actions.First(a2 => a2.Type.Name == a.ActionType).Configurable)
                    {
                        // create uninitialized instance
                        var action = FormatterServices.GetSafeUninitializedObject(
                            Type.GetType(a.ActionType, true, true) ??
                            throw new InvalidOperationException(Resources.TaskHelper_NoSuchActionMessage)) as Action;

                        // set options property
                        a.GetType().GetProperty("Options")?.SetValue(action, a.Options);

                        // call parameterless constructor
                        action?.GetType()
                        .GetConstructor(
                            BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic,
                            null,
                            Type.EmptyTypes,
                            null)
                        ?.Invoke(action, new object[] { this.codec });
                    }

                    return(Activator.CreateInstance(Type.GetType(a.ActionType) ??
                                                    throw new InvalidOperationException(
                                                        Resources.TaskHelper_NoSuchActionMessage),
                                                    this.codec) as Action);
                } catch (Exception exception) {
                    Log.WriteLine(LogLevel.Warning, $"error initializing action {a.ActionType}: {exception}");

                    // create dummy action for displaying error
                    var action = new Action(this.codec);
                    action.SetStatus(ActionStatus.Failed,
                                     new Exception(Resources.TaskHelper_ActionInitializationFailedCaption, exception));
                    return(action);
                }
            })
                                 .Where(a => a != null)
                                 .ToList();

            this.stream = new MultiStream();
            this.actions.ForEach(a => {
                this.stream.Add(a);
                Application.ActionManager.AddAction(a);
            });

            if (this.codec is ID3D11Codec acceleratedCodec &&
                this.videoProvider is ID3D11VideoProvider acceleratedVideoProvider &&
                acceleratedVideoProvider.SurfacePointer != IntPtr.Zero)
            {
                acceleratedCodec.SurfacePointer = acceleratedVideoProvider.SurfacePointer;
                this.isAcceleratedEncoding      = true;
                Log.WriteLine(LogLevel.Informational, "performing hardware-assisted encoding");
            }

            this.codec?.Initialize(this.videoProvider.CaptureBounds.Size, this.stream);

            this.recordingThread = new Thread(Record)
            {
                Priority = ThreadPriority.Highest
            };
            this.recordingThread.SetApartmentState(ApartmentState.MTA);
            this.recordingThread.Start();

            State = RecordingState.Recording;
            Application.TrayIcon.AnimateIndicator(IndicatorStatus.Recording, 500);
        }
 public void Stop()
 {
     if (recordingState == RecordingState.Recording)
     {
         recordingState = RecordingState.RequestedStop;
     }
 }
        /// <summary>
        /// Analyze speech input
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void btnAnalyzeSpeech_Click(object sender, RoutedEventArgs e)
        {
            ignoreNextString = false;
            AudioControlsGrid.Visibility = Visibility.Visible;

            if (ButtonState == "Record")
            {
                this.fullText = null;
                recording = true;
                BrushConverter bc = new BrushConverter();
                mySentiment.Sentiment = 0.5;
                userInput.Text = "";

                recordGrid.Visibility = System.Windows.Visibility.Hidden;
                recordingdGrid.Visibility = System.Windows.Visibility.Visible;

                recordingState = RecordingState.Recording;

                //string speechAPIKey = confCollection["SpeechRecognitionAPIKey"].Value;
                
                string speechAPIKey = Properties.Settings.Default.SpeechRecognitionAPIKey;

                MicrophoneRecognitionClient intentMicClient =
                                SpeechRecognitionServiceFactory.CreateMicrophoneClient(SpeechRecognitionMode.LongDictation,
                                                                                       "en-us",
                                                                                       speechAPIKey);

                m_micClient = intentMicClient;

                // Event handlers for speech recognition results
                m_micClient.OnResponseReceived += this.OnResponseReceivedHandler;
                m_micClient.OnPartialResponseReceived += this.OnPartialResponseReceivedHandler;
                //m_micClient.OnConversationError += OnConversationErrorHandler;

                // First send of audio data to service
                m_micClient.StartMicAndRecognition();

                ButtonState = "Finish";
            }
            // Finish the recording
            else if (ButtonState == "Finish")
            {
                Thread.Sleep(1000);
                recording = false;
                m_micClient.EndMicAndRecognition();
                recordGrid.Visibility = System.Windows.Visibility.Visible;
                recordingdGrid.Visibility = System.Windows.Visibility.Hidden;

                ButtonState = "Record";

                DisplayAnalysis();

                // Stop recording.
                Stop();
            }
        }
예제 #31
0
파일: WasapiCapture.cs 프로젝트: EQ4/cscore
        //based on http://msdn.microsoft.com/en-us/library/windows/desktop/dd370800(v=vs.85).aspx
        private void CaptureProc(object param)
        {
            var playbackStartedEventWaitHandle = param as EventWaitHandle;

            Exception exception = null;
            try
            {
                int bufferSize = _audioClient.BufferSize;
                int frameSize = WaveFormat.Channels * WaveFormat.BytesPerSample;

                long actualDuration = (long) ((double) ReftimesPerSecond * bufferSize / WaveFormat.SampleRate);
                int actualLatency = (int) (actualDuration / ReftimesPerMillisecond);
                int sleepDuration = actualLatency / 8;

                byte[] buffer = new byte[bufferSize * frameSize];

                WaitHandle[] eventWaitHandleArray = {_eventWaitHandle};

                _audioClient.Start();
                _recordingState = RecordingState.Recording;

                if (playbackStartedEventWaitHandle != null)
                {
                    playbackStartedEventWaitHandle.Set();
                    playbackStartedEventWaitHandle = null;
                }

                while (RecordingState != RecordingState.Stopped)
                {
                    if (_eventSync)
                    {
                        int eventWaitHandleIndex = WaitHandle.WaitAny(eventWaitHandleArray, actualLatency, false);
                        if (eventWaitHandleIndex == WaitHandle.WaitTimeout)
                            continue;
                    }
                    else
                    {
                        Thread.Sleep(sleepDuration);
                    }

                    if (RecordingState == RecordingState.Recording)
                    {
                        ReadData(buffer, _audioCaptureClient, (uint) frameSize);
                    }
                }

                Thread.Sleep(actualLatency / 2);

                _audioClient.Stop();
                _audioClient.Reset();

            }
            catch (Exception ex)
            {
                exception = ex;
            }
            finally
            {
                if (playbackStartedEventWaitHandle != null)
                    playbackStartedEventWaitHandle.Set();
                RaiseStopped(exception);
            }
        }
예제 #32
0
파일: WasapiCapture.cs 프로젝트: EQ4/cscore
        /// <summary>
        /// Stop recording.
        /// </summary>
        public void Stop()
        {
            CheckForInvalidThreadCall();
            lock (_lockObj)
            {
                CheckForDisposed();
                //don't check for initialized since disposing without init would cause an exception

                if (RecordingState != RecordingState.Stopped)
                {
                    _recordingState = RecordingState.Stopped;
                    _recordThread.WaitForExit(); //possible deadlock
                    _recordThread = null;
                }
                else if (RecordingState == RecordingState.Stopped && _recordThread != null)
                {
                    _recordThread.WaitForExit();
                    _recordThread = null;
                }
            }
        }
예제 #33
0
파일: WasapiCapture.cs 프로젝트: EQ4/cscore
        /// <summary>
        /// Initializes a new instance of the <see cref="WasapiCapture"/> class.
        /// </summary>
        /// <param name="eventSync">True, to use eventsynchronization instead of a simple loop and sleep behavior. Don't use this in combination with exclusive mode.</param>
        /// <param name="shareMode">Specifies how to open the audio device. Note that if exclusive mode is used, the device can only be used once on the whole system. Don't use exclusive mode in combination with eventSync.</param>
        /// <param name="latency">Latency of the capture specified in milliseconds.</param>
        /// <param name="captureThreadPriority">ThreadPriority of the capturethread which runs in background and provides the audiocapture itself.</param>
        /// <param name="defaultFormat">The default WaveFormat to use for the capture. If this parameter is set to null, the best available format will be chosen automatically.</param>
        /// <param name="synchronizationContext">The <see cref="SynchronizationContext"/> to use to fire events on.</param>
        /// <exception cref="PlatformNotSupportedException">The current platform does not support Wasapi. For more details see: <see cref="IsSupportedOnCurrentPlatform"/>.</exception>
        /// <exception cref="ArgumentException">The <paramref name="eventSync"/> parameter is set to true while the <paramref name="shareMode"/> is set to <see cref="AudioClientShareMode.Exclusive"/>.</exception>
        public WasapiCapture(bool eventSync, AudioClientShareMode shareMode, int latency, WaveFormat defaultFormat,
            ThreadPriority captureThreadPriority, SynchronizationContext synchronizationContext)
        {
            if (!IsSupportedOnCurrentPlatform)
                throw new PlatformNotSupportedException("Wasapi is only supported on Windows Vista and above.");
            if (eventSync && shareMode == AudioClientShareMode.Exclusive)
                throw new ArgumentException("Don't use eventSync in combination with exclusive mode.");

            _eventSync = eventSync;
            _shareMode = shareMode;
            _waveFormat = defaultFormat;

            _latency = latency;
            _captureThreadPriority = captureThreadPriority;
            _synchronizationContext = synchronizationContext;

            _recordingState = RecordingState.Stopped;
        }
예제 #34
0
 public void Stop()
 {
     waveIn.StopRecording();
     state = RecordingState.Stopping;
 }
        private async Task InitializeVideoAsync()
        {
            ReloadVideoStreamButton.Visibility = Visibility.Collapsed;
            ShowBusyIndicator("Initializing...");

            try
            {
                currentState = RecordingState.NotInitialized;

                PreviewMediaElement.Source = null;

                ShowBusyIndicator("starting video device...");

                mediaCapture = new MediaCapture();

                // put reference in App so that it can be disposed if app is suspended
                App.MediaCaptureManager = mediaCapture;

                selectedCamera = await CameraUtilities.FindBestCameraAsync();

                if (selectedCamera == null)
                {
                    await new MessageDialog("There are no cameras connected, please connect a camera and try again.").ShowAsync();
                    await DisposeMediaCaptureAsync();

                    HideBusyIndicator();
                    return;
                }

                await mediaCapture.InitializeAsync(new MediaCaptureInitializationSettings { VideoDeviceId = selectedCamera.Id });

                if (mediaCapture.MediaCaptureSettings.VideoDeviceId != "" && mediaCapture.MediaCaptureSettings.AudioDeviceId != "")
                {
                    ShowBusyIndicator("camera initialized..");

                    mediaCapture.Failed += Failed;
                }
                else
                {
                    ShowBusyIndicator("camera error!");
                }

                //------starting preview----------//

                ShowBusyIndicator("starting preview...");

                PreviewMediaElement.Source = mediaCapture;
                await mediaCapture.StartPreviewAsync();

                currentState = RecordingState.Previewing;
            }
            catch (UnauthorizedAccessException ex)
            {
                Debug.WriteLine($"InitializeVideo UnauthorizedAccessException\r\n {ex}");

                ShowBusyIndicator("Unauthorized Access Error");

                await new MessageDialog("-----Unauthorized Access Error!-----\r\n\n" +
                                        "This can happen for a couple reasons:\r\n" +
                                        "-You have disabled Camera access to the app\r\n" +
                                        "-You have disabled Microphone access to the app\r\n\n" +
                                        "To fix this, go to Settings > Privacy > Camera (or Microphone) and reenable it.").ShowAsync();

                await DisposeMediaCaptureAsync();
            }
            catch (Exception ex)
            {
                ShowBusyIndicator("Initialize Video Error");
                await new MessageDialog("InitializeVideoAsync() Exception\r\n\nError Message: " + ex.Message).ShowAsync();

                currentState = RecordingState.NotInitialized;
                PreviewMediaElement.Source = null;
            }
            finally
            {
                HideBusyIndicator();
            }
        }
예제 #36
0
 public void StopPlayback()
 {
     recordingState = RecordingState.NOT_RECORDING;
 }
예제 #37
0
 public void Resume()
 {
     // Check state
      lock (syncRoot) {
     if (state != RecordingState.Paused) {
        throw new InvalidOperationException("Invalid state.");
     }
     state = RecordingState.Recording;
     this.stateTransition.WaitOne(); // Wait for recording thread to signal
      }
 }
예제 #38
0
    //Instantiates the player's bullets
    public virtual void AttackBasedOnRecording()
    {
        if (attackIndex < attacks.Count - 1)
        {
            attackIndex++;
            isAttacking = attacks[attackIndex];
        }
        else if (attackIndex == attacks.Count - 1)
        {
            recordingState = RecordingState.NOT_RECORDING;
            attackIndex    = 0;
            isAttacking    = false;
        }

        if (isAttacking)
        {
            if (gameObject.name == "Laser")
            {
                Ray        ray = new Ray(thisCamera.transform.position, thisCamera.transform.forward);
                Transform  laserTarget;
                RaycastHit rayHit = new RaycastHit();

                if (Physics.Raycast(ray, out rayHit, Mathf.Infinity, playerLayer))
                {
                    if (rayHit.transform.name != "Laser")
                    {
                        laserTarget = rayHit.transform;
                        GameObject hitEffect;
                        hitEffect = Instantiate(Resources.Load("Prefabs/Effects/LaserHit") as GameObject);
                        hitEffect.transform.position = rayHit.point;
                        // hitEffect = Instantiate(Services.Prefabs.LaserHit, rayHit.point, Quaternion.identity);
                        // hitEffect = Instantiate(Resources.Load("Prefabs/Effects/LaserHit") as GameObject);

                        if (rayHit.transform.tag == "Enemies")
                        {
                            var enemy = rayHit.transform;
                            // Debug.Log("PLAYBACK RAY sent!");
                            enemy.SendMessage("DeductHealth", 20f);
                        }
                    }
                }
                // GameObject bullet = Instantiate(Resources.Load("Prefabs/Weapons/LaserBullet")) as GameObject;
                //  bullet.transform.position = thisCamera.transform.position;
                //  bullet.transform.rotation = thisCamera.transform.rotation;
            }

            if (gameObject.name == "MineMaster")
            {
                GameObject bullet = Instantiate(Resources.Load("Prefabs/Weapons/Mine")) as GameObject;
                bullet.transform.position = thisCamera.transform.position;
                bullet.transform.rotation = thisCamera.transform.rotation;
            }

            if (gameObject.name == "Tank")
            {
                GameObject bullet = Instantiate(Resources.Load("Prefabs/Weapons/MortarBullet")) as GameObject;
                bullet.transform.position = thisCamera.transform.position;
                bullet.transform.rotation = thisCamera.transform.rotation;
            }
        }
    }
예제 #39
0
 private void RecordCallBack(IAsyncResult result)
 {
     if (result.IsCompleted) {
     try {
        this.record.EndInvoke(result);
        this.stateTransition.Set(); // Let the executing thread of Stop() know that recording is stopped
     }
     catch (SRException e) {
        this.state = RecordingState.Idle;
        this.stateTransition.Set(); // Let the executing thread of Stop() know that recording is stopped
        this.RaiseError(e);
     }
      }
 }
예제 #40
0
        public void Record(string filename)
        {
            if (string.IsNullOrWhiteSpace(filename))
            {
                return;
            }

            cachedPosition = TimeSpan.Zero;
            position       = TimeSpan.Zero;
            sampleLength   = 0;
            recordedData   = new List <float>();

            if (InputDevice == null)
            {
                return;
            }

            if (recordingState == RecordingState.Recording)
            {
                return;
            }

            recordingState = RecordingState.Recording;

            if (inputDevice.Type == DeviceType.Capture)
            {
                _capture = new WasapiCapture();
            }
            else
            {
                _capture = new WasapiLoopbackCapture();
            }

            _capture.Device = inputDevice.ActualDevice;
            _capture.Initialize();

            _soundInSource = new SoundInSource(_capture)
            {
                FillWithZeros = false
            };
            _soundInSource.DataAvailable += _soundInSource_DataAvailable;

            _waveSource = _soundInSource
                          .ChangeSampleRate(SampleRate)
                          .ToSampleSource()
                          .ToWaveSource(BitResolution)
                          .ToMono();

            spectrumProvider = new BasicSpectrumProvider(_waveSource.WaveFormat.Channels,
                                                         _waveSource.WaveFormat.SampleRate,
                                                         CSCore.DSP.FftSize.Fft4096);

            _waveWriter = new WaveWriter(filename, _waveSource.WaveFormat);

            //the SingleBlockNotificationStream is used to intercept the played samples
            _notificationSource = new SingleBlockNotificationStream(_waveSource.ToSampleSource());
            //pass the intercepted samples as input data to the spectrumprovider (which will calculate a fft based on them)
            _notificationSource.SingleBlockRead += _notificationSource_SingleBlockRead;
            _waveSource = _notificationSource.ToWaveSource(16);

            RaiseSourceEvent(SourceEventType.Loaded);
            _capture.Start();
            RaiseSourcePropertyChangedEvent(SourceProperty.RecordingState, _capture.RecordingState);
        }
예제 #41
0
 public RecordingStateEventArgs(RecordingState state)
 {
     State = state;
 }
예제 #42
0
        private void waveIn_DataAvailable(object sender, WaveInEventArgs e)
        {
            if (recordingState == RecordingState.Recording)
            {
                Task.Run(async () =>
                    {
                        await writer.WriteAsync(e.Buffer, 0, e.BytesRecorded);
                    });
            }
            else if (recordingState == RecordingState.RequestedStop)
            {
                recordingState = RecordingState.Monitoring;
                if (writer != null)
                {
                    writer.Dispose();
                    writer = null;
                }
            }

            float maxL = 0;
            float minL = 0;
            float maxR = 0;
            float minR = 0;

            for (int index = 0; index < e.BytesRecorded; index += 4)
            {
                var sample = (short)((e.Buffer[index + 1] << 8) | (e.Buffer[index]));
                var sample32 = sample / 32768f;

                maxL = Math.Max(sample32, maxL);
                minL = Math.Min(sample32, minL);

                sample = (short)((e.Buffer[index + 3] << 8) | (e.Buffer[index + 2]));
                sample32 = sample / 32768f;

                maxR = Math.Max(sample32, maxR);
                minR = Math.Min(sample32, minR);
            }

            if (NewSample != null)
                NewSample(minL, maxL, minR, maxR);

            RaisePropertyChanged(() => TenthOfSecondsRecorded);
        }
예제 #43
0
 public EventStateChangeArgs(RecordingState oldSt, RecordingState newSt)
 {
     oldState = oldSt;
     newState = newSt;
 }
 public abstract void Prepare(ref RecordingState state);
예제 #45
0
    /// <summary>
    /// Initialize the phase before recording begins. Allowing for countdowns, prep, etc.
    /// </summary>
    private void BeginPreRecording()
    {
        captureState = RecordingState.PreRecording;

        delaySeconds = int.Parse(delays[delaySelection].text.Split(' ')[0]);
        stopwatch.Reset();
        stopwatch.Start();
    }
예제 #46
0
 void OnRecordingStopped(object sender, StoppedEventArgs e)
 {
     recordingState = RecordingState.Stopped;
     writer.Dispose();
     Stopped(this, EventArgs.Empty);
 }
예제 #47
0
    /// <summary>
    /// Once capturing is complete, write out the animation file.
    /// </summary>
    private void StopRecording()
    {
        // Change to stopped state
        stopwatch.Stop();
        captureState = RecordingState.NotRecording;

        // Check if there is capture data
        if (captureData == null)
        {
            UnityEngine.Debug.LogWarning("No capture data was found.");
            return;
        }

        // Reload the rig data and mapper if necessary
        if (inputMapper == null)
        {
            rigData = ColladaUtility.ReadRigData(SOURCE_FILE);
            inputMapper = new NUIInputToRigMapper(rigData);
        }

        // Map captured data to Collada data
        ColladaAnimationData data = inputMapper.GetColladaAnimation(captureData);

        // Check filename
        string appendedFileName = string.Format("MoCapHumanoid@{0}", fileName);
        string newFileName = appendedFileName;
        if (System.IO.File.Exists(string.Format(FILE_DESTINATION, appendedFileName)))
        {
            newFileName = getNewFilename(appendedFileName);
            UnityEngine.Debug.LogWarning(string.Format(NAME_DUPLICATE_ERROR_MSG, appendedFileName, newFileName));
        }

        // Save
        if (transformationType == TransformationType.Matrix)
        {
            ColladaUtility.SaveAnimationData(data, SOURCE_FILE_MATRIX, string.Format(FILE_DESTINATION, newFileName), true);
        }
        else
        {
            ColladaUtility.SaveAnimationData(data, SOURCE_FILE, string.Format(FILE_DESTINATION, newFileName), false);
        }
        
        AssetDatabase.Refresh();
    }
예제 #48
0
파일: Recorder.cs 프로젝트: tamerlan/Mimimi
        public void StopPlayback()
        {
            if(_player == null)
            {
                return;
            }

            _player.Stop();
            State = RecordingState.Stopped;
        }
예제 #49
0
		/// <summary>
		/// Stops the recording and saves it with the file name originally specified.
		/// </summary>
		public void stopAndSaveRecording(){

			if (currentState != RecordingState.Paused && currentState != RecordingState.Recording) {
				Debug.LogError ("Can't stop a recording because we're not currentely making one!");
				return;
			}

			currentState = RecordingState.Stopped;
			saveRecording ();

		}
예제 #50
0
파일: Recorder.cs 프로젝트: tamerlan/Mimimi
        public void StartPlayback()
        {
            if (State != RecordingState.Stopped || String.IsNullOrEmpty(TempWavFileName) )
            {
                return;
            }

            //waveOutDevice = new NAudio.Wave.DirectSoundOut();
            //IWaveProvider reductionStream = CreateStream(TempWavFileName);
            //waveOutDevice.Init(reductionStream);

            _player = new SoundPlayer(TempWavFileName);

            State = RecordingState.Playback;

            _player.PlayLooping();
        }
예제 #51
0
        //based on http://msdn.microsoft.com/en-us/library/windows/desktop/dd370800(v=vs.85).aspx
        private void CaptureProc(object param)
        {
            var playbackStartedEventWaitHandle = param as EventWaitHandle;

            Exception exception = null;

            try
            {
                int bufferSize = _audioClient.BufferSize;
                int frameSize  = WaveFormat.Channels * WaveFormat.BytesPerSample;

                long actualDuration = (long)((double)ReftimesPerSecond * bufferSize / WaveFormat.SampleRate);
                int  actualLatency  = (int)(actualDuration / ReftimesPerMillisecond);
                int  sleepDuration  = actualLatency / 8;

                byte[] buffer = new byte[bufferSize * frameSize];

                WaitHandle[] eventWaitHandleArray = { _eventWaitHandle };

                _audioClient.Start();
                _recordingState = RecordingState.Recording;

                if (playbackStartedEventWaitHandle != null)
                {
                    playbackStartedEventWaitHandle.Set();
                    playbackStartedEventWaitHandle = null;
                }

                while (RecordingState != RecordingState.Stopped)
                {
                    if (_eventSync)
                    {
                        int eventWaitHandleIndex = WaitHandle.WaitAny(eventWaitHandleArray, actualLatency, false);
                        if (eventWaitHandleIndex == WaitHandle.WaitTimeout)
                        {
                            continue;
                        }
                    }
                    else
                    {
                        Thread.Sleep(sleepDuration);
                    }

                    if (RecordingState == RecordingState.Recording)
                    {
                        ReadData(buffer, _audioCaptureClient, (uint)frameSize);
                    }
                }

                Thread.Sleep(actualLatency / 2);

                _audioClient.Stop();
                _audioClient.Reset();
            }
            catch (Exception ex)
            {
                exception = ex;
            }
            finally
            {
                if (playbackStartedEventWaitHandle != null)
                {
                    playbackStartedEventWaitHandle.Set();
                }
                RaiseStopped(exception);
            }
        }
예제 #52
0
		/// <summary>
		/// Begins recording the scene if you pass in a name
		/// </summary>
		public void startRecording(string nameOfRecording){

			// Cleans data
			if (nameOfRecording != null && nameOfRecording == "") {
				Debug.Log ("You can't create a recording with no name!");
				return;
			}

			// Can't start a new recording while currentely recording
			if (currentState == RecordingState.Recording || currentState == RecordingState.Paused) {
				Debug.LogError ("Can't record");
			}

			// Change state to recording
			currentState = RecordingState.Recording;

			// Grab all actors in the scene
			actorsInScene = new List<ActorBehavior>(GameObject.FindObjectsOfType<ActorBehavior> ());

			// Grabs all actor unique instance ids
			actorIdsBeingRecorded = new int[actorsInScene.Count];
			for (int i = 0; i < actorIdsBeingRecorded.Length; i++) {
				actorIdsBeingRecorded[i] = actorsInScene [i].gameObject.GetInstanceID ();
			}

			// Grab actor names
			string[] actorNames = new string[actorsInScene.Count];
			for (int i = 0; i < actorsInScene.Count; i++) {
				actorNames[i] = actorsInScene [i].getNameForRecording ();
			}

			// Grab actors prefered playback representation
			string[] actorplaybackRep = new string[actorsInScene.Count];
			for (int i = 0; i < actorsInScene.Count; i++) {
				actorplaybackRep[i] = actorsInScene [i].getObjToRepresentActor ();
			}

			// Create our recording object that we'll add frame data to.
			currentRecordingBeingBuilt = new Recording (nameOfRecording, getFPS(), actorIdsBeingRecorded, actorNames, actorplaybackRep);

			// Capture our first frame
			captureFrame ();
		}
            public virtual long Execute(DocumentsOperationContext context, RecordingState recordingState)
            {
                recordingState?.Record(context, this);

                return(ExecuteCmd(context));
            }
예제 #54
0
        public void StartRecording()
        {
            if (writer == null)
            {
                outputFolder = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.Desktop), "RecordToMP3");
                Directory.CreateDirectory(outputFolder);
                outputFilenameBase = String.Format(Properties.Settings.Default.RECORDER_Filename, DateTime.Now);
                writer = new WaveFileWriter(Path.Combine(outputFolder, outputFilenameBase) + ".wav", waveIn.WaveFormat);

                Properties.Settings.Default.RECORDER_LastFile = writer.Filename;
                Properties.Settings.Default.Save();

                if (Markers == null)
                    Markers = new ObservableCollection<int>();

                Markers.Clear();
            }

            switch (recordingState)
            {
                case RecordingState.Monitoring:
                    recordingState = RecordingState.Recording;
                    break;
                case RecordingState.Recording:
                    recordingState = RecordingState.Paused;
                    break;
                case RecordingState.Paused:
                    recordingState = RecordingState.Recording;
                    break;
            }
        }
예제 #55
0
파일: Recorder.cs 프로젝트: tamerlan/Mimimi
 public void StartRecording()
 {
     State = RecordingState.Recording;
     waveIn.StartRecording();
 }
예제 #56
0
 private void StopFromCallback(Exception exception)
 {
     RecordingState = RecordingState.Stopped;
     RaiseStopped(exception);
 }
예제 #57
0
파일: Recorder.cs 프로젝트: tamerlan/Mimimi
 void waveIn_RecordingStopped(object sender, StoppedEventArgs e)
 {
     State = RecordingState.Stopped;
     writer.Dispose();
 }
예제 #58
0
 public void Record(DisplayProvider displayProvider, SoundProvider soundProvider)
 {
     if (this.fileName == null) {
     throw new InvalidOperationException("FileName is not specified");
      }
      // Check state
      lock (syncRoot) {
     if (state != RecordingState.Idle) {
        throw new InvalidOperationException("Invalid state.");
     }
     state = RecordingState.Preparing;
      }
      record = new RecordDelegate(this.RecordPrivate);
      AsyncCallback callback = new AsyncCallback(this.RecordCallBack);
      record.BeginInvoke(displayProvider, soundProvider, callback, null); // Start a new thread for recording
 }
예제 #59
0
 private void SetControlStates(bool isRecording)
 {
     Status = isRecording ? RecordingState.Recording : RecordingState.Stopped;
 }
예제 #60
0
		/// <summary>
		/// Stops the and trashes the current recording being made.
		/// </summary>
		public void stopAndTrashRecording(){
			
			if (currentState != RecordingState.Paused && currentState != RecordingState.Recording) {
				Debug.LogError ("Can't stop a recording because we're not currentely making one!");
				return;
			}

			currentState = RecordingState.Stopped;
			currentRecordingBeingBuilt = null;

		}