public AudioClip ConvertDataToAudioClip(RecordingData data) { AudioClip audioClip = AudioClip.Create(data.RecordingName, data.AudioSamples.Length, data.Channels, data.Frequency, false); audioClip.SetData(data.AudioSamples, 0); return(audioClip); }
/// <summary> /// Invoked when the application is launched normally by the end user. Other entry points /// will be used such as when the application is launched to open a specific file. /// </summary> /// <param name="e">Details about the launch request and process.</param> protected override async void OnLaunched(LaunchActivatedEventArgs e) { Frame rootFrame = Window.Current.Content as Frame; // Do not repeat app initialization when the Window already has content, // just ensure that the window is active if (rootFrame == null) { // Create a Frame to act as the navigation context and navigate to the first page rootFrame = new Frame(); // Set the default language rootFrame.Language = Windows.Globalization.ApplicationLanguages.Languages[0]; rootFrame.NavigationFailed += OnNavigationFailed; // Place the frame in the current Window Window.Current.Content = rootFrame; } // Switching decision between pages if (string.IsNullOrEmpty(e.Arguments)) { // Register for notifications await RegisterPushNotifications("kinect-VOD-tutorial", "Endpoint=sb://kinect-demo.servicebus.windows.net/;SharedAccessKeyName=ListenPolicy;SharedAccessKey=KWaGKpa38A7ymRLDk/7qpTpZmla3nfmWN1AWto2R3zY=", "new-video-template", string.Format("$({0})", "RecordingData"), "New recorded video", string.Format("$({0})", "Caption"), "http://www.kinectingforwindows.com/images/notification_logo.png"); // Navigate to the overview page rootFrame.Navigate(typeof(MainPage)); } else { // Deserialize to RD RecordingData data = e.Arguments.DeserializeFromJson <RecordingData>(); // Load recording history on first run if (_recordingHistory == null) { _recordingHistory = await LocalStorageHelper.LoadFileContentAsync <ObservableCollection <RecordingData> >(RecordingFileName) ?? new ObservableCollection <RecordingData>(); } // Add to the list _recordingHistory.Add(data); // Save the new list locally await LocalStorageHelper.SaveFileContentAsync(RecordingFileName, _recordingHistory); // Navigate to the video page rootFrame.Navigate(typeof(VideoPage), data); } // Ensure the current window is active Window.Current.Activate(); }
private void UpdatePlayback() { if (timer.ElapsedMilliseconds >= crs.endtime) { crs = crs.next; if (crs == null) { StopPlayback(); crs = recordingstart; return; } } }
public RecordingData ConvertAudioClipToData(AudioClip audioClip) { RecordingData data = new RecordingData(); float[] samples = new float[audioClip.samples]; audioClip.GetData(samples, 0); data.AudioSamples = samples; data.RecordingName = audioClip.name; data.Frequency = audioClip.frequency; data.Channels = audioClip.channels; return(data); }
private void StartRecording() { if (recordingstate == RS_PLAYBACK) { StopPlayback(); } infobeforerecord.total = kpsHandler.total; infobeforerecord.max = kpsHandler.max; recordingstate = RS_RECORDING; cmsStartStopRecording.Text = "Stop recording"; crs = recordingstart; crs.next = null; timer.Reset(); timer.Start(); pnlInfo.BackColor = pnlKeys.BackColor = Color.Maroon; }
public void SaveRecording() { if (dataStorageHandler == null) { dataStorageHandler = new DataStorageHandler <RecordingData>(); } if (audioClipConverter == null) { audioClipConverter = new AudioClipConverter(); } RecordingData data = audioClipConverter.ConvertAudioClipToData(audioRecorder.GetRecording()); dataStorageHandler.SaveData(data, recordingFileName); }
public void LoadRecording() { if (dataStorageHandler == null) { dataStorageHandler = new DataStorageHandler <RecordingData>(); } if (audioClipConverter == null) { audioClipConverter = new AudioClipConverter(); } RecordingData data = dataStorageHandler.LoadData(recordingFileName); AudioClip clip = audioClipConverter.ConvertDataToAudioClip(data); audioRecorder.SetRecording(clip); }
/// <summary> /// Send the streaming URL & caption to the clients /// </summary> /// <param name="streamUrl">Url of the stream</param> private async Task SendNotification(string streamUrl, DateTime stamp) { // Create metadata for the client (will be used in the launch-property of the tile) RecordingData recordingData = new RecordingData(VideoCaption.Text, streamUrl, _recordingID, stamp); // Assign properties for the notification Dictionary <string, string> properties = new Dictionary <string, string>() { { "Caption", recordingData.Caption }, { "SmoothStreamUrl", recordingData.SmoothStreamUrl }, { "RecordingId", recordingData.RecordingId }, { "RecordingStamp", recordingData.RecordingStamp.ToString() }, { "RecordingData", recordingData.SerializeToJson() } }; // Send the notification await _notificationAgent.SendTemplateNotificationAsync(properties); }
public TTTASProvider( ICommunication communication, IMicrophoneHandler microphoneHandler, IHubContext <Web.Hubs.TTTASHub> monitorHubContext) { this.communication = communication; this.microphoneHandler = microphoneHandler; this.monitorHubContext = monitorHubContext; dataFilePath = BGC.IO.DataManagement.PathForDataFile("Config", "TTTAS", "TTTASData.json"); if (File.Exists(dataFilePath)) { recordingData = JsonSerializer.Deserialize <RecordingData>(File.ReadAllText(dataFilePath)); recordingData.VerifyAndPopulate(communication); } else { recordingData = new RecordingData(); File.WriteAllText(dataFilePath, JsonSerializer.Serialize(recordingData)); } }
private void UpdateRecord(uint eventmask) { if (reckey != 0 && ((GetAsyncKeyState(reckey) & 0x8000) == 0x8000)) { if (!keystate) { keystate = true; if (recordingstate == RS_RECORDING) { StopRecording(); return; } StartRecording(); } } else { keystate = false; } switch (recordingstate) { case RS_RECORDING: break; case RS_PLAYBACK: UpdatePlayback(); return; default: return; } if (crs.mask == eventmask) { return; } crs.endtime = timer.ElapsedMilliseconds; crs.next = new RecordingData(); crs = crs.next; crs.mask = eventmask; }
private void PlaybackBeatActivity() { if (mBeatSprite != null && IsPlayingBack) { RecordingData recordingData = mRecordingDataPropertyGrid.SelectedObject; double timeIntoSongThisFrame = TimeManager.CurrentTime - mMusic.TimeStarted; // Give the second difference a little extra to resolve any floating point issues. double timeIntoSongLastFrame = TimeManager.CurrentTime - mMusic.TimeStarted - TimeManager.SecondDifference * 1.05; if (recordingData.GetNumberOfBeatsIntoSong(timeIntoSongLastFrame) != recordingData.GetNumberOfBeatsIntoSong(timeIntoSongThisFrame)) { mBeatSprite.Alpha = 255; mBeatSprite.AlphaRate = -400; mBeatCircle.Visible = true; mBeatCircle.Instructions.Add(new Instruction <Circle, bool>(mBeatCircle, "Visible", false, TimeManager.CurrentTime + .05)); } // Test the beats to see if we have a new one. If so, make the Sprite flash. } }
private void StartPlayback() { if (recordingstate == RS_RECORDING) { StopRecording(); } infoafterrecord.total = kpsHandler.total; infoafterrecord.max = kpsHandler.max; kpsHandler.SetMax(infobeforerecord.max); kpsHandler.SetTotal(infobeforerecord.total); uint keymask = 1; for (int i = MAX_BUTTONS; i > 0;) { savedkeyhandlers[--i] = btns[i].keyhandler; btns[i].keyhandler = new PlaybackKeyHandler(keymask); keymask <<= 1; } crs = recordingstart; timer.Reset(); timer.Start(); recordingstate = RS_PLAYBACK; cmsPlaybackRecording.Text = "Stop playbacking"; }
/// <summary> /// If the active screen is confirmationScreen it just calls the RefreshScreen() /// function of the confirmationScreen. Otherwise it hides the active screen, /// switches it to the confirmationScreen and passes it the appropariate parameters, /// shows it and calls the RefreshScreen() function. /// </summary> /// <param name="selectedRecording">A recording selected in the previous screen. Alternatively we can just pass a int id.</param> /// <returns></returns> public void ShowConfirmationScreen(RecordingData selectedRecording) { throw new Exception("The method or operation is not implemented."); }
/// <summary> /// If the active screen is recordingEditScreen it just calls the RefreshScreen() /// function of the recordingEditScreen. Otherwise it hides the active screen, /// switches it to the recordingEditScreen, shows it and calls the RefreshScreen() /// function. /// </summary> /// <param name="recordData">The data of the recording to be edited.</param> /// <returns></returns> public void ShowRecordingEditScreen(RecordingData recordData) { throw new Exception("The method or operation is not implemented."); }
/// <summary> /// If the active screen is resultsScreen it just calls the RefreshScreen() function /// of the resultsScreen. Otherwise it hides the active screen, switches it to the /// resultsScreen, shows it and calls the RefreshScreen() function. /// </summary> /// <param name="resultParameters">The calculated parameters to be displayed in a nice way on the screen.</param> /// <param name="processedRecording">The data of the recording to be displayed (if the user has proper authorization) on the results screen.</param> /// <returns></returns> public void ShowResultsScreen(double[] resultParameters, RecordingData processedRecording) { throw new Exception("The method or operation is not implemented."); }
// Use this for initialization void Start() { _ticCount = 0; _fixedFrameInterval = frameInterval; if (mode == Mode.Record) { if (recordingFile == "") { _recordingFilePath = string.Format("{0}Recording_{1}.unityrec", rootFolder, GetTimestamp(DateTime.Now)); } else { // Uniquely names the file int n = 0; string suffix = ""; do { _recordingFilePath = string.Format("{0}{1}{2}.unityrec", rootFolder, recordingFile, suffix); suffix = string.Format("_{0}", n++); } while (File.Exists(_recordingFilePath)); } _recording = new RecordingData(); _recording.interval = frameInterval; _recording.frames.Capacity = 32000; // Fill out the object mapping dictionary _objects = transform.root.GetComponentsInChildren <Recorder>(true); foreach (Recorder rec in _objects) { if (_objectMapping.ContainsKey(rec.uniqueID)) { Debug.LogErrorFormat("Recorder ID {0} already used by {1}", rec.uniqueID, _objectMapping[rec.uniqueID]); } _objectMapping.Add(rec.uniqueID, rec); Debug.Log("Recording " + rec.uniqueID); } // Disable custom playback objects _playbackObjects = transform.root.GetComponentsInChildren <CustomPlayback>(true); foreach (CustomPlayback player in _playbackObjects) { player.gameObject.SetActive(false); } } else if (mode == Mode.Playback) { // Deserialize the specified recording _recording = null; _recordingFilePath = string.Format("{0}{1}.unityrec", rootFolder, playbackFile); FileStream fs = null; try { fs = new FileStream(_recordingFilePath, FileMode.Open); BinaryFormatter formatter = new BinaryFormatter(); _recording = (RecordingData)formatter.Deserialize(fs); } catch (Exception e) { Debug.Log("Failed to deserialize: " + e.Message); mode = Mode.Off; return; } finally { if (fs != null) { fs.Close(); } } Debug.Log(_recording.metadata); // Fill out the object mapping dictionary _objects = transform.root.GetComponentsInChildren <Recorder>(true); foreach (Recorder rec in _objects) { if (_objectMapping.ContainsKey(rec.uniqueID)) { Debug.LogErrorFormat("Recorder ID {0} already used by {1}", rec.uniqueID, _objectMapping[rec.uniqueID]); } _objectMapping.Add(rec.uniqueID, rec); Rigidbody body = rec.GetComponent <Rigidbody>(); if (body != null) { // completely disable physics on playback objects body.isKinematic = true; body.detectCollisions = false; } } // Fill out playback mapping dictionary _playbackObjects = transform.root.GetComponentsInChildren <CustomPlayback>(true); foreach (CustomPlayback player in _playbackObjects) { _playbackMapping.Add(player.uniqueID, player); } // Display the arm in the main view mainCamera.cullingMask |= 1 << LayerMask.NameToLayer("Arm"); } else if (mode == Mode.Off) { // Deactivate playback objects _playbackObjects = transform.root.GetComponentsInChildren <CustomPlayback>(true); foreach (CustomPlayback player in _playbackObjects) { player.gameObject.SetActive(false); } } }