/// <summary> /// Open the file and read it into memory /// </summary> public async Task <bool> ReadFileAsync(string fileName) { _Samples = new List <IBFSample>(); using (var fileReader = await FileSystemExtensionMethods.WaitForFileAsync(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) using (var reader = new StreamReader(fileReader)) { var nextLine = reader.ReadLine(); while (nextLine != null) { if (nextLine.Contains("%") || nextLine.Contains("Sample Index")) { ParseHeaderLine(nextLine); } else { var newSample = CreateSample(nextLine); if (newSample == null) { break; } _Samples.Add(newSample); } nextLine = reader.ReadLine(); } } return(IsValidFile); }
/// <summary> /// Open the file and read the data into memory /// </summary> public async Task <bool> ReadFileAsync(string fileName) { int fileHandle = -1; try { using (var fileReader = await FileSystemExtensionMethods.WaitForFileAsync(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) { fileReader.Close(); }; _Samples = new List <IBFSample>(); fileHandle = edfOpenFileReadOnly(fileName); if (fileHandle < 0) { throw new Exception($"Unable to open file. Error {fileHandle}."); } // get the header json and convert to header object var header = JsonConvert.DeserializeObject <EdfHeaderStruct>(edfGetHeaderAsJson(fileHandle)); SetFilePropertiesFromHeader(header); // make the samples var signalCount = header.edfsignals; var samplesPerDataRecord = header.signalparam[0].smp_in_datarecord; ReadDataRecordsCount = 0; double[,] chunk; for (ulong i = 0; i < header.datarecords_in_file; i++) { chunk = new double[signalCount, samplesPerDataRecord]; for (int j = 0; j < header.edfsignals; j++) { var thisSignal = edfReadPhysicalSamples(fileHandle, j, header.signalparam[j].smp_in_datarecord); for (int k = 0; k < samplesPerDataRecord; k++) { chunk[j, k] = thisSignal[k]; } } CreateSamples(chunk); ReadDataRecordsCount++; } } finally { if (fileHandle >= 0) { edfCloseFile(fileHandle); } } return(IsValidFile); }
/// <summary> /// Open the file and read the header, first record and last record (to calculate duration) /// does not save any other samples from the file /// Returns true if the file has valid information /// </summary> public async Task <bool> ReadFileForHeaderAsync(string fileName) { _Samples = new List <IBFSample>(); using (var fileReader = await FileSystemExtensionMethods.WaitForFileAsync(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) using (var reader = new StreamReader(fileReader)) { int lineCount = 0; var nextLine = reader.ReadLine(); while (nextLine != null) { if (nextLine.Contains("%") || nextLine.Contains("Sample Index")) { ParseHeaderLine(nextLine); } else { if (_Samples.Count == 0) { var firstSample = CreateSample(nextLine); if (firstSample != null) { _Samples.Add(firstSample); // save the first sample } } lineCount++; } nextLine = reader.ReadLine(); } if (_Samples.Count > 0) { EndTime = _Samples.First().TimeStamp + lineCount / SampleRate; } } return(IsValidFile); }
/// <summary> /// Open the file and read the header /// does not save any samples from the file /// Returns true if the file has a valid header /// </summary> public async Task <bool> ReadFileForHeaderAsync(string fileName) { using (var fileReader = await FileSystemExtensionMethods.WaitForFileAsync(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) { fileReader.Close(); }; _Samples = new List <IBFSample>(); int fileHandle = -1; try { fileHandle = edfOpenFileReadOnly(fileName); if (fileHandle < 0) { return(false); } // get the header json and convert to header object var header = JsonConvert.DeserializeObject <EdfHeaderStruct>(edfGetHeaderAsJson(fileHandle)); SetFilePropertiesFromHeader(header); } catch (Exception) { return(false); } finally { if (fileHandle >= 0) { edfCloseFile(fileHandle); } } return(IsValidFile); }
/// <summary> /// Run function /// </summary> async Task RunFileWriter(CancellationToken cancelToken) { try { // generate test file name if (!Directory.Exists(Path.GetDirectoryName(FileName))) { Directory.CreateDirectory(Path.GetDirectoryName(FileName)); } using (var fileStream = await FileSystemExtensionMethods.WaitForFileAsync(FileName, FileMode.Create, FileAccess.Write, FileShare.Read)) using (System.IO.StreamWriter file = new System.IO.StreamWriter(fileStream)) { Log?.Invoke(this, new LogEventArgs(this, "RunFileWriter", $"Started recording file {FileName}.", LogLevel.INFO)); WriteFileHeader(file); bool writeHeader = false; try { FileTimer.Restart(); while (!cancelToken.IsCancellationRequested) { await NotifyAddedData.WaitAsync(cancelToken); try { while (Data.Count > 0) { Data.TryDequeue(out var nextReading); if (nextReading == null) { Log?.Invoke(this, new LogEventArgs(this, "RunFileWriter", $"Null sample.", LogLevel.WARN)); continue; } if (!writeHeader) { WriteSampleHeaderOnFirstSample(file, nextReading); writeHeader = true; } WriteToFile(file, nextReading); } } catch (Exception ex) { Log?.Invoke(this, new LogEventArgs(this, "RunFileWriter", ex, LogLevel.ERROR)); } } } catch (OperationCanceledException) { } catch (Exception e) { Log?.Invoke(this, new LogEventArgs(this, "RunFileWriter", e, LogLevel.FATAL)); } finally { file.Close(); FileTimer.Stop(); Log?.Invoke(this, new LogEventArgs(this, "RunFileWriter", $"Closed recording file {FileName}.", LogLevel.INFO)); } } } catch (Exception e) { Log?.Invoke(this, new LogEventArgs(this, "RunFileWriter", e, LogLevel.FATAL)); } }