public void Wrap_Original_Stream_On_Over() { var sut = SequenceStream.Using( stream => new StreamWriter( stream, Encoding.UTF8, leaveOpen: false, bufferSize: 128 ) ); var compressed = sut.Over( stream => new GZipStream( stream, leaveOpen: false, compressionLevel: CompressionLevel.Fastest ) ); using var wordsStream = compressed .On(new[] { "Read", "the", "content" }) .Writing((output, word) => output.Write($"{word} ")); using var reader = new StreamReader( new GZipStream(wordsStream, CompressionMode.Decompress), Encoding.UTF8 ); var result = reader.ReadToEnd(); result.Should().Be("Read the content "); }
public Stream Create <TItem, TContext>( Func <Stream, TContext> contextFactory, Action <TContext> dispose, IEnumerable <TItem> sequence, ArrayPool <byte> pool, Action <TContext, TItem> write) => SequenceStream .Using(contextFactory, dispose) .On(sequence.ToAsyncEnumerable()) .Pooling(pool) .Writing(write);
public static Stream BinaryContext(IEnumerable <Person> persons) => SequenceStream.Using( stream => new BinaryWriter(stream, Encoding.UTF8) ) .On(persons) .Writing( (binaryWriter, person) => { binaryWriter.Write(person.Name); binaryWriter.Write(person.Age); } );
public async Task Read_Async() { using var concatenation = SequenceStream.Concatenation( new Stream[] { new MemoryStream(new byte[] { 1, 2, 3 }), new MemoryStream(new byte[] { 7, 40, 80, 78 }) }.ToAsyncEnumerable(), ArrayPool <byte> .Shared, 2 ); var buffer = new byte[5]; await concatenation.ReadAsync(buffer, 0, 5); buffer.Should().Equal(1, 2, 3, 7, 40); }
public void Read_Piece() { using var concatenation = SequenceStream.Concatenation( new Stream[] { new MemoryStream(new byte[] { 1, 2, 3 }), new MemoryStream(new byte[] { 7, 40, 80, 78 }) }, ArrayPool <byte> .Shared, 2 ); var buffer = new byte[5]; concatenation.Read(buffer, 0, 5); buffer.Should().Equal(1, 2, 3, 7, 40); }
public void Dispose_Used_Streams_On_Dispose([Range(1, 5)] int chunkSize, [Values(1, 3, 5, 7, 10, 12, 17)] int read) { var streams = new[] { new Disposable(new MemoryStream(new byte[] { 7, 40, 80, 78 })), new Disposable(new MemoryStream(new byte[] { 20, 40, 70 })), new Disposable(new MemoryStream(new byte[] { 17, 50, 233, 89, 54 })) }; using (var concatenation = SequenceStream.Concatenation(streams, ArrayPool <byte> .Shared, chunkSize)) { concatenation.Read(new byte[read], 0, read); } streams.Should().OnlyContain( stream => stream.Position == 0 || stream.Disposed ); }
public void Read_Entire_Content() { using var concatenation = SequenceStream.Concatenation( new Stream[] { new MemoryStream(new byte[] { 1, 2, 3 }), new MemoryStream(new byte[] { 7, 40, 80, 78 }) }, ArrayPool <byte> .Shared, 3 ); var buffer = new byte[100]; var read = concatenation.Read(buffer, 0, 100); read.Should().Be(7); buffer.Take(read).Should().Equal(1, 2, 3, 7, 40, 80, 78); }
public void Test() { using var source = new CancellationTokenSource(); var cancelled = false; using var strings = SequenceStream.Using(stream => new StreamWriter(stream, Encoding.UTF8)) .On(Yield()) .WithCancellation(source.Token) .Writing( (output, @string) => output.Write(@string) ); using var streamReader = new StreamReader(strings); Assert.ThrowsAsync <TaskCanceledException>( streamReader.ReadToEndAsync ); cancelled.Should().BeTrue(); async IAsyncEnumerable <string> Yield(
public static AbstractModel train(SequenceStream <Event> events, IDictionary <string, string> trainParams, IDictionary <string, string> reportMap) { if (!isValid(trainParams)) { throw new System.ArgumentException("trainParams are not valid!"); } if (!isSequenceTraining(trainParams)) { throw new System.ArgumentException("Algorithm must be a sequence algorithm!"); } int iterations = getIntParam(trainParams, ITERATIONS_PARAM, ITERATIONS_DEFAULT, reportMap); int cutoff = getIntParam(trainParams, CUTOFF_PARAM, CUTOFF_DEFAULT, reportMap); bool useAverage = getBooleanParam(trainParams, "UseAverage", true, reportMap); return((new SimplePerceptronSequenceTrainer()).trainModel(iterations, events, cutoff, useAverage)); }
public static Stream ItemMetaInformationOnBuilder(IEnumerable <string> names) { return(SequenceStream.Using( stream => new StreamWriter(stream, Encoding.UTF8) ) .On(names) .AsItems() .Writing( (writer, item) => { if (item.Kind.IsFirst()) { writer.WriteLine("Names"); } writer.Write($"{item.Index}: {item.Value}"); if (!item.Kind.IsLast()) { writer.WriteLine(); } } )); }
private void mPlayButton_Click(object sender, EventArgs e) { mPlaybackDevice.stopPlayback(); if (mInputFilesListView.Items.Count == 0) { MessageBox.Show(this, "There are no input files to play", "Play"); return; } /* Playback of all files using both SubStream and SequenceStream */ List <Stream> ifStreams = new List <Stream>(mInputFilesListView.Items.Count); foreach (ListViewItem inputFileItem in mInputFilesListView.Items) { FileStream ifs = new FileStream(inputFileItem.SubItems[2].Text, FileMode.Open, FileAccess.Read); PCMDataInfo pcmInfo = PCMDataInfo.ParseRiffWaveHeader(ifs); long startPos = ifs.Position; ifs.Position = 0; SubStream subIfs = new SubStream(ifs, startPos, pcmInfo.DataLength); ifStreams.Add(subIfs); } mPCMInputStream = new SequenceStream(ifStreams); mPlaybackDevice.play(mPCMInputStream); }
public static SequenceStream.Builder <JsonWriter> Using(JsonWriterFactory factory) => SequenceStream.Using(factory.Create);
public virtual AbstractModel trainModel(int iterations, SequenceStream sequenceStream, int cutoff, bool useAverage) { this.iterations = iterations; this.sequenceStream = sequenceStream; DataIndexer di = new OnePassDataIndexer(new SequenceStreamEventStream(sequenceStream), cutoff, false); numSequences = 0; foreach (Sequence <Event> s in sequenceStream) { numSequences++; } outcomeList = di.OutcomeList; predLabels = di.PredLabels; pmap = new IndexHashTable <string>(predLabels, 0.7d); display("Incorporating indexed data for training... \n"); this.useAverage = useAverage; numEvents = di.NumEvents; this.iterations = iterations; outcomeLabels = di.OutcomeLabels; omap = new Dictionary <string, int?>(); for (int oli = 0; oli < outcomeLabels.Length; oli++) { omap[outcomeLabels[oli]] = oli; } outcomeList = di.OutcomeList; numPreds = predLabels.Length; numOutcomes = outcomeLabels.Length; if (useAverage) { updates = RectangularArrays.ReturnRectangularIntArray(numPreds, numOutcomes, 3); } display("done.\n"); display("\tNumber of Event Tokens: " + numEvents + "\n"); display("\t Number of Outcomes: " + numOutcomes + "\n"); display("\t Number of Predicates: " + numPreds + "\n"); parameters = new MutableContext[numPreds]; if (useAverage) { averageParams = new MutableContext[numPreds]; } allOutcomesPattern = new int[numOutcomes]; for (int oi = 0; oi < numOutcomes; oi++) { allOutcomesPattern[oi] = oi; } for (int pi = 0; pi < numPreds; pi++) { parameters[pi] = new MutableContext(allOutcomesPattern, new double[numOutcomes]); if (useAverage) { averageParams[pi] = new MutableContext(allOutcomesPattern, new double[numOutcomes]); } for (int aoi = 0; aoi < numOutcomes; aoi++) { parameters[pi].setParameter(aoi, 0.0); if (useAverage) { averageParams[pi].setParameter(aoi, 0.0); } } } modelDistribution = new double[numOutcomes]; display("Computing model parameters...\n"); findParameters(iterations); display("...done.\n"); /// <summary> ///************* Create and return the model ***************** </summary> string[] updatedPredLabels = predLabels; /* * String[] updatedPredLabels = new String[pmap.size()]; * for (String pred : pmap.keySet()) { * updatedPredLabels[pmap.get(pred)]=pred; * } */ if (useAverage) { return(new PerceptronModel(averageParams, updatedPredLabels, outcomeLabels)); } else { return(new PerceptronModel(parameters, updatedPredLabels, outcomeLabels)); } }
public static Stream Strings(IEnumerable <string> strings) => SequenceStream.FromStrings(strings, Encoding.UTF8, Environment.NewLine);
public static Stream Bytes(IEnumerable <byte[]> chunks) => SequenceStream.UsingStream() .On(chunks) .Writing( (stream, bytes) => { stream.Write(bytes, 0, bytes.Length); } );
public SequenceStreamEventStream(SequenceStream <Event> sequenceStream) { this.sequenceIterator = sequenceStream.GetEnumerator(); }