Exemple #1
0
        internal static IObservable <StreamEvent <TPayload> > ToStreamEventObservable <TPayload>(
            this IStreamable <Empty, TPayload> stream,
            QueryContainer container,
            string identifier,
            ReshapingPolicy reshapingPolicy)
        {
            Invariant.IsNotNull(stream, nameof(stream));

            if (stream is IFusibleStreamable <Empty, TPayload> f && f.CanFuseEgressObservable)
            {
                if (reshapingPolicy == ReshapingPolicy.None)
                {
                    return(f.FuseEgressObservable((s, e, p, k) => new StreamEvent <TPayload>(s, e, p), container, identifier));
                }
            }

            return((reshapingPolicy == ReshapingPolicy.None)
                ? new StreamEventObservable <TPayload>(stream, container, identifier)
                : new StreamEventObservable <TPayload>(stream.ToEndEdgeFreeStream(), container, identifier));
        }
Exemple #2
0
        /// <summary>
        /// Caches the complete results computed by the streamable upon an immediate call to subscribe. This
        /// call will block until the underlying query has fully executed and the result cached.
        /// </summary>
        /// <typeparam name="TKey">Type of key for stream</typeparam>
        /// <typeparam name="TPayload">Type of payload for stream</typeparam>
        /// <param name="stream">Instance of the stream to be cached</param>
        /// <param name="limit">Limit on number of events to store in the cached stream</param>
        /// <param name="inferProperties">Specifies whether each stream event in the incoming stream should be checked to
        /// infer the properties of no intervals and constant duration
        /// </param>
        /// <returns>A streamable cache instance</returns>
        /// <param name="coalesceEndEdges">Whether or not we coalesce end edges with their starts into interval events</param>
        /// <returns></returns>
        public static StreamCache <TKey, TPayload> Cache <TKey, TPayload>(this IStreamable <TKey, TPayload> stream, ulong limit = 0, bool inferProperties = false, bool coalesceEndEdges = false)
        {
            var elements = new List <QueuedMessage <StreamMessage <TKey, TPayload> > >();

            if (coalesceEndEdges)
            {
                stream = stream.ToEndEdgeFreeStream();
            }

            var  observable     = new QueuedMessageObservable <TKey, TPayload>(stream);
            var  limitSpecified = limit > 0;
            var  memoryPool     = MemoryManager.GetMemoryPool <TKey, TPayload>(stream.Properties.IsColumnar);
            long lastSync       = -1;

            try
            {
                observable.SynchronousForEach(t =>
                {
                    if (t.Kind == MessageKind.DataBatch)
                    {
                        var mt = t.Message.MinTimestamp;
                        if (mt < lastSync)
                        {
                            throw new StreamProcessingOutOfOrderException("Out-of-order event received during Cache() call");
                        }

                        lastSync = mt;

                        // REVIEW: What if there are a *lot* of data batches after we want to stop?
                        // But we need to wait to see all of the remaining punctuations?
                        if (limitSpecified)
                        {
                            if (limit > 0)
                            {
                                ulong count = (ulong)t.Message.Count;
                                if (count <= limit)
                                {
                                    // whole batch fits
                                    limit -= count;
                                    elements.Add(t);
                                }
                                else
                                {
                                    memoryPool.Get(out StreamMessage <TKey, TPayload> newMessage);
                                    newMessage.Allocate();

                                    for (ulong i = 0; i < limit; i++)
                                    {
                                        newMessage.Add(t.Message.vsync.col[i], t.Message.vother.col[i], t.Message.key.col[i], t.Message[(int)i]);
                                    }
                                    elements.Add(new QueuedMessage <StreamMessage <TKey, TPayload> > {
                                        Kind = MessageKind.DataBatch, Message = newMessage
                                    });
                                    limit = 0;
                                    t.Message.Free();
                                }
                            }
                            else
                            {
                                t.Message.Free();
                            }
                        }
                        else
                        {
                            elements.Add(t);
                        }
                    }
                    else
                    {
                        elements.Add(t);
                    }
                });
            }
            catch (Exception)
            {
                for (int i = 0; i < elements.Count; i++)
                {
                    elements[i].Message?.Free();
                }
                throw;
            }

            var p = stream.Properties;

            if (inferProperties)
            {
                InferProperties(elements, out bool noIntervals, out bool constantDuration, out long duration);
                if (noIntervals)
                {
                    ; // Intentional nop until internal variable is available to set property
                }
                if (constantDuration)
                {
                    p = p.ToConstantDuration(true, duration);
                }
            }
            return(new StreamCache <TKey, TPayload>(p, elements));
        }