internal override void InitializeOperator(int shardId, ShardingInfo shardingInfo)
        {
            _hasSplittedOutput = HasSplittedOutput();

            _deploySubscribeInput  = new CountdownEvent(1);
            _deploySubscribeOutput = new CountdownEvent(1);

            _runSubscribeInput  = new CountdownEvent(1);
            _runSubscribeOutput = new CountdownEvent(1);

            string[] toEndpoints = GetEndpointNamesForVertex(VertexName.Split('$')[0], _toFromConnections);
            var      fromTuple   = _toFromConnections[new Tuple <string, string>(VertexName.Split('$')[0], toEndpoints[0])];

            if (!fromTuple.Item4)
            {
                AddAsyncInputEndpoint(toEndpoints[0], new ShardedSubscribeInput(this, shardId, shardingInfo.AllShards.Length, toEndpoints[0]));
            }
            else
            {
                throw new NotImplementedException("Shared secondary endpoints are not supported in subscribe operators!!");
            }

            string[] fromEndpoints = GetEndpointNamesForVertex(VertexName.Split('$')[0], _fromToConnections);
            var      toTuple       = _fromToConnections[new Tuple <string, string>(VertexName.Split('$')[0], fromEndpoints[0])];

            if (!toTuple.Item4)
            {
                AddAsyncOutputEndpoint(fromEndpoints[0], new ShardedSubscribeOutput(this, shardId, shardingInfo.AllShards.Length, fromEndpoints[0]));
            }
            else
            {
                throw new NotImplementedException("Shared secondary endpoints are not supported in subscribe operators!!");
            }
        }
Example #2
0
        internal override void InitializeOperator(int shardId, ShardingInfo shardingInfo)
        {
            _hasSplittedOutput = HasSplittedOutput();

            _numShardsConnectingTo = 0;
            var instancesMap = _task.DeployDescriptor.InstancesMap();

            foreach (var entry in instancesMap.Keys)
            {
                _numShardsConnectingTo += instancesMap[entry];
            }

            _deploySubscribeInput  = new CountdownEvent(1);
            _deploySubscribeOutput = new CountdownEvent(_numShardsConnectingTo);

            _runSubscribeInput  = new CountdownEvent(1);
            _runSubscribeOutput = new CountdownEvent(_numShardsConnectingTo);

            string[] toEndpoints = GetEndpointNamesForVertex(VertexName.Split('$')[0], _toFromConnections);
            var      fromTuple   = _toFromConnections[new Tuple <string, string>(VertexName.Split('$')[0], toEndpoints[0])];

            if (!fromTuple.Item4)
            {
                AddAsyncInputEndpoint(toEndpoints[0], new ShardedSubscribeClientInput(this, shardId, shardingInfo.AllShards.Length, toEndpoints[0]));
            }
            else
            {
                throw new NotImplementedException("Shared secondary endpoints are not supported in subscribe operators!!");
            }

            string fromEndpoint = "OutputToClient" + Guid.NewGuid().ToString();

            AddAsyncOutputEndpoint(fromEndpoint, new ShardedSubscribeClientOutput(this, shardId, shardingInfo.AllShards.Length, fromEndpoint));
        }
Example #3
0
        internal override void InitializeOperator(int shardId, ShardingInfo shardingInfo)
        {
            _hasSplittedOutput = HasSplittedOutput();
            string[] toEndpoints   = GetEndpointNamesForVertex(VertexName.Split('$')[0], _toFromConnections);
            string[] fromEndpoints = GetEndpointNamesForVertex(VertexName.Split('$')[0], _fromToConnections);

            int secondaryOutputsCount = 0;
            int ordinaryOutputSCount  = 0;

            foreach (var fromEndpoint in fromEndpoints)
            {
                var toTuple = _fromToConnections[new Tuple <string, string>(VertexName.Split('$')[0], fromEndpoint)];
                if (toTuple.Item4)
                {
                    secondaryOutputsCount++;
                }
                else
                {
                    ordinaryOutputSCount++;
                }
            }
            int deployShuffleInputCount = secondaryOutputsCount;

            if (_hasSplittedOutput)
            {
                deployShuffleInputCount += shardingInfo.AllShards.Length;
            }
            else
            {
                deployShuffleInputCount += ordinaryOutputSCount;
            }
            _deployShuffleInput = new CountdownEvent(deployShuffleInputCount);
            _runShuffleInput    = new CountdownEvent(deployShuffleInputCount);

            int secondaryInputsCount = 0;

            foreach (var toEndpoint in toEndpoints)
            {
                var fromTuple = _toFromConnections[new Tuple <string, string>(VertexName.Split('$')[0], toEndpoint)];
                if (fromTuple.Item4)
                {
                    secondaryInputsCount++;
                }
            }
            _deployShuffleOutput = new CountdownEvent(shardingInfo.AllShards.Length + secondaryInputsCount);
            _runShuffleOutput    = new CountdownEvent(shardingInfo.AllShards.Length + secondaryInputsCount);

            _inputSplitDatasets[shardId] = new object[shardingInfo.AllShards.Length];

            foreach (var toEndpoint in toEndpoints)
            {
                var fromTuple = _toFromConnections[new Tuple <string, string>(VertexName.Split('$')[0], toEndpoint)];
                if (!fromTuple.Item4)
                {
                    AddAsyncInputEndpoint(toEndpoint, new ShardedShuffleInput(this, shardId, shardingInfo.AllShards.Length, toEndpoint));
                }
                else
                {
                    _startCreatingSecondaryDatasets[fromTuple.Item1]  = new CountdownEvent(1);
                    _finishCreatingSecondaryDatasets[fromTuple.Item1] = new CountdownEvent(1);
                    AddAsyncInputEndpoint(toEndpoint, new ShardedShuffleSecondaryInput(this, shardId, shardingInfo.AllShards.Length, toEndpoint));
                }
            }

            foreach (var fromEndpoint in fromEndpoints)
            {
                var toTuple = _fromToConnections[new Tuple <string, string>(VertexName.Split('$')[0], fromEndpoint)];
                if (!toTuple.Item4)
                {
                    AddAsyncOutputEndpoint(fromEndpoint, new ShardedShuffleOutput(this, shardId, shardingInfo.AllShards.Length, fromEndpoint));
                }
                else
                {
                    AddAsyncOutputEndpoint(fromEndpoint, new ShardedShuffleSecondaryOutput(this, shardId, shardingInfo.AllShards.Length, fromEndpoint));
                }
            }
        }
Example #4
0
        internal override void InitializeOperator(int shardId, ShardingInfo shardingInfo)
        {
            _hasSplittedOutput = HasSplittedOutput();
            string[] toEndpoints   = GetEndpointNamesForVertex(VertexName.Split('$')[0], _toFromConnections);
            string[] fromEndpoints = GetEndpointNamesForVertex(VertexName.Split('$')[0], _fromToConnections);

            int secondaryOutputsCount = 0;
            int ordinaryOutputSCount  = 0;

            foreach (var fromEndpoint in fromEndpoints)
            {
                var toTuple = _fromToConnections[new Tuple <string, string>(VertexName.Split('$')[0], fromEndpoint)];
                if (toTuple.Item4)
                {
                    secondaryOutputsCount++;
                }
                else
                {
                    ordinaryOutputSCount++;
                }
            }
            int deployProduceInputCount = secondaryOutputsCount;

            if (_hasSplittedOutput)
            {
                deployProduceInputCount += shardingInfo.AllShards.Length;
            }
            else
            {
                deployProduceInputCount += ordinaryOutputSCount;
            }
            _deployProduceInput = new CountdownEvent(deployProduceInputCount);
            _runProduceInput    = new CountdownEvent(deployProduceInputCount);

            int secondaryInputsCount = 0;

            foreach (var toEndpoint in toEndpoints)
            {
                var fromTuple = _toFromConnections[new Tuple <string, string>(VertexName.Split('$')[0], toEndpoint)];
                if (fromTuple.Item4)
                {
                    secondaryInputsCount++;
                }
            }
            _deployProduceOutput = new CountdownEvent(secondaryInputsCount);
            _runProduceOutput    = new CountdownEvent(secondaryInputsCount);

            _continueAfterTransformation = new CountdownEvent(1);

            if (secondaryInputsCount > 0)
            {
                _hasSecondaryInput = true;
            }

            foreach (var toEndpoint in toEndpoints)
            {
                var fromTuple = _toFromConnections[new Tuple <string, string>(VertexName.Split('$')[0], toEndpoint)];
                if (!fromTuple.Item4)
                {
                    throw new NotImplementedException("Shared input endpoints are not supported in produce operators!!");
                }
                else
                {
                    _startCreatingSecondaryDatasets[fromTuple.Item1]  = new CountdownEvent(1);
                    _finishCreatingSecondaryDatasets[fromTuple.Item1] = new CountdownEvent(1);
                    AddAsyncInputEndpoint(toEndpoint, new ShardedProducerSecondaryInput(this, shardId, shardingInfo.AllShards.Length, toEndpoint));
                }
            }

            if (!_hasSecondaryInput)
            {
                CreateAndTransformDataset(shardId);
                _isTransformationsApplied = true;
                _continueAfterTransformation.Signal();
            }

            foreach (var fromEndpoint in fromEndpoints)
            {
                var toTuple = _fromToConnections[new Tuple <string, string>(VertexName.Split('$')[0], fromEndpoint)];
                if (!toTuple.Item4)
                {
                    AddAsyncOutputEndpoint(fromEndpoint, new ShardedProducerOutput(this, shardId, shardingInfo.AllShards.Length, fromEndpoint));
                }
                else
                {
                    AddAsyncOutputEndpoint(fromEndpoint, new ShardedProducerSecondaryOutput(this, shardId, shardingInfo.AllShards.Length, fromEndpoint));
                }
            }
        }