Beispiel #1
0
            /// <summary>
            /// this is callback from remote host side to get asset associated with checksum from VS.
            /// </summary>
            public async Task RequestAssetAsync(int sessionId, byte[][] checksums, string streamName)
            {
                try
                {
                    Contract.ThrowIfFalse(_owner._currentSessionId == sessionId);

                    using (Logger.LogBlock(FunctionId.JsonRpcSession_RequestAssetAsync, streamName, _source.Token))
                        using (var stream = await DirectStream.GetAsync(streamName, _source.Token).ConfigureAwait(false))
                        {
                            using (var writer = new StreamObjectWriter(stream))
                            {
                                writer.WriteInt32(sessionId);

                                await WriteAssetAsync(writer, checksums).ConfigureAwait(false);
                            }

                            await stream.FlushAsync(_source.Token).ConfigureAwait(false);
                        }
                }
                catch (IOException)
                {
                    // remote host side is cancelled (client stream connection is closed)
                    // can happen if pinned solution scope is disposed
                }
                catch (OperationCanceledException)
                {
                    // rpc connection is closed.
                    // can happen if pinned solution scope is disposed
                }
            }
                /// <summary>
                /// this is callback from remote host side to get asset associated with checksum from VS.
                /// </summary>
                public async Task RequestAssetAsync(int serviceId, byte[][] checksums, string streamName)
                {
                    try
                    {
                        using (var stream = await DirectStream.GetAsync(streamName, _source.Token).ConfigureAwait(false))
                        {
                            using (var writer = new ObjectWriter(stream))
                            {
                                writer.WriteInt32(serviceId);

                                await WriteAssetAsync(writer, checksums).ConfigureAwait(false);
                            }

                            await stream.FlushAsync(_source.Token).ConfigureAwait(false);
                        }
                    }
                    catch (IOException)
                    {
                        // remote host side is cancelled (client stream connection is closed)
                        // can happen if pinned solution scope is disposed
                    }
                    catch (OperationCanceledException)
                    {
                        // rpc connection is closed.
                        // can happen if pinned solution scope is disposed
                    }
                }
Beispiel #3
0
            /// <summary>
            /// this is callback from remote host side to get asset associated with checksum from VS.
            /// </summary>
            public async Task RequestAssetAsync(int scopeId, Checksum[] checksums, string streamName)
            {
                try
                {
                    using (Logger.LogBlock(FunctionId.JsonRpcSession_RequestAssetAsync, streamName, _source.Token))
                        using (var stream = await DirectStream.GetAsync(streamName, _source.Token).ConfigureAwait(false))
                        {
                            var scope = _owner.PinnedRemotableDataScope;
                            using (var writer = new ObjectWriter(stream))
                            {
                                writer.WriteInt32(scopeId);

                                await WriteAssetAsync(writer, scope, checksums).ConfigureAwait(false);
                            }

                            await stream.FlushAsync(_source.Token).ConfigureAwait(false);
                        }
                }
                catch (IOException)
                {
                    // remote host side is cancelled (client stream connection is closed)
                    // can happen if pinned solution scope is disposed
                }
                catch (OperationCanceledException)
                {
                    // rpc connection is closed.
                    // can happen if pinned solution scope is disposed
                }
            }
Beispiel #4
0
        /// <summary>
        /// this is callback from remote host side to get asset associated with checksum from VS.
        /// </summary>
        public async Task RequestAssetAsync(int scopeId, Checksum[] checksums, string streamName, CancellationToken cancellationToken)
        {
            try
            {
                using (var source = CancellationTokenSource.CreateLinkedTokenSource(_shutdownCancellationSource.Token, cancellationToken))
                    using (Logger.LogBlock(FunctionId.JsonRpcSession_RequestAssetAsync, streamName, source.Token))
                        using (var stream = await DirectStream.GetAsync(streamName, source.Token).ConfigureAwait(false))
                        {
                            using (var writer = new ObjectWriter(stream, source.Token))
                            {
                                writer.WriteInt32(scopeId);

                                await WriteAssetAsync(writer, scopeId, checksums, source.Token).ConfigureAwait(false);
                            }

                            await stream.FlushAsync(source.Token).ConfigureAwait(false);
                        }
            }
            catch (IOException)
            {
                // direct stream can throw if cancellation happens since direct stream still uses
                // disconnection for cancellation
            }
            catch (OperationCanceledException)
            {
                // this can happen if connection got shutdown
            }
        }
Beispiel #5
0
                /// <summary>
                /// this is callback from remote host side to get asset associated with checksum from VS.
                /// </summary>
                public async Task RequestAssetAsync(int serviceId, byte[] checksum, string streamName)
                {
                    try
                    {
                        var service = ChecksumScope.Workspace.Services.GetRequiredService <ISolutionChecksumService>();

                        using (var stream = await DirectStream.GetAsync(streamName, _source.Token).ConfigureAwait(false))
                        {
                            using (var writer = new ObjectWriter(stream))
                            {
                                writer.WriteInt32(serviceId);
                                writer.WriteArray(checksum);

                                var checksumObject = service.GetChecksumObject(new Checksum(checksum), _source.Token);
                                writer.WriteString(checksumObject.Kind);

                                await checksumObject.WriteToAsync(writer, _source.Token).ConfigureAwait(false);
                            }

                            await stream.FlushAsync(_source.Token).ConfigureAwait(false);
                        }
                    }
                    catch (IOException)
                    {
                        // remote host side is cancelled (client stream connection is closed)
                        // can happen if pinned solution scope is disposed
                    }
                    catch (OperationCanceledException)
                    {
                        // rpc connection is closed.
                        // can happen if pinned solution scope is disposed
                    }
                }
Beispiel #6
0
        private static void SynthToCam(string text, CameraWindow cw)
        {
            var synthFormat = new System.Speech.AudioFormat.SpeechAudioFormatInfo(System.Speech.AudioFormat.EncodingFormat.Pcm, 11025, 16, 1, 22100, 2, null);

            using (var synthesizer = new SpeechSynthesizer())
            {
                using (var waveStream = new MemoryStream())
                {
                    //write some silence to the stream to allow camera to initialise properly
                    var silence = new byte[1 * 22050];
                    waveStream.Write(silence, 0, silence.Count());

                    var pbuilder = new PromptBuilder();
                    var pStyle   = new PromptStyle
                    {
                        Emphasis = PromptEmphasis.Strong,
                        Rate     = PromptRate.Slow,
                        Volume   = PromptVolume.ExtraLoud
                    };

                    pbuilder.StartStyle(pStyle);
                    pbuilder.StartParagraph();
                    pbuilder.StartVoice(VoiceGender.Male, VoiceAge.Adult, 2);
                    pbuilder.StartSentence();
                    pbuilder.AppendText(text);
                    pbuilder.EndSentence();
                    pbuilder.EndVoice();
                    pbuilder.EndParagraph();
                    pbuilder.EndStyle();

                    synthesizer.SetOutputToAudioStream(waveStream, synthFormat);
                    synthesizer.Speak(pbuilder);
                    synthesizer.SetOutputToNull();

                    //write some silence to the stream to allow camera to end properly
                    waveStream.Write(silence, 0, silence.Count());
                    waveStream.Seek(0, SeekOrigin.Begin);

                    var ds = new DirectStream(waveStream)
                    {
                        RecordingFormat = new WaveFormat(11025, 16, 1)
                    };
                    var talkTarget = TalkHelper.GetTalkTarget(cw.Camobject, ds);
                    ds.Start();
                    talkTarget.Start();
                    while (ds.IsRunning)
                    {
                        Thread.Sleep(100);
                    }
                    ds.Stop();
                    talkTarget.Stop();
                    talkTarget = null;
                    ds         = null;

                    waveStream.Close();
                }
            }
        }
Beispiel #7
0
        private static void SynthToCam(string fileName, CameraWindow cw)
        {
            using (var waveStream = new MemoryStream())
            {
                //write some silence to the stream to allow camera to initialise properly
                var silence = new byte[1 * 22050];
                waveStream.Write(silence, 0, silence.Count());

                //read in and convert the wave stream into our format
                using (var reader = new WaveFileReader(fileName))
                {
                    var    newFormat = new WaveFormat(11025, 16, 1);
                    byte[] buff      = new byte[22050];

                    using (var conversionStream = new WaveFormatConversionStream(newFormat, reader))
                    {
                        do
                        {
                            int i = conversionStream.Read(buff, 0, 22050);
                            waveStream.Write(buff, 0, i);
                            if (i < 22050)
                            {
                                break;
                            }
                        } while (true);
                    }
                }


                //write some silence to the stream to allow camera to end properly
                waveStream.Write(silence, 0, silence.Count());

                waveStream.Seek(0, SeekOrigin.Begin);

                var ds = new DirectStream(waveStream)
                {
                    RecordingFormat = new WaveFormat(11025, 16, 1)
                };
                var talkTarget = TalkHelper.GetTalkTarget(cw.Camobject, ds);

                ds.Start();
                talkTarget.Start();
                while (ds.IsRunning)
                {
                    Thread.Sleep(100);
                }
                ds.Stop();
                talkTarget.Stop();
                talkTarget = null;
                ds         = null;

                waveStream.Close();
            }
        }
Beispiel #8
0
        /// <summary>
        /// this is callback from remote host side to get asset associated with checksum from VS.
        /// </summary>
        public async Task RequestAssetAsync(int scopeId, Checksum[] checksums, string streamName, CancellationToken cancellationToken)
        {
            try
            {
                using (var combinedCancellationToken = _shutdownCancellationSource.Token.CombineWith(cancellationToken))
                    using (Logger.LogBlock(FunctionId.JsonRpcSession_RequestAssetAsync, streamName, combinedCancellationToken.Token))
                        using (var stream = await DirectStream.GetAsync(streamName, combinedCancellationToken.Token).ConfigureAwait(false))
                        {
                            using (var writer = new ObjectWriter(stream, combinedCancellationToken.Token))
                            {
                                writer.WriteInt32(scopeId);

                                await WriteAssetAsync(writer, scopeId, checksums, combinedCancellationToken.Token).ConfigureAwait(false);
                            }

                            await stream.FlushAsync(combinedCancellationToken.Token).ConfigureAwait(false);
                        }
            }
            catch (Exception ex) when(ReportUnlessCanceled(ex, cancellationToken))
            {
                // only expected exception will be catched. otherwise, NFW and let it propagate
                Debug.Assert(cancellationToken.IsCancellationRequested || ex is IOException);
            }
        }
Beispiel #9
0
        private static void SynthToCam(string fileName, CameraWindow cw)
        {
            try
            {
                using (var waveStream = new MemoryStream())
                {
                    //write some silence to the stream to allow camera to initialise properly
                    var silence = new byte[1 * 22050];
                    waveStream.Write(silence, 0, silence.Length);

                    var newFormat = new WaveFormat(11025, 16, 1);

                    try
                    {
                        if (File.Exists(fileName))
                        {
                            //read in and convert the wave stream into our format
                            var reader = new WaveFileReader(fileName);
                            var buff   = new byte[22050];

                            using (var conversionStream = new WaveFormatConversionStream(newFormat, reader))
                            {
                                while (true)
                                {
                                    var i = conversionStream.Read(buff, 0, buff.Length);
                                    waveStream.Write(buff, 0, i);
                                    if (i < 22050)
                                    {
                                        break;
                                    }
                                }
                            }
                        }
                        else
                        {
                            throw null;
                        }
                    }
                    catch
                    {
                        const int BUFFER_LIMIT = 1024000;

                        using (var ar = new AudioReader(newFormat.SampleRate, newFormat.Channels))
                        {
                            ar.ReadSamples(fileName, (b, c) =>
                            {
                                waveStream.Write(b, 0, c);
                                return(waveStream.Length >= BUFFER_LIMIT);
                            });
                        }
                    }

                    //write some silence to the stream to allow camera to end properly
                    waveStream.Write(silence, 0, silence.Length);
                    waveStream.Seek(0, SeekOrigin.Begin);

                    var ds = new DirectStream(waveStream)
                    {
                        RecordingFormat = new WaveFormat(11025, 16, 1)
                    };
                    var talkTarget = TalkHelper.GetTalkTarget(cw.Camobject, ds);
                    ds.Start();
                    talkTarget.Start();
                    while (ds.IsRunning)
                    {
                        Thread.Sleep(100);
                    }
                    ds.Stop();
                    talkTarget.Stop();
                    talkTarget = null;
                    ds         = null;
                }
            }
            catch (Exception ex)
            {
                Logger.LogException(ex, "SynthToCam");
            }
        }
Beispiel #10
0
        private static void SynthToCam(string text, CameraWindow cw)
        {
            var synthFormat = new System.Speech.AudioFormat.SpeechAudioFormatInfo(System.Speech.AudioFormat.EncodingFormat.Pcm, 11025, 16, 1, 22100, 2, null);

            using (var synthesizer = new SpeechSynthesizer())
            {
                using (var waveStream = new MemoryStream())
                {
                    //write some silence to the stream to allow camera to initialise properly
                    var silence = new byte[1 * 22050];
                    waveStream.Write(silence, 0, silence.Count());

                    var pbuilder = new PromptBuilder();
                    var pStyle   = new PromptStyle
                    {
                        Emphasis = PromptEmphasis.Strong,
                        Rate     = PromptRate.Slow,
                        Volume   = PromptVolume.ExtraLoud
                    };

                    pbuilder.StartStyle(pStyle);
                    pbuilder.StartParagraph();
                    pbuilder.StartVoice(VoiceGender.Male, VoiceAge.Adult, 2);
                    pbuilder.StartSentence();
                    pbuilder.AppendText(text);
                    pbuilder.EndSentence();
                    pbuilder.EndVoice();
                    pbuilder.EndParagraph();
                    pbuilder.EndStyle();

                    synthesizer.SetOutputToAudioStream(waveStream, synthFormat);
                    synthesizer.Speak(pbuilder);
                    synthesizer.SetOutputToNull();

                    //write some silence to the stream to allow camera to end properly
                    waveStream.Write(silence, 0, silence.Count());

                    waveStream.Seek(0, SeekOrigin.Begin);

                    ITalkTarget talkTarget = null;

                    var ds = new DirectStream(waveStream)
                    {
                        RecordingFormat = new WaveFormat(11025, 16, 1)
                    };
                    switch (cw.Camobject.settings.audiomodel)
                    {
                    case "Foscam":
                        ds.Interval   = 40;
                        ds.PacketSize = 882;     // (40ms packet at 22050 bytes per second)
                        talkTarget    = new TalkFoscam(cw.Camobject.settings.audioip, cw.Camobject.settings.audioport,
                                                       cw.Camobject.settings.audiousername,
                                                       cw.Camobject.settings.audiopassword, ds);
                        break;

                    case "NetworkKinect":
                        ds.Interval   = 40;
                        ds.PacketSize = 882;
                        talkTarget    = new TalkNetworkKinect(cw.Camobject.settings.audioip, cw.Camobject.settings.audioport, ds);
                        break;

                    case "iSpyServer":
                        ds.Interval   = 40;
                        ds.PacketSize = 882;
                        talkTarget    = new TalkiSpyServer(cw.Camobject.settings.audioip,
                                                           cw.Camobject.settings.audioport,
                                                           ds);
                        break;

                    case "Axis":
                        talkTarget = new TalkAxis(cw.Camobject.settings.audioip, cw.Camobject.settings.audioport,
                                                  cw.Camobject.settings.audiousername,
                                                  cw.Camobject.settings.audiopassword, ds);
                        break;

                    default:
                        //local playback
                        talkTarget = new TalkLocal(ds);

                        break;
                    }
                    ds.Start();
                    talkTarget.Start();
                    while (ds.IsRunning)
                    {
                        Thread.Sleep(100);
                    }
                    ds.Stop();
                    if (talkTarget != null)
                    {
                        talkTarget.Stop();
                    }
                    talkTarget = null;
                    ds         = null;

                    waveStream.Close();
                }
            }
        }
Beispiel #11
0
        private static void SynthToCam(string fileName, CameraWindow cw)
        {
            using (var waveStream = new MemoryStream())
            {
                //write some silence to the stream to allow camera to initialise properly
                var silence = new byte[1 * 22050];
                waveStream.Write(silence, 0, silence.Count());

                //read in and convert the wave stream into our format
                using (var reader = new WaveFileReader(fileName))
                {
                    var    newFormat = new WaveFormat(11025, 16, 1);
                    byte[] buff      = new byte[22050];

                    using (var conversionStream = new WaveFormatConversionStream(newFormat, reader))
                    {
                        do
                        {
                            int i = conversionStream.Read(buff, 0, 22050);
                            waveStream.Write(buff, 0, i);
                            if (i < 22050)
                            {
                                break;
                            }
                        } while (true);
                    }
                }


                //write some silence to the stream to allow camera to end properly
                waveStream.Write(silence, 0, silence.Count());

                waveStream.Seek(0, SeekOrigin.Begin);

                ITalkTarget talkTarget;

                var ds = new DirectStream(waveStream)
                {
                    RecordingFormat = new WaveFormat(11025, 16, 1)
                };
                switch (cw.Camobject.settings.audiomodel)
                {
                case "Foscam":
                    ds.Interval   = 40;
                    ds.PacketSize = 882;     // (40ms packet at 22050 bytes per second)
                    talkTarget    = new TalkFoscam(cw.Camobject.settings.audioip, cw.Camobject.settings.audioport,
                                                   cw.Camobject.settings.audiousername,
                                                   cw.Camobject.settings.audiopassword, ds);
                    break;

                case "NetworkKinect":
                    ds.Interval   = 40;
                    ds.PacketSize = 882;
                    talkTarget    = new TalkNetworkKinect(cw.Camobject.settings.audioip, cw.Camobject.settings.audioport, ds);
                    break;

                case "iSpyServer":
                    ds.Interval   = 40;
                    ds.PacketSize = 882;
                    talkTarget    = new TalkiSpyServer(cw.Camobject.settings.audioip,
                                                       cw.Camobject.settings.audioport,
                                                       ds);
                    break;

                case "Axis":
                    talkTarget = new TalkAxis(cw.Camobject.settings.audioip, cw.Camobject.settings.audioport,
                                              cw.Camobject.settings.audiousername,
                                              cw.Camobject.settings.audiopassword, ds);
                    break;

                default:
                    //local playback
                    talkTarget = new TalkLocal(ds);

                    break;
                }
                ds.Start();
                talkTarget.Start();
                while (ds.IsRunning)
                {
                    Thread.Sleep(100);
                }
                ds.Stop();
                if (talkTarget != null)
                {
                    talkTarget.Stop();
                }
                talkTarget = null;
                ds         = null;

                waveStream.Close();
            }
        }