public GraphInterpreterSpec(ITestOutputHelper output = null) : base(output) { _identity = GraphStages.Identity<int>(); _detach = new Detacher<int>(); _zip = new Zip<int, string>(); _broadcast = new Broadcast<int>(2); _merge = new Merge<int>(2); _balance = new Balance<int>(2); }
public void ReceiveBroadcast(Broadcast data) { if (data != null) { this.ThreadSafeMethod(new MethodInvoker(delegate() { string str = this.lbBroadcastMessage.Text; this.lbBroadcastMessage.Text = string.Format("{0}\r\n{1}", data, str); HostBroadcast host = data as HostBroadcast; if (host != null && this.observer != null) { this.observer.AddDataObserver(host); } })); } }
protected void btnBroadcast_Click(object sender, EventArgs e) { try { //use EF to connect to SQL Server using (wfbEntities2 db = new wfbEntities2()) { //use the broadcast model to save the new record Broadcast b = new Broadcast(); Int32 BroadcastID = 0; //check the querystring for an id so we can determine add / update if (Request.QueryString["BroadcastID"] != null) { //get the id from the url BroadcastID = Convert.ToInt32(Request.QueryString["BroadcastID"]); //get the current broadcast from EF b = (from objB in db.Broadcasts where objB.BroadcastID == BroadcastID select objB).FirstOrDefault(); } b.MessageBroadcast = txtMessage.Text; b.Reference_CompanyID = 1; b.SignatureBroadcast = txtName.Text; b.TitleBroadcast = txtTitle.Text; //call add only if we have no broadcast ID if (BroadcastID == 0) { db.Broadcasts.Add(b); } //run the update or insert db.SaveChanges(); //redirect to the updated students page Response.Redirect("/admin/broadcasts.aspx"); } } catch (Exception d) { Response.Redirect("/error.aspx"); } }
public JsonResult Save(BotType type, string text, string textFa, string fileName, string dataCode, int?count) { Response response; try { var user = GetAuthenticatedUser(); using (var db = new KiaGalleryContext()) { var item = new Broadcast() { BroadcastType = type, Text = text, TextFa = textFa, FileName = fileName, FileId = null, CreatedDate = DateTime.Now, Sended = false, SubmitedUser = user.Id }; db.BotBroadcast.Add(item); db.SaveChanges(); } response = new Response() { status = 200, message = "پیام عمومی ثبت شد." }; } catch (Exception ex) { response = Core.GetExceptionResponse(ex); } return(Json(response, JsonRequestBehavior.AllowGet)); }
public async Task <IActionResult> Detail(int?id) { var item = default(Broadcast); if (id == null) { item = new Broadcast { Id = -1 }; } else { item = await this.Db.Broadcasts .FirstOrDefaultAsync(x => x.Id == id.Value); } if (item == null) { return(NotFound()); } return(View(item)); }
private void OnPeerDiscovered(Broadcast broadcast) { try { if (!Nodes.TryGetValue(broadcast.Address, out Node existingNode)) { existingNode = new Node { Address = broadcast.Address, Name = broadcast.Data }; Nodes.TryAdd(existingNode.Address, existingNode); RunOnUiThread(() => Messages.Add($"Discovered: {existingNode.Name}")); } existingNode.DiscoveredAt = DateTime.UtcNow; } catch (Exception ex) { RunOnUiThread(() => Messages.Add($"Exception: {ex.Message}")); } }
protected NdArray ForwardCpu(NdArray x) { int[] inputShape = x.Shape; int[] outputShape = this.Weight.Shape; List <int> shapeList = new List <int>(); for (int i = 0; i < this.Axis; i++) { shapeList.Add(1); } shapeList.AddRange(outputShape); for (int i = 0; i < inputShape.Length - this.Axis - outputShape.Length; i++) { shapeList.Add(1); } int[] preShape = shapeList.ToArray(); NdArray y1 = new Reshape(preShape).Forward(this.Weight)[0]; NdArray y2 = new Broadcast(inputShape).Forward(y1)[0]; if (BiasTerm) { NdArray b1 = new Reshape(preShape).Forward(this.Bias)[0]; NdArray b2 = new Broadcast(inputShape).Forward(b1)[0]; return(x * y2 + b2); } else { return(x * y2); } }
public ChannelSubscription SubscribeToMyChannel( string parameter, bool sendMessageAfterSubscribing ) { var sub = Broadcast.Channel <MyChannel>() .WithParameters(parameter) .CreateSubscription(); if (sendMessageAfterSubscribing) { Broadcast.Channel <MyChannel>() .WithParameters(parameter) .Send(new MyMessage { foo = "Message after subscribing" }); // make sure the server really delivers the message // before a subscription handler can be registered Thread.Sleep(2000); } return(sub); }
public void FixtureSetup() { BroadcastServiceMock = MockRepository.GenerateStub <IBroadcastServicePortTypeClient>(); Client = new SoapBroadcastClient(BroadcastServiceMock); var queryBroadcast = new Broadcast[1]; BroadcastId = 1; BroadcastName = "broadcast"; BroadcastLastModified = DateTime.Now; queryBroadcast[0] = new Broadcast(BroadcastId, BroadcastName, BroadcastStatus.RUNNING, BroadcastLastModified, BroadcastType.IVR, null); CfBroadcastType[] broadcastType = { CfBroadcastType.Ivr }; ExpectedQueryBroadcast = new CfQueryBroadcasts(5, 0, broadcastType, true, "labelName"); var cfBroadcastQueryResult = new BroadcastQueryResult(1, queryBroadcast); BroadcastServiceMock .Stub(b => b.QueryBroadcasts(Arg <QueryBroadcasts> .Matches(x => x.MaxResults == ExpectedQueryBroadcast.MaxResults && x.FirstResult == ExpectedQueryBroadcast.FirstResult && x.Type == BroadcastType.IVR.ToString() && x.LabelName == ExpectedQueryBroadcast.LabelName))) .Return(cfBroadcastQueryResult); }
public void GraphInterpreter_should_implement_zip_broadcast() { WithTestSetup((setup, builder, lastEvents) => { var source1 = setup.NewUpstreamProbe <int>("source1"); var source2 = setup.NewUpstreamProbe <int>("source2"); var sink1 = setup.NewDownstreamProbe <Tuple <int, int> >("sink1"); var sink2 = setup.NewDownstreamProbe <Tuple <int, int> >("sink2"); var zip = new Zip <int, int>(); var broadcast = new Broadcast <Tuple <int, int> >(2); builder(new IGraphStageWithMaterializedValue <Shape, object>[] { broadcast, zip }) .Connect(source1, zip.In0) .Connect(source2, zip.In1) .Connect(zip.Out, broadcast.In) .Connect(broadcast.Out(0), sink1) .Connect(broadcast.Out(1), sink2) .Init(); lastEvents().Should().BeEmpty(); sink1.RequestOne(); lastEvents().Should().BeEquivalentTo(new RequestOne(source1), new RequestOne(source2)); sink2.RequestOne(); source1.OnNext(1); lastEvents().Should().BeEmpty(); source2.OnNext(2); lastEvents() .Should() .Equal(new OnNext(sink1, new Tuple <int, int>(1, 2)), new RequestOne(source1), new RequestOne(source2), new OnNext(sink2, new Tuple <int, int>(1, 2))); }); }
public override async Task <int> SendAsync(byte[] payload) { if (payload == null) { throw new ArgumentNullException(nameof(payload)); } var sequenceGuid = Guid.NewGuid(); var byteChunks = payload.Chunk(_payloadSegmentLength).ToArray(); var chunksCount = byteChunks.GetLength(0); var bytesSentCount = 0; for (var i = 0; i < chunksCount; i++) { var segment = new Segment(i, byteChunks[i]); var package = new Package(sequenceGuid, chunksCount, segment); var bytes = _formatter.OneLineSerialize(package); bytesSentCount += await Broadcast.SendAsync(bytes); } return(bytesSentCount); }
public void TestDestroy(string isEncryptionEnabled) { _spark.SparkContext.GetConf().Set("spark.io.encryption.enabled", isEncryptionEnabled); var obj1 = new TestBroadcastVariable(5, "destroy"); Broadcast <TestBroadcastVariable> bc1 = _spark.SparkContext.Broadcast(obj1); Func <Column, Column> udf = Udf <string, string>( str => $"{str} {bc1.Value().StringValue}, {bc1.Value().IntValue}"); var expected = new string[] { "hello destroy, 5", "world destroy, 5" }; string[] actual = ToStringArray(_df.Select(udf(_df["_1"]))); Assert.Equal(expected, actual); bc1.Destroy(); // Throws the following exception: // ERROR Utils: Exception encountered // org.apache.spark.SparkException: Attempted to use Broadcast(0) after it was destroyed(destroy at NativeMethodAccessorImpl.java:0) // at org.apache.spark.broadcast.Broadcast.assertValid(Broadcast.scala:144) // at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$writeObject$1.apply$mcV$sp(TorrentBroadcast.scala:203) // at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$writeObject$1.apply(TorrentBroadcast.scala:202) // at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$writeObject$1.apply(TorrentBroadcast.scala:202) // at org.apache.spark.util.Utils$.tryOrIOException(Utils.scala:1326) // at org.apache.spark.broadcast.TorrentBroadcast.writeObject(TorrentBroadcast.scala:202) // at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) try { _df.Select(udf(_df["_1"])).Collect().ToArray(); Assert.True(false); } catch (Exception e) { Assert.NotNull(e); } }
public async Task <ActionResult> Create(Broadcast collection) { BotApplication botApplication = db.BotApplications.Where(x => x.BotId == collection.BotId).FirstOrDefault(); collection.BotApplication = botApplication; try { if (ModelState.IsValid) { db.Broadcasts.Add(collection); db.SaveChanges(); await SendMessage(collection); } //await ShowDistributionListAsync(botApplication.BotAuthorizationTokenApi); return(RedirectToAction("Index")); } catch (Exception ex) { return(View(ex.Message)); } }
private void DownloadReplayVideos(Broadcast broadcast, AccessPublic accessPublic) { int i = 0; Uri replayUrl = new Uri(accessPublic.replay_url); string baseUrl = replayUrl.Scheme + "://" + replayUrl.DnsSafeHost + "/" + accessPublic.channel + "/"; List<string> highPriorityList; List<string> lowPriorityList; GetPlaylist(broadcast, baseUrl, out highPriorityList, out lowPriorityList); var totalCount = highPriorityList.Count + lowPriorityList.Count; Console.WriteLine(highPriorityList.Count + " high "+ lowPriorityList.Count + " low priority files exists."); Parallel.ForEach(highPriorityList, new ParallelOptions { MaxDegreeOfParallelism = _settings.ApplicationSettings.ParallelSaveLimit }, chunk => { string message; DownloadChunk(broadcast, baseUrl, chunk, out message); Console.WriteLine("[" + (i + 1) + "/" + totalCount + "] " + message); i++; }); if (_settings.UserSettings.DownloadReplayChunkIfLiveStreamChunkExists) { Parallel.ForEach(lowPriorityList, new ParallelOptions {MaxDegreeOfParallelism = _settings.ApplicationSettings.ParallelSaveLimit}, chunk => { string message; DownloadChunk(broadcast, baseUrl, chunk, out message); Console.WriteLine("[" + (i + 1) + "/" + totalCount + "] " + message); i++; }); } }
public override void OnClosing() { base.OnClosing(); _instance = null; }
private static RunnableGraph <ISourceQueueWithComplete <ChannelData <float> > > CreateGraph(IActorRef target, List <ChannelAdjusterConfig> configs, ChannelData <float> sample) { /* * Digital Merger is only necessary when there are additional digitals created and the same goes for the * Broadcast following the Analog Splitter. A broadcast is only required when the analog channel is produces * the additional digitals. Otherwise the analog is pushed straight to the merger +---------------+--------------+-----------------------------------------------------------------------+ | | | SyncData | | | | +-------------+----------------+-------------------------+ | | QueueSource | Channel Data | | FilterFlow | | Channel Data | | | Splitter | Analog | ================ | Analog | | | | | Splitter | Broadcast => Filter | Merger | | | | | | ----------------+----------------+ | | | | | \=> -FullScale | | | | | | | \=> +FullScale | Digital | | | | | | \=> FlatLining | Merger | | | | +-------------+-------------------------+ | | | | | Digitals | | | +---------------+--------------+-----------------------------------------------------------------------+ */ var indices = GetIndices(sample, configs); var number = indices.Count(); var temporalOffsets = configs.Select(x => - x.TemporalOffset).Append(0); var temp = temporalOffsets.Select(x => x - temporalOffsets.Min()).ToList(); var skipIndices = temp.Take(temp.Count - 1).ToList(); var zerothIndex = temp.Last(); var bufferSize = temp.Max() + 1; var skipFlowsNeeded = skipIndices.Any(x => x != 0); var graph = GraphDsl.Create(Source.Queue <ChannelData <float> >(10000, OverflowStrategy.Backpressure), (builder, source) => { //Split channel data into sync data, analogs and digitals var channelDataSplitter = new UnzipWith < ChannelData <float>, ISyncData, IReadOnlyList <DataChannel <float> >, IReadOnlyList <DataChannel <bool> > >(cd => Tuple.Create(cd as ISyncData, cd.Analogs, cd.Digitals)); var channelDataSplitterShape = builder.Add(channelDataSplitter); //Split, filter and reorder the analog channels into the required data channels var analogSplitter = new UnzipEnumerable < IReadOnlyList <DataChannel <float> >, DataChannel <float> >(list => indices.Select(i => list[i]).ToImmutableList(), number ); var analogSplitterShape = builder.Add(analogSplitter); //Re-combine the filtered analog channels var analogMerger = new ZipN <DataChannel <float> >(number); var analogMergerShape = builder.Add(analogMerger); //Digital additional flows var additionalDigitalFlows = new List <FlowShape <DataChannel <float>, DataChannel <bool> > >(); //Create the appropriate analog filtering flows. for (int i = 0; i < configs.Count(); i++) { var skipValue = skipIndices[i]; //Create new flows for the analogs switch (configs[i].Option) { // 1a) Each cfg generates one analog flow... case FilterOption.PassThrough: if (skipFlowsNeeded) { builder.From(analogSplitterShape.Out(i)) .Via( builder.Add( Flow.Create <DataChannel <float> >() .Buffer(bufferSize, OverflowStrategy.Backpressure) .Skip(skipValue) .Log("AnalogLog") ) ) .To(analogMergerShape.In(i)); } else { // Pass through channels can be connected straight from the splitter to the merger. builder.From(analogSplitterShape.Out(i)).To(analogMergerShape.In(i)); } break; case FilterOption.Filter: // Filtered channels create a single flow and connected from the splitter to the merger. var scale = configs[i].Scale; var offset = configs[i].Offset; var filterFlow = skipFlowsNeeded ? Flow.Create <DataChannel <float> >() .Buffer(bufferSize, OverflowStrategy.Backpressure) .Skip(skipValue) .Select(x => new DataChannel <float>(x.Name, x.Value * scale + offset, x.Units)) : Flow.Create <DataChannel <float> >() .Select(x => new DataChannel <float>(x.Name, x.Value * scale + offset, x.Units)); builder.From(analogSplitterShape.Out(i)).Via(builder.Add(filterFlow)).To(analogMergerShape.In(i)); break; // 1b) OR One analog flow and 3 additional digital flows. case FilterOption.CreateDigitals: // Filtered channels that create digitals creates a broadcaster for the analog channel first... var analogBroadcaster = new Broadcast <DataChannel <float> >(4); // ...then three flows for the digitals var d1Flow = builder.Add(Flow.Create <DataChannel <float> >().Select(x => new DataChannel <bool>($"{x.Name}_+FullScale", false))); var d2Flow = builder.Add(Flow.Create <DataChannel <float> >().Select(x => new DataChannel <bool>($"{x.Name}_-FullScale", false))); var d3Flow = builder.Add(Flow.Create <DataChannel <float> >().Select(x => new DataChannel <bool>($"{x.Name}_Flatlining", false))); // ...add the digital flow shapes to be connected later additionalDigitalFlows.Add(d1Flow); additionalDigitalFlows.Add(d2Flow); additionalDigitalFlows.Add(d3Flow); // ...create the broadcaster shape var analogBroadcasterShape = builder.Add(analogBroadcaster); // ...create the filter flow and connect the broadcaster to the merger via the filter var scaler = configs[i].Scale; var offsetter = configs[i].Offset; var filter = skipFlowsNeeded ? Flow.Create <DataChannel <float> >() .Buffer(bufferSize, OverflowStrategy.Backpressure) .Skip(skipValue) .Select(x => new DataChannel <float>(x.Name, x.Value * scaler + offsetter, x.Units)) : Flow.Create <DataChannel <float> >() .Select(x => new DataChannel <float>(x.Name, x.Value * scaler + offsetter, x.Units)); // ...link the analog splitter output to the broadcaster builder.From(analogSplitterShape.Out(i)) .Via(filter) .To(analogBroadcasterShape); builder.From(analogBroadcasterShape.Out(0)).To(analogMergerShape.In(i)); // ...link the broadcaster channels to the additional digital flows builder.From(analogBroadcasterShape.Out(1)).Via(d1Flow); builder.From(analogBroadcasterShape.Out(2)).Via(d2Flow); builder.From(analogBroadcasterShape.Out(3)).Via(d3Flow); break; case FilterOption.NotSet: throw new ArgumentException("Filter Option Not Set is not allowed."); } } //Merge everything back together var channelDataMerger = ZipWith.Apply < ISyncData, IImmutableList <DataChannel <float> >, IReadOnlyList <DataChannel <bool> >, ChannelData <float> >( (sync, analogs, digitals) => new ChannelData <float> ( analogs, digitals, sync.TimeStamp, sync.TachometerCount, sync.MasterSyncIncrement, sync.MasterSyncState, sync.SampleIndex ) ); var channelDataMergerShape = builder.Add(channelDataMerger); //Sink var sink = Sink.ActorRef <ChannelData <float> >(target, false); var sinkShape = builder.Add(sink); //_________Link stages_________ //=====Source===== //Source to the channel data splitter if (skipFlowsNeeded) { builder.From(source) .Via(builder.Add(Flow.Create <ChannelData <float> >().Buffer(bufferSize, OverflowStrategy.Backpressure))) .To(channelDataSplitterShape.In); //=====Splitter===== //Splitter sync data to merger. builder.From(channelDataSplitterShape.Out0) .Via(builder.Add(Flow.Create <ISyncData>().Buffer(bufferSize, OverflowStrategy.Backpressure).Skip(zerothIndex))) .To(channelDataMergerShape.In0); //Splitter analogs to analog splitter. builder.From(channelDataSplitterShape.Out1) .Via(builder.Add(Flow.Create <IReadOnlyList <DataChannel <float> > >().Buffer(bufferSize, OverflowStrategy.Backpressure))) .To(analogSplitterShape.In); //=====AdditionalDigitalFlows===== if (additionalDigitalFlows.Count > 0) { // Additonal Digital Merger var additionalDigitalMerger = new ZipWithN <DataChannel <bool>, IImmutableList <DataChannel <bool> > >(channel => channel, additionalDigitalFlows.Count); var additionalDigitalMergerShape = builder.Add(additionalDigitalMerger); //Combine the input digitals with the generated additional digitals var digitalMerger = ZipWith.Apply <List <DataChannel <bool> >, ImmutableList <DataChannel <bool> >, IReadOnlyList <DataChannel <bool> > >((channel1, channel2) => channel1.Concat(channel2).ToList()); var digitalMergerShape = builder.Add(digitalMerger); //Splitter digitals to digital merger. builder.From(channelDataSplitterShape.Out2) .Via(builder.Add(Flow.Create <IReadOnlyList <DataChannel <bool> > >().Buffer(bufferSize, OverflowStrategy.Backpressure))) .To(digitalMergerShape.In0); // Merge all additional flows together. for (int i = 0; i < additionalDigitalFlows.Count; i++) { builder.From(additionalDigitalFlows[i]).To(additionalDigitalMergerShape.In(i)); } //Additional digitals to digital merger builder.From(additionalDigitalMergerShape.Out).To(digitalMergerShape.In1); //=====DigitalMerger===== //Digital merger to channel data merger builder.From(digitalMergerShape.Out).To(channelDataMergerShape.In2); } else { // Splitter digitals to final merger. builder.From(channelDataSplitterShape.Out2) .Via(builder.Add(Flow.Create <IReadOnlyList <DataChannel <bool> > >().Buffer(bufferSize, OverflowStrategy.Backpressure))) .To(channelDataMergerShape.In2); } // Analog merger to final merger. builder.From(analogMergerShape.Out).To(channelDataMergerShape.In1); //=====Merger===== //Channel Data Merger to sink builder.From(channelDataMergerShape.Out).To(sinkShape); } else { builder.From(source).To(channelDataSplitterShape.In); //=====Splitter===== //Splitter sync data to merger. builder.From(channelDataSplitterShape.Out0).To(channelDataMergerShape.In0); //Splitter analogs to analog splitter. builder.From(channelDataSplitterShape.Out1).To(analogSplitterShape.In); //=====AdditionalDigitalFlows===== if (additionalDigitalFlows.Count > 0) { // Additonal Digital Merger var additionalDigitalMerger = new ZipWithN <DataChannel <bool>, IImmutableList <DataChannel <bool> > >(channel => channel, additionalDigitalFlows.Count); var additionalDigitalMergerShape = builder.Add(additionalDigitalMerger); //Combine the input digitals with the generated additional digitals var digitalMerger = ZipWith.Apply <List <DataChannel <bool> >, ImmutableList <DataChannel <bool> >, IReadOnlyList <DataChannel <bool> > >((channel1, channel2) => channel1.Concat(channel2).ToList()); var digitalMergerShape = builder.Add(digitalMerger); //Splitter digitals to digital merger. builder.From(channelDataSplitterShape.Out2).To(digitalMergerShape.In0); // Merge all additional flows together. for (int i = 0; i < additionalDigitalFlows.Count; i++) { builder.From(additionalDigitalFlows[i]).To(additionalDigitalMergerShape.In(i)); } //Additional digitals to digital merger builder.From(additionalDigitalMergerShape.Out).To(digitalMergerShape.In1); //=====DigitalMerger===== //Digital merger to channel data merger builder.From(digitalMergerShape.Out).To(channelDataMergerShape.In2); } else { // Splitter digitals to final merger. builder.From(channelDataSplitterShape.Out2).To(channelDataMergerShape.In2); } // Analog merger to final merger. builder.From(analogMergerShape.Out).To(channelDataMergerShape.In1); //=====Merger===== //Channel Data Merger to sink builder.From(channelDataMergerShape.Out).To(sinkShape); } return(ClosedShape.Instance); }); return(RunnableGraph.FromGraph(graph)); }
private void UpdateChunklist(Broadcast broadcast, string baseUrl, ref List<string> playlist, ref bool liveStream) { List<string> currentChunks; try { currentChunks = GetPlaylistUrls(baseUrl + "chunklist.m3u8"); } catch (Exception) { liveStream = false; return; } playlist.AddRange(currentChunks.Except(playlist)); playlist = playlist.Except(_localFileActions.GetValidFiles(broadcast.username, broadcast.id)).ToList(); }
public void ResetBroadcast(Broadcast show) { }
protected override async Task UpdateBroadcasts() { PreviousBroadcast = CurrentBroadcast; // NextBroadcast vienmēr null, jo zināma tikai pašreizējā dziesma. try { // {"status":0,"result":{"duration_sec":"341","title":"\u041b\u0410\u0421\u041a\u041e\u0412\u042b\u0419 \u041c\u0410\u0419 - \u0410 \u042f \u0422\u0430\u043a \u0416\u0434\u0443","id":"57966","start_time":"1361529715","finish_time":1361530056,"mdb_idtrack":"3702","mdb_idexecutor":"376","mdb_retry":"0","module":"channel","track_title":"\u0410 \u042f \u0422\u0430\u043a \u0416\u0434\u0443","executor_title":"\u041b\u0410\u0421\u041a\u041e\u0412\u042b\u0419 \u041c\u0410\u0419","end_time":125,"sample":""},"errorCode":0,"errorMsg":""} /* <root type="object"> * <status type="number">0</status> * <result type="object"> * <duration_sec type="string">284</duration_sec> * <title type="string">DIGITAL EMOTION - Get Up Action</title> * <id type="string">74493</id> * <start_time type="string">1362592329</start_time> * <finish_time type="number">1362592613</finish_time> * <mdb_idtrack type="string">31325</mdb_idtrack> * <mdb_idexecutor type="string">9362</mdb_idexecutor> * <mdb_retry type="string">0</mdb_retry> * <module type="string">channel</module> * <track_title type="string">Get Up Action</track_title> * <executor_title type="string">DIGITAL EMOTION</executor_title> * <end_time type="number">65</end_time> * <sample type="string">http://wz5.101.ru/full/1/74493.mp3</sample> * </result> * <errorCode type="number">0</errorCode> * <errorMsg type="string"></errorMsg> * </root>*/ DateTime now = DateTime.Now; XElement result = (await client.GetEncodedJson(currentUrl)).Element("result"); if (result.Element("track_title") == null) { // Ētera stacijas nekādi sevi neatpazīst, tāpēc nevar vienkārši atslēgt nestrādājošo raidījumu sarakstu. // result ir title elements ar tekstu "Ожидание...", kā arī nepareizi laika elementi, tāpēc te tos aizstāj ar mājaslapā redzamo tekstu. CurrentBroadcast = new Broadcast(now, now.AddHours(1), "Прямой эфир"); } else { string caption = WebUtility.HtmlDecode(result.Element("track_title").Value), description = WebUtility.HtmlDecode(result.Element("executor_title").Value); if (description != null) { // Izpildītājs. Nomaina lielos burtus pret normālu pierakstu, sabojājot, protams, ABBA un dažu citu kolektīvu nosaukumus. int splitIdx = description.IndexOf(','); if (splitIdx != -1) // Samaina vietām vārdu un uzvārdu.Piemēram, TOZZI, Umberto => Umberto Tozzi { description = description.Substring(splitIdx + 2) + ' ' + description.Substring(0, splitIdx).ToCapitalized(); } else { description = description.ToCapitalized(); } } short duration = short.Parse(result.Element("duration_sec").Value); // Dziesmas ilgums sekundēs. short delta = short.Parse(result.Element("end_time").Value); // Sekunžu skaits, kurš atlicis līdz dziesmas beigām. if (delta <= 0 || duration <= 0) // Kanāla reklāma, pāragrs izsaukums vai kāda cita aizkave. { CurrentBroadcast = new Broadcast(now, now.AddSeconds(4), caption, description); } else { CurrentBroadcast = new Broadcast(now.AddSeconds(delta - duration), now.AddSeconds(delta), caption, description); } // start_time un finish_time būtu ērti, ja tie būtu precīzi, bet bieži finish_time izrādās pagātnē, kad end_time vēl manāms. } } catch { CurrentBroadcast = null; } }
/// <summary> /// /// </summary> /// <param name="data"></param> private void ReceiveHostBroadcast(Broadcast data) { if (data != null) { try { Form frm = Application.OpenForms[0]; if (frm != null && (frm is IReceiveBroadcast) && !frm.IsDisposed && frm.IsHandleCreated) { if (frm.InvokeRequired) { frm.Invoke(new MethodInvoker(delegate() { ((IReceiveBroadcast)frm).ReceiveBroadcast(data); })); } } if (data is HostCloseBroadcast) { #region 主机关闭广播。 if ((((HostCloseBroadcast)data).HostID == (string)this["host_id"]) && (((HostCloseBroadcast)data).UIP == (string)this["host_ip"])) { MessageBox.Show("教师主机结束上课或被强制下线!", "学生客户端关闭", MessageBoxButtons.OK, MessageBoxIcon.Stop); this.isHostClose = this.ForceQuit = true; ThreadPool.QueueUserWorkItem(new WaitCallback(delegate(object obj) { Thread.Sleep(500); this.ForceQuit = true; while (Application.OpenForms.Count > 0) { Form f = Application.OpenForms[0]; if (f != null) { f.Invoke(new MethodInvoker(delegate() { f.Close(); })); } } this.Rest = true; })); } #endregion } } catch (Exception e) { this.RaiseChanged("发送异常错误:" + e.Message); } } }
private void DownloadChunk(Broadcast broadcast, string baseUrl, string chunk, out string message) { message = ""; Uri videoUri = new Uri(baseUrl + chunk); for (int i = 1; i <= _settings.ApplicationSettings.NumberOfRetries; i++) { try { byte[] buffer; using (var client = new WebClient { Headers = _client.Headers }) { buffer = client.DownloadData(videoUri); } _totalBufferLength += buffer.Length; _localFileActions.SaveFile(buffer, broadcast.username, broadcast.id, chunk); message = chunk + " saved. Total size " + _totalBufferLength + " bytes."; return; } catch (Exception ex) { message = ex.Message + " " + chunk; if (i < _settings.ApplicationSettings.NumberOfRetries) Thread.Sleep(_settings.ApplicationSettings.DelayOnRetry); } finally { message = (DateTime.Now - _startTime).ToString("g") + " " + message; } } }
public void GraphInterpreter_should_implement_zip_broadcast() { WithTestSetup((setup, builder, lastEvents) => { var source1 = setup.NewUpstreamProbe<int>("source1"); var source2 = setup.NewUpstreamProbe<int>("source2"); var sink1 = setup.NewDownstreamProbe<Tuple<int, int>>("sink1"); var sink2 = setup.NewDownstreamProbe<Tuple<int, int>>("sink2"); var zip = new Zip<int, int>(); var broadcast = new Broadcast<Tuple<int, int>>(2); builder(new IGraphStageWithMaterializedValue<Shape, object>[] {broadcast, zip}) .Connect(source1, zip.In0) .Connect(source2, zip.In1) .Connect(zip.Out, broadcast.In) .Connect(broadcast.Out(0), sink1) .Connect(broadcast.Out(1), sink2) .Init(); lastEvents().Should().BeEmpty(); sink1.RequestOne(); lastEvents().Should().BeEquivalentTo(new RequestOne(source1), new RequestOne(source2)); sink2.RequestOne(); source1.OnNext(1); lastEvents().Should().BeEmpty(); source2.OnNext(2); lastEvents() .Should() .Equal(new RequestOne(source1), new RequestOne(source2), new OnNext(sink1, new Tuple<int, int>(1, 2)), new OnNext(sink2, new Tuple<int, int>(1, 2))); }); }
public void DownloadVideos(Broadcast broadcast, bool downloadLiveStream = true) { _localFileActions.CreateNotExistingDirectory(broadcast.username, broadcast.id); _localFileActions.SaveFile(JsonConvert.SerializeObject(broadcast, Formatting.Indented), broadcast.username, broadcast.id, "broadcast_info.json"); #region Download Videos AccessPublic accessPublic = GetAccessPublic(broadcast.id); var referer = new Uri(Constants.PeriscopeUrl + broadcast.username + "/" + broadcast.id); UpdateClient(referer, accessPublic); if (accessPublic.IsLiveStream) { if (downloadLiveStream) DownloadLiveStream(broadcast, accessPublic); } else DownloadReplayVideos(broadcast, accessPublic); #endregion _localFileActions.DeleteEmptyDirectory(broadcast.username, broadcast.id); }
protected override Tuple <DateTime, int> DataFromBroadcast(Broadcast broadcast) { return(Tuple.Create(TimeZoneInfo.ConvertTime(broadcast.StartTime, TimeZoneInfo.Local, timezone), ((NrcuBroadcast)broadcast).Id)); }
/// <summary> /// /// </summary> /// <param name="data"></param> public void ReceiveBroadcast(Broadcast data) { //if (data != null) //{ // this.ThreadSafeMethod(this.lbBroadcast, new MethodInvoker(delegate() // { // this.lbBroadcast.Text = data.ToString(); // this.lbBroadcast.Update(); // })); //} }
private static bool CoversEntireTimeslot(Broadcast broadcast, Timeslot timeslot) { return(broadcast.StartTime <= timeslot.Start && broadcast.EndTime >= timeslot.End); }
internal UpdateStateHelper(Broadcast <int> b) { this.b = b; }
public PickerBot(IReadOnlyCollection <Cell> cells, ISaver saver, Broadcast broadcast) { _cells = cells; _saver = saver; _broadcast = broadcast; }
static void _bm_ReceiveInvalidBroadcast(Broadcast broadcast) { Console.WriteLine("Invalid broadcast " + broadcast.Status); }
public ImageDiscovery(Broadcast broadcast) { _broadcast = broadcast; }
private void GetPlaylist(Broadcast broadcast, string baseUrl, out List<string> highPriorityList, out List<string> lowPriorityList) { var playlist = GetPlaylistUrls(baseUrl + "playlist.m3u8"); List<string> downloadedFiles = _localFileActions.GetValidFiles(broadcast.username, broadcast.id).ToList(); _localFileActions.GetRemainingFiles(playlist, downloadedFiles, out highPriorityList, out lowPriorityList); }
internal BroadcastHelper(Broadcast <T[]> broadcastVar) { this.broadcastVar = broadcastVar; }
private Message FindExpert(string subjectName, Message message) { subjectName = subjectName.ToLower(); Subject subject = Subject.GetAll().FirstOrDefault(p => p.Name == subjectName); string question = $"{Person.GetAll().FirstOrDefault(p => p.Id == message.From.Id).Username} needs help with {subjectName}"; if (subject == null) { return(BroadcastMessage(subjectName, question, message)); } List <Person> persons = Person.GetAll().Where(p => p.Id != message.From.Id).ToList(); List <KeyValuePair <Person, int> > potentialExperts = new List <KeyValuePair <Person, int> >(); foreach (var person in persons) { int points = Matrix.GetPoints(person, subject); if (points >= 5) { potentialExperts.Add(new KeyValuePair <Person, int>(person, points)); } } potentialExperts.Sort((p1, p2) => p2.Value - p1.Value); List <ChannelAccount> potentialAnswerers = new List <ChannelAccount>(); List <string> experts = new List <string>(); List <string> randoms = new List <string>(); List <string> chosenIds = new List <string>(); for (int i = 0; i < potentialExperts.Count && i < 3; ++i) { string chosenId = potentialExperts[i].Key.Id; if (accountsForId.ContainsKey(chosenId)) { chosenIds.Add(chosenId); potentialAnswerers.Add(accountsForId[chosenId]); experts.Add("@" + accountsForId[chosenId].Name); } } List <string> allIds = accountsForId.Keys.ToList(); allIds.Remove(message.From.Id); foreach (string id in chosenIds) { allIds.Remove(id); } int n = allIds.Count; Random random = new Random(); while (n > 1) { n--; int k = random.Next(n + 1); string value = allIds[k]; allIds[k] = allIds[n]; allIds[n] = value; } for (int i = 0; i < 5 - potentialAnswerers.Count && i < allIds.Count; ++i) { potentialAnswerers.Add(accountsForId[allIds[i]]); randoms.Add("@" + accountsForId[allIds[i]].Name); } if (potentialAnswerers.Count > 0) { Broadcast.Add(subjectName, message.From, potentialAnswerers, experts.Count); return(message.CreateReplyMessage($"And what was your question?", "en")); } else { return(message.CreateReplyMessage(Reply.GetReply(ReplyType.NoExpertsFound).Text)); } }
static void NewBroadcast(Broadcast broadcast) { Console.WriteLine("NewBroadcast " + broadcast); }
public BroadcastRequest(string requestId, Broadcast broadcast) { RequestId = requestId; Broadcast = broadcast; }
private void listedGuide_NextBroadcastChanged(Broadcast nextBroadcast) { NextBroadcast = nextBroadcast; }
public ISubscriberResult Subscribe(CancellationToken unsubscribe) { return(Broadcast.Subscribe(unsubscribe)); }
public override int GetHashCode() { int hash = 1; if (Key.Length != 0) { hash ^= Key.GetHashCode(); } if (Type != 0) { hash ^= Type.GetHashCode(); } if (Broadcast != false) { hash ^= Broadcast.GetHashCode(); } if (Name.Length != 0) { hash ^= Name.GetHashCode(); } if (Avatar.Length != 0) { hash ^= Avatar.GetHashCode(); } if (Seat != 0) { hash ^= Seat.GetHashCode(); } if (RoomKey.Length != 0) { hash ^= RoomKey.GetHashCode(); } if (RoomName.Length != 0) { hash ^= RoomName.GetHashCode(); } if (Message.Length != 0) { hash ^= Message.GetHashCode(); } if (BeginPoint != 0D) { hash ^= BeginPoint.GetHashCode(); } if (EndPoint != 0D) { hash ^= EndPoint.GetHashCode(); } if (PenSize != 0) { hash ^= PenSize.GetHashCode(); } if (Color != 0) { hash ^= Color.GetHashCode(); } hash ^= users_.GetHashCode(); hash ^= rooms_.GetHashCode(); if (Code != 0) { hash ^= Code.GetHashCode(); } if (Error.Length != 0) { hash ^= Error.GetHashCode(); } if (Id != 0) { hash ^= Id.GetHashCode(); } return(hash); }
protected static void SendRPC(NeutronObject mThis, int RPCID, ValidationPacket validationType, object[] parameters, SendTo sendTo, bool cached, ProtocolType protocolType, Broadcast broadcast) { using (NeutronWriter writer = new NeutronWriter()) { object[] bArray = { mThis.Infor.ownerID, parameters }; //==========================================================================================================// writer.WritePacket(Packet.RPC); writer.WritePacket(broadcast); writer.Write(RPCID); writer.WritePacket(sendTo); writer.WritePacket(validationType); writer.Write(cached); writer.Write(bArray.Serialize()); //==========================================================================================================// Send(writer.GetBuffer(), protocolType); } }
private bool CheckForBroadcastAnswer(Message message) { if (!message.Text.Contains("@")) { return(false); } string sub = message.Text.Substring(message.Text.IndexOf("@")); string username = string.Empty; if (sub.Contains(" ")) { username = sub.Substring(0, sub.IndexOf(" ")); } else { username = sub; } Person asker = Person.GetAll().FirstOrDefault(p => p.Username == username); if (asker == null || asker.Id == message.From.Id) { return(false); } List <Broadcast> currentBroadcasts = Broadcast.GetAll(); foreach (Broadcast broadcast in currentBroadcasts) { if (broadcast.Asker.Id != asker.Id) { continue; } string[] split = message.Text.Split(' '); Regex rgx = new Regex(split.FirstOrDefault(s => s.StartsWith("@")) + " "); string answerString = rgx.Replace(message.Text, "", 1); if (broadcast.Status == BroadcastStatus.WaitingForApproval && broadcast.Answers.Count > 0) { broadcast.Answers.Enqueue(new BroadcastAnswer { Answerer = message.From, MessageText = answerString }); return(true); } broadcast.Status = BroadcastStatus.WaitingForApproval; broadcast.Answers.Enqueue(new BroadcastAnswer { Answerer = message.From, MessageText = answerString }); var connector = new ConnectorClient(); string answerer = "@" + message.From.Name; Message answerAck = message.CreateReplyMessage(answerer + ": "); Message replyMessage = new Message(); replyMessage.From = answerAck.From; replyMessage.Language = "en"; replyMessage.To = broadcast.Asker; replyMessage.Text = answerAck.Text + answerString + " - Is it a good answer?"; connector.Messages.SendMessage(replyMessage); return(true); } return(false); }
private void DownloadLiveStream(Broadcast broadcast, AccessPublic accessPublic) { List<string> chunklist = new List<string>(); Uri httpsHlsUrl = new Uri(accessPublic.https_hls_url); string baseUrl = httpsHlsUrl.Scheme + "://" + httpsHlsUrl.DnsSafeHost + httpsHlsUrl.Segments[0] + httpsHlsUrl.Segments[1] + httpsHlsUrl.Segments[2] + httpsHlsUrl.Segments[3]; bool liveStream = true; while (liveStream) { UpdateChunklist(broadcast, baseUrl, ref chunklist, ref liveStream); foreach (var chunk in chunklist.ToList()) { string message; chunklist.Remove(chunk); DownloadChunk(broadcast, baseUrl, chunk, out message); Console.WriteLine(message); } } var pDownload = new PDownload { User = broadcast.username, Broadcast = broadcast.id, DownloadLiveStream = false }; DownloadVideos(pDownload); }
/// <summary cref="IValueVisitor.Visit(Broadcast)"/> public void Visit(Broadcast broadcast) => CodeGenerator.GenerateCode(broadcast);
internal UpdateStateHelper(Broadcast<int> b) { this.b = b; }
public PaletteReducer(ImageCollection images, Broadcast broadcast) { _images = images; _broadcast = broadcast; }
public BroadcastQueryResult(long totalResults, Broadcast[] broadcasts) { TotalResults = totalResults; Broadcast = broadcasts; }