/// <summary> /// Specify where the ad should interrupt the main video. This code provides a procedural /// abastraction for the Google IMA Plugin setup code. /// </summary> private void setupCuePoints(Source source) { string cuePointType = "ad"; IDictionary <string, object> properties = new Dictionary <string, object>(); IDictionary <string, object> details = new Dictionary <string, object>(); // preroll CuePoint cuePoint = new CuePoint(CuePoint.PositionType.BEFORE, cuePointType, properties); details[Event.CUE_POINT] = cuePoint; eventEmitter.emit(EventType.SET_CUE_POINT, details); // midroll at 10 seconds. // Due HLS bugs in the Android MediaPlayer, midrolls are not supported. if (!source.DeliveryType.Equals(DeliveryType.HLS)) { int cuepointTime = 10 * (int)DateUtils.SECOND_IN_MILLIS; cuePoint = new CuePoint(cuepointTime, cuePointType, properties); details[Event.CUE_POINT] = cuePoint; eventEmitter.emit(EventType.SET_CUE_POINT, details); // Add a marker where the ad will be. mediaController.BrightcoveSeekBar.addMarker(cuepointTime); } // postroll cuePoint = new CuePoint(CuePoint.PositionType.AFTER, cuePointType, properties); details[Event.CUE_POINT] = cuePoint; eventEmitter.emit(EventType.SET_CUE_POINT, details); }
/// <summary> /// Specify where the ad should interrupt the main video. This code provides a procedural /// abastraction for the Google IMA Plugin setup code. /// </summary> private void setupCuePoints(Source source) { string cuePointType = "ad"; IDictionary <string, object> properties = new Dictionary <string, object>(); IDictionary <string, object> details = new Dictionary <string, object>(); // preroll CuePoint cuePoint = new CuePoint(CuePoint.PositionType.BEFORE, cuePointType, properties); details[Event.CUE_POINT] = cuePoint; eventEmitter.emit(EventType.SET_CUE_POINT, details); // midroll // Due HLS bugs in the Android MediaPlayer, midrolls are not supported. if (!source.DeliveryType.Equals(DeliveryType.HLS)) { cuePoint = new CuePoint(10 * (int)DateUtils.SECOND_IN_MILLIS, cuePointType, properties); details[Event.CUE_POINT] = cuePoint; eventEmitter.emit(EventType.SET_CUE_POINT, details); } // postroll cuePoint = new CuePoint(CuePoint.PositionType.AFTER, cuePointType, properties); details[Event.CUE_POINT] = cuePoint; eventEmitter.emit(EventType.SET_CUE_POINT, details); }
public Region AddRegion(UInt32 Start, UInt32 Length) { // cp CkCue cueChunk = (CkCue)GetChunk <CkCue>() ?? (CkCue)AddChunk(CkType.cue); var cp = new CuePoint(Start) { ID = (UInt32)cueChunk.CuePoints.Count + 1 }; cueChunk.CuePoints.Add(cp); // ltxt entry CkList listChunk = (CkList)GetChunk <CkList>() ?? (CkList)AddChunk(CkType.LIST); var ltxtChunk = new LiCkLtxt(cp.ID, Length); listChunk.Chunks.Add(ltxtChunk); // create the region var newRegion = new Region(cp) { Length = Length, LtxtChunk = ltxtChunk }; return(newRegion); }
//--Methods public CueChunk(string id, int size, BinaryReader reader) : base(id, size) { cues = new CuePoint[reader.ReadInt32()]; for (int x = 0; x < cues.Length; x++) { cues[x] = new CuePoint(reader); } }
public Marker AddMarker(UInt32 SamplePos) { CkCue cueChunk = (CkCue)GetChunk <CkCue>() ?? (CkCue)AddChunk(CkType.cue); CuePoint cp = cueChunk.AddCuePoint(SamplePos); var mk = new Marker(cp); EnumerateMarkers(); return(mk); }
public AnnotationUpdateRequestBuilder(string id, CuePoint annotation) : this() { this.Id = id; this.Annotation = annotation; }
public static void AddMarkers(String WaveFile, Dictionary <double, string> Markers) { var file = new RiffFile(WaveFile, false); var reader = new WaveFileReader(WaveFile); WaveFormat fmt = reader.WaveFormat; string temp1name = Path.GetDirectoryName(WaveFile) + Path.GetFileNameWithoutExtension(WaveFile) + "-temp" + Path.GetExtension(WaveFile); // find old cue chunk, or add new var cueChunk = file.GetChunk <CkCue>() as CkCue; if (cueChunk == null) { cueChunk = file.AddChunk(CkType.cue) as CkCue; } else { cueChunk.CuePoints.Clear(); } var listChunk = file.GetChunk <CkList>() as CkList; if (listChunk == null || listChunk.TypeID != LiCkType.adtl) { listChunk = file.AddChunk(CkType.LIST) as CkList; if (listChunk != null) { listChunk.TypeID = LiCkType.adtl; } } if (listChunk != null) { listChunk.Chunks.Clear(); } uint cueCounter = 0; foreach (var pair in Markers) { var samplePos = (uint)(pair.Key * fmt.SampleRate); var cp = new CuePoint(samplePos) { ID = ++cueCounter }; if (cueChunk != null) { cueChunk.CuePoints.Add(cp); } var labl = new LiCkInfoLabl(cp, pair.Value); if (listChunk != null) { listChunk.Chunks.Add(labl); } } reader.Close(); reader.Dispose(); file.Save(temp1name); file.Close(); File.Replace(temp1name, WaveFile, null); }
public static void StripPadding(string WaveFile, int Seconds) { var reader = new WaveFileReader(WaveFile); var strTarget = new BinaryWriter(new MemoryStream()); WaveFormat fmt = reader.WaveFormat; string temp1name = Path.GetDirectoryName(WaveFile) + Path.DirectorySeparatorChar + Path.GetFileNameWithoutExtension(WaveFile) + "-temp." + Path.GetExtension(WaveFile); string temp2name = Path.GetDirectoryName(WaveFile) + Path.DirectorySeparatorChar + Path.GetFileNameWithoutExtension(WaveFile) + "-old." + Path.GetExtension(WaveFile); int samples = fmt.SampleRate * Seconds; int sampleSize = (fmt.BitsPerSample / 8) * fmt.Channels; int padBytes = samples * sampleSize; var bufsize = (int)(reader.Length - (padBytes * 2)); var buf = new byte[bufsize]; int newSampleCount = bufsize / sampleSize; reader.Seek(padBytes, SeekOrigin.Begin); strTarget.Write(reader.Read(buf, 0, buf.Length)); reader.Close(); reader.Dispose(); var file = new RiffFile(WaveFile, false); file.GetChunk <CkData>().Data = buf; // adjust marker positions, remove if outside foreach (Chunk ch in file.Chunks) { if (ch.GetType() == typeof(CkCue)) { var me = ch as CkCue; //foreach (CuePoint cp in me.CuePoints) if (me != null) { for (int i = me.CuePoints.Count - 1; i >= 0; i--) { CuePoint cp = me.CuePoints[i]; cp.Position -= (uint)samples; cp.SampleOffset -= (uint)samples; if (cp.Position > newSampleCount) { me.CuePoints.Remove(cp); } } } } // trim down region lengths (ugly, but necessary due to Vegas rounding rather than truncating) else if (ch is CkList) { var me = ch as CkList; foreach (ListChunk lch in me.Chunks) { if (lch.GetType() == typeof(LiCkLtxt)) { var lt = lch as LiCkLtxt; if (lt != null && lt.SampleLength > newSampleCount) { lt.SampleLength = (uint)newSampleCount; } } else if (lch.GetType() == typeof(LiCkInfoTCOD)) { var tch = lch as LiCkInfoTCOD; if (tch != null) { tch.Position += (uint)samples; } } else if (lch.GetType() == typeof(LiCkInfoTCDO)) { var tch = lch as LiCkInfoTCDO; if (tch != null) { tch.Position -= (uint)samples; } } } } } file.Save(temp1name); File.Move(WaveFile, temp2name); File.Move(temp1name, WaveFile); File.Delete(temp2name); }
public static CuePointUpdateRequestBuilder Update(string id, CuePoint cuePoint) { return(new CuePointUpdateRequestBuilder(id, cuePoint)); }
/// <summary> /// Specify where the ad should interrupt the main video. This code provides a procedural /// abastraction for the Google IMA Plugin setup code. /// </summary> private void setupCuePoints(Source source) { string cuePointType = "ad"; IDictionary<string, object> properties = new Dictionary<string, object>(); IDictionary<string, object> details = new Dictionary<string, object>(); // preroll CuePoint cuePoint = new CuePoint(CuePoint.PositionType.BEFORE, cuePointType, properties); details[Event.CUE_POINT] = cuePoint; eventEmitter.emit(EventType.SET_CUE_POINT, details); // midroll // Due HLS bugs in the Android MediaPlayer, midrolls are not supported. if (!source.DeliveryType.Equals(DeliveryType.HLS)) { cuePoint = new CuePoint(10 * (int) DateUtils.SECOND_IN_MILLIS, cuePointType, properties); details[Event.CUE_POINT] = cuePoint; eventEmitter.emit(EventType.SET_CUE_POINT, details); } // postroll cuePoint = new CuePoint(CuePoint.PositionType.AFTER, cuePointType, properties); details[Event.CUE_POINT] = cuePoint; eventEmitter.emit(EventType.SET_CUE_POINT, details); }
public Region AddRegion(UInt32 Start, UInt32 Length) { // cp CkCue cueChunk = (CkCue)GetChunk<CkCue>() ?? (CkCue)AddChunk(CkType.cue); var cp = new CuePoint(Start) { ID = (UInt32)cueChunk.CuePoints.Count + 1 }; cueChunk.CuePoints.Add(cp); // ltxt entry CkList listChunk = (CkList)GetChunk<CkList>() ?? (CkList)AddChunk(CkType.LIST); var ltxtChunk = new LiCkLtxt(cp.ID, Length); listChunk.Chunks.Add(ltxtChunk); // create the region var newRegion = new Region(cp) { Length = Length, LtxtChunk = ltxtChunk }; return newRegion; }
public static AnnotationUpdateRequestBuilder Update(string id, CuePoint annotation) { return(new AnnotationUpdateRequestBuilder(id, annotation)); }
public AnnotationAddRequestBuilder(CuePoint annotation) : this() { this.Annotation = annotation; }
void TriggerCuePoint(CuePoint c){ c.Triggered = true; switch (c.Name) { case "Start": MAudio.MainAudioSource.Play (); break; case "Move 0": // sweep up to top MCamera.TweenPosition (new Vector3 (0, 4, -32), c.Duration); MCamera.TweenRotation (new Vector3 (5, 0, 0), c.Duration); break; case "Move 1": // outside upper angle MCamera.TweenPosition (new Vector3 (0, 10, -45), c.Duration); MCamera.TweenRotation (new Vector3 (5, 0, 0), c.Duration); MGrid.TweenBeatScale(new Vector3 (1 << 11,0,0), c.Duration); break; case "Move 2": // back nome MCamera.TweenPosition (new Vector3 (0, 0, 0), c.Duration); MCamera.TweenRotation (new Vector3 (0, 0, 0), c.Duration); MGrid.TweenBeatScale(new Vector3 (1 << 14,0,0), c.Duration); break; case "Move 3": // down the cone MCamera.TweenPosition (new Vector3 (0, 2, 0), c.Duration); MCamera.TweenRotation (new Vector3 (90, 0, 0), c.Duration); break; case "Move 4": // pull out wide MCamera.TweenPosition (new Vector3 (0, 200, 0), c.Duration); MCamera.TweenRotation (new Vector3 (90, 0, 0), c.Duration); break; case "Move 5": // pull out wider MCamera.TweenPosition (new Vector3 (0, 85, 0), c.Duration); MCamera.TweenRotation (new Vector3 (90, 0, 0), c.Duration); break; case "Move 6": // come down to the side MCamera.TweenPosition (new Vector3 (0, 30, -40), c.Duration); MCamera.TweenRotation (new Vector3 (90 - 36.86f, 0, 0), c.Duration); break; case "Move 7": // rotate prisms // MGrid.PrismRotation = new Vector3(0, 0, 45); // move 1/8 around column MCamera.TweenPosition (new Vector3 (-10, 30, -30), c.Duration); MCamera.TweenRotation (new Vector3 (90 - 36.86f, 45, 0), c.Duration); break; case "Move 8": // finish move MCamera.TweenPosition (new Vector3 (-40, 30, 0), c.Duration); MCamera.TweenRotation (new Vector3 (90 - 36.86f, 90, 0), c.Duration); break; case "Move 9": // back home MCamera.TweenPosition (new Vector3 (0, 0, 0), c.Duration); MCamera.TweenRotation (new Vector3 (0, 0, 0), c.Duration); break; } }
private CuePoint setupCuePoint(CuePoint c){ if (CuePoints.Count > 0) { c.CueTime = CuePoints [CuePoints.Count - 1].CueTime + CuePoints [CuePoints.Count - 1].Duration + c.Delay; } else { c.CueTime = c.Delay; } CuePoints.Add(c); return c; }
public Marker(CuePoint CuePoint) { this.CuePoint = CuePoint; }
public static AnnotationAddRequestBuilder Add(CuePoint annotation) { return(new AnnotationAddRequestBuilder(annotation)); }
public void LoadWaveData(string assetPath, out float startTime, out float endTime) { WavHeader theHeader = new WavHeader(); CueHeader theCueHeader = new CueHeader(); //CuePoint theCuePoint = new CuePoint(); ListHeader theListHeader = new ListHeader(); //LabelChunk theLabelChunk = new LabelChunk(); ArrayList markerTimeList = new ArrayList(); //List<short> lDataList = new List<short>(); //List<short> rDataList = new List<short>(); startTime = -1.0f; endTime = -1.0f; uint riffProgress = 0; //using (FileStream fs = new FileStream("/Users/msweet/Desktop/Chimes_with_marker_24.wav", System.IO.FileMode.Open, System.IO.FileAccess.Read)) using (FileStream fs = new FileStream(assetPath, System.IO.FileMode.Open, System.IO.FileAccess.Read)) using (BinaryReader br = new BinaryReader(fs)) { try { OIMSDebug("Trying to load the WAV data: " + assetPath); theHeader.chunkID = br.ReadBytes(4); //riff riffProgress += 4; theHeader.size = br.ReadUInt32(); //size of the total file! riffProgress += 4; theHeader.typeID = br.ReadBytes(4); //wave or something else riffProgress += 4; //if the file is not riff and wave - then we should exit here if (System.Text.Encoding.ASCII.GetString(theHeader.chunkID) != "RIFF") { OIMSDebug("The file is not RIFF encoded!! " + System.Text.Encoding.ASCII.GetString(theHeader.chunkID) + " " + assetPath); return; } if (System.Text.Encoding.ASCII.GetString(theHeader.typeID) != "WAVE") { OIMSDebug("The file is not WAVE encoded!! " + System.Text.Encoding.ASCII.GetString(theHeader.typeID) + " " + assetPath); return; } OIMSDebug("We have a wave file - continuing on to parse the wave data."); //next we need to parse the wave data theHeader.fmtID = br.ReadBytes(4); //should equal 'fmt ' with the trailing space riffProgress += 4; theHeader.fmtSize = br.ReadUInt32(); //not sure what this should be? but equals 16 - remaining chunk length after header? riffProgress += 4; theHeader.format = br.ReadUInt16(); //not sure what this is? but equals 1? i think this defines pcm or adpcm riffProgress += 2; theHeader.channels = br.ReadUInt16(); //1 for mono, 2 for stereo etc. riffProgress += 2; theHeader.sampleRate = br.ReadUInt32(); //22050, 44100, 48000, etc. riffProgress += 4; theHeader.bytePerSec = br.ReadUInt32(); //Bytes per second = (Sample Rate * BitsPerSample * Channels) / 8. riffProgress += 4; theHeader.blockSize = br.ReadUInt16(); //Block align = (BitsPerSample * Channels) / 8 this is 1 if 8 bit mono, this is 6 if 24 bit stereo, this is 2 if 16 bit mono riffProgress += 2; theHeader.bit = br.ReadUInt16(); //bit rate e.g. 8, 16, 24 riffProgress += 2; theHeader.dataID = br.ReadBytes(4); //this should be 'data' riffProgress += 4; theHeader.dataSize = br.ReadUInt32(); //this is how much data we have riffProgress += 4; // OIMSDebug("theHeader.typeID: " + System.Text.Encoding.ASCII.GetString(theHeader.typeID)); // OIMSDebug("theHeader.fmtID: " + System.Text.Encoding.ASCII.GetString(theHeader.fmtID)); // OIMSDebug("theHeader.fmtSize: " + theHeader.fmtSize); // OIMSDebug("theHeader.format: " + theHeader.format); // OIMSDebug("theHeader.channels: " + theHeader.channels); // OIMSDebug("theHeader.sampleRate: " + theHeader.sampleRate); // OIMSDebug("theHeader.bytePerSec: " + theHeader.bytePerSec); // OIMSDebug("theHeader.blockSize: " + theHeader.blockSize); // OIMSDebug("theHeader.bit: " + theHeader.bit); // OIMSDebug("theHeader.dataID: " + System.Text.Encoding.ASCII.GetString(theHeader.dataID)); // OIMSDebug("theHeader.dataSize: " + theHeader.dataSize); for (int i = 0; i < theHeader.dataSize / theHeader.blockSize; i++) { //byte[] x = br.ReadBytes(theHeader.blockSize); br.ReadBytes(theHeader.blockSize); riffProgress = riffProgress + theHeader.blockSize; //lDataList.Add((short)br.ReadUInt16()); //rDataList.Add((short)br.ReadUInt16()); //OIMSDebug("i: " + theHeader.blockSize + " " + i); } // THEN THINGS GET WEIRD - it could be any type of chunk next, and we have to look at then one at a time bool done = false; while (!done && riffProgress <= theHeader.size) { OIMSDebug("riffProgress: " + riffProgress + " out of the full file size:" + theHeader.size + " = " + (theHeader.size - riffProgress) + " left."); //lets look at the next chunk byte[] chunkID = br.ReadBytes(4); riffProgress += 4; OIMSDebug("The chunk id is: " + System.Text.Encoding.ASCII.GetString(chunkID)); bool chunkThatWeRecognize = false; OIMSDebug("System.Text.Encoding.ASCII.GetString(chunkID).Length = " + System.Text.Encoding.ASCII.GetString(chunkID).Length); //what is it? 'bext' or 'cue ' or something else if (System.Text.Encoding.ASCII.GetString(chunkID).Substring(0, 3) == "cue") { //yay it's a cue! I know how to do that! OIMSDebug("Found cue chunk!"); theCueHeader.size = br.ReadUInt32(); //the size riffProgress += 4; theCueHeader.numberOfCuePoints = br.ReadUInt32(); //number of cue points riffProgress += 4; //OIMSDebug("1theCueHeader.chunkID: " + System.Text.Encoding.ASCII.GetString(theCueHeader.chunkID)); OIMSDebug("1theCueHeader.size: " + theCueHeader.size); OIMSDebug("1theCueHeader.numberOfCuePoints: " + theCueHeader.numberOfCuePoints); //next iterate through the cue points for (int i = 0; i < theCueHeader.numberOfCuePoints; i++) { CuePoint thisCuePoint = new CuePoint(); thisCuePoint.dataID = br.ReadUInt32(); riffProgress += 4; thisCuePoint.position = br.ReadUInt32(); riffProgress += 4; thisCuePoint.dataChunkID = br.ReadUInt32(); riffProgress += 4; thisCuePoint.chunkStart = br.ReadUInt32(); riffProgress += 4; thisCuePoint.blockStart = br.ReadUInt32(); riffProgress += 4; thisCuePoint.sampleOffset = br.ReadUInt32(); riffProgress += 4; OIMSDebug("thisCuePoint.sampleOffset: " + thisCuePoint.sampleOffset); float sampleOffset = (float)thisCuePoint.sampleOffset / (float)theHeader.sampleRate * 1000.0f; OIMSDebug("thisCuePoint.sampleOffset in msec is: " + sampleOffset); markerTimeList.Add(sampleOffset); } chunkThatWeRecognize = true; } if (System.Text.Encoding.ASCII.GetString(chunkID) == "LIST") { OIMSDebug("Found list chunk!"); theListHeader.size = br.ReadUInt32(); //the size riffProgress += 4; theListHeader.typeID = br.ReadBytes(4); //should be 'adlt' riffProgress += 4; //then we need to iterate through the data int j = 0; int k = 0; while (j < theListHeader.size) { LabelChunk thisLabelChunk = new LabelChunk(); thisLabelChunk.chunkID = br.ReadBytes(4); //should be 'labl' riffProgress += 4; OIMSDebug("thisLabelChunk.chunkID: " + System.Text.Encoding.ASCII.GetString(thisLabelChunk.chunkID)); j += 4; thisLabelChunk.size = br.ReadUInt32(); //the size riffProgress += 4; j += 4; thisLabelChunk.cuePointID = br.ReadUInt32(); riffProgress += 4; j += 4; thisLabelChunk.text = br.ReadBytes((int)thisLabelChunk.size - 4); riffProgress = riffProgress + thisLabelChunk.size - 4; OIMSDebug("thisLabelChunk.text: " + System.Text.Encoding.ASCII.GetString(thisLabelChunk.text)); string tempString = System.Text.Encoding.ASCII.GetString(thisLabelChunk.text).Trim().ToLower(); OIMSDebug("tempString: " + tempString); OIMSDebug("the length of tempString is: " + tempString.Length); if (tempString.Length > 5) { if (String.Equals(tempString.Substring(0, 5), "start")) { OIMSDebug("START TIME RETRIEVED!"); if (markerTimeList[k] != null) { startTime = (float)markerTimeList[k]; } } } if (tempString.Length > 3) { if (String.Equals(tempString.Substring(0, 3), "end")) { OIMSDebug("END TIME RETRIEVED!"); if (markerTimeList[k] != null) { endTime = (float)markerTimeList[k]; } } } j += (int)thisLabelChunk.size; k++; } chunkThatWeRecognize = true; } if (System.Text.Encoding.ASCII.GetString(chunkID) == "bext") { OIMSDebug("Found bext chunk!"); uint size = br.ReadUInt32(); riffProgress += 4; for (int i = 0; i < size; i++) { //byte[] x = br.ReadBytes(1); br.ReadBytes(1); riffProgress += 1; } chunkThatWeRecognize = true; } if (System.Text.Encoding.ASCII.GetString(chunkID) == "JUNK") { OIMSDebug("Found JUNK chunk!"); uint size = br.ReadUInt32(); riffProgress += 4; for (int i = 0; i < size; i++) { //byte[] x = br.ReadBytes(1); br.ReadBytes(1); riffProgress += 1; } chunkThatWeRecognize = true; } if (System.Text.Encoding.ASCII.GetString(chunkID) == "MLo2") { OIMSDebug("Found MLo2 chunk!"); uint size = br.ReadUInt32(); riffProgress += 4; for (int i = 0; i < size; i++) { //byte[] x = br.ReadBytes(1); br.ReadBytes(1); riffProgress += 1; } chunkThatWeRecognize = true; } if (System.Text.Encoding.ASCII.GetString(chunkID) == "acid") { OIMSDebug("Found acid chunk!"); uint size = br.ReadUInt32(); riffProgress += 4; for (int i = 0; i < size; i++) { //byte[] x = br.ReadBytes(1); br.ReadBytes(1); riffProgress += 1; } chunkThatWeRecognize = true; } if (System.Text.Encoding.ASCII.GetString(chunkID) == "dpas") { OIMSDebug("Found dpas chunk!"); uint size = br.ReadUInt32(); riffProgress += 4; for (int i = 0; i < size; i++) { //byte[] x = br.ReadBytes(1); br.ReadBytes(1); riffProgress += 1; } chunkThatWeRecognize = true; } if (System.Text.Encoding.ASCII.GetString(chunkID) == "dprn") { OIMSDebug("Found dprn chunk!"); uint size = br.ReadUInt32(); riffProgress += 4; for (int i = 0; i < size; i++) { //byte[] x = br.ReadBytes(1); br.ReadBytes(1); riffProgress += 1; } chunkThatWeRecognize = true; } if (System.Text.Encoding.ASCII.GetString(chunkID) == "fact") { OIMSDebug("Found fact chunk!"); uint size = br.ReadUInt32(); riffProgress += 4; for (int i = 0; i < size; i++) { //byte[] x = br.ReadBytes(1); br.ReadBytes(1); riffProgress += 1; } chunkThatWeRecognize = true; } if (!chunkThatWeRecognize) { done = true; } } OIMSDebug("FINAL riffProgress: " + riffProgress + " out of the full file size:" + theHeader.size + " = " + (theHeader.size - riffProgress) + " left."); } //Finally block is useful for running any code that must execute even if there is an exception. //Control is passed to the Finally block regardless of how the Try ... Catch block exits finally { if (br != null) //binary reader { br.Close(); } if (fs != null) //file stream { fs.Close(); } } } OIMSDebug("WAV File opened."); //OIMSDebug(); //Processing would happen here (passing for the time being) }
/// <summary> /// Specify where the ad should interrupt the main video. This code provides a procedural /// abastraction for the Google IMA Plugin setup code. /// </summary> private void setupCuePoints(Source source) { string cuePointType = "ad"; IDictionary<string, object> properties = new Dictionary<string, object>(); IDictionary<string, object> details = new Dictionary<string, object>(); // preroll CuePoint cuePoint = new CuePoint(CuePoint.PositionType.BEFORE, cuePointType, properties); details[Event.CUE_POINT] = cuePoint; eventEmitter.emit(EventType.SET_CUE_POINT, details); // midroll at 10 seconds. // Due HLS bugs in the Android MediaPlayer, midrolls are not supported. if (!source.DeliveryType.Equals(DeliveryType.HLS)) { int cuepointTime = 10 * (int) DateUtils.SECOND_IN_MILLIS; cuePoint = new CuePoint(cuepointTime, cuePointType, properties); details[Event.CUE_POINT] = cuePoint; eventEmitter.emit(EventType.SET_CUE_POINT, details); // Add a marker where the ad will be. mediaController.BrightcoveSeekBar.addMarker(cuepointTime); } // postroll cuePoint = new CuePoint(CuePoint.PositionType.AFTER, cuePointType, properties); details[Event.CUE_POINT] = cuePoint; eventEmitter.emit(EventType.SET_CUE_POINT, details); }
public static void AddMarkers(String WaveFile, Dictionary<double, string> Markers) { var file = new RiffFile(WaveFile, false); var reader = new WaveFileReader(WaveFile); WaveFormat fmt = reader.WaveFormat; string temp1name = Path.GetDirectoryName(WaveFile) + Path.GetFileNameWithoutExtension(WaveFile) + "-temp" + Path.GetExtension(WaveFile); // find old cue chunk, or add new var cueChunk = file.GetChunk<CkCue>() as CkCue; if (cueChunk == null) { cueChunk = file.AddChunk(CkType.cue) as CkCue; } else cueChunk.CuePoints.Clear(); var listChunk = file.GetChunk<CkList>() as CkList; if (listChunk == null || listChunk.TypeID != LiCkType.adtl) { listChunk = file.AddChunk(CkType.LIST) as CkList; if (listChunk != null) listChunk.TypeID = LiCkType.adtl; } if (listChunk != null) listChunk.Chunks.Clear(); uint cueCounter = 0; foreach (var pair in Markers) { var samplePos = (uint)(pair.Key * fmt.SampleRate); var cp = new CuePoint(samplePos) { ID = ++cueCounter }; if (cueChunk != null) cueChunk.CuePoints.Add(cp); var labl = new LiCkInfoLabl(cp, pair.Value); if (listChunk != null) listChunk.Chunks.Add(labl); } reader.Close(); reader.Dispose(); file.Save(temp1name); file.Close(); File.Replace(temp1name, WaveFile, null); }
/// <summary>Creates an object creation expression for an event.</summary> /// <param name="ev">The event to create.</param> /// <returns>The object creation expression for the event.</returns> private static CodeObjectCreateExpression CreateMetaEvent(MidiEvent ev) { CodeObjectCreateExpression newEvent = null; CodeExpression delta = new CodePrimitiveExpression(ev.DeltaTime); // SEQUENCE NUMBER if (ev is SequenceNumber) { SequenceNumber midiEvent = (SequenceNumber)ev; newEvent = new CodeObjectCreateExpression( typeof(SequenceNumber), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Number) }); } // TEXT else if (ev is Text) { Text midiEvent = (Text)ev; newEvent = new CodeObjectCreateExpression( typeof(Text), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Text) }); } // Copyright else if (ev is Copyright) { Copyright midiEvent = (Copyright)ev; newEvent = new CodeObjectCreateExpression( typeof(Copyright), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Text) }); } // SEQUENCE TRACK NAME else if (ev is SequenceTrackName) { SequenceTrackName midiEvent = (SequenceTrackName)ev; newEvent = new CodeObjectCreateExpression( typeof(SequenceTrackName), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Text) }); } // INSTRUMENT else if (ev is Instrument) { Instrument midiEvent = (Instrument)ev; newEvent = new CodeObjectCreateExpression( typeof(Instrument), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Text) }); } // Lyric else if (ev is Lyric) { Lyric midiEvent = (Lyric)ev; newEvent = new CodeObjectCreateExpression( typeof(Lyric), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Text) }); } // Marker else if (ev is Marker) { Marker midiEvent = (Marker)ev; newEvent = new CodeObjectCreateExpression( typeof(Marker), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Text) }); } // CuePoint else if (ev is CuePoint) { CuePoint midiEvent = (CuePoint)ev; newEvent = new CodeObjectCreateExpression( typeof(CuePoint), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Text) }); } // ProgramName else if (ev is ProgramName) { ProgramName midiEvent = (ProgramName)ev; newEvent = new CodeObjectCreateExpression( typeof(ProgramName), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Text) }); } // ProgramName else if (ev is DeviceName) { DeviceName midiEvent = (DeviceName)ev; newEvent = new CodeObjectCreateExpression( typeof(DeviceName), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Text) }); } // ChannelPrefix else if (ev is ChannelPrefix) { ChannelPrefix midiEvent = (ChannelPrefix)ev; newEvent = new CodeObjectCreateExpression( typeof(ChannelPrefix), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Prefix) }); } // MidiPort else if (ev is MidiPort) { MidiPort midiEvent = (MidiPort)ev; newEvent = new CodeObjectCreateExpression( typeof(MidiPort), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Port) }); } // EndOfTrack else if (ev is EndOfTrack) { EndOfTrack midiEvent = (EndOfTrack)ev; newEvent = new CodeObjectCreateExpression( typeof(EndOfTrack), new CodeExpression[] { delta }); } // Tempo else if (ev is Tempo) { Tempo midiEvent = (Tempo)ev; newEvent = new CodeObjectCreateExpression( typeof(Tempo), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Value) }); } // SMPTEOffset else if (ev is SMPTEOffset) { SMPTEOffset midiEvent = (SMPTEOffset)ev; newEvent = new CodeObjectCreateExpression( typeof(SMPTEOffset), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Hours), new CodePrimitiveExpression(midiEvent.Minutes), new CodePrimitiveExpression(midiEvent.Seconds), new CodePrimitiveExpression(midiEvent.Frames), new CodePrimitiveExpression(midiEvent.FractionalFrames) }); } // TimeSignature else if (ev is TimeSignature) { TimeSignature midiEvent = (TimeSignature)ev; newEvent = new CodeObjectCreateExpression( typeof(TimeSignature), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.Numerator), new CodePrimitiveExpression(midiEvent.Denominator), new CodePrimitiveExpression(midiEvent.MidiClocksPerClick), new CodePrimitiveExpression(midiEvent.NumberOfNotated32nds) }); } // KeySignature else if (ev is KeySignature) { KeySignature midiEvent = (KeySignature)ev; newEvent = new CodeObjectCreateExpression( typeof(KeySignature), new CodeExpression[] { delta, new CodeCastExpression(typeof(Key), new CodePrimitiveExpression((byte)midiEvent.Key)), new CodeCastExpression(typeof(Tonality), new CodePrimitiveExpression((byte)midiEvent.Tonality)) }); } // Proprietary else if (ev is Proprietary) { Proprietary midiEvent = (Proprietary)ev; newEvent = new CodeObjectCreateExpression( typeof(Proprietary), new CodeExpression[] { delta, CreateDataArray(midiEvent.Data) }); } // UnknownMetaMidiEvent else if (ev is UnknownMetaMidiEvent) { UnknownMetaMidiEvent midiEvent = (UnknownMetaMidiEvent)ev; newEvent = new CodeObjectCreateExpression( typeof(UnknownMetaMidiEvent), new CodeExpression[] { delta, new CodePrimitiveExpression(midiEvent.MetaEventID), CreateDataArray(midiEvent.Data) }); } // Return the event return(newEvent); }
/// <summary>Parse a meta MIDI event from the data stream.</summary> /// <param name="deltaTime">The previously parsed delta-time for this event.</param> /// <param name="eventType">The previously parsed type of message we're expecting to find.</param> /// <param name="data">The data stream from which to read the event information.</param> /// <param name="pos">The position of the start of the event information.</param> /// <returns>The parsed meta MIDI event.</returns> private static MidiEvent ParseMetaEvent(long deltaTime, byte eventType, byte [] data, ref long pos) { try { MidiEvent tempEvent = null; // Create the correct meta event based on its meta event id/type switch (eventType) { // Sequence number case 0x00: pos++; // skip past the 0x02 int number = ((data[pos] << 8) | data[pos + 1]); tempEvent = new SequenceNumber(deltaTime, number); pos += 2; // skip read values break; // Text events (copyright, lyrics, etc) case 0x01: tempEvent = new Text(deltaTime, ReadASCIIText(data, ref pos)); break; case 0x02: tempEvent = new Copyright(deltaTime, ReadASCIIText(data, ref pos)); break; case 0x03: tempEvent = new SequenceTrackName(deltaTime, ReadASCIIText(data, ref pos)); break; case 0x04: tempEvent = new Instrument(deltaTime, ReadASCIIText(data, ref pos)); break; case 0x05: tempEvent = new Lyric(deltaTime, ReadASCIIText(data, ref pos)); break; case 0x06: tempEvent = new Marker(deltaTime, ReadASCIIText(data, ref pos)); break; case 0x07: tempEvent = new CuePoint(deltaTime, ReadASCIIText(data, ref pos)); break; case 0x08: tempEvent = new ProgramName(deltaTime, ReadASCIIText(data, ref pos)); break; case 0x09: tempEvent = new DeviceName(deltaTime, ReadASCIIText(data, ref pos)); break; // Channel prefix case 0x20: pos++; // skip 0x1 tempEvent = new ChannelPrefix(deltaTime, data[pos]); pos++; // skip read value break; // Port number case 0x21: pos++; // skip 0x1 tempEvent = new MidiPort(deltaTime, data[pos]); pos++; // skip read value break; // End of track case 0x2F: pos++; // skip 0x0 tempEvent = new EndOfTrack(deltaTime); break; // Tempo case 0x51: pos++; // skip 0x3 int tempo = ((data[pos] << 16) | data[pos + 1] << 8 | data[pos + 2]); tempEvent = new Tempo(deltaTime, tempo); pos += 3; break; // SMPTE offset case 0x54: pos++; // skip 0x5 tempEvent = new SMPTEOffset(deltaTime, data[pos], data[pos + 1], data[pos + 2], data[pos + 3], data[pos + 4]); pos += 5; break; // Time signature case 0x58: pos++; // skip past 0x4 tempEvent = new TimeSignature(deltaTime, data[pos], data[pos + 1], data[pos + 2], data[pos + 3]); pos += 4; break; // Key signature case 0x59: pos++; // skip past 0x2 tempEvent = new KeySignature(deltaTime, (Key)data[pos], (Tonality)data[pos + 1]); pos += 2; break; // Proprietary case 0x7F: // Read in the variable length and that much data, then store it long length = ReadVariableLength(data, ref pos); byte [] propData = new byte[length]; Array.Copy(data, (int)pos, propData, 0, (int)length); tempEvent = new Proprietary(deltaTime, propData); pos += length; break; // An unknown meta event! default: // Read in the variable length and that much data, then store it length = ReadVariableLength(data, ref pos); byte [] unknownData = new byte[length]; Array.Copy(data, (int)pos, unknownData, 0, (int)length); tempEvent = new UnknownMetaMidiEvent(deltaTime, eventType, unknownData); pos += length; break; } return(tempEvent); } // Something bad happened; wrap it in a parser exception catch (Exception exc) { throw new MidiParserException("Unable to parse meta MIDI event.", exc, pos); } }
public static CuePointAddRequestBuilder Add(CuePoint cuePoint) { return(new CuePointAddRequestBuilder(cuePoint)); }