public ApplicationSprite() { // GearVR native api available // for AIR via http://www.adobe.com/devnet/air/articles/extending-air.html // http://blog.aboutme.be/2011/12/14/udp-native-extension-for-air-mobile-now-with-android-support/ // https://sites.google.com/a/jsc-solutions.net/backlog/knowledge-base/2014/201403/20140310 // http://16bit.signt.com/post/31487077697/extendable-as3-worker-class // http://performancebydesign.blogspot.com/2011/11/measuring-thread-execution-state-using.html // http://16bit.signt.com/post/31601682385/utilizing-multiple-worker-in-as3 // http://www.blixtsystems.com/2010/11/audio-mixing-on-air-for-android/ // http://coenraets.org/blog/2010/07/voicenotes-for-android-sample-app-using-flex-air-and-the-microphone-api/ // can we get a code analyzer to suggest a new nuget // 20150107, until when clippy starts selling us things?:D // Error 1 The type or namespace name 'net' could not be found (are you missing a using directive or an assembly reference?) X:\jsc.svn\examples\actionscript\air\AIRThreadedSound\AIRThreadedSound\ApplicationSprite.cs 44 17 AIRThreadedSound new net.hires.debug.Stats().AttachTo(this); new Thread( // jsc, whats the scope sharing analysis for this new block // can you show it on the other UHD display? // jsc zombie server, analysis server delegate (object scope) { // can our Console.WriteLine // be redirected over udp from android // to the jsc studio running over at the chrome? // AIR has to use native plugin to do lan udp broadcast? // can we thread left and right audio on separate threads? // http://help.adobe.com/en_US/FlashPlatform/reference/actionscript/3/flash/media/Sound.html var mySound = new Sound(); // would it make any sense to use the async workflow instead? mySound.sampleData += e => { // does it work on tab? // lets attatch the tab to find out. // cmd /K c:\util\android-sdk-windows\platform-tools\adb.exe logcat // works at 60fps // works! // could we add the nuget packages at runtime? // /jsc the debugger view for (var c = 0; c < 8192; c++) { // i wonder, can we use the orientation // or magnetic north here? // prep for Gear VR? e.data.writeFloat(Math.Sin(((c + e.position) / Math.PI / 2)) * 0.4); e.data.writeFloat(Math.Sin(((c + e.position) / Math.PI / 2)) * 0.1); } }; mySound.play(); } ).Start(default(object)); }
// X:\jsc.svn\examples\actionscript\test\TestResolveNativeImplementationExtension\TestResolveNativeImplementationExtension\Class1.cs public static SoundTasks get_async(Sound that) { //Console.WriteLine("InteractiveObject get_async"); if (!SoundTasks.InternalLookup.ContainsKey(that)) SoundTasks.InternalLookup[that] = new SoundTasks { that_Sound = that }; return SoundTasks.InternalLookup[that]; }
public MP3Pitch(string url) { _rate = 1.0; var _mp3 = new Sound(); _mp3.load(new URLRequest(url)); _mp3.complete += e => { f(_mp3).play(); }; }
private Sound mp3; // Use for decoding //[Embed("/assets/MP3LoopExperiment/loop.mp3")] //static Class __mp3; public ApplicationSprite() { //this.InvokeWhenStageIsReady( // () => // { //var ref0 = "assets/MP3LoopExperiment/loop.mp3"; //mp3 = __mp3.ToSoundAsset(); mp3 = KnownEmbeddedResources.Default["assets/MP3LoopExperiment/loop.mp3"].ToSoundAsset(); samplesTotal = System.Convert.ToInt32((mp3.length * 44100 / 1000) - MAGIC_DELAY - MAGIC_DELAY / 2); //stage.align = StageAlign.TOP_LEFT; //stage.scaleMode = StageScaleMode.NO_SCALE; textField.autoSize = TextFieldAutoSize.LEFT; textField.selectable = false; //textField.defaultTextFormat = new TextFormat("Verdana", 10, 0xFFFFFF); textField.text = "loading..."; addChild(textField); this.click += delegate { enabled = !enabled; updateText(); }; updateText(); startPlayback(); enabled = true; updateText(); // } //); }
public static void remove_progress(Sound that, Action<ProgressEvent> value) { CommonExtensions.RemoveDelegate(that, value, ProgressEvent.PROGRESS); }
public static void remove_open(Sound that, Action<Event> value) { CommonExtensions.RemoveDelegate(that, value, Event.OPEN); }
public static void add_open(Sound that, Action<Event> value) { CommonExtensions.CombineDelegate(that, value, Event.OPEN); }
public static void remove_ioError(Sound that, Action<IOErrorEvent> value) { CommonExtensions.RemoveDelegate(that, value, IOErrorEvent.IO_ERROR); }
public static void remove_complete(Sound that, Action<Event> value) { CommonExtensions.RemoveDelegate(that, value, Event.COMPLETE); }
public static void add_complete(Sound that, Action<Event> value) { CommonExtensions.CombineDelegate(that, value, Event.COMPLETE); }
// X:\jsc.svn\examples\actionscript\air\AIRAudioWorker\AIRAudioWorker\ApplicationSprite.cs // would jsc be able to translate it into // a property with events for cross thread sync? // Error 1 'AIRThreadedSoundAsyncLoop.ApplicationSprite.volume': // a volatile field cannot be of the type 'double' X:\jsc.svn\examples\actionscript\air\AIRThreadedSoundAsyncLoop\AIRThreadedSoundAsyncLoop\ApplicationSprite.cs 13 25 AIRThreadedSoundAsyncLoop // or, a GC like syncer should see what fields are being read and written // and on the fly bridge the data flow if usage is found //volatile double volume; // http://msdn.microsoft.com/en-us/library/aa645755%28v=vs.71%29.aspx // http://stackoverflow.com/questions/4727068/why-not-volatile-on-system-double-and-system-long // http://theburningmonk.com/2010/03/threading-understanding-the-volatile-modifier-in-csharp/ // Application ApplicationWebService sync would also benefit from such // usage analysis //volatile float volume; public ApplicationSprite() { // GearVR native api available // for AIR via http://www.adobe.com/devnet/air/articles/extending-air.html // http://blog.aboutme.be/2011/12/14/udp-native-extension-for-air-mobile-now-with-android-support/ // https://sites.google.com/a/jsc-solutions.net/backlog/knowledge-base/2014/201403/20140310 // http://16bit.signt.com/post/31487077697/extendable-as3-worker-class // http://performancebydesign.blogspot.com/2011/11/measuring-thread-execution-state-using.html // http://16bit.signt.com/post/31601682385/utilizing-multiple-worker-in-as3 // http://www.blixtsystems.com/2010/11/audio-mixing-on-air-for-android/ // http://coenraets.org/blog/2010/07/voicenotes-for-android-sample-app-using-flex-air-and-the-microphone-api/ var t = new TextField { multiline = true, autoSize = TextFieldAutoSize.LEFT, text = "..." // X:\jsc.svn\examples\actionscript\Test\TestWorkerConsole\TestWorkerConsole\ApplicationSprite.cs }.AttachToSprite().AsConsole(); //new Thread( // // jsc, whats the scope sharing analysis for this new block // // can you show it on the other UHD display? // // jsc zombie server, analysis server // //async // delegate (object scope) //{ // can our Console.WriteLine // be redirected over udp from android // to the jsc studio running over at the chrome? // AIR has to use native plugin to do lan udp broadcast? // can we thread left and right audio on separate threads? // http://help.adobe.com/en_US/FlashPlatform/reference/actionscript/3/flash/media/Sound.html new net.hires.debug.Stats().AttachToSprite(); // can we loose the arg0 // can we print to console from thread? // can we share and sync scope variables? var tt = new Thread(async arg0 => { // jsc shall augment the scope so we can sync it between contexts var s = new Sound(); //var sw = System.Diagnostics.Stopwatch.StartNew(); while (true) { // X:\jsc.svn\examples\actionscript\Test\TestWorkerSoundAssetLoop\TestWorkerSoundAssetLoop\ApplicationSprite.cs var e = await s.async.sampleData; // can we get the pitch from another device over lan? // can we have framerate as audio? for (var c = 0; c < 8192; c++) { // i wonder, can we use the orientation // or magnetic north here? // prep for Gear VR? e.data.writeFloat(Sin(((c + e.position) / PI * 0.3)) * 0.4); e.data.writeFloat(Sin(((c + e.position) / PI * 0.3)) * 0.1); } } } ); tt.Start(null); }
public ApplicationSprite() { // well does the assets library do something with mp3 for us? //jeepengine // not for AIR. // lets move into embedded resources var t = new TextField { multiline = true, autoSize = TextFieldAutoSize.LEFT, text = "click to start", x = 100 // X:\jsc.svn\examples\actionscript\Test\TestWorkerConsole\TestWorkerConsole\ApplicationSprite.cs }.AttachToSprite().AsConsole(); new net.hires.debug.Stats().AttachToSprite(); // X:\jsc.svn\examples\actionscript\air\AIRThreadedSoundAsyncLoop\AIRThreadedSoundAsyncLoop\ApplicationSprite.cs var threadId = 0; t.click += delegate { threadId++; Console.WriteLine(new { threadId }); var tt = new Thread(async arg0 => { // https://www.packtpub.com/books/content/flash-development-android-audio-input-microphone Console.WriteLine("enter thread"); var SourceAudio = KnownEmbeddedResources.Default[ "assets/TestWorkerSoundAssetLoop/jeepengine.mp3" ].ToSoundAsset(); // sometimes wont reach here, why? timerace issue? Console.WriteLine("SourceAudio " + new { SourceAudio.bytesTotal, SourceAudio.id3 }); var SourceAudioBytes = new ByteArray { endian = Endian.LITTLE_ENDIAN }; //2647ms SourceAudio { { bytesTotal = 34625, id3 = [object ID3Info] } } //2669ms SourceAudio { { samplesperchannel = 105984 } } var samplesperchannel = (int)SourceAudio.extract( target: SourceAudioBytes, length: 0x100000, startPosition: 0 ); //var MAGIC_DELAY = 2257u; // LAME 3.98.2 + flash.media.Sound Delay SourceAudioBytes.position = 0; // cyclical binary reader? // 63ms enter thread //66ms SourceAudio { { bytesTotal = 34625, id3 = [object ID3Info] } } // 95ms SourceAudio { { samplesperchannel = 105984 } } Console.WriteLine("SourceAudio " + new { samplesperchannel }); // can we await for a click here? // what if the parameter is the onclick task? /event // should we prerender our audio loop into the pitch we would need? //var loopjeep = new Abstractatech.ActionScript.Audio.MP3PitchLoop(SourceAudio); //it works and keeps the fps // on android it sounds choppy. why? // nexus seems to be able to do 16sounds with 60fps. //loopjeep.Sound.play(); var s = new Sound(); while (true) { // X:\jsc.svn\examples\actionscript\Test\TestWorkerSoundAssetLoop\TestWorkerSoundAssetLoop\ApplicationSprite.cs var e = await s.async.sampleData; // ftt //1831ms { { position = 0 } } //1847ms { { position = 65536 } } // wrap if (8192 * 8 + SourceAudioBytes.position > SourceAudioBytes.length) { Console.WriteLine(new { SourceAudioBytes.position }); SourceAudioBytes.position = 0; } // can we get the pitch from another device over lan? // can we have framerate as audio? for (var c = 0; c < 8192; c++) { // mipmap? // 4 var q0 = SourceAudioBytes.readFloat(); // 4 var q1 = SourceAudioBytes.readFloat(); // i wonder, can we use the orientation // or magnetic north here? // prep for Gear VR? e.data.writeFloat(q0 * 0.7); e.data.writeFloat(q1 * 0.7); } } } ); tt.Start(null); }; // can we get heatzeeker to be like a small earth ball? }
private Sound f(Sound _mp3) { var _target = new ByteArray(); var _position = 0.0; var _sound = new Sound(); _sound.sampleData += e => { //-- REUSE INSTEAD OF RECREATION _target.position = 0; //-- SHORTCUT var data = e.data; var scaledBlockSize = BLOCK_SIZE * _rate; var positionInt = Convert.ToInt32(_position); var alpha = _position - positionInt; var positionTargetNum = alpha; var positionTargetInt = -1; //-- COMPUTE NUMBER OF SAMPLES NEED TO PROCESS BLOCK (+2 FOR INTERPOLATION) var need = Convert.ToInt32(Math.Ceiling(scaledBlockSize) + 2); //-- EXTRACT SAMPLES var read = (int)_mp3.extract(_target, need, positionInt); var n = BLOCK_SIZE; if (read != need) n = Convert.ToInt32(read / _rate); var l0 = .0; var r0 = .0; var l1 = .0; var r1 = .0; var i = 0; for (; i < n; i++) { //-- AVOID READING EQUAL SAMPLES, IF RATE < 1.0 if (Convert.ToInt32(positionTargetNum) != positionTargetInt) { positionTargetInt = Convert.ToInt32(positionTargetNum); //-- SET TARGET READ POSITION _target.position = (uint)(positionTargetInt << 3); //-- READ TWO STEREO SAMPLES FOR LINEAR INTERPOLATION l0 = _target.readFloat(); r0 = _target.readFloat(); l1 = _target.readFloat(); r1 = _target.readFloat(); } //-- WRITE INTERPOLATED AMPLITUDES INTO STREAM data.writeFloat(l0 + alpha * (l1 - l0)); data.writeFloat(r0 + alpha * (r1 - r0)); //-- INCREASE TARGET POSITION positionTargetNum += _rate; //-- INCREASE FRACTION AND CLAMP BETWEEN 0 AND 1 alpha += _rate; while (alpha >= 1.0) --alpha; } //-- FILL REST OF STREAM WITH ZEROs if (i < BLOCK_SIZE) { while (i < BLOCK_SIZE) { data.writeFloat(0.0); data.writeFloat(0.0); ++i; } } //-- INCREASE SOUND POSITION _position += scaledBlockSize; }; return _sound; }
public Actor(Bitmap[] frames, Bitmap corpse, Bitmap blood, Sound death) { this._frames = frames; this._corpse = corpse; this._blood = blood; this.mouseEnabled = false; PlayDeathSound = death.ToAction(); Kill = delegate { IsAlive = false; PlayDeathSound(); foreach (var v in frames) v.Orphanize(); #region corpse if (corpse != null) { corpse.MoveToCenter().AttachTo(this); (10000 + 10000.FixedRandom()).AtDelay( delegate { if (IsAlive) return; corpse.Orphanize(); blood.x = -blood.width / 2; blood.y = -blood.height / 2; blood.AttachTo(this); ((20000 + 10000.FixedRandom())).AtDelay( delegate { if (IsAlive) return; blood.Orphanize(); IsCorpseAndBloodGone = true; if (CorpseAndBloodGone != null) CorpseAndBloodGone(); } ); IsCorpseGone = true; if (CorpseGone != null) CorpseGone(); } ); } #endregion if (Die != null) Die(); }; //this.Moved += // delegate // { RunAnimationTimer = (1000 / 15).AtInterval( t => { if (!IsAlive) { t.stop(); return; } if (!RunAnimation) return; ShowFrame(t.currentCount); } ); ShowFrame(0); }
public static void remove_sampleData(Sound that, Action<SampleDataEvent> value) { CommonExtensions.RemoveDelegate(that, value, SampleDataEvent.SAMPLE_DATA); }
public MP3PitchLoop(Sound SourceAudio, bool autoplay = false) { MP3PitchLoopCount++; this.SourceAudio = SourceAudio; this.Rate = 1.0; // X:\jsc.svn\examples\actionscript\air\AIRThreadedSoundAsyncLoop\AIRThreadedSoundAsyncLoop\ApplicationSprite.cs // X:\jsc.svn\examples\actionscript\air\AIRAudioWorker\AIRAudioWorker\ApplicationSprite.cs // on mobile AIR, something behaves differently? var SourceAudioBytes = new ByteArray { endian = Endian.LITTLE_ENDIAN }; var samplesperchannel = (int)SourceAudio.extract(SourceAudioBytes, 0x100000, 0); #region Autoconfigure { var bytes = SourceAudioBytes.ToMemoryStream(); bytes.Position = 0; var r = new BinaryReader(bytes); var floats = new double[bytes.Length / 4]; //Console.WriteLine("floats " + new { floats.Length }); for (int i = 0; i < floats.Length; i++) { floats[i] = r.ReadSingle(); } var paddingmode_yellow = true; var paddingsamples_yellow = 0; var paddingmode_yellow_agg = 0.0; var paddingmode_yellow_grace = 411; var paddingmode_red = true; var paddingsamples_red = 0; var paddingmode_red_agg = 0.0; var paddingmode_red_grace = 411; #region max var min = 0.0; var minset = false; var max = 0.0; var maxset = false; for (int ix = 0; ix < floats.Length; ix += 2) { // arg[0] is typeof System.Single //script: error JSC1000: No implementation found for this native method, please implement [static System.Console.WriteLine(System.Single)] var l0 = floats[ix]; var r0 = floats[ix + 1]; if (l0 != 0) if (minset) { min = Math.Min(min, l0); } else { min = l0; minset = true; } if (maxset) { max = Math.Max(max, l0); } else { max = l0; maxset = true; } } var absmax = max.Max(Math.Abs(min)); #endregion #region paddingmode_yellow for (int ix = 0; ix < floats.Length; ix += 2) { // arg[0] is typeof System.Single //script: error JSC1000: No implementation found for this native method, please implement [static System.Console.WriteLine(System.Single)] var l0 = floats[ix]; var r0 = floats[ix + 1]; if (paddingmode_yellow) { // discard noise if (Math.Abs(l0) > 0.08 * absmax) paddingmode_yellow_agg += Math.Abs(l0); } if (paddingmode_yellow_agg > absmax * 2.1) { if (Math.Abs(l0) < 0.02 * absmax) { paddingmode_yellow = false; } } if (paddingmode_yellow) { paddingsamples_yellow++; if (paddingmode_yellow_agg > absmax * 3.2) { if (paddingmode_yellow_grace > 0) { paddingmode_yellow_grace--; } else { // rollback paddingsamples_yellow -= 411; paddingmode_yellow = false; } } } } #endregion // count down while near zero, then wait for zero #region paddingmode_red for (int ix = floats.Length - 1; ix >= 0; ix -= 2) { var l0 = floats[ix]; var r0 = floats[ix + 1]; if (paddingmode_red) { // discard noise if (Math.Abs(l0) > 0.08 * absmax) paddingmode_red_agg += Math.Abs(l0); } if (paddingmode_red_agg > absmax * 2.1) { if (Math.Abs(l0) < 0.02 * absmax) { paddingmode_red = false; } } if (paddingmode_red) { paddingsamples_red++; if (paddingmode_red_agg > absmax * 3.2) { if (paddingmode_red_grace > 0) { paddingmode_red_grace--; } else { // rollback paddingsamples_red -= 411; paddingmode_red = false; } } } } #endregion this.SourceAudioInitialPosition = Convert.ToDouble(paddingsamples_yellow); this.SourceAudioPosition = Convert.ToDouble(paddingsamples_yellow); this.SourceAudioPaddingRight = Convert.ToDouble(paddingsamples_red); } #endregion // http://stackoverflow.com/questions/16733369/stuttering-playback-when-playing-a-stream-received-via-udp-socket // https://forums.adobe.com/message/4187920#4187920 // https://forums.adobe.com/message/3932678#3932678 // https://forums.adobe.com/message/3260161#3260161 // http://stackoverflow.com/questions/4944351/flash-10-1-as3-applying-realtime-effects-to-microphone-stutter-problems // shat causes audio stutter on android? // this costs us 58% of total time? Sound.sampleData += e => { var i = 0; var TargetAudioStream = e.data; if (MasterVolume < 0.05) { while (i < BLOCK_SIZE) { if (glitchmode) { glitch.Add(0); glitch.Add(0); } TargetAudioStream.writeFloat(0.0); TargetAudioStream.writeFloat(0.0); ++i; } return; } if (glitchmode) { //glitchmode = false; glitch.Clear(); //foreach (var item in glitch) //{ // TargetAudioStream.writeFloat(item); //} //return; } try { //-- REUSE INSTEAD OF RECREATION LoopAudioStream.position = 0; var scaledBlockSize = BLOCK_SIZE * Rate; // this costs us 13% of total time? //var positionInt = Convert.ToInt32(SourceAudioPosition); var positionInt = (int)(SourceAudioPosition); var alpha = SourceAudioPosition - positionInt; var positionTargetNum = alpha; var positionTargetInt = -1; //-- COMPUTE NUMBER OF SAMPLES NEED TO PROCESS BLOCK (+2 FOR INTERPOLATION) //var need = Convert.ToInt32(Math.Ceiling(scaledBlockSize) + 2); var need = (int)(Math.Ceiling(scaledBlockSize) + 2); var nextposition = SourceAudioPosition + scaledBlockSize; //-- EXTRACT SAMPLES //var need1 = Math.Min( // positionInt + need, // SourceAudio.bytesTotal //- SourceAudioPaddingRight //) - positionInt; var need1offset = positionInt + need; // http://help.adobe.com/en_US/FlashPlatform/reference/actionscript/3/flash/media/Sound.html#extract() var need1 = Math.Min(positionInt + need, // in bytes or in samples?? samplesperchannel - this.SourceAudioPaddingRight) - positionInt; //if (AtDiagnostics != null) // AtDiagnostics(new { positionInt, samplesperchannel, SourceAudioBytes.length, need1, need }.ToString()); // Scout tells this costs us 5% of total time #region SourceAudio.extract var read = (int)SourceAudio.extract(LoopAudioStream, need1, positionInt); var n = BLOCK_SIZE; if (read != need) { var p0 = LoopAudioStream.position; var need2 = n - read; nextposition = SourceAudioInitialPosition; var read2 = (int)SourceAudio.extract(LoopAudioStream, need2, nextposition); // we need to ease the edges now to make the click effect to o away. //var samples = read + read2; //var trace0 = new { samples, read, read2, LoopAudioStream.length }; //if (AtDiagnostics != null) // AtDiagnostics("fixup_LoopAudioStream " + trace0); //var fixup_LoopAudioStream = new ByteArray(); var p1 = LoopAudioStream.position; read += read2; nextposition += read2; //for (int fixup_i = 0; fixup_i < (read + read2); fixup_i++) #region ReadFloat32At Action<int, Action<double, double>> ReadFloat32At = (p, y) => { // wrap by 8 bytes! LoopAudioStream.position = (uint)(((p + 0) + (read * 8)) % (read * 8)); var fixup_l0 = LoopAudioStream.readFloat(); LoopAudioStream.position = (uint)(((p + 4) + (read * 8)) % (read * 8)); var fixup_r0 = LoopAudioStream.readFloat(); y(fixup_l0, fixup_r0); }; #endregion #region WriteFloat32At Action<int, double, double> WriteFloat32At = (p, fixup_l0, fixup_r0) => { // wrap by 8 bytes! LoopAudioStream.position = (uint)(((p + 0) + (read * 8)) % (read * 8)); LoopAudioStream.writeFloat(fixup_l0); LoopAudioStream.position = (uint)(((p + 4) + (read * 8)) % (read * 8)); LoopAudioStream.writeFloat(fixup_r0); }; #endregion #region DoSmoothingAt Action<int, int> DoSmoothingAt = (pp, radius) => { ReadFloat32At((int)pp - 8 * radius, (fixup_l0, fixup_r0) => { ReadFloat32At((int)pp + 8 * radius, (fixup_l1, fixup_r1) => { // mark it, can we see it? var dl = fixup_l1 - fixup_l0; var dr = fixup_r1 - fixup_r0; for (var fixupi = 0; fixupi < radius * 2; fixupi++) { var p = (double)fixupi / (double)(radius * 2); WriteFloat32At( (int)pp + 8 * (fixupi - radius), fixup_l0 + dl * p, fixup_r0 + dr * p ); } } ); } ); }; // does it actually help us?? DoSmoothingAt((int)p0, 40); //DoSmoothingAt(0, 40); #endregion LoopAudioStream.position = p1; //if (AtDiagnostics != null) // AtDiagnostics("fixup_LoopAudioStream done! " + new { fixup_LoopAudioStream.length, trace0 }); //LoopAudioStream = fixup_LoopAudioStream; glitchmode = true; } #endregion if (read != need) { //n = Convert.ToInt32(read / Rate); n = (int)(read / Rate); } #region TargetAudioStream.writeFloat var l0 = .0; var r0 = .0; var l1 = .0; var r1 = .0; for (; i < n; i++) { //-- AVOID READING EQUAL SAMPLES, IF RATE < 1.0 if (Convert.ToInt32(positionTargetNum) != positionTargetInt) { //positionTargetInt = Convert.ToInt32(positionTargetNum); positionTargetInt = (int)(positionTargetNum); //-- SET TARGET READ POSITION LoopAudioStream.position = (uint)(positionTargetInt << 3); //-- READ TWO STEREO SAMPLES FOR LINEAR INTERPOLATION l0 = LoopAudioStream.readFloat() * LeftVolume * MasterVolume; r0 = LoopAudioStream.readFloat() * RightVolume * MasterVolume; l1 = LoopAudioStream.readFloat() * LeftVolume * MasterVolume; r1 = LoopAudioStream.readFloat() * RightVolume * MasterVolume; } //-- WRITE INTERPOLATED AMPLITUDES INTO STREAM var tl0 = l0 + alpha * (l1 - l0); if (glitchmode) glitch.Add(tl0); TargetAudioStream.writeFloat(tl0); var tr0 = r0 + alpha * (r1 - r0); if (glitchmode) glitch.Add(tr0); TargetAudioStream.writeFloat(tr0); //-- INCREASE TARGET POSITION positionTargetNum += Rate; //-- INCREASE FRACTION AND CLAMP BETWEEN 0 AND 1 alpha += Rate; while (alpha >= 1.0) --alpha; } #endregion #region -- FILL REST OF STREAM WITH ZEROs //if (i < BLOCK_SIZE) //{ while (i < BLOCK_SIZE) { if (glitchmode) { glitch.Add(0); glitch.Add(0); } TargetAudioStream.writeFloat(0.0); TargetAudioStream.writeFloat(0.0); ++i; } //} #endregion //-- INCREASE SOUND POSITION SourceAudioPosition = nextposition; if (SourceAudioPositionChanged != null) SourceAudioPositionChanged(); } catch (Exception ex) { var StackTrace = ((ScriptCoreLib.ActionScript.Error)(object)ex).getStackTrace(); if (AtDiagnostics != null) AtDiagnostics("error: " + new { ex.Message, StackTrace }); } }; if (autoplay) Sound.play(); }
public ApplicationSprite() { // "X:\jsc.svn\examples\actionscript\air\AIRThreadedSound\AIRThreadedSound.sln" // https://sites.google.com/a/jsc-solutions.net/backlog/knowledge-base/2015/201501/20150107 // ! jsc inserts a check here now // if ((__Thread.InternalWorkerInvoke_4ebbe596_0600112e(this))) // https://sites.google.com/a/jsc-solutions.net/backlog/knowledge-base/2014/201403/20140310 // http://16bit.signt.com/post/31487077697/extendable-as3-worker-class // http://performancebydesign.blogspot.com/2011/11/measuring-thread-execution-state-using.html // http://16bit.signt.com/post/31601682385/utilizing-multiple-worker-in-as3 // http://www.blixtsystems.com/2010/11/audio-mixing-on-air-for-android/ // http://coenraets.org/blog/2010/07/voicenotes-for-android-sample-app-using-flex-air-and-the-microphone-api/ if (Worker.current.isPrimordial) { var w = WorkerDomain.current.createWorker( this.loaderInfo.bytes ); w.start(); new net.hires.debug.Stats().AttachTo(this); return; } // http://help.adobe.com/en_US/FlashPlatform/reference/actionscript/3/flash/media/Sound.html var mySound = new Sound(); mySound.sampleData += e => { // does it work on tab? // lets attatch the tab to find out. // cmd /K c:\util\android-sdk-windows\platform-tools\adb.exe logcat // works at 60fps for (var c = 0; c < 8192; c++) { e.data.writeFloat(Math.Sin(((c + e.position) / Math.PI / 2)) * 0.25); e.data.writeFloat(Math.Sin(((c + e.position) / Math.PI / 2)) * 0.25); } }; // i cannot hear a thing! mySound.play(); //var loopdiesel2 = new MP3PitchLoop( // KnownEmbeddedResources.Default[ // "assets/Abstractatech.ActionScript.Audio/diesel4.mp3" // ].ToSoundAsset() // ); //// on android this feels choppy. why? //loopdiesel2.Sound.play(); }
// method: CompareTo // Object reference not set to an instance of an object. // at System.Collections.Generic.Dictionary`2.Insert(TKey key, TValue value, Boolean add) //at System.Collections.Generic.Dictionary`2.set_Item(TKey key, TValue value) //at jsc.Script.CompilerBase.DIACache.GetVariableName(Type t, MethodBase m, LocalVariableInfo var, CompilerBase z) in x:\jsc.internal.git\compiler\jsc\Languages\CompilerBase.DIA.cs:line 264 //at jsc.Script.CompilerBase.<WriteVariableName>b__0(Type t, MethodBase m, LocalVariableInfo v) in x:\jsc.internal.git\compiler\jsc\Languages\CompilerBase.DIA.cs:line 289 //at jsc.Script.CompilerBase.<>c__DisplayClass5.<>c__DisplayClass8.<WriteVariableName>b__2(LocalVariableInfo vx) in x:\jsc.internal.git\compiler\jsc\Languages\CompilerBase.DIA.cs:line 303 // X:\jsc.svn\examples\actionscript\air\AIRAudioWorker\AIRAudioWorker\ApplicationSprite.cs // would jsc be able to translate it into // a property with events for cross thread sync? // Error 1 'AIRThreadedSoundAsync.ApplicationSprite.volume': // a volatile field cannot be of the type 'double' X:\jsc.svn\examples\actionscript\air\AIRThreadedSoundAsync\AIRThreadedSoundAsync\ApplicationSprite.cs 13 25 AIRThreadedSoundAsync // or, a GC like syncer should see what fields are being read and written // and on the fly bridge the data flow if usage is found //volatile double volume; // http://msdn.microsoft.com/en-us/library/aa645755%28v=vs.71%29.aspx // http://stackoverflow.com/questions/4727068/why-not-volatile-on-system-double-and-system-long // http://theburningmonk.com/2010/03/threading-understanding-the-volatile-modifier-in-csharp/ // Application ApplicationWebService sync would also benefit from such // usage analysis //volatile float volume; public ApplicationSprite() { // GearVR native api available // for AIR via http://www.adobe.com/devnet/air/articles/extending-air.html // http://blog.aboutme.be/2011/12/14/udp-native-extension-for-air-mobile-now-with-android-support/ // https://sites.google.com/a/jsc-solutions.net/backlog/knowledge-base/2014/201403/20140310 // http://16bit.signt.com/post/31487077697/extendable-as3-worker-class // http://performancebydesign.blogspot.com/2011/11/measuring-thread-execution-state-using.html // http://16bit.signt.com/post/31601682385/utilizing-multiple-worker-in-as3 // http://www.blixtsystems.com/2010/11/audio-mixing-on-air-for-android/ // http://coenraets.org/blog/2010/07/voicenotes-for-android-sample-app-using-flex-air-and-the-microphone-api/ var t = new TextField { multiline = true, autoSize = TextFieldAutoSize.LEFT, text = "..." //}.AttachTo(__Thread.InternalPrimordialSprite); // .AsConsole }.AttachToSprite().AsConsole(); //new Thread( // // jsc, whats the scope sharing analysis for this new block // // can you show it on the other UHD display? // // jsc zombie server, analysis server // //async // delegate (object scope) //{ // can our Console.WriteLine // be redirected over udp from android // to the jsc studio running over at the chrome? // AIR has to use native plugin to do lan udp broadcast? // can we thread left and right audio on separate threads? // http://help.adobe.com/en_US/FlashPlatform/reference/actionscript/3/flash/media/Sound.html var mySound = new Sound(); var awaiting_sampleData_i = 0; var awaiting_sampleData = default(TaskCompletionSource<SampleDataEvent>); Action<SampleDataEvent> y = e => { awaiting_sampleData_i++; ////t.text = new { i } + " sampleData"; Console.WriteLine(new { awaiting_sampleData_i } + " sampleData"); // can we have framerate as audio? for (var c = 0; c < 8192; c++) { // i wonder, can we use the orientation // or magnetic north here? // prep for Gear VR? e.data.writeFloat(Math.Sin(((c + e.position) / Math.PI / 2)) * 0.4); e.data.writeFloat(Math.Sin(((c + e.position) / Math.PI / 2)) * 0.1); } }; var listening_sampleData = false; var x_init = default(Action); x_init = delegate { Console.WriteLine("await sampleData " + new { listening_sampleData, awaiting_sampleData_i }); awaiting_sampleData = new TaskCompletionSource<SampleDataEvent>(); if (!listening_sampleData) { listening_sampleData = true; //can we have only the event in another thread? mySound.sampleData += e => { // bail to keep log small //if (awaiting_sampleData_i >= 2) // return; Console.WriteLine(" at sampleData " + new { awaiting_sampleData_i, awaiting_sampleData }); //y(e); // why does this work yet the async variant not? awaiting_sampleData.SetResult(e); Console.WriteLine(" at exit sampleData " + new { awaiting_sampleData_i, awaiting_sampleData }); }; // https://sites.google.com/a/jsc-solutions.net/backlog/knowledge-base/2014/201411/2014 // await for the next frame Task.Delay(1).ContinueWith( delegate { mySound.play(); } ); } Console.WriteLine("await sampleData exit " + new { awaiting_sampleData }); // ?? awaiting_sampleData.Task.ContinueWith( tt => { y(tt.Result); x_init(); } ); }; //reusable TaskCompletionSource ? x_init(); ////can we have only the event in another thread? //mySound.sampleData += e => //{ // // bail to keep log small // if (awaiting_sampleData_i >= 2) // return; // Console.WriteLine(" at sampleData " + new { awaiting_sampleData_i, awaiting_sampleData }); // //y(e); // // why does this work yet the async variant not? // awaiting_sampleData.SetResult(e); // Console.WriteLine(" at exit sampleData " + new { awaiting_sampleData_i, awaiting_sampleData }); //}; //mySound.play(); //11ms await sampleData { listening_sampleData = false, awaiting_sampleData_i = 0 } //16ms at sampleData { awaiting_sampleData_i = 1, awaiting_sampleData = [object __TaskCompletionSource_1] } //17ms at exit sampleData { awaiting_sampleData_i = 1, awaiting_sampleData = } //17ms await sampleData exit { awaiting_sampleData = } //20ms frame1 enter //25ms frame1 complete //25ms await sampleData { listening_sampleData = true, awaiting_sampleData_i = 1 } //26ms await sampleData exit { awaiting_sampleData = [object __TaskCompletionSource_1] } //await sampleData { listening_sampleData = true, awaiting_sampleData_i = 1 } //await exit sampleData #if xx new { }.With( async scope => { // sampleData { listening_sampleData = false, awaiting_sampleData_i = 0 } //sampleData { listening_sampleData = true, awaiting_sampleData_i = 1 } { var e = await mySound.async.sampleData; Console.WriteLine("frame1 enter"); y(e); } Console.WriteLine("frame1 complete"); { var e = await mySound.async.sampleData; Console.WriteLine("frame2 enter"); y(e); } Console.WriteLine("frame2 complete"); { var e = await mySound.async.sampleData; y(e); } Console.WriteLine("frame3 complete"); { // await mySound.async.sampleData2 //Console.WriteLine("await mySound.async.sampleData2"); var e = await mySound.async.sampleData; y(e); } } ); #endif // call play automatically after subscribing? //mySound.play(); //var go = default(Action); //go = delegate //{ // t.text = "enter go"; // mySound.async.sampleData.ContinueWith( // tt => // { // // ? // t.text = "enter sampleData"; // var e = tt.Result; // for (var c = 0; c < 8192; c++) // { // // i wonder, can we use the orientation // // or magnetic north here? // // prep for Gear VR? // e.data.writeFloat(Math.Sin(((c + e.position) / Math.PI / 2)) * 0.4); // e.data.writeFloat(Math.Sin(((c + e.position) / Math.PI / 2)) * 0.1); // } // go(); // } // ); //}; //go(); //while (true) //{ // // would it make any sense to use the async workflow instead? // var e = await mySound.async.sampleData; // // does it work on tab? // // lets attatch the tab to find out. // // cmd /K c:\util\android-sdk-windows\platform-tools\adb.exe logcat // // works at 60fps // // works! // // could we add the nuget packages at runtime? // // /jsc the debugger view // for (var c = 0; c < 8192; c++) // { // // i wonder, can we use the orientation // // or magnetic north here? // // prep for Gear VR? // e.data.writeFloat(Math.Sin(((c + e.position) / Math.PI / 2)) * 0.4); // e.data.writeFloat(Math.Sin(((c + e.position) / Math.PI / 2)) * 0.1); // } // // can we plot it? //} //} //).Start(default(object)); }