public MonoGameGame(LSetting config, Loon game) : base(config, game) { this._start = JavaSystem.NanoTime(); this._log = new MonoGameLog(); this._assets = new MonoGameAssets(this); this._asyn = new MonoGameAsyn <object>(this._log, frame); }
protected void USleep(int waitTime) { do { difTime2 = JavaSystem.NanoTime(); }while ((difTime2 - difTime1) / waitTime * 1000 < msOnPulse); difTime1 = JavaSystem.NanoTime(); }
public MonoGameGame(LSetting config, Loon game) : base(config, game) { this._plat = game; this._start = JavaSystem.NanoTime(); this._contentManager = new MonoGameContentManager(game.GetContentManager().ServiceProvider, game.GetContentManager().RootDirectory); this._asyn = new MonoGameAsyn <object>(_log, frame); this._log = new MonoGameLog(config.appName); this._support = new NativeSupport(); this._assets = new MonoGameAssets(this); this._inputer = new MonoGameInputMake(this); this._graphics = new MonoGameGraphics(this, game.GetGraphicsDevice(), config.Width, config.Height); this.InitProcess(); }
public void OnDrawFrame(IGL10 unused) { GLES20.GlClear(GLES20.GlColorBufferBit); drawRectangle(yuvTextures[1], remoteVertices); drawRectangle(yuvTextures[0], localVertices); ++numFramesSinceLastLog; long now = JavaSystem.NanoTime(); if (lastFPSLogTime == -1 || now - lastFPSLogTime > 1e9) { double fps = numFramesSinceLastLog / ((now - lastFPSLogTime) / 1e9); Log.Debug(TAG, "Rendered FPS: " + fps); lastFPSLogTime = now; numFramesSinceLastLog = 1; } checkNoGLES2Error(); }
/** * Updates the number of packets sent, and the total amount of data sent. * @param length The length of the packet * @param rtpts * The RTP timestamp. * @throws IOException **/ public void update(int length, long rtpts) { mPacketCount += 1; mOctetCount += length; setLong(mPacketCount, 20, 24); setLong(mOctetCount, 24, 28); now = SystemClock.ElapsedRealtime(); delta += oldnow != 0 ? now - oldnow : 0; oldnow = now; if (interval > 0 && delta >= interval) { // We send a Sender Report send(JavaSystem.NanoTime(), rtpts); delta = 0; } }
public void Run() { long duration = 0; Log.d(TAG, "H264 packetizer started !"); stats.reset(); count = 0; if (inputStream.GetType() == typeof(MediaCodecInputStream)) { streamType = 1; socket.setCacheSize(0); } else { streamType = 0; socket.setCacheSize(400); } try { while (!Thread.Interrupted()) { oldtime = JavaSystem.NanoTime(); // We read a NAL units from the input stream and we send them send(); // We measure how long it took to receive NAL units from the phone duration = JavaSystem.NanoTime() - oldtime; stats.push(duration); // Computes the average duration of a NAL unit delay = stats.average(); //Log.d(TAG,"duration: "+duration/1000000+" delay: "+delay/1000000); } } catch (IOException e) { } catch (InterruptedException e) {} Log.Debug(TAG, "H264 packetizer stopped !"); }
private void resync() { int type; Log.Error(TAG, "Packetizer out of sync ! Let's try to fix that...(NAL length: " + naluLength + ")"); while (true) { header[0] = header[1]; header[1] = header[2]; header[2] = header[3]; header[3] = header[4]; header[4] = (byte)inputStream.Read(); type = header[4] & 0x1F; if (type == 5 || type == 1) { naluLength = header[3] & 0xFF | (header[2] & 0xFF) << 8 | (header[1] & 0xFF) << 16 | (header[0] & 0xFF) << 24; if (naluLength > 0 && naluLength < 100000) { oldtime = JavaSystem.NanoTime(); Log.Error(TAG, "A NAL unit may have been found in the bit stream !"); break; } if (naluLength == 0) { Log.Error(TAG, "NAL unit with NULL size found..."); } else if (header[3] == 0xFF && header[2] == 0xFF && header[1] == 0xFF && header[0] == 0xFF) { Log.Error(TAG, "NAL unit with 0xFFFFFFFF size found..."); } } } }
public void Run() { long time, duration = 0; int i = 0, j = 0, tr; bool firstFragment = true; byte[] nextBuffer; stats.reset(); try { while (!Thread.Interrupted()) { if (j == 0) { buffer = socket.requestBuffer(); } socket.updateTimestamp(ts); // Each packet we send has a two byte long header (See section 5.1 of RFC 4629) buffer[rtphl] = 0; buffer[rtphl + 1] = 0; time = JavaSystem.NanoTime(); if (fill(rtphl + j + 2, MAXPACKETSIZE - rtphl - j - 2) < 0) { return; } duration += JavaSystem.NanoTime() - time; j = 0; // Each h263 frame starts with: 0000 0000 0000 0000 1000 00?? // Here we search where the next frame begins in the bit stream for (i = rtphl + 2; i < MAXPACKETSIZE - 1; i++) { if (buffer[i] == 0 && buffer[i + 1] == 0 && (buffer[i + 2] & 0xFC) == 0x80) { j = i; break; } } // Parse temporal reference tr = (buffer[i + 2] & 0x03) << 6 | (buffer[i + 3] & 0xFF) >> 2; //Log.d(TAG,"j: "+j+" buffer: "+printBuffer(rtphl, rtphl+5)+" tr: "+tr); if (firstFragment) { // This is the first fragment of the frame -> header is set to 0x0400 buffer[rtphl] = 4; firstFragment = false; } else { buffer[rtphl] = 0; } if (j > 0) { // We have found the end of the frame stats.push(duration); ts += stats.average(); duration = 0; //Log.d(TAG,"End of frame ! duration: "+stats.average()); // The last fragment of a frame has to be marked socket.markNextPacket(); send(j); nextBuffer = socket.requestBuffer(); JavaSystem.Arraycopy(buffer, j + 2, nextBuffer, rtphl + 2, MAXPACKETSIZE - j - 2); buffer = nextBuffer; j = MAXPACKETSIZE - j - 2; firstFragment = true; } else { // We have not found the beginning of another frame // The whole packet is a fragment of a frame send(MAXPACKETSIZE); } } } catch (IOException e) { } catch (InterruptedException e) { } Log.d(TAG, "H263 Packetizer stopped !"); }
public override int Tick() { return((int)((JavaSystem.NanoTime() - _start) / 1000000L)); }
public static long NanoTime() { return(JavaSystem.NanoTime()); }
private void encodeCameraToMpeg() { // arbitrary but popular values int encWidth = 640; int encHeight = 480; int encBitRate = 6000000; // Mbps Log.Debug(TAG, MIME_TYPE + " output " + encWidth + "x" + encHeight + " @" + encBitRate); try { prepareCamera(encWidth, encHeight); prepareEncoder(encWidth, encHeight, encBitRate); _inputSurface.MakeCurrent(); prepareSurfaceTexture(); _camera.StartPreview(); long startWhen = JavaSystem.NanoTime(); long desiredEnd = startWhen + DURATION_SEC * 1000000000L; var st = _outputSurface.SurfaceTexture; int frameCount = 0; var curShad = false; while (JavaSystem.NanoTime() < desiredEnd) { // Feed any pending encoder output into the muxer. drainEncoder(false); if ((frameCount % 24) == 0) { curShad = !curShad; } if (curShad) { _outputSurface.ChangeFragmentShader(FRAGMENT_SHADER1); } else { _outputSurface.ChangeFragmentShader(FRAGMENT_SHADER2); } frameCount++; // Acquire a new frame of input, and render it to the Surface. If we had a // GLSurfaceView we could switch EGL contexts and call drawImage() a second // time to render it on screen. The texture can be shared between contexts by // passing the GLSurfaceView's EGLContext as eglCreateContext()'s share_context // argument. _outputSurface.AwaitNewImage(); _outputSurface.DrawImage(); // Set the presentation time stamp from the SurfaceTexture's time stamp. This // will be used by MediaMuxer to set the PTS in the video. if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "present: " + ((st.Timestamp - startWhen) / 1000000.0) + "ms"); } _inputSurface.SetPresentationTime(st.Timestamp); // Submit it to the encoder. The eglSwapBuffers call will block if the input // is full, which would be bad if it stayed full until we dequeued an output // buffer (which we can't do, since we're stuck here). So long as we fully drain // the encoder before supplying additional input, the system guarantees that we // can supply another frame without blocking. if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "sending frame to encoder"); } _inputSurface.SwapBuffers(); } // send end-of-stream to encoder, and drain remaining output drainEncoder(true); } finally { // release everything we grabbed releaseCamera(); releaseEncoder(); releaseSurfaceTexture(); } }
private long timestamp() { return(JavaSystem.NanoTime() / 1000); }