protected void updateSourceMedia(SourceMedia sourceMedia, Android.Net.Uri uri) { sourceMedia.uri = uri; sourceMedia.size = TranscoderUtils.GetSize(this, uri); sourceMedia.duration = getMediaDuration(uri) / 1000f; try { MediaExtractor mediaExtractor = new MediaExtractor(); mediaExtractor.SetDataSource(this, uri, null); sourceMedia.tracks = new List<MediaTrackFormat>(mediaExtractor.TrackCount); for (int track = 0; track < mediaExtractor.TrackCount; track++) { MediaFormat mediaFormat = mediaExtractor.GetTrackFormat(track); var mimeType = mediaFormat.GetString(MediaFormat.KeyMime); if (mimeType == null) { continue; } if (mimeType.StartsWith("video")) { VideoTrackFormat videoTrack = new VideoTrackFormat(track, mimeType); videoTrack.width = getInt(mediaFormat, MediaFormat.KeyWidth); videoTrack.height = getInt(mediaFormat, MediaFormat.KeyHeight); videoTrack.duration = getLong(mediaFormat, MediaFormat.KeyDuration); videoTrack.frameRate = MediaFormatUtils.GetFrameRate(mediaFormat, new Java.Lang.Integer(-1)).IntValue(); videoTrack.keyFrameInterval = MediaFormatUtils.GetIFrameInterval(mediaFormat, new Java.Lang.Integer(-1)).IntValue(); videoTrack.rotation = getInt(mediaFormat, TrackMetadataUtil.KEY_ROTATION, 0); videoTrack.bitrate = getInt(mediaFormat, MediaFormat.KeyBitRate); sourceMedia.tracks.Add(videoTrack); } else if (mimeType.StartsWith("audio")) { AudioTrackFormat audioTrack = new AudioTrackFormat(track, mimeType); audioTrack.channelCount = getInt(mediaFormat, MediaFormat.KeyChannelCount); audioTrack.samplingRate = getInt(mediaFormat, MediaFormat.KeySampleRate); audioTrack.duration = getLong(mediaFormat, MediaFormat.KeyDuration); audioTrack.bitrate = getInt(mediaFormat, MediaFormat.KeyBitRate); sourceMedia.tracks.Add(audioTrack); } else { sourceMedia.tracks.Add(new GenericTrackFormat(track, mimeType)); } } } catch (IOException ex) { System.Diagnostics.Debug.WriteLine($"Failed to extract sourceMedia: {ex.Message}"); } sourceMedia.NotifyChange(); }
private List<IGlFilter> createGlFilters(SourceMedia sourceMedia, TargetVideoTrack targetTrack, float overlayWidth, PointF position, float rotation) { List<IGlFilter> glFilters = null; if (targetTrack != null && targetTrack.overlay != null) { try { Bitmap bitmap = BitmapFactory.DecodeStream(context.ContentResolver.OpenInputStream(targetTrack.overlay)); if (bitmap != null) { float overlayHeight; VideoTrackFormat sourceVideoTrackFormat = (VideoTrackFormat)sourceMedia.tracks[targetTrack.sourceTrackIndex]; if (sourceVideoTrackFormat.rotation == 90 || sourceVideoTrackFormat.rotation == 270) { float overlayWidthPixels = overlayWidth * sourceVideoTrackFormat.height; float overlayHeightPixels = overlayWidthPixels * bitmap.Height / bitmap.Width; overlayHeight = overlayHeightPixels / sourceVideoTrackFormat.width; } else { float overlayWidthPixels = overlayWidth * sourceVideoTrackFormat.width; float overlayHeightPixels = overlayWidthPixels * bitmap.Height / bitmap.Width; overlayHeight = overlayHeightPixels / sourceVideoTrackFormat.height; } PointF size = new PointF(overlayWidth, overlayHeight); IGlFilter filter = TransformationUtil.createGlFilter(context, targetTrack.overlay, size, position, rotation); if (filter != null) { glFilters = new List<IGlFilter>(); glFilters.Add(filter); } } } catch (System.Exception err) { System.Diagnostics.Debug.WriteLine($"Failed to extract audio track metadata: {err.Message}"); } } return glFilters; }