Пример #1
0
    private static GameObject CreateObjectFromStreamInfo(StreamInfo stream, bool visible)
    {
        GameObject streamObj = new GameObject("Stream #" + streamID++);

        //Need a mesh filter and a mesh renderer for the stream's mesh rendering
        MeshFilter filter = streamObj.AddComponent("MeshFilter") as MeshFilter;
        filter.mesh = Object.Instantiate(stream.mesh) as Mesh;

        MeshRenderer renderer = streamObj.AddComponent("MeshRenderer") as MeshRenderer;
        renderer.material = Object.Instantiate(stream.material) as Material;
        renderer.enabled = visible;

        //Add a box collider
        MeshCollider hitBox = streamObj.AddComponent("MeshCollider") as MeshCollider;
        hitBox.transform.parent = streamObj.transform;

        //Add proper stream script
        string streamScript = "FluidStream";
        if(stream.type == Source.SourceType.Electricity)
            streamScript = "ElectricityStream";
        else if (stream.type == Source.SourceType.Wind)
            streamScript = "WindStream";

        streamObj.AddComponent(streamScript);

        return streamObj;
    }
 internal AudioStreamWrapper(StreamInfo adaptiveAudioStream)
 {
     AdaptiveAudioStream = adaptiveAudioStream;
     base.Name = adaptiveAudioStream.GetName();
     base.Language = adaptiveAudioStream.GetLanguage();
     //base.Language = new CultureInfo(adaptiveAudioStream.GetLanguage()).DisplayName;
 }
Пример #3
0
 public override CustomObjectInfo SerializeObject()
 {
     StreamInfo x = new StreamInfo();
     x.BasicSerialization(this);
     x.direction=direction;
     x.power=power;
     return x;
 }
Пример #4
0
		public StreamInfo GetVideoMedatada()
		{
			StreamInfo sinfo = new StreamInfo();

			sinfo.Author = currentUrl;
			sinfo.Title = currentUrl;

			return sinfo;
		}
Пример #5
0
        private static void AddBytes(IBinaryStorage storage, string key, byte[] data)
        {
            var streamInfo = new StreamInfo();
            using (var md5 = MD5.Create()) {
                streamInfo.Hash = md5.ComputeHash(data);
            }
            streamInfo.Length = data.Length;
            streamInfo.IsCompressed = false;

            using (var ms = new MemoryStream(data)) {
                storage.Add(key, ms, streamInfo);
            }
        }
Пример #6
0
        internal StreamInfo Clone()
        {
            StreamInfo clone = new StreamInfo
            {
                SectionName = SectionName,
                ConfigSource = ConfigSource,
                StreamName = StreamName,
                IsMonitored = IsMonitored,
                Version = Version
            };


            return clone;
        }
Пример #7
0
		public StreamInfo GetVideoMedatada()
		{
			StreamInfo sinfo = new StreamInfo();

			sinfo.Author = GetCanonicalUrl();
			sinfo.Title = GetCanonicalUrl();

			if (localInitData.Config.GetBoolean(ConfigurationConstants.ApiInternetAccess,
				true, false))
			{
				try
				{
					// pobieramy informacje o video
					string yt_url = GetCanonicalUrl();
					string oembed = "http://www.youtube.com/oembed";

					{
						var qstr = HttpUtility.ParseQueryString(string.Empty);
						qstr["url"] = yt_url;
						qstr["format"] = "xml";

						oembed += "?" + qstr.ToString();
					}

					WebRequest wr = WebRequest.Create(oembed);
					WebResponse wre = wr.GetResponse();
					Stream data = wre.GetResponseStream();

					System.Xml.XmlDocument xmldoc = new System.Xml.XmlDocument();
					xmldoc.Load(data);
					data.Close();

					sinfo.Author = xmldoc.GetElementsByTagName("author_name")[0].InnerText;
					sinfo.Title = xmldoc.GetElementsByTagName("title")[0].InnerText;
					sinfo.CanonicalUrl = GetCanonicalUrl();
				}
				catch (Exception)
				{
					sinfo.Author = "Unknown author";
					sinfo.Title = "Unknown title";
					sinfo.CanonicalUrl = GetCanonicalUrl();
				}
			}

			return sinfo;
		}
Пример #8
0
 public static IList<string> GetADSes(this FileInfo info)
 {
     List<string> result = new List<string>();
        using (FileStream stream = new StreamInfo(info.FullName).Open(FileMode.Open,
     FileAccess.Read, FileShare.ReadWrite))
        using (SafeFileHandle streamHandle = stream.SafeFileHandle)
        {
     NativeMethods.FILE_STREAM_INFORMATION[] streams = GetADSes(streamHandle);
     foreach (NativeMethods.FILE_STREAM_INFORMATION streamInfo in streams)
     {
      string streamName = streamInfo.StreamName.Substring(1,
       streamInfo.StreamName.LastIndexOf(':') - 1);
      if (streamName.Length != 0)
       result.Add(streamName);
     }
        }
        return result.AsReadOnly();
 }
Пример #9
0
 public static StreamInfo[] resolve_stream(string pred, int minimum, double timeout)
 {
     IntPtr[] buf = new IntPtr[1024]; int num = dll.lsl_resolve_bypred(buf, (uint)buf.Length, pred, minimum, timeout);
     StreamInfo[] res = new StreamInfo[num];
     for (int k = 0; k < num; k++)
     res[k] = new StreamInfo(buf[k]);
     return res;
 }
Пример #10
0
 public StreamOutlet(StreamInfo info, int chunk_size)
 {
     obj = dll.lsl_create_outlet(info.handle(), chunk_size, 360);
 }
Пример #11
0
        private void PutFileInternal(string s3Filename, string filename, StreamInfo file) {
            var tmpfile = Path.Combine(_tempDirectory, Guid.NewGuid() + ".cache");
            try {
                using(file) {
                    Tuplet<string, TaskTimer, DateTime?> entry = null;

                    // create tmp file
                    try {

                        // copy stream to tmp file
                        using(Stream stream = File.Create(tmpfile)) {
                            file.Stream.CopyTo(stream, file.Length, new Result<long>(TimeSpan.MaxValue)).Wait();
                        }

                        // create cached entry
                        if(_cacheTtl != TimeSpan.Zero) {
                            lock(_cache) {
                                if(_cache.TryGetValue(s3Filename, out entry)) {
                                    entry.Item2.Change(_cacheTtl, TaskEnv.None);
                                    entry.Item3 = file.Modified;
                                } else {
                                    var timer = _timerFactory.New(_cacheTtl, OnTimer, s3Filename, TaskEnv.None);
                                    _cache[s3Filename] = entry = new Tuplet<string, TaskTimer, DateTime?>(tmpfile, timer, file.Modified);
                                }
                            }
                        }
                    } catch(Exception e) {
                        try {

                            // delete tmp file and clear out timer and cache, if any exist
                            SafeFileDelete(tmpfile);
                            if(entry != null) {
                                lock(_cache) {
                                    entry.Item2.Cancel();
                                    _cache.Remove(s3Filename);
                                }
                            }
                        } catch(Exception e2) {
                            _log.WarnFormat("Failed cleaned-up post tmp file creation failure for attachment {0}: {1}", s3Filename, e2.Message);
                        }
                        throw new DreamInternalErrorException(string.Format("Unable to cache file attachment to '{0}' ({1})", s3Filename, e.Message));
                    }
                }

                // forward cached file to S3
                Stream filestream = File.Open(tmpfile, FileMode.Open, FileAccess.Read, FileShare.Read);
                file = new StreamInfo(filestream, file.Length, file.Type);
                var s3Msg = DreamMessage.Ok(file.Type, file.Length, file.Stream);
                s3Msg.Headers.ContentDisposition = new ContentDisposition(true, DateTime.UtcNow, null, null, filename, file.Length);

                // Note (arnec): The timeout is just a workaround Plug not having some kind of heartbeat on progress. Ideally 30 seconds of inactivity
                // should be perfectly fine, as long as we track uploads that are proceeding as active
                _s3.AtPath(s3Filename).WithTimeout(TimeSpan.FromMinutes(30)).Put(s3Msg);
            } finally {
                if(_cacheTtl == TimeSpan.Zero) {
                    SafeFileDelete(tmpfile);
                }
            }
        }
Пример #12
0
 public void PutFile(ResourceBE attachment, SizeType size, StreamInfo file) {
     CheckDisposed();
     PutFileInternal(BuildS3Filename(attachment, size), attachment.Name, file);
 }
Пример #13
0
		/// ------------------------------------------------------------------------------------
		/// <summary>
		/// Processes the GUID stream.
		/// </summary>
		/// <param name="streamInfo">The stream info.</param>
		/// ------------------------------------------------------------------------------------
		private void ProcessGuidStream(StreamInfo streamInfo)
		{
			// blank out the GUIDs
			int nGuidSize = Marshal.SizeOf(typeof(Guid));
			int nGuids = streamInfo.Size / nGuidSize;
			byte[] buffer = new byte[nGuidSize];
			for (int i = 0; i < nGuids; i++)
				m_writer.Write(buffer);
		}
Пример #14
0
		/// ------------------------------------------------------------------------------------
		/// <summary>
		/// Processes the stream header.
		/// </summary>
		/// <param name="nMetaDataRoot">The meta data root position.</param>
		/// <param name="streamInfo">The stream info.</param>
		/// ------------------------------------------------------------------------------------
		private void ProcessStreamHeader(long nMetaDataRoot, StreamInfo streamInfo)
		{
			long nStreamBase = m_stream.Position;

			streamInfo.Offset = m_reader.ReadInt32();
			streamInfo.Size = m_reader.ReadInt32();

			// Read Name. This is a null-terminated string with max length of 32
			streamInfo.Name = ReadName();
			m_stream.Position = nMetaDataRoot + streamInfo.Offset;
			ProcessStream(streamInfo);

			int nameFieldLength = streamInfo.Name.Length + 1;
			if (nameFieldLength % 4 > 0)
				nameFieldLength += 4;
			m_stream.Position = nStreamBase + 8 + (nameFieldLength / 4) * 4;
		}
        void element_MediaOpened(object sender, RoutedEventArgs e)
        {
            if (this.element.IsLive)
            {
                this.element.StartSeekToLive();
            }

            foreach (SegmentInfo segment in this.element.ManifestInfo.Segments)
            {
                audioTracks = new List<StreamInfo>();
                textTracks = new List<StreamInfo>();
                IList<StreamInfo> streamInfoList = segment.AvailableStreams;
                List<StreamInfo> selectStreams = segment.SelectedStreams.ToList<StreamInfo>();

                foreach (StreamInfo stream in streamInfoList)
                {
                    if (stream.Type == MediaStreamType.Video)
                    {
                        playingStream = stream;
                        tracks = stream.AvailableTracks.ToList<TrackInfo>();

                        ManifestEventArgs args = new ManifestEventArgs(tracks.ToList<Object>());
                        BitratesReady(this, args);
                    }
                    else if (stream.Type == MediaStreamType.Audio)
                    {
                        audioTracks.Add(stream);
                    }
                    //subtitles
                    else if (stream.Type == MediaStreamType.Script && stream.Subtype == "CAPT")
                    {
                        textTracks.Add(stream);
                    }
                }

                if (MediaOpened != null)
                {
                    MediaOpened(sender, e);
                }
                ManifestEventArgs audioArgs = new ManifestEventArgs(audioTracks.ToList<Object>());
                AudioTracksReady(this, audioArgs);

                ManifestEventArgs textArgs = new ManifestEventArgs(textTracks.ToList<Object>());
                TextTracksReady(this, textArgs);
            }
        }
Пример #16
0
        private void AddVideoResource(DlnaOptions options, XmlWriter writer, IHasMediaSources video, string deviceId, Filter filter, StreamInfo streamInfo = null)
        {
            if (streamInfo == null)
            {
                var sources = _mediaSourceManager.GetStaticMediaSources(video, true, _user).ToList();

                streamInfo = new StreamBuilder(_mediaEncoder, GetStreamBuilderLogger(options)).BuildVideoItem(new VideoOptions
                {
                    ItemId       = GetClientId(video),
                    MediaSources = sources,
                    Profile      = _profile,
                    DeviceId     = deviceId,
                    MaxBitrate   = _profile.MaxStreamingBitrate
                });
            }

            var targetWidth  = streamInfo.TargetWidth;
            var targetHeight = streamInfo.TargetHeight;

            var contentFeatureList = new ContentFeatureBuilder(_profile).BuildVideoHeader(streamInfo.Container,
                                                                                          streamInfo.TargetVideoCodec,
                                                                                          streamInfo.TargetAudioCodec,
                                                                                          targetWidth,
                                                                                          targetHeight,
                                                                                          streamInfo.TargetVideoBitDepth,
                                                                                          streamInfo.TargetVideoBitrate,
                                                                                          streamInfo.TargetTimestamp,
                                                                                          streamInfo.IsDirectStream,
                                                                                          streamInfo.RunTimeTicks,
                                                                                          streamInfo.TargetVideoProfile,
                                                                                          streamInfo.TargetVideoLevel,
                                                                                          streamInfo.TargetFramerate,
                                                                                          streamInfo.TargetPacketLength,
                                                                                          streamInfo.TranscodeSeekInfo,
                                                                                          streamInfo.IsTargetAnamorphic,
                                                                                          streamInfo.IsTargetInterlaced,
                                                                                          streamInfo.TargetRefFrames,
                                                                                          streamInfo.TargetVideoStreamCount,
                                                                                          streamInfo.TargetAudioStreamCount,
                                                                                          streamInfo.TargetVideoCodecTag,
                                                                                          streamInfo.IsTargetAVC);

            foreach (var contentFeature in contentFeatureList)
            {
                AddVideoResource(writer, video, deviceId, filter, contentFeature, streamInfo);
            }

            var subtitleProfiles = streamInfo.GetSubtitleProfiles(false, _serverAddress, _accessToken)
                                   .Where(subtitle => subtitle.DeliveryMethod == SubtitleDeliveryMethod.External)
                                   .ToList();

            foreach (var subtitle in subtitleProfiles)
            {
                var subtitleAdded = AddSubtitleElement(writer, subtitle);

                if (subtitleAdded && _profile.EnableSingleSubtitleLimit)
                {
                    break;
                }
            }
        }
Пример #17
0
 private bool EnableClientSideSeek(StreamInfo info)
 {
     return(info.IsDirectStream);
 }
Пример #18
0
        private void AddVideoResource(XmlWriter writer, IHasMediaSources video, string deviceId, Filter filter, string contentFeatures, StreamInfo streamInfo)
        {
            writer.WriteStartElement(string.Empty, "res", NS_DIDL);

            var url = streamInfo.ToDlnaUrl(_serverAddress, _accessToken);

            var mediaSource = streamInfo.MediaSource;

            if (mediaSource.RunTimeTicks.HasValue)
            {
                writer.WriteAttributeString("duration", TimeSpan.FromTicks(mediaSource.RunTimeTicks.Value).ToString("c", _usCulture));
            }

            if (filter.Contains("res@size"))
            {
                if (streamInfo.IsDirectStream || streamInfo.EstimateContentLength)
                {
                    var size = streamInfo.TargetSize;

                    if (size.HasValue)
                    {
                        writer.WriteAttributeString("size", size.Value.ToString(_usCulture));
                    }
                }
            }

            var totalBitrate     = streamInfo.TargetTotalBitrate;
            var targetSampleRate = streamInfo.TargetAudioSampleRate;
            var targetChannels   = streamInfo.TargetAudioChannels;

            var targetWidth  = streamInfo.TargetWidth;
            var targetHeight = streamInfo.TargetHeight;

            if (targetChannels.HasValue)
            {
                writer.WriteAttributeString("nrAudioChannels", targetChannels.Value.ToString(_usCulture));
            }

            if (filter.Contains("res@resolution"))
            {
                if (targetWidth.HasValue && targetHeight.HasValue)
                {
                    writer.WriteAttributeString("resolution", string.Format("{0}x{1}", targetWidth.Value, targetHeight.Value));
                }
            }

            if (targetSampleRate.HasValue)
            {
                writer.WriteAttributeString("sampleFrequency", targetSampleRate.Value.ToString(_usCulture));
            }

            if (totalBitrate.HasValue)
            {
                writer.WriteAttributeString("bitrate", totalBitrate.Value.ToString(_usCulture));
            }

            var mediaProfile = _profile.GetVideoMediaProfile(streamInfo.Container,
                                                             streamInfo.TargetAudioCodec,
                                                             streamInfo.TargetVideoCodec,
                                                             streamInfo.TargetAudioBitrate,
                                                             targetWidth,
                                                             targetHeight,
                                                             streamInfo.TargetVideoBitDepth,
                                                             streamInfo.TargetVideoProfile,
                                                             streamInfo.TargetVideoLevel,
                                                             streamInfo.TargetFramerate,
                                                             streamInfo.TargetPacketLength,
                                                             streamInfo.TargetTimestamp,
                                                             streamInfo.IsTargetAnamorphic,
                                                             streamInfo.IsTargetInterlaced,
                                                             streamInfo.TargetRefFrames,
                                                             streamInfo.TargetVideoStreamCount,
                                                             streamInfo.TargetAudioStreamCount,
                                                             streamInfo.TargetVideoCodecTag,
                                                             streamInfo.IsTargetAVC);

            var filename = url.Substring(0, url.IndexOf('?'));

            var mimeType = mediaProfile == null || string.IsNullOrEmpty(mediaProfile.MimeType)
               ? GetMimeType(filename)
               : mediaProfile.MimeType;

            writer.WriteAttributeString("protocolInfo", String.Format(
                                            "http-get:*:{0}:{1}",
                                            mimeType,
                                            contentFeatures
                                            ));

            writer.WriteString(url);

            writer.WriteFullEndElement();
        }
Пример #19
0
 public void Save(StreamInfo output, int versionIndex = 0)
 {
     _format.Save(output.FileData);
 }
Пример #20
0
 public void Load(StreamInfo input)
 {
     _format = new MSG(input.FileData);
 }
Пример #21
0
 public bool Identify(StreamInfo file, BaseReadOnlyDirectoryNode fileSystem)
 {
     using (var br = new BinaryReaderX(file.FileData, LeaveOpen))
         return(br.ReadString(8) == "ARC TEST");
 }
Пример #22
0
        public string GetItemDidl(DlnaOptions options, BaseItem item, User user, BaseItem context, string deviceId, Filter filter, StreamInfo streamInfo)
        {
            var settings = new XmlWriterSettings
            {
                Encoding           = Encoding.UTF8,
                CloseOutput        = false,
                OmitXmlDeclaration = true,
                ConformanceLevel   = ConformanceLevel.Fragment
            };

            StringWriter builder = new StringWriterWithEncoding(Encoding.UTF8);

            using (XmlWriter writer = XmlWriter.Create(builder, settings))
            {
                //writer.WriteStartDocument();

                writer.WriteStartElement(string.Empty, "DIDL-Lite", NS_DIDL);

                writer.WriteAttributeString("xmlns", "dc", null, NS_DC);
                writer.WriteAttributeString("xmlns", "dlna", null, NS_DLNA);
                writer.WriteAttributeString("xmlns", "upnp", null, NS_UPNP);
                //didl.SetAttribute("xmlns:sec", NS_SEC);

                WriteXmlRootAttributes(_profile, writer);

                WriteItemElement(options, writer, item, user, context, null, deviceId, filter, streamInfo);

                writer.WriteFullEndElement();
                //writer.WriteEndDocument();
            }

            return(builder.ToString());
        }
Пример #23
0
 public Task <Stream> GetFileStream(StreamInfo info)
 {
     return(Task.FromResult <Stream>(null));
 }
        public void selectTextTrack(int trackIndex)
        {
            if (textTracks != null && textTracks.Count > trackIndex)
            {
                currentTextTrack = textTracks[trackIndex];
                var segment = element.ManifestInfo.Segments[element.CurrentSegmentIndex.Value];
                var newStreams = new List<StreamInfo>();
                // use current video streams
                var selectedVideoStreams = segment.SelectedStreams.Where(i => i.Type != MediaStreamType.Script).ToList();
                newStreams.AddRange(selectedVideoStreams);
                // add a new text stream
                newStreams.Add(currentTextTrack);
                // replace old streams by new ones
                segment.SelectStreamsAsync(newStreams);

                textChunks = currentTextTrack.ChunkList.ToList<ChunkInfo>();
                //clear previous language markers
                this.element.Markers.Clear();
                textTrackLoaded = false;
                getNextTextChunks(null, null);
                if (_capt_timer == null)
                {
                    _capt_timer = new DispatcherTimer();
                    _capt_timer.Interval = new TimeSpan(0, 0, 0, CAPT_TIMER_INTERVAL, 0); // 10 seconds
                    _capt_timer.Tick += getNextTextChunks;
                }

                if (element.CurrentState == SmoothStreamingMediaElementState.Playing)
                {
                    _capt_timer.Start();
                }
                else
                {
                    _capt_timer.Stop();
                }
            }
        }
Пример #25
0
 /// <summary>
 /// Initializes a new instance of the <see cref="Decoder{TFrame}"/> class.
 /// </summary>
 /// <param name="stream">The multimedia stream.</param>
 /// <param name="owner">The container that owns the stream.</param>
 public Decoder(AVCodecContext *codec, AVStream *stream, InputContainer owner)
     : base(codec)
 {
     OwnerFile = owner;
     Info      = new StreamInfo(stream, owner);
 }
Пример #26
0
		/// ------------------------------------------------------------------------------------
		/// <summary>
		/// Processes the meta data.
		/// </summary>
		/// <param name="size">The size.</param>
		/// ------------------------------------------------------------------------------------
		private void ProcessMetaData(int size)
		{
			long nMetaDataBase = m_stream.Position;
			int signature = m_reader.ReadInt32();
			Debug.Assert(signature == 0x424A5342);
			m_stream.Position = nMetaDataBase + 12;
			int nVersionLength = m_reader.ReadInt32();
			m_stream.Position += nVersionLength + 2;
			short nStreams = m_reader.ReadInt16();

			m_StreamInfo = new StreamInfo[nStreams];

			for (int i = 0; i < nStreams; i++)
			{
				m_StreamInfo[i] = new StreamInfo();
				ProcessStreamHeader(nMetaDataBase, m_StreamInfo[i]);
			}

		}
Пример #27
0
 internal static extern uint AVIStreamInfo(IntPtr streamPtr, ref StreamInfo psi, int lSize);
Пример #28
0
		/// ------------------------------------------------------------------------------------
		/// <summary>
		/// Processes the stream.
		/// </summary>
		/// <param name="streamInfo">The stream info.</param>
		/// ------------------------------------------------------------------------------------
		private void ProcessStream(StreamInfo streamInfo)
		{
			switch (streamInfo.Name)
			{
				case "#GUID":
					ProcessGuidStream(streamInfo);
					break;
				case "#~":
					// At the moment we don't do anything with the tilde stream, so we don't
					// need to call that method...
					//ProcessTildeStream(streamInfo);
					break;
				default:
					break;
			}
		}
Пример #29
0
 /**
 * Obtain the set of currently present streams on the network (i.e. resolve result).
 * @return An array of matching stream info objects (excluding their meta-data), any of
 *         which can subsequently be used to open an inlet.
 */
 public StreamInfo[] results()
 {
     IntPtr[] buf = new IntPtr[1024];
     int num = dll.lsl_resolver_results(obj,buf,(uint)buf.Length);
     StreamInfo[] res = new StreamInfo[num];
     for (int k = 0; k < num; k++)
     res[k] = new StreamInfo(buf[k]);
     return res;
 }
Пример #30
0
		/// ------------------------------------------------------------------------------------
		/// <summary>
		/// Processes the #~ stream.
		/// </summary>
		/// <param name="streamInfo">The stream info.</param>
		/// ------------------------------------------------------------------------------------
		private void ProcessTildeStream(StreamInfo streamInfo)
		{
			long nStreamBase = m_stream.Position;
			m_stream.Position += 8;
			ulong vValid = m_reader.ReadUInt64();
			int cValid = CountSetBits(vValid);
			ulong vSorted = m_reader.ReadUInt64();

			uint[] rows = new uint[cValid];
			for (int i = 0; i < cValid; i++)
			{
				rows[i] = m_reader.ReadUInt32();
			}

			// Handle tables
			for (int i = 0; i < cValid; i++)
			{
				if (((vValid >> i) & 0x01) == 0)
					continue;

				for (int j = 0; j < rows[i]; j++)
				{
					//Debug.WriteLine(string.Format("Reading row {1} of table {0} at position 0x{2:x}",
					//    m_MetaDataTable[i], j, m_stream.Position));
					switch (i)
					{
						case 0:
							ProcessModule();
							break;
						case 1:
							ProcessTypeRef();
							break;
						case 6:
							ProcessMethodDef();
							break;
						default:
							break;
					}
				}
			}
		}
Пример #31
0
 public StreamInlet(StreamInfo info, int max_buflen, int max_chunklen)
 {
     obj = dll.lsl_create_inlet(info.handle(), max_buflen, max_chunklen, 1);
 }
Пример #32
0
 public void PutSiteFile(string label, StreamInfo file) {
     CheckDisposed();
     PutFileInternal(BuildS3SiteFilename(label), string.Empty, file);
 }
Пример #33
0
        static bool ReEncode(Options opt)
        {
            if (File.Exists(opt.OutputFile))
            {
                File.Delete(opt.OutputFile);
            }

            using (var transcoder = new Transcoder())
            {
                // In order to use the production release for testing (without a valid license),
                // the transcoder demo mode must be enabled.
                transcoder.AllowDemoMode = true;

                using (var mediaInfo = new MediaInfo())
                {
                    mediaInfo.Inputs[0].File = opt.InputFile;

                    if (!mediaInfo.Open())
                    {
                        PrintError("Open MediaInfo", mediaInfo.Error);
                        return(false);
                    }

                    // Add Inputs
                    {
                        var socket = MediaSocket.FromMediaInfo(mediaInfo);
                        transcoder.Inputs.Add(socket);
                    }
                }

                // Add Outputs
                {
                    // Create output socket
                    var socket   = new MediaSocket();
                    var inSocket = transcoder.Inputs[0];

                    socket.StreamType = inSocket.StreamType;
                    socket.File       = opt.OutputFile;

                    // Add pins with ReEncode parameter set to Use.On
                    foreach (var inPin in inSocket.Pins)
                    {
                        StreamInfo si  = (StreamInfo)inPin.StreamInfo.Clone();
                        var        pin = new MediaPin();
                        pin.StreamInfo = (StreamInfo)si.Clone();

                        if ((MediaType.Video == si.MediaType) && opt.ReEncodeVideo)
                        {
                            pin.Params.Add(Param.ReEncode, Use.On);
                        }

                        if ((MediaType.Audio == si.MediaType) && opt.ReEncodeAudio)
                        {
                            pin.Params.Add(Param.ReEncode, Use.On);
                        }

                        socket.Pins.Add(pin);
                    }

                    transcoder.Outputs.Add(socket);
                }


                bool result = transcoder.Open();
                PrintError("Open Transcoder", transcoder.Error);
                if (!result)
                {
                    return(false);
                }

                result = transcoder.Run();
                PrintError("Run Transcoder", transcoder.Error);
                if (!result)
                {
                    return(false);
                }

                transcoder.Close();
            }

            return(true);
        }
Пример #34
0
 /**
 * Establish a new stream outlet. This makes the stream discoverable.
 * @param info The stream information to use for creating this stream. Stays constant over the lifetime of the outlet.
 * @param chunk_size Optionally the desired chunk granularity (in samples) for transmission. If unspecified,
 *                   each push operation yields one chunk. Inlets can override this setting.
 * @param max_buffered Optionally the maximum amount of data to buffer (in seconds if there is a nominal
 *                     sampling rate, otherwise x100 in samples). The default is 6 minutes of data.
 */
 public StreamOutlet(StreamInfo info)
 {
     obj = dll.lsl_create_outlet(info.handle(), 0, 360);
 }
Пример #35
0
 public void Read (TProtocol iprot)
 {
   bool isset_inputs = false;
   bool isset_streams = false;
   TField field;
   iprot.ReadStructBegin();
   while (true)
   {
     field = iprot.ReadFieldBegin();
     if (field.Type == TType.Stop) { 
       break;
     }
     switch (field.ID)
     {
       case 1:
         if (field.Type == TType.Map) {
           {
             Inputs = new Dictionary<GlobalStreamId, Grouping>();
             TMap _map12 = iprot.ReadMapBegin();
             for( int _i13 = 0; _i13 < _map12.Count; ++_i13)
             {
               GlobalStreamId _key14;
               Grouping _val15;
               _key14 = new GlobalStreamId();
               _key14.Read(iprot);
               _val15 = new Grouping();
               _val15.Read(iprot);
               Inputs[_key14] = _val15;
             }
             iprot.ReadMapEnd();
           }
           isset_inputs = true;
         } else { 
           TProtocolUtil.Skip(iprot, field.Type);
         }
         break;
       case 2:
         if (field.Type == TType.Map) {
           {
             Streams = new Dictionary<string, StreamInfo>();
             TMap _map16 = iprot.ReadMapBegin();
             for( int _i17 = 0; _i17 < _map16.Count; ++_i17)
             {
               string _key18;
               StreamInfo _val19;
               _key18 = iprot.ReadString();
               _val19 = new StreamInfo();
               _val19.Read(iprot);
               Streams[_key18] = _val19;
             }
             iprot.ReadMapEnd();
           }
           isset_streams = true;
         } else { 
           TProtocolUtil.Skip(iprot, field.Type);
         }
         break;
       case 3:
         if (field.Type == TType.I32) {
           Parallelism_hint = iprot.ReadI32();
         } else { 
           TProtocolUtil.Skip(iprot, field.Type);
         }
         break;
       case 4:
         if (field.Type == TType.String) {
           Json_conf = iprot.ReadString();
         } else { 
           TProtocolUtil.Skip(iprot, field.Type);
         }
         break;
       default: 
         TProtocolUtil.Skip(iprot, field.Type);
         break;
     }
     iprot.ReadFieldEnd();
   }
   iprot.ReadStructEnd();
   if (!isset_inputs)
     throw new TProtocolException(TProtocolException.INVALID_DATA);
   if (!isset_streams)
     throw new TProtocolException(TProtocolException.INVALID_DATA);
 }
Пример #36
0
 public StreamOutlet(StreamInfo info, int chunk_size, int max_buffered)
 {
     obj = dll.lsl_create_outlet(info.handle(), chunk_size, max_buffered);
 }
Пример #37
0
        public Mesh(AssetPreloadData MeshPD)
        {
            //Stream = new EndianStream(File.OpenRead(sourceFile.filePath), sourceFile.endianType);
            //Stream.endian = sourceFile.endianType;
            var version = MeshPD.sourceFile.version;
            a_Stream = MeshPD.sourceFile.a_Stream;
            a_Stream.Position = MeshPD.Offset;

            bool m_Use16BitIndices = true; //3.5.0 and newer always uses 16bit indices
            uint m_MeshCompression = 0;

            if (MeshPD.sourceFile.platform == -2)
            {
                uint m_ObjectHideFlags = a_Stream.ReadUInt32();
                PPtr m_PrefabParentObject = MeshPD.sourceFile.ReadPPtr();
                PPtr m_PrefabInternal = MeshPD.sourceFile.ReadPPtr();
            }

            m_Name = a_Stream.ReadAlignedString(a_Stream.ReadInt32());
            if (version[0] < 3 || (version[0] == 3 && version[1] < 5))
            {
                m_Use16BitIndices = a_Stream.ReadBoolean();
                a_Stream.Position += 3;
            }

            #region Index Buffer for 2.5.1 and earlier
            if (version[0] == 2 && version[1] <= 5)
            {
                int m_IndexBuffer_size = a_Stream.ReadInt32();

                if (m_Use16BitIndices)
                {
                    m_IndexBuffer = new uint[m_IndexBuffer_size / 2];
                    for (int i = 0; i < m_IndexBuffer_size / 2; i++) { m_IndexBuffer[i] = a_Stream.ReadUInt16(); }
                    a_Stream.AlignStream(4);
                }
                else
                {
                    m_IndexBuffer = new uint[m_IndexBuffer_size / 4];
                    for (int i = 0; i < m_IndexBuffer_size / 4; i++) { m_IndexBuffer[i] = a_Stream.ReadUInt32(); }
                }
            }
            #endregion

            int m_SubMeshes_size = a_Stream.ReadInt32();
            for (int s = 0; s < m_SubMeshes_size; s++)
            {
                m_SubMeshes.Add(new SubMesh());
                m_SubMeshes[s].firstByte = a_Stream.ReadUInt32();
                m_SubMeshes[s].indexCount = a_Stream.ReadUInt32(); //what is this in case of triangle strips?
                m_SubMeshes[s].topology = a_Stream.ReadInt32(); //isTriStrip
                if (version[0] < 4)
                {
                    m_SubMeshes[s].triangleCount = a_Stream.ReadUInt32();
                }
                if (version[0] >= 3)
                {
                    m_SubMeshes[s].firstVertex = a_Stream.ReadUInt32();
                    m_SubMeshes[s].vertexCount = a_Stream.ReadUInt32();
                    a_Stream.Position += 24; //Axis-Aligned Bounding Box
                }
            }

            #region m_Shapes for 4.1.0 and later, excluding 4.1.0 alpha
            if (version [0] >= 5 || (version[0] == 4 && (version[1] > 1 || (version[1] == 1 && MeshPD.sourceFile.buildType[0] != "a"))))
            {
                if (version[0] == 4 && version[1] <= 2) //4.1.0f4 - 4.2.2f1
                {
                    int m_Shapes_size = a_Stream.ReadInt32();
                    if (m_Shapes_size > 0)
                    {
                        bool stop = true;
                    }
                    for (int s = 0; s < m_Shapes_size; s++) //untested
                    {
                        string shape_name = a_Stream.ReadAlignedString(a_Stream.ReadInt32());
                        a_Stream.Position += 36; //uint firstVertex, vertexCount; Vector3f aabbMinDelta, aabbMaxDelta; bool hasNormals, hasTangents
                    }

                    int m_ShapeVertices_size = a_Stream.ReadInt32();
                    a_Stream.Position += m_ShapeVertices_size * 40; //vertex positions, normals, tangents & uint index
                }
                else //4.3.0 and later
                {
                    int m_ShapeVertices_size = a_Stream.ReadInt32();
                    a_Stream.Position += m_ShapeVertices_size * 40; //vertex positions, normals, tangents & uint index

                    int shapes_size = a_Stream.ReadInt32();
                    a_Stream.Position += shapes_size * 12; //uint firstVertex, vertexCount; bool hasNormals, hasTangents

                    int channels_size = a_Stream.ReadInt32();
                    for (int c = 0; c < channels_size; c++)
                    {
                        string channel_name = a_Stream.ReadAlignedString(a_Stream.ReadInt32());
                        a_Stream.Position += 12; //uint nameHash; int frameIndex, frameCount
                    }

                    int fullWeights_size = a_Stream.ReadInt32();
                    a_Stream.Position += fullWeights_size * 4; //floats

                    int m_BindPose_size = a_Stream.ReadInt32();
                    a_Stream.Position += m_BindPose_size * 16 * 4; //matrix 4x4

                    int m_BoneNameHashes_size = a_Stream.ReadInt32();
                    a_Stream.Position += m_BoneNameHashes_size * 4; //uints

                    uint m_RootBoneNameHash = a_Stream.ReadUInt32();
                }
            }
            #endregion

            #region Index Buffer for 2.6.0 and later
            if (version[0] >= 3 || (version[0] == 2 && version[1] >= 6))
            {
                m_MeshCompression = a_Stream.ReadByte();
                if (version[0] >= 4)
                {
                    if (version[0] < 5) { uint m_StreamCompression = a_Stream.ReadByte(); }
                    bool m_IsReadable = a_Stream.ReadBoolean();
                    bool m_KeepVertices = a_Stream.ReadBoolean();
                    bool m_KeepIndices = a_Stream.ReadBoolean();
                }
                a_Stream.AlignStream(4);

                int m_IndexBuffer_size = a_Stream.ReadInt32();

                if (m_Use16BitIndices)
                {
                    m_IndexBuffer = new uint[m_IndexBuffer_size / 2];
                    for (int i = 0; i < m_IndexBuffer_size / 2; i++) { m_IndexBuffer[i] = a_Stream.ReadUInt16(); }
                    a_Stream.AlignStream(4);
                }
                else
                {
                    m_IndexBuffer = new uint[m_IndexBuffer_size / 4];
                    for (int i = 0; i < m_IndexBuffer_size / 4; i++) { m_IndexBuffer[i] = a_Stream.ReadUInt32(); }
                    //align??
                }
            }
            #endregion

            #region Vertex Buffer for 3.4.2 and earlier
            if (version[0] < 3 || (version[0] == 3 && version[1] < 5))
            {
                m_VertexCount = a_Stream.ReadUInt32();
                m_Vertices = new float[m_VertexCount * 3];
                for (int v = 0; v < m_VertexCount * 3; v++) { m_Vertices[v] = a_Stream.ReadSingle(); }

                int m_Skin_size = a_Stream.ReadInt32();
                a_Stream.Position += m_Skin_size * 32; //4x float weights & 4x int boneIndices

                int m_BindPose_size = a_Stream.ReadInt32();
                a_Stream.Position += m_BindPose_size * 16 * 4; //matrix 4x4

                int m_UV1_size = a_Stream.ReadInt32();
                m_UV1 = new float[m_UV1_size * 2];
                for (int v = 0; v < m_UV1_size * 2; v++) { m_UV1[v] = a_Stream.ReadSingle(); }

                int m_UV2_size = a_Stream.ReadInt32();
                m_UV2 = new float[m_UV2_size * 2];
                for (int v = 0; v < m_UV2_size * 2; v++) { m_UV2[v] = a_Stream.ReadSingle(); }

                if (version[0] == 2 && version[1] <= 5)
                {
                    int m_TangentSpace_size = a_Stream.ReadInt32();
                    m_Normals = new float[m_TangentSpace_size * 3];
                    for (int v = 0; v < m_TangentSpace_size; v++)
                    {
                        m_Normals[v * 3] = a_Stream.ReadSingle();
                        m_Normals[v * 3 + 1] = a_Stream.ReadSingle();
                        m_Normals[v * 3 + 2] = a_Stream.ReadSingle();
                        a_Stream.Position += 16; //Vector3f tangent & float handedness
                    }
                }
                else //2.6.0 and later
                {
                    int m_Tangents_size = a_Stream.ReadInt32();
                    a_Stream.Position += m_Tangents_size * 16; //Vector4f

                    int m_Normals_size = a_Stream.ReadInt32();
                    m_Normals = new float[m_Normals_size * 3];
                    for (int v = 0; v < m_Normals_size * 3; v++) { m_Normals[v] = a_Stream.ReadSingle(); }
                }
            }
            #endregion
            #region Vertex Buffer for 3.5.0 and later
            else
            {
                #region read vertex stream
                int m_Skin_size = a_Stream.ReadInt32();
                a_Stream.Position += m_Skin_size * 32; //4x float weights & 4x int boneIndices

                if (version[0] <= 3 || (version[0] == 4 && version[1] <= 2))
                {
                    int m_BindPose_size = a_Stream.ReadInt32();
                    a_Stream.Position += m_BindPose_size * 16 * 4; //matrix 4x4
                }

                int m_CurrentChannels = a_Stream.ReadInt32();//defined as uint in Unity
                m_VertexCount = a_Stream.ReadUInt32();

                #region 3.5.0 - 3.5.7
                if (version[0] < 4)
                {
                    if (m_MeshCompression != 0 && version[2] == 0) //special case not just on platform 9
                    {
                        a_Stream.Position += 12;
                    }
                    else
                    {
                        m_Streams = new StreamInfo[4];
                        for (int s = 0; s < 4; s++)
                        {
                            m_Streams[s] = new StreamInfo();
                            m_Streams[s].channelMask = new BitArray(new int[1] { a_Stream.ReadInt32() });
                            m_Streams[s].offset = a_Stream.ReadInt32();
                            m_Streams[s].stride = a_Stream.ReadInt32();
                            m_Streams[s].align = a_Stream.ReadUInt32();
                        }
                    }
                }
                #endregion
                #region 4.0.0 and later
                else
                {
                    int singleStreamStride = 0;//used tor unity 5

                    m_Channels = new ChannelInfo[a_Stream.ReadInt32()];
                    for (int c = 0; c < m_Channels.Length; c++)
                    {
                        m_Channels[c] = new ChannelInfo();
                        m_Channels[c].stream = a_Stream.ReadByte();
                        m_Channels[c].offset = a_Stream.ReadByte();
                        m_Channels[c].format = a_Stream.ReadByte();
                        m_Channels[c].dimension = a_Stream.ReadByte();

                        //calculate stride for Unity 5
                        singleStreamStride += m_Channels[c].dimension * (m_Channels[c].format % 2 == 0 ? 4 : 2);//fingers crossed!
                    }

                    if (version[0] < 5)
                    {
                        m_Streams = new StreamInfo[a_Stream.ReadInt32()];
                        for (int s = 0; s < m_Streams.Length; s++)
                        {
                            m_Streams[s] = new StreamInfo();
                            m_Streams[s].channelMask = new BitArray(new int[1] { a_Stream.ReadInt32() });
                            m_Streams[s].offset = a_Stream.ReadInt32();
                            m_Streams[s].stride = a_Stream.ReadByte();
                            m_Streams[s].dividerOp = a_Stream.ReadByte();
                            m_Streams[s].frequency = a_Stream.ReadUInt16();
                        }
                    }
                    else //it's just easier to create my own stream here
                    {
                        m_Streams = new StreamInfo[1];
                        m_Streams[0] = new StreamInfo();
                        m_Streams[0].channelMask = new BitArray(new int[1] { m_CurrentChannels });
                        m_Streams[0].offset = 0;
                        m_Streams[0].stride = singleStreamStride;
                    }
                }
                #endregion

                //actual Vertex Buffer
                byte[] m_DataSize = new byte[a_Stream.ReadInt32()];
                a_Stream.Read(m_DataSize, 0, m_DataSize.Length);
                #endregion

                #region compute FvF
                byte valueBufferSize = 0;
                byte[] valueBuffer;
                float[] dstArray;

                if (m_Channels != null)
                {
                    //it is better to loop channels instead of streams
                    //because channels are likely to be sorted by vertex property
                    #region 4.0.0 and later
                    foreach (var m_Channel in m_Channels)
                    {
                        if (m_Channel.dimension > 0)
                        {
                            var m_Stream = m_Streams[m_Channel.stream];

                            for (int b = 0; b < 6; b++)
                            {
                                if (m_Stream.channelMask.Get(b))
                                {
                                    switch (m_Channel.format)
                                    {
                                        case 0: //32bit
                                            valueBufferSize = 4;
                                            break;
                                        case 1: //16bit
                                            valueBufferSize = 2;
                                            break;
                                        case 2: //8bit
                                            valueBufferSize = 1;
                                            m_Channel.dimension = 4;//these are actually groups of 4 components
                                            break;
                                    }

                                    valueBuffer = new byte[valueBufferSize];
                                    dstArray = new float[m_VertexCount * m_Channel.dimension];

                                    for (int v = 0; v < m_VertexCount; v++)
                                    {
                                        for (int d = 0; d < m_Channel.dimension; d++)
                                        {
                                            int m_DataSizeOffset = m_Stream.offset + m_Channel.offset + m_Stream.stride * v + valueBufferSize * d;
                                            Buffer.BlockCopy(m_DataSize, m_DataSizeOffset, valueBuffer, 0, valueBufferSize);
                                            dstArray[v * m_Channel.dimension + d] = bytesToFloat(valueBuffer);
                                        }
                                    }

                                    switch (b)
                                    {
                                        case 0://1
                                            m_Vertices = dstArray;
                                            break;
                                        case 1://2
                                            m_Normals = dstArray;
                                            break;
                                        case 2://4
                                            m_Colors = dstArray;
                                            break;
                                        case 3://8
                                            m_UV1 = dstArray;
                                            break;
                                        case 4://16
                                            m_UV2 = dstArray;
                                            break;
                                        case 5://32
                                            m_Tangents = dstArray;
                                            break;
                                    }

                                    m_Stream.channelMask.Set(b, false); //is this needed?
                                    valueBuffer = null;
                                    dstArray = null;
                                    break; //go to next channel
                                }
                            }
                        }
                    }
                }
                #endregion
                #region 3.5.0 - 3.5.7
                else if (m_Streams != null)
                {
                    foreach (var m_Stream in m_Streams)
                    {
                        //a stream may have multiple vertex components but without channels there are no offsets, so I assume all vertex properties are in order
                        //Unity 3.5.x only uses floats, and that's probably why channels were introduced in Unity 4

                        ChannelInfo m_Channel = new ChannelInfo();//create my own channel so I can use the same methods
                        m_Channel.offset = 0;

                        for (int b = 0; b < 6; b++)
                        {
                            if (m_Stream.channelMask.Get(b))
                            {
                                switch (b)
                                {
                                    case 0:
                                    case 1:
                                        valueBufferSize = 4;
                                        m_Channel.dimension = 3;
                                        break;
                                    case 2:
                                        valueBufferSize = 1;
                                        m_Channel.dimension = 4;
                                        break;
                                    case 3:
                                    case 4:
                                        valueBufferSize = 4;
                                        m_Channel.dimension = 2;
                                        break;
                                    case 5:
                                        valueBufferSize = 4;
                                        m_Channel.dimension = 4;
                                        break;
                                }

                                valueBuffer = new byte[valueBufferSize];
                                dstArray = new float[m_VertexCount * m_Channel.dimension];

                                for (int v = 0; v < m_VertexCount; v++)
                                {
                                    for (int d = 0; d < m_Channel.dimension; d++)
                                    {
                                        int m_DataSizeOffset = m_Stream.offset + m_Channel.offset + m_Stream.stride * v + valueBufferSize * d;
                                        Buffer.BlockCopy(m_DataSize, m_DataSizeOffset, valueBuffer, 0, valueBufferSize);
                                        dstArray[v * m_Channel.dimension + d] = bytesToFloat(valueBuffer);
                                    }
                                }

                                switch (b)
                                {
                                    case 0:
                                        m_Vertices = dstArray;
                                        break;
                                    case 1:
                                        m_Normals = dstArray;
                                        break;
                                    case 2:
                                        m_Colors = dstArray;
                                        break;
                                    case 3:
                                        m_UV1 = dstArray;
                                        break;
                                    case 4:
                                        m_UV2 = dstArray;
                                        break;
                                    case 5:
                                        m_Tangents = dstArray;
                                        break;
                                }

                                m_Channel.offset += (byte)(m_Channel.dimension * valueBufferSize); //strides larger than 255 are unlikely
                                m_Stream.channelMask.Set(b, false); //is this needed?
                                valueBuffer = null;
                                dstArray = null;
                            }
                        }
                    }
                }
                #endregion
                #endregion
            }
            #endregion

            #region Compressed Mesh data for 2.6.0 and later - 160 bytes
            if (version[0] >= 3 || (version[0] == 2 && version[1] >= 6))
            {
                //remember there can be combinations of packed and regular vertex properties
                PackedBitVector m_Vertices_Packed = new PackedBitVector();
                m_Vertices_Packed.m_NumItems = a_Stream.ReadUInt32();
                m_Vertices_Packed.m_Range = a_Stream.ReadSingle();
                m_Vertices_Packed.m_Start = a_Stream.ReadSingle();
                m_Vertices_Packed.m_Data = new byte[a_Stream.ReadInt32()];
                a_Stream.Read(m_Vertices_Packed.m_Data, 0, m_Vertices_Packed.m_Data.Length);
                a_Stream.AlignStream(4);
                m_Vertices_Packed.m_BitSize = a_Stream.ReadByte();
                a_Stream.Position += 3; //4 byte alignment

                if (m_Vertices_Packed.m_NumItems > 0)
                {
                    m_VertexCount = m_Vertices_Packed.m_NumItems / 3;
                    uint[] m_Vertices_Unpacked = UnpackBitVector(m_Vertices_Packed);
                    int bitmax = 0;//used to convert int value to float
                    for (int b = 0; b < m_Vertices_Packed.m_BitSize; b++) { bitmax |= (1 << b); }
                    m_Vertices = new float[m_Vertices_Packed.m_NumItems];
                    for (int v = 0; v < m_Vertices_Packed.m_NumItems; v++)
                    {
                        m_Vertices[v] = (float)m_Vertices_Unpacked[v] / bitmax * m_Vertices_Packed.m_Range + m_Vertices_Packed.m_Start;
                    }
                }

                PackedBitVector m_UV_Packed = new PackedBitVector(); //contains both channels
                m_UV_Packed.m_NumItems = a_Stream.ReadUInt32();
                m_UV_Packed.m_Range = a_Stream.ReadSingle();
                m_UV_Packed.m_Start = a_Stream.ReadSingle();
                m_UV_Packed.m_Data = new byte[a_Stream.ReadInt32()];
                a_Stream.Read(m_UV_Packed.m_Data, 0, m_UV_Packed.m_Data.Length);
                m_UV_Packed.m_BitSize = a_Stream.ReadByte();
                a_Stream.Position += 3; //4 byte alignment

                if (m_UV_Packed.m_NumItems > 0)
                {
                    uint[] m_UV_Unpacked = UnpackBitVector(m_UV_Packed);
                    int bitmax = 0;
                    for (int b = 0; b < m_Vertices_Packed.m_BitSize; b++) { bitmax |= (1 << b); }

                    m_UV1 = new float[m_VertexCount * 2];

                    for (int v = 0; v < m_VertexCount * 2; v++)
                    {
                        m_UV1[v] = (float)m_UV_Unpacked[v] / bitmax * m_UV_Packed.m_Range + m_UV_Packed.m_Start;
                    }

                    if (m_UV_Packed.m_NumItems == m_VertexCount * 4)
                    {
                        m_UV2 = new float[m_VertexCount * 2];
                        for (uint v = 0; v < m_VertexCount * 2; v++)
                        {
                            m_UV2[v] = (float)m_UV_Unpacked[v + m_VertexCount * 2] / bitmax * m_UV_Packed.m_Range + m_UV_Packed.m_Start;
                        }
                    }
                }

                if (version[0] < 5)
                {
                    PackedBitVector m_BindPoses_Packed = new PackedBitVector();
                    m_BindPoses_Packed.m_NumItems = a_Stream.ReadUInt32();
                    m_BindPoses_Packed.m_Range = a_Stream.ReadSingle();
                    m_BindPoses_Packed.m_Start = a_Stream.ReadSingle();
                    m_BindPoses_Packed.m_Data = new byte[a_Stream.ReadInt32()];
                    a_Stream.Read(m_BindPoses_Packed.m_Data, 0, m_BindPoses_Packed.m_Data.Length);
                    a_Stream.AlignStream(4);
                    m_BindPoses_Packed.m_BitSize = a_Stream.ReadByte();
                    a_Stream.Position += 3; //4 byte alignment
                }

                PackedBitVector m_Normals_Packed = new PackedBitVector();
                m_Normals_Packed.m_NumItems = a_Stream.ReadUInt32();
                m_Normals_Packed.m_Range = a_Stream.ReadSingle();
                m_Normals_Packed.m_Start = a_Stream.ReadSingle();
                m_Normals_Packed.m_Data = new byte[a_Stream.ReadInt32()];
                a_Stream.Read(m_Normals_Packed.m_Data, 0, m_Normals_Packed.m_Data.Length);
                a_Stream.AlignStream(4);
                m_Normals_Packed.m_BitSize = a_Stream.ReadByte();
                a_Stream.Position += 3; //4 byte alignment

                PackedBitVector m_Tangents_Packed = new PackedBitVector();
                m_Tangents_Packed.m_NumItems = a_Stream.ReadUInt32();
                m_Tangents_Packed.m_Range = a_Stream.ReadSingle();
                m_Tangents_Packed.m_Start = a_Stream.ReadSingle();
                m_Tangents_Packed.m_Data = new byte[a_Stream.ReadInt32()];
                a_Stream.Read(m_Tangents_Packed.m_Data, 0, m_Tangents_Packed.m_Data.Length);
                a_Stream.AlignStream(4);
                m_Tangents_Packed.m_BitSize = a_Stream.ReadByte();
                a_Stream.Position += 3; //4 byte alignment

                PackedBitVector m_Weights_Packed = new PackedBitVector();
                m_Weights_Packed.m_NumItems = a_Stream.ReadUInt32();
                m_Weights_Packed.m_Data = new byte[a_Stream.ReadInt32()];
                a_Stream.Read(m_Weights_Packed.m_Data, 0, m_Weights_Packed.m_Data.Length);
                a_Stream.AlignStream(4);
                m_Weights_Packed.m_BitSize = a_Stream.ReadByte();
                a_Stream.Position += 3; //4 byte alignment

                PackedBitVector m_NormalSigns_packed = new PackedBitVector();
                m_NormalSigns_packed.m_NumItems = a_Stream.ReadUInt32();
                m_NormalSigns_packed.m_Data = new byte[a_Stream.ReadInt32()];
                a_Stream.Read(m_NormalSigns_packed.m_Data, 0, m_NormalSigns_packed.m_Data.Length);
                a_Stream.AlignStream(4);
                m_NormalSigns_packed.m_BitSize = a_Stream.ReadByte();
                a_Stream.Position += 3; //4 byte alignment

                if (m_Normals_Packed.m_NumItems > 0)
                {
                    uint[] m_Normals_Unpacked = UnpackBitVector(m_Normals_Packed);
                    uint[] m_NormalSigns = UnpackBitVector(m_NormalSigns_packed);
                    int bitmax = 0;
                    for (int b = 0; b < m_Normals_Packed.m_BitSize; b++) { bitmax |= (1 << b); }
                    m_Normals = new float[m_Normals_Packed.m_NumItems / 2 * 3];
                    for (int v = 0; v < m_Normals_Packed.m_NumItems / 2; v++)
                    {
                        m_Normals[v * 3] = (float)((double)m_Normals_Unpacked[v * 2] / bitmax) * m_Normals_Packed.m_Range + m_Normals_Packed.m_Start;
                        m_Normals[v * 3 + 1] = (float)((double)m_Normals_Unpacked[v * 2 + 1] / bitmax) * m_Normals_Packed.m_Range + m_Normals_Packed.m_Start;
                        m_Normals[v * 3 + 2] = (float)Math.Sqrt(1 - m_Normals[v * 3] * m_Normals[v * 3] - m_Normals[v * 3 + 1] * m_Normals[v * 3 + 1]);
                        if (m_NormalSigns[v] == 0) { m_Normals[v * 3 + 2] *= -1; }
                    }
                }

                PackedBitVector m_TangentSigns = new PackedBitVector();
                m_TangentSigns.m_NumItems = a_Stream.ReadUInt32();
                m_TangentSigns.m_Data = new byte[a_Stream.ReadInt32()];
                a_Stream.Read(m_TangentSigns.m_Data, 0, m_TangentSigns.m_Data.Length);
                a_Stream.AlignStream(4);
                m_TangentSigns.m_BitSize = a_Stream.ReadByte();
                a_Stream.Position += 3; //4 byte alignment

                if (version[0] >= 5)
                {
                    PackedBitVector m_FloatColors = new PackedBitVector();
                    m_FloatColors.m_NumItems = a_Stream.ReadUInt32();
                    m_FloatColors.m_Range = a_Stream.ReadSingle();
                    m_FloatColors.m_Start = a_Stream.ReadSingle();
                    m_FloatColors.m_Data = new byte[a_Stream.ReadInt32()];
                    a_Stream.Read(m_FloatColors.m_Data, 0, m_FloatColors.m_Data.Length);
                    a_Stream.AlignStream(4);
                    m_FloatColors.m_BitSize = a_Stream.ReadByte();
                    a_Stream.Position += 3; //4 byte alignment

                    if (m_FloatColors.m_NumItems > 0)
                    {
                        uint[] m_FloatColors_Unpacked = UnpackBitVector(m_FloatColors);
                        int bitmax = 0;
                        for (int b = 0; b < m_Vertices_Packed.m_BitSize; b++) { bitmax |= (1 << b); }

                        m_Colors = new float[m_FloatColors.m_NumItems];

                        for (int v = 0; v < m_FloatColors.m_NumItems; v++)
                        {
                            m_Colors[v] = (float)m_FloatColors_Unpacked[v] / bitmax * m_FloatColors.m_Range + m_FloatColors.m_Start;
                        }
                    }
                }

                PackedBitVector m_BoneIndices = new PackedBitVector();
                m_BoneIndices.m_NumItems = a_Stream.ReadUInt32();
                m_BoneIndices.m_Data = new byte[a_Stream.ReadInt32()];
                a_Stream.Read(m_BoneIndices.m_Data, 0, m_BoneIndices.m_Data.Length);
                a_Stream.AlignStream(4);
                m_BoneIndices.m_BitSize = a_Stream.ReadByte();
                a_Stream.Position += 3; //4 byte alignment

                PackedBitVector m_Triangles = new PackedBitVector();
                m_Triangles.m_NumItems = a_Stream.ReadUInt32();
                m_Triangles.m_Data = new byte[a_Stream.ReadInt32()];
                a_Stream.Read(m_Triangles.m_Data, 0, m_Triangles.m_Data.Length);
                a_Stream.AlignStream(4);
                m_Triangles.m_BitSize = a_Stream.ReadByte();
                a_Stream.Position += 3; //4 byte alignment

                if (m_Triangles.m_NumItems > 0) { m_IndexBuffer = UnpackBitVector(m_Triangles); }
            }
            #endregion

            #region Colors & Collision triangles for 3.4.2 and earlier
            if (version[0] <= 2 || (version[0] == 3 && version[1] <= 4)) //
            {
                a_Stream.Position += 24; //Axis-Aligned Bounding Box
                int m_Colors_size = a_Stream.ReadInt32();
                m_Colors = new float[m_Colors_size * 4];
                for (int v = 0; v < m_Colors_size * 4; v++) { m_Colors[v] = (float)(a_Stream.ReadByte()) / 0xFF; }

                int m_CollisionTriangles_size = a_Stream.ReadInt32();
                a_Stream.Position += m_CollisionTriangles_size * 4; //UInt32 indices
                int m_CollisionVertexCount = a_Stream.ReadInt32();
            }
            #endregion
            #region Compressed colors & Local AABB for 3.5.0 to 4.x.x
            else //vertex colors are either in streams or packed bits
            {
                if (version[0] < 5)
                {
                    PackedBitVector m_Colors_Packed = new PackedBitVector();
                    m_Colors_Packed.m_NumItems = a_Stream.ReadUInt32();
                    m_Colors_Packed.m_Data = new byte[a_Stream.ReadInt32()];
                    a_Stream.Read(m_Colors_Packed.m_Data, 0, m_Colors_Packed.m_Data.Length);
                    a_Stream.AlignStream(4);
                    m_Colors_Packed.m_BitSize = a_Stream.ReadByte();
                    a_Stream.Position += 3; //4 byte alignment

                    if (m_Colors_Packed.m_NumItems > 0)
                    {
                        if (m_Colors_Packed.m_BitSize == 32)
                        {
                            //4 x 8bit color channels
                            m_Colors = new float[m_Colors_Packed.m_Data.Length];
                            for (int v = 0; v < m_Colors_Packed.m_Data.Length; v++)
                            {
                                m_Colors[v] = (float)m_Colors_Packed.m_Data[v] / 0xFF;
                            }
                        }
                        else //not tested
                        {
                            uint[] m_Colors_Unpacked = UnpackBitVector(m_Colors_Packed);
                            int bitmax = 0;//used to convert int value to float
                            for (int b = 0; b < m_Colors_Packed.m_BitSize; b++) { bitmax |= (1 << b); }
                            m_Colors = new float[m_Colors_Packed.m_NumItems];
                            for (int v = 0; v < m_Colors_Packed.m_NumItems; v++)
                            {
                                m_Colors[v] = (float)m_Colors_Unpacked[v] / bitmax;
                            }
                        }
                    }
                }

                a_Stream.Position += 24; //Axis-Aligned Bounding Box
            }
            #endregion

            int m_MeshUsageFlags = a_Stream.ReadInt32();

            if (version[0] >= 5)
            {
                //int m_BakedConvexCollisionMesh = a_Stream.ReadInt32();
                //a_Stream.Position += m_BakedConvexCollisionMesh;
                //int m_BakedTriangleCollisionMesh = a_Stream.ReadInt32();
                //a_Stream.Position += m_BakedConvexCollisionMesh;
            }

            #region Build face indices
            for (int s = 0; s < m_SubMeshes_size; s++)
            {
                uint firstIndex = m_SubMeshes[s].firstByte / 2;
                if (!m_Use16BitIndices) { firstIndex /= 2; }

                if (m_SubMeshes[s].topology == 0)
                {
                    for (int i = 0; i < m_SubMeshes[s].indexCount / 3; i++)
                    {
                        m_Indices.Add(m_IndexBuffer[firstIndex + i * 3]);
                        m_Indices.Add(m_IndexBuffer[firstIndex + i * 3 + 1]);
                        m_Indices.Add(m_IndexBuffer[firstIndex + i * 3 + 2]);
                        m_materialIDs.Add(s);
                    }
                }
                else
                {
                    for (int i = 0; i < m_SubMeshes[s].indexCount - 2; i++)
                    {
                        uint fa = m_IndexBuffer[firstIndex + i];
                        uint fb = m_IndexBuffer[firstIndex + i + 1];
                        uint fc = m_IndexBuffer[firstIndex + i + 2];

                        if ((fa!=fb) && (fa!=fc) && (fc!=fb))
                        {
                            m_Indices.Add(fa);
                            if ((i % 2) == 0)
                            {
                                m_Indices.Add(fb);
                                m_Indices.Add(fc);
                            }
                            else
                            {
                                m_Indices.Add(fc);
                                m_Indices.Add(fb);
                            }
                            m_materialIDs.Add(s);
                        }
                    }
                }
            }
            #endregion
        }
Пример #38
0
 public static StreamInfo[] resolve_streams(double wait_time)
 {
     IntPtr[] buf = new IntPtr[1024]; int num = dll.lsl_resolve_all(buf, (uint)buf.Length, wait_time);
     StreamInfo[] res = new StreamInfo[num];
     for (int k = 0; k < num; k++)
     res[k] = new StreamInfo(buf[k]);
     return res;
 }
Пример #39
0
        private void AddAudioResource(DlnaOptions options, XmlWriter writer, IHasMediaSources audio, string deviceId, Filter filter, StreamInfo streamInfo = null)
        {
            writer.WriteStartElement(string.Empty, "res", NS_DIDL);

            if (streamInfo == null)
            {
                var sources = _mediaSourceManager.GetStaticMediaSources(audio, true, _user).ToList();

                streamInfo = new StreamBuilder(_mediaEncoder, GetStreamBuilderLogger(options)).BuildAudioItem(new AudioOptions
                {
                    ItemId       = GetClientId(audio),
                    MediaSources = sources,
                    Profile      = _profile,
                    DeviceId     = deviceId
                });
            }

            var url = streamInfo.ToDlnaUrl(_serverAddress, _accessToken);

            var mediaSource = streamInfo.MediaSource;

            if (mediaSource.RunTimeTicks.HasValue)
            {
                writer.WriteAttributeString("duration", TimeSpan.FromTicks(mediaSource.RunTimeTicks.Value).ToString("c", _usCulture));
            }

            if (filter.Contains("res@size"))
            {
                if (streamInfo.IsDirectStream || streamInfo.EstimateContentLength)
                {
                    var size = streamInfo.TargetSize;

                    if (size.HasValue)
                    {
                        writer.WriteAttributeString("size", size.Value.ToString(_usCulture));
                    }
                }
            }

            var targetAudioBitrate  = streamInfo.TargetAudioBitrate;
            var targetSampleRate    = streamInfo.TargetAudioSampleRate;
            var targetChannels      = streamInfo.TargetAudioChannels;
            var targetAudioBitDepth = streamInfo.TargetAudioBitDepth;

            if (targetChannels.HasValue)
            {
                writer.WriteAttributeString("nrAudioChannels", targetChannels.Value.ToString(_usCulture));
            }

            if (targetSampleRate.HasValue)
            {
                writer.WriteAttributeString("sampleFrequency", targetSampleRate.Value.ToString(_usCulture));
            }

            if (targetAudioBitrate.HasValue)
            {
                writer.WriteAttributeString("bitrate", targetAudioBitrate.Value.ToString(_usCulture));
            }

            var mediaProfile = _profile.GetAudioMediaProfile(streamInfo.Container,
                                                             streamInfo.TargetAudioCodec,
                                                             targetChannels,
                                                             targetAudioBitrate,
                                                             targetSampleRate,
                                                             targetAudioBitDepth);

            var filename = url.Substring(0, url.IndexOf('?'));

            var mimeType = mediaProfile == null || string.IsNullOrEmpty(mediaProfile.MimeType)
                ? GetMimeType(filename)
                : mediaProfile.MimeType;

            var contentFeatures = new ContentFeatureBuilder(_profile).BuildAudioHeader(streamInfo.Container,
                                                                                       streamInfo.TargetAudioCodec,
                                                                                       targetAudioBitrate,
                                                                                       targetSampleRate,
                                                                                       targetChannels,
                                                                                       targetAudioBitDepth,
                                                                                       streamInfo.IsDirectStream,
                                                                                       streamInfo.RunTimeTicks,
                                                                                       streamInfo.TranscodeSeekInfo);

            writer.WriteAttributeString("protocolInfo", String.Format(
                                            "http-get:*:{0}:{1}",
                                            mimeType,
                                            contentFeatures
                                            ));

            writer.WriteString(url);

            writer.WriteFullEndElement();
        }
Пример #40
0
 /**
 * Construct a new stream inlet from a resolved stream info.
 * @param info A resolved stream info object (as coming from one of the resolver functions).
 *             Note: the stream_inlet may also be constructed with a fully-specified stream_info,
 *                   if the desired channel format and count is already known up-front, but this is
 *                   strongly discouraged and should only ever be done if there is no time to resolve the
 *                   stream up-front (e.g., due to limitations in the client program).
 * @param max_buflen Optionally the maximum amount of data to buffer (in seconds if there is a nominal
 *                   sampling rate, otherwise x100 in samples). Recording applications want to use a fairly
 *                   large buffer size here, while real-time applications would only buffer as much as
 *                   they need to perform their next calculation.
 * @param max_chunklen Optionally the maximum size, in samples, at which chunks are transmitted
 *                     (the default corresponds to the chunk sizes used by the sender).
 *                     Recording applications can use a generous size here (leaving it to the network how
 *                     to pack things), while real-time applications may want a finer (perhaps 1-sample) granularity.
                   If left unspecified (=0), the sender determines the chunk granularity.
 * @param recover Try to silently recover lost streams that are recoverable (=those that that have a source_id set).
 *                In all other cases (recover is false or the stream is not recoverable) functions may throw a
 *                LostException if the stream's source is lost (e.g., due to an app or computer crash).
 */
 public StreamInlet(StreamInfo info)
 {
     obj = dll.lsl_create_inlet(info.handle(), 360, 0, 1);
 }
Пример #41
0
    private void ParseStreams(IAMStreamSelect pStrm)
    {
      int cStreams = 0;
      pStrm.Count(out cStreams);

      _audioStreams.Clear();
      _subtitleStreams.Clear();

      for (int istream = 0; istream < cStreams; istream++)
      {
        AMMediaType sType;
        AMStreamSelectInfoFlags sFlag;
        int sPDWGroup;
        int sPLCid;
        string sName;
        object pppunk; 
        object ppobject;

        pStrm.Info(istream, out sType, out sFlag, out sPLCid, out sPDWGroup, out sName, out pppunk, out ppobject);

        if (sPDWGroup == 1)
        {
          StreamInfo info = new StreamInfo(istream, sName);
          _audioStreams.Add(info);
        }

        if (sPDWGroup == 2)
        {
          StreamInfo info = new StreamInfo(istream, sName);
          _subtitleStreams.Add(info);
        }
      }
    }
Пример #42
0
 public StreamInlet(StreamInfo info, int max_buflen, int max_chunklen, bool recover)
 {
     obj = dll.lsl_create_inlet(info.handle(), max_buflen, max_chunklen, recover?1:0);
 }
Пример #43
0
 public static MediaFrame MediaSteamEntity2MediaFrame(MediaFrameEntity entity, ref StreamInfo streamInfo)
 {
     if (entity.MediaType == MediaType.VideoES)
     {
         if (entity.KeyFrame == 0)
         {
             MediaFrame mf = null;
             if (streamInfo != null && streamInfo.SPS_PPSInited && streamInfo.Width == entity.Width && streamInfo.Height == entity.Height)
             {
                 mf = new MediaFrame()
                 {
                     IsAudio           = 0,
                     IsKeyFrame        = 0,
                     Size              = entity.Length,
                     Height            = entity.Height,
                     Width             = entity.Width,
                     SPSLen            = (short)streamInfo.Video_SPS.Length,
                     PPSLen            = (short)streamInfo.Video_PPS.Length,
                     NTimetick         = ThreadEx.TickCount,
                     Offset            = 0,
                     Encoder           = MediaFrame.H264Encoder,
                     Ex                = 1,
                     MediaFrameVersion = 0x00,
                 };
                 mf.SetData(entity.Buffer);
             }
             return(mf);
         }
         else if (entity.KeyFrame == 1)
         {
             bool needResetCodec = false;
             if (streamInfo == null || (streamInfo != null && (streamInfo.Width != entity.Width || streamInfo.Height != entity.Height)))
             {
                 streamInfo = new StreamInfo();
                 var sps_pps = GetSPS_PPS(entity.Buffer);
                 if (sps_pps != null)
                 {
                     streamInfo.Video_SPS       = sps_pps[0];
                     streamInfo.Video_PPS       = sps_pps[1];
                     streamInfo.Video_SPSString = streamInfo.Video_SPS.To16Strs();
                     streamInfo.Video_PPSString = streamInfo.Video_PPS.To16Strs();
                     streamInfo.SPS_PPSInited   = true;
                     streamInfo.Width           = entity.Width;
                     streamInfo.Height          = entity.Height;
                     needResetCodec             = true;
                 }
             }
             if (streamInfo.Video_SPS == null)
             {
                 var mf = new MediaFrame()
                 {
                     IsAudio    = 0,
                     IsKeyFrame = 1,
                     Size       = entity.Length,
                     Height     = entity.Height,
                     Width      = entity.Width,
                     SPSLen     = 0,
                     PPSLen     = 0,
                     NTimetick  = ThreadEx.TickCount,
                     Offset     = 0,
                     Encoder    = MediaFrame.H264Encoder,
                     Ex         = (byte)(needResetCodec ? 0 : 1),
                     //nEx=(byte)entity.Ex,
                     MediaFrameVersion = 0x01,
                 };
                 mf.SetData(entity.Buffer);
                 return(mf);
             }
             else
             {
                 var mf = new MediaFrame()
                 {
                     IsAudio    = 0,
                     IsKeyFrame = 1,
                     Size       = entity.Length,
                     Height     = entity.Height,
                     Width      = entity.Width,
                     SPSLen     = (short)streamInfo.Video_SPS.Length,
                     PPSLen     = (short)streamInfo.Video_PPS.Length,
                     NTimetick  = ThreadEx.TickCount,
                     Offset     = 0,
                     Encoder    = MediaFrame.H264Encoder,
                     Ex         = (byte)(needResetCodec ? 0 : 1),
                     //nEx=(byte)entity.Ex,
                     MediaFrameVersion = 0x01,
                 };
                 mf.SetData(entity.Buffer);
                 return(mf);
             }
         }
         else
         {
             throw new Exception("帧类型错误");
         }
     }
     else if (entity.MediaType == MediaType.AudioES)
     {
         if (streamInfo == null)
         {
             streamInfo = new StreamInfo();
         }
         try
         {
             var mf = new MediaFrame()
             {
                 IsAudio           = 1,
                 IsKeyFrame        = 1,
                 Size              = entity.Length,
                 Channel           = 1,
                 Frequency         = 32000,
                 AudioFormat       = 2,
                 NTimetick         = ThreadEx.TickCount,
                 Offset            = 0,
                 Encoder           = MediaFrame.AAC_Encoder,
                 Ex                = 1,
                 MediaFrameVersion = 0x01,
             };
             mf.SetData(entity.Buffer);
             //if (mf.nIsKeyFrame == 1)
             //    mf.nEx = 0;
             mf.StreamID = (short)entity.Index;//区分俩路音频数据
             streamInfo.IsFirstAudioFrame = false;
             return(mf);
         }
         catch (Exception ex)
         {
             throw ex;
         }
     }
     else
     {
         throw new Exception("流类型错误");
     }
 }