public LogsListener(TimberWinR.Parser.LogParameters arguments, CancellationToken cancelToken)
            : base(cancelToken, "Win32-FileLog")
        {
            Stop = false;

            EnsureRollingCaught();

             _codecArguments = arguments.CodecArguments;

            _codecArguments = arguments.CodecArguments;
            if (_codecArguments != null && _codecArguments.Type == CodecArguments.CodecType.multiline)
                _codec = new Multiline(_codecArguments);

            if (!string.IsNullOrEmpty(arguments.Type))
                SetTypeName(arguments.Type);

            _receivedMessages = 0;
            _arguments = arguments;
            _pollingIntervalInSeconds = arguments.Interval;

            IsWildcardFilePattern = arguments.Location.Contains('*');

            foreach (string srcFile in _arguments.Location.Split(','))
            {
                string file = srcFile.Trim();
                string dir = Path.GetDirectoryName(file);
                if (string.IsNullOrEmpty(dir))
                    dir = Environment.CurrentDirectory;
                string fileSpec = Path.Combine(dir, file);

                Task.Factory.StartNew(() => FileWatcher(fileSpec));
            }
        }
        public TailFileListener(TimberWinR.Parser.TailFileArguments arguments,
            CancellationToken cancelToken)
            : base(cancelToken, "Win32-TailLog")
        {
            Stop = false;

            EnsureRollingCaught();

            _codecArguments = arguments.CodecArguments;
            if (_codecArguments != null && _codecArguments.Type == CodecArguments.CodecType.multiline)
                _codec = new Multiline(_codecArguments);

            if (!string.IsNullOrEmpty(arguments.Type))
                SetTypeName(arguments.Type);

            _receivedMessages = 0;
            _arguments = arguments;
            _pollingIntervalInSeconds = arguments.Interval;

            foreach (string srcFile in _arguments.Location.Split(','))
            {
                string file = srcFile.Trim();
                Task.Factory.StartNew(() => TailFileWatcher(file));
            }
        }
        public static string SmartFormat(ResolvedEvent evnt, ICodec targetCodec)
        {
            var dto = CreateDataDto(evnt);

            switch (targetCodec.ContentType)
            {
                case ContentType.Xml:
                case ContentType.ApplicationXml:
                    {
                        var serializeObject = JsonConvert.SerializeObject(dto.data);
                        var deserializeXmlNode = JsonConvert.DeserializeXmlNode(serializeObject, "data");
                        return deserializeXmlNode.InnerXml;
                    }
                case ContentType.Json:
                    return targetCodec.To(dto.data);


                case ContentType.Atom:
                case ContentType.EventXml:
                {
                    var serializeObject = JsonConvert.SerializeObject(dto);
                    var deserializeXmlNode = JsonConvert.DeserializeXmlNode(serializeObject, "event");
                    return deserializeXmlNode.InnerXml;
                }

                case ContentType.EventJson:
                    return targetCodec.To(dto);


                default:
                    throw new NotSupportedException();
            }
        }
Example #4
0
        public static void Connect(IPEndPoint endpoint, MMDevice device, ICodec codec)
        {
            var config = new NetPeerConfiguration("airgap");

            _client = new NetClient(config);
            _client.RegisterReceivedCallback(MessageReceived);

            _client.Start();

            _waveIn = new WasapiLoopbackCapture(device);
            _codec = codec;

            _sourceFormat = _waveIn.WaveFormat;
            _targetFormat = new WaveFormat(_codec.SampleRate, _codec.Channels); // format to convert to

            _waveIn.DataAvailable += SendData;
            _waveIn.RecordingStopped += (sender, args) => Console.WriteLine("Stopped");
            // TODO: RecordingStopped is called when you change the audio device settings, should recover from that

            NetOutgoingMessage formatMsg = _client.CreateMessage();
            formatMsg.Write(_targetFormat.Channels);
            formatMsg.Write(_targetFormat.SampleRate);
            formatMsg.Write(codec.Name);

            _client.Connect(endpoint, formatMsg);
        }
        //TODO GFY THERE IS WAY TOO MUCH COPYING/SERIALIZING/DESERIALIZING HERE!
        public static Event[] SmartParse(byte[] request, ICodec sourceCodec, Guid includedId, string includedType=null)
        {
            switch(sourceCodec.ContentType)
            {
                case ContentType.Raw:
                    return LoadRaw(request, includedId, includedType);
                case ContentType.Json:
                    return LoadRaw(sourceCodec.Encoding.GetString(request), true, includedId, includedType);
                case ContentType.EventJson:
                case ContentType.EventsJson:
                case ContentType.AtomJson:
                    var writeEvents = LoadFromJson(sourceCodec.Encoding.GetString(request));
                    if (writeEvents.IsEmpty())
                        return null;
                    return Parse(writeEvents);

                case ContentType.ApplicationXml:
                case ContentType.Xml:
                    return LoadRaw(sourceCodec.Encoding.GetString(request), false, includedId, includedType);
                case ContentType.EventXml:
                case ContentType.EventsXml:
                case ContentType.Atom:
                    var writeEvents2 = LoadFromXml(sourceCodec.Encoding.GetString(request));
                    if (writeEvents2.IsEmpty())
                        return null;
                    return Parse(writeEvents2);
                default:
                    return null;
            }
        }
        public HttpEntity(DateTime timeStamp,
                          ICodec requestCodec,
                          ICodec responseCodec,
                          HttpListenerContext context,
                          string[] allowedMethods,
                          Action<HttpEntity> onRequestSatisfied)
        {
            Ensure.NotNull(requestCodec, "requestCodec");
            Ensure.NotNull(responseCodec, "responseCodec");
            Ensure.NotNull(context, "context");
            Ensure.NotNull(allowedMethods, "allowedMethods");
            Ensure.NotNull(onRequestSatisfied, "onRequestSatisfied");

            TimeStamp = timeStamp;
            UserHostName = context.Request.UserHostName;

            RequestCodec = requestCodec;
            ResponseCodec = responseCodec;
            _context = context;

            Request = context.Request;
            Response = context.Response;

            Manager = new HttpEntityManager(this, allowedMethods, onRequestSatisfied);
        }
Example #7
0
        public InfraredRecorder(BinaryWriter writer)
        {
            this._bytes = new byte[217088 * 4];

            this._writer = writer;
            this._codec = new RawCodec();
        }
Example #8
0
 /// <inheritdoc cref="Owasp.Esapi.Interfaces.IEncoder.AddCodec(string, ICodec)" />
 public void AddCodec(string codecName, ICodec codec)
 {
     if (codecName == null) {
         throw new ArgumentNullException("codecName");
     }
     codecs.Add(codecName, codec);
 }
 static Func<IHttpEntity, IType, string, Task<object>> GetReader(ICodec instance)
 {
   var readerAsync = instance as IMediaTypeReaderAsync;
   if (readerAsync != null) return readerAsync.ReadFrom;
   return (obj, type, name) => Task.FromResult(
     ((IMediaTypeReader) instance).ReadFrom(obj, type, name));
 }
        public static string SmartFormat(ClientMessage.ReadEventCompleted completed, ICodec targetCodec)
        {
            var dto = new HttpClientMessageDto.ReadEventCompletedText(completed);
            if (completed.Record.Event.Flags.HasFlag(PrepareFlags.IsJson))
            {
                var deserializedData = Codec.Json.From<object>((string) dto.Data);
                var deserializedMetadata = Codec.Json.From<object>((string) dto.Metadata);

                if (deserializedData != null)
                    dto.Data = deserializedData;
                if (deserializedMetadata != null)
                    dto.Metadata = deserializedMetadata;
            }

            switch (targetCodec.ContentType)
            {
                case ContentType.Xml:
                case ContentType.ApplicationXml:
                case ContentType.Atom:
                {
                    var serializeObject = JsonConvert.SerializeObject(dto);
                    var deserializeXmlNode = JsonConvert.DeserializeXmlNode(serializeObject, "read-event-result");
                    return deserializeXmlNode.InnerXml;
                }

                default:
                    return targetCodec.To(dto);
            }
        }
 /// <summary>
 /// Converts the filter to its JSON representation.
 /// </summary>
 /// <param name="codec">The codec to use for encoding values.</param>
 public override JObject ConvertToJson(ICodec codec)
 {
     JObject json = base.ConvertToJson(codec);
     json[_familyPropertyName] = new JValue(codec.Encode(_family));
     json[_qualifierPropertyName] = new JValue(codec.Encode(_qualifier));
     json[_latestVersionPropertyName] = new JValue(_latestVersion);
     return json;
 }
        /// <summary>
        ///    Converts the filter to its JSON representation.
        /// </summary>
        /// <param name="codec">The codec to use for encoding values.</param>
        public override JObject ConvertToJson(ICodec codec)
        {
            JObject json = base.ConvertToJson(codec);

            json[_valuePropertyName] = GetValueJToken(codec);

            return json;
        }
        /// <summary>
        ///   Converts the filter to its JSON representation.
        /// </summary>
        /// <param name="codec">The codec to use for encoding values.</param>
        public override JObject ConvertToJson(ICodec codec)
        {
            JObject json = base.ConvertToJson(codec);

              json[_offsetPropertyName] = _offset;

              return json;
        }
 public static Event[] SmartParse(string request, ICodec sourceCodec)
 {
     var writeEvents = Load(request, sourceCodec);
     if (writeEvents.IsEmpty())
         return null;
     var events = Parse(writeEvents);
     return events;
 }
Example #15
0
        /// <summary>
        /// Converts the filter to its JSON representation.
        /// </summary>
        /// <param name="codec">The codec to use for encoding values.</param>
        public virtual JObject ConvertToJson(ICodec codec)
        {
            var json = new JObject();

            json[_typePropertyName] = new JValue(GetFilterType());

            return json;
        }
Example #16
0
        /// <summary>
        ///   Converts the filter to its JSON representation.
        /// </summary>
        /// <param name="codec">The codec to use for encoding values.</param>
        public override JObject ConvertToJson(ICodec codec)
        {
            JObject json = base.ConvertToJson(codec);

              json[_chancePropertyName] = _chance;

              return json;
        }
 /// <summary>
 /// Creates a video output directly to file based on the parameters
 /// </summary>
 /// <param name="filename">The file to output to</param>
 /// <param name="codec">The codec to encode with</param>
 /// <param name="width">The viedo width</param>
 /// <param name="height">The video height</param>
 /// <param name="depth">The video colour bit-depth</param>
 /// <param name="fps">The number of frames to encode per second</param>
 /// <param name="length">The length of the video</param>
 /// <param name="effects">The effects to apply to the video (null for none)</param>
 /// <returns>The video stream to write to</returns>
 public static IOutputStream CreateVideoStream(string filename, ICodec codec,
                                               int width, int height, int depth,
                                               int fps, TimeSpan length,
                                               IFrameEffect[] effects)
 {
     if ((int)(codec.Capabilities.OutputType & CodecCapabilities.OutputTypes.File) > 0)
         return ((Codec)codec).CreateStream(filename, width, height, depth, fps, effects);
     return null;
 }
        /// <summary>
        ///    Converts the filter to its JSON representation.
        /// </summary>
        /// <param name="codec">The codec to use for encoding values.</param>
        public override JObject ConvertToJson(ICodec codec)
        {
            JObject json = base.ConvertToJson(codec);

            json[_operationPropertyName] = new JValue(_comparisonTypes[_comparison]);
            json[_comparatorPropertyName] = new BinaryComparator(_value).ConvertToJson(codec);

            return json;
        }
        public static Tuple<int, Event[]> SmartParse(string request, ICodec sourceCodec)
        {
            var write = Load(request, sourceCodec);
            if (write == null || write.Events == null || write.Events.Length == 0)
                return new Tuple<int, Event[]>(-1, null);

            var events = Parse(write.Events);
            return new Tuple<int, Event[]>(write.ExpectedVersion, events);
        }
        public StdinListener(TimberWinR.Parser.Stdin arguments, CancellationToken cancelToken)
            : base(cancelToken, "Win32-Console")
        {
            _codecArguments = arguments.CodecArguments;
            if (_codecArguments != null && _codecArguments.Type == CodecArguments.CodecType.multiline)
                _codec = new Multiline(_codecArguments);

            _listenThread = new Thread(new ThreadStart(ListenToStdin));
            _listenThread.Start();
        }
Example #21
0
 private void Read(bool read_tags, ReadStyle style, out uint aiff_size, out long tag_start, out long tag_end)
 {
     base.Seek(0L);
     if (base.ReadBlock(4) != FileIdentifier)
     {
         throw new CorruptFileException("File does not begin with AIFF identifier");
     }
     aiff_size = base.ReadBlock(4).ToUInt(true);
     tag_start = -1L;
     tag_end = -1L;
     if ((this.header_block == null) && (style != ReadStyle.None))
     {
         long offset = base.Find(CommIdentifier, 0L);
         if (offset == -1L)
         {
             throw new CorruptFileException("No Common chunk available in AIFF file.");
         }
         base.Seek(offset);
         this.header_block = base.ReadBlock(0x1a);
         StreamHeader header = new StreamHeader(this.header_block, (long) ((ulong) aiff_size));
         ICodec[] codecs = new ICodec[] { header };
         this.properties = new TagLib.Properties(TimeSpan.Zero, codecs);
     }
     long num2 = -1L;
     if (base.Find(SoundIdentifier, 0L, ID3Identifier) == -1L)
     {
         num2 = base.Find(ID3Identifier, 0L);
     }
     long num3 = base.Find(SoundIdentifier, 0L);
     if (num3 == -1L)
     {
         throw new CorruptFileException("No Sound chunk available in AIFF file.");
     }
     base.Seek(num3 + 4L);
     long startPosition = (((long) base.ReadBlock(4).ToULong(true)) + num3) + 4L;
     if (num2 == -1L)
     {
         num2 = base.Find(ID3Identifier, startPosition);
     }
     if (num2 > -1L)
     {
         if (read_tags && (this.tag == null))
         {
             this.tag = new TagLib.Id3v2.Tag(this, num2 + 8L);
         }
         base.Seek(num2 + 4L);
         uint num6 = base.ReadBlock(4).ToUInt(true) + 8;
         long num7 = num2;
         base.InvariantStartPosition = num7;
         tag_start = num7;
         num7 = tag_start + num6;
         base.InvariantEndPosition = num7;
         tag_end = num7;
     }
 }
Example #22
0
        /// <summary>
        /// Converts the filter to its JSON representation.
        /// </summary>
        /// <param name="codec">The codec to use for encoding values.</param>
        public override JObject ConvertToJson(ICodec codec)
        {
            JObject json = base.ConvertToJson(codec);

            json[_minColumnPropertyName] = string.IsNullOrEmpty(_minColumn) ? null : new JValue(codec.Encode(_minColumn));
            json[_maxColumnPropertyName] = string.IsNullOrEmpty(_maxColumn) ? null : new JValue(codec.Encode(_maxColumn));
            json[_minColumnInclusivePropertyName] = _minColumnInclusive;
            json[_maxColumnInclusivePropertyName] = _maxColumnInclusive;

            return json;
        }
 public static byte[] CompressAudioData(float[] samples, int channels, out int sample_count, BandMode mode, ICodec Codec, float gain = 1f)
 {
     USpeakAudioClipCompressor.data.Clear();
     sample_count = 0;
     short[] shorts = USpeakAudioClipConverter.AudioDataToShorts(samples, channels, gain);
     byte[] numArray = Codec.Encode(shorts, mode);
     USpeakPoolUtils.Return(shorts);
     USpeakAudioClipCompressor.data.AddRange(numArray);
     USpeakPoolUtils.Return(numArray);
     return USpeakAudioClipCompressor.data.ToArray();
 }
Example #24
0
        public HttpRequest(ICodec codec)
        {
            RawHeaders = new Dictionary<string, string>();

            UserAgent = String.Format("EasyHttp HttpClient v{0}",
                                       Assembly.GetAssembly(typeof(HttpClient)).GetName().Version);

            Accept = String.Join(";", HttpContentTypes.TextHtml, HttpContentTypes.ApplicationXml,
                                 HttpContentTypes.ApplicationJson);
            _codec = codec;
        }
Example #25
0
        static Codecs()
        {
            RawColor = new RawCodec();
            JpegColor = new JpegCodec();

            RawDepth = new RawCodec();
            JpegDepth = new JpegCodec();
            PngDepth = new PngCodec();

            RawInfrared = new RawCodec();
            JpegInfrared = new JpegCodec();
            PngInfrared = new PngCodec();
        }
Example #26
0
        public static string ConvertOnRead(ClientMessage.ReadEventCompleted completed, ICodec responseCodec)
        {
            var dto = new ClientMessageDto.ReadEventCompletedText(completed);

            if (completed.Record.Flags.HasFlag(PrepareFlags.IsJson))
            {
                dto.Data = Codec.Json.From<object>((string) dto.Data);
                dto.Metadata = Codec.Json.From<object>((string) dto.Metadata);
            }

            var type = responseCodec.GetType();
            type = type == typeof (CustomCodec) ? ((CustomCodec) responseCodec).BaseCodec.GetType() : type;
            return type == typeof(XmlCodec) ? Codec.Json.ToXmlUsingJson(dto) : responseCodec.To(dto);
        }
Example #27
0
        internal static PlayDepthFrame FromReader(BinaryReader reader, ICodec codec)
        {
            var frame = new PlayDepthFrame();

            frame.FrameType = FrameTypes.Depth;
            frame.RelativeTime = TimeSpan.FromMilliseconds(reader.ReadDouble());
            frame.FrameSize = reader.ReadInt64();

            long frameStartPos = reader.BaseStream.Position;

            frame.Codec = codec;
            frame.Codec.ReadDepthHeader(reader, frame);

            frame.Stream = reader.BaseStream;
            frame.StreamPosition = frame.Stream.Position;
            frame.Stream.Position += frame.FrameDataSize;

            // Do Frame Integrity Check
            var isGoodFrame = false;
            try
            {
                if (reader.ReadString() == PlayFrame.EndOfFrameMarker)
                {
                    isGoodFrame = true;
                }
            }
            catch { }

            if (!isGoodFrame)
            {
                System.Diagnostics.Debug.WriteLine("BAD FRAME...RESETTING");
                reader.BaseStream.Position = frameStartPos + frame.FrameSize;

                try
                {
                    if (reader.ReadString() != PlayFrame.EndOfFrameMarker)
                    {
                        throw new IOException("The recording appears to be corrupt.");
                    }
                    return null;
                }
                catch
                {
                    throw new IOException("The recording appears to be corrupt.");
                }

            }

            return frame;
        }
Example #28
0
        public InputBase(ConcurrentQueue<JObject> messageQueue, string codecString = null, string type = null)
        {
            _messageQueue = messageQueue;
            _type = type;

            switch (codecString)
            {
                case "json":
                    _codec = new JsonCodec();
                    break;
                default:
                    _codec = new PlainCodec();
                    break;
            }
        }
Example #29
0
        public RecordInfraredFrame(InfraredFrame frame)
        {
            this.Codec = Codecs.RawColor;

            this.FrameType = FrameTypes.Infrared;
            this.RelativeTime = frame.RelativeTime;

            this.Width = frame.FrameDescription.Width;
            this.Height = frame.FrameDescription.Height;
            this.BytesPerPixel = frame.FrameDescription.BytesPerPixel;

            _frameData = new ushort[this.Width * this.Height];

            frame.CopyFrameDataToArray(_frameData);
        }
        internal HttpEntityManager(
            HttpEntity httpEntity, string[] allowedMethods, Action<HttpEntity> onRequestSatisfied, ICodec requestCodec,
            ICodec responseCodec)
        {
            Ensure.NotNull(httpEntity, "httpEntity");
            Ensure.NotNull(allowedMethods, "allowedMethods");
            Ensure.NotNull(onRequestSatisfied, "onRequestSatisfied");

            HttpEntity = httpEntity;
            TimeStamp = DateTime.UtcNow;

            _allowedMethods = allowedMethods;
            _onRequestSatisfied = onRequestSatisfied;
            _requestCodec = requestCodec;
            _responseCodec = responseCodec;
            _requestedUrl = httpEntity.RequestedUrl;
        }
 private ResponseConfiguration OkResponseConfigurator <T>(ICodec codec, T message)
 {
     return(new ResponseConfiguration(200, "OK", codec.ContentType, Helper.UTF8NoBom));
 }
Example #32
0
 private string AutoFormatter <T>(ICodec codec, T result)
 {
     return(codec.To(result));
 }
Example #33
0
 private CodecToStreamingCodec(ICodec <T> codec)
 {
     _codec = codec;
 }
        internal HttpEntityManager(
            HttpEntity httpEntity, string[] allowedMethods, Action <HttpEntity> onRequestSatisfied, ICodec requestCodec,
            ICodec responseCodec, bool logHttpRequests)
        {
            Ensure.NotNull(httpEntity, "httpEntity");
            Ensure.NotNull(allowedMethods, "allowedMethods");
            Ensure.NotNull(onRequestSatisfied, "onRequestSatisfied");

            HttpEntity = httpEntity;
            TimeStamp  = DateTime.UtcNow;

            _allowedMethods     = allowedMethods;
            _onRequestSatisfied = onRequestSatisfied;
            _requestCodec       = requestCodec;
            _responseCodec      = responseCodec;
            _requestedUrl       = httpEntity.RequestedUrl;
            _logHttpRequests    = logHttpRequests;

            if (HttpEntity.Request != null && HttpEntity.Request.ContentLength64 == 0)
            {
                LogRequest(new byte[0]);
            }
        }
Example #35
0
        /// <summary>
        ///    Reads the contents of the current instance determining
        ///    the size of the riff data, the area the tagging is in,
        ///    and optionally reading in the tags and media properties.
        /// </summary>
        /// <param name="read_tags">
        ///    If <see langword="true" />, any tags found will be read
        ///    into the current instance.
        /// </param>
        /// <param name="style">
        ///    A <see cref="ReadStyle"/> value specifying how the media
        ///    data is to be read into the current instance.
        /// </param>
        /// <param name="riff_size">
        ///    A <see cref="uint"/> value reference to be filled with
        ///    the size of the RIFF data as read from the file.
        /// </param>
        /// <param name="tag_start">
        ///    A <see cref="long" /> value reference to be filled with
        ///    the absolute seek position at which the tagging data
        ///    starts.
        /// </param>
        /// <param name="tag_end">
        ///    A <see cref="long" /> value reference to be filled with
        ///    the absolute seek position at which the tagging data
        ///    ends.
        /// </param>
        /// <exception cref="CorruptFileException">
        ///    The file does not begin with <see cref="FileIdentifier"
        ///    />.
        /// </exception>
        private void Read(bool read_tags, ReadStyle style,
                          out uint riff_size, out long tag_start,
                          out long tag_end)
        {
            Seek(0);
            if (ReadBlock(4) != FileIdentifier)
            {
                throw new CorruptFileException(
                          "File does not begin with RIFF identifier");
            }

            riff_size = ReadBlock(4).ToUInt(false);
            ByteVector stream_format = ReadBlock(4);

            tag_start = -1;
            tag_end   = -1;

            long     position = 12;
            long     length   = Length;
            uint     size     = 0;
            TimeSpan duration = TimeSpan.Zero;

            ICodec [] codecs = new ICodec [0];

            // Read until there are less than 8 bytes to read.
            do
            {
                bool tag_found = false;

                Seek(position);
                string fourcc = ReadBlock(4).ToString(StringType.UTF8);
                size = ReadBlock(4).ToUInt(false);

                switch (fourcc)
                {
                // "fmt " is used by Wave files to hold the
                // WaveFormatEx structure.
                case "fmt ":
                    if (style == ReadStyle.None ||
                        stream_format != "WAVE")
                    {
                        break;
                    }

                    Seek(position + 8);
                    codecs = new ICodec [] {
                        new WaveFormatEx(ReadBlock(18))
                    };
                    break;

                // "data" contains the audio data for wave
                // files. It's contents represent the invariant
                // portion of the file and is used to determine
                // the duration of a file. It should always
                // appear after "fmt ".
                case "data":
                    if (stream_format != "WAVE")
                    {
                        break;
                    }

                    InvariantStartPosition = position;
                    InvariantEndPosition   = position + size;

                    if (style == ReadStyle.None ||
                        codecs.Length != 1 ||
                        !(codecs [0] is WaveFormatEx))
                    {
                        break;
                    }

                    duration += TimeSpan.FromSeconds(
                        (double)size / (double)
                        ((WaveFormatEx)codecs [0])
                        .AverageBytesPerSecond);

                    break;

                // Lists are used to store a variety of data
                // collections. Read the type and act on it.
                case "LIST":
                {
                    switch (ReadBlock(4).ToString(StringType.UTF8))
                    {
                    // "hdlr" is used by AVI files to hold
                    // a media header and BitmapInfoHeader
                    // and WaveFormatEx structures.
                    case "hdrl":
                        if (style == ReadStyle.None ||
                            stream_format != "AVI ")
                        {
                            continue;
                        }

                        AviHeaderList header_list =
                            new AviHeaderList(this,
                                              position + 12,
                                              (int)(size - 4));
                        duration = header_list.Header.Duration;
                        codecs   = header_list.Codecs;
                        break;

                    // "INFO" is a tagging format handled by
                    // the InfoTag class.
                    case "INFO":
                        if (read_tags && info_tag == null)
                        {
                            info_tag = new InfoTag(
                                this,
                                position + 12,
                                (int)(size - 4));
                        }

                        tag_found = true;
                        break;

                    // "MID " is a tagging format handled by
                    // the MovieIdTag class.
                    case "MID ":
                        if (read_tags && mid_tag == null)
                        {
                            mid_tag = new MovieIdTag(
                                this,
                                position + 12,
                                (int)(size - 4));
                        }

                        tag_found = true;
                        break;

                    // "movi" contains the media data for
                    // and AVI and its contents represent
                    // the invariant portion of the file.
                    case "movi":
                        if (stream_format != "AVI ")
                        {
                            break;
                        }

                        InvariantStartPosition = position;
                        InvariantEndPosition   = position + size;
                        break;
                    }
                    break;
                }

                // "ID32" is a custom box for this format that
                // contains an ID3v2 tag.
                case "ID32":
                    if (read_tags && id32_tag == null)
                    {
                        id32_tag = new Id3v2.Tag(this,
                                                 position + 8);
                    }

                    tag_found = true;
                    break;

                // "IDVX" is used by DivX and holds an ID3v1-
                // style tag.
                case "IDVX":
                    if (read_tags && divx_tag == null)
                    {
                        divx_tag = new DivXTag(this,
                                               position + 8);
                    }

                    tag_found = true;
                    break;

                // "JUNK" is a padding element that could be
                // associated with tag data.
                case "JUNK":
                    if (tag_end == position)
                    {
                        tag_end = position + 8 + size;
                    }
                    break;
                }

                // Determine the region of the file that
                // contains tags.
                if (tag_found)
                {
                    if (tag_start == -1)
                    {
                        tag_start = position;
                        tag_end   = position + 8 + size;
                    }
                    else if (tag_end == position)
                    {
                        tag_end = position + 8 + size;
                    }
                }

                // Move to the next item.
            } while ((position += 8 + size) + 8 < length);

            // If we're reading properties, and one were found,
            // throw an exception. Otherwise, create the Properties
            // object.
            if (style != ReadStyle.None)
            {
                if (codecs.Length == 0)
                {
                    throw new UnsupportedFormatException(
                              "Unsupported RIFF type.");
                }

                properties = new Properties(duration, codecs);
            }

            // If we're reading tags, update the combined tag.
            if (read_tags)
            {
                tag.SetTags(id32_tag, info_tag, mid_tag, divx_tag);
            }
        }
Example #36
0
 private PipelineMessageCodec(ICodec <T> baseCodec)
 {
     BaseCodec = baseCodec;
 }
Example #37
0
 public void Convert(VideoFile videoFile, ICodec codec)
 {
     Console.WriteLine($"Convert {videoFile.Filename} to {codec.Description}");
 }
 private string ConflictFormatter(ICodec codec, ProjectionManagementMessage.OperationFailed message)
 {
     return(message.Reason);
 }
 private static string DefaultFormatter <T>(ICodec codec, T message)
 {
     return(codec.To(message));
 }
 private string NotAuthorizedFormatter(ICodec codec, ProjectionManagementMessage.NotAuthorized message)
 {
     return(message.Reason);
 }
 private ResponseConfiguration ConflictConfigurator(
     ICodec codec, ProjectionManagementMessage.OperationFailed message)
 {
     return(new ResponseConfiguration(409, "Conflict", "text/plain", Helper.UTF8NoBom));
 }
 private ResponseConfiguration NotAuthorizedConfigurator(
     ICodec codec, ProjectionManagementMessage.NotAuthorized message)
 {
     return(new ResponseConfiguration(401, "Not Authorized", "text/plain", Encoding.UTF8));
 }
 private ResponseConfiguration NotFoundConfigurator(ICodec codec, ProjectionManagementMessage.NotFound message)
 {
     return(new ResponseConfiguration(404, "Not Found", "text/plain", Helper.UTF8NoBom));
 }
 private ResponseConfiguration OkNoCacheResponseConfigurator <T>(ICodec codec, T message)
 {
     return(Configure.Ok(codec.ContentType, codec.Encoding, null, null, false));
 }
 private string QueryFormatter(ICodec codec, ProjectionManagementMessage.ProjectionQuery state)
 {
     return(state.Query);
 }
Example #46
0
 public HttpEntityManager CreateManager(
     ICodec requestCodec, ICodec responseCodec, string[] allowedMethods, Action <HttpEntity> onRequestSatisfied)
 {
     return(new HttpEntityManager(this, allowedMethods, onRequestSatisfied, requestCodec, responseCodec));
 }
 private ResponseConfiguration QueryConfigurator(ICodec codec, ProjectionManagementMessage.ProjectionQuery state)
 {
     return(Configure.Ok("application/javascript", Helper.UTF8NoBom, null, null, false));
 }
        private void ProcessRequest(HttpService httpService, HttpEntity httpEntity)
        {
            var request = httpEntity.Request;

            try {
                var allMatches = httpService.GetAllUriMatches(request.Url);
                if (allMatches.Count == 0)
                {
                    NotFound(httpEntity);
                    return;
                }

                var allowedMethods = GetAllowedMethods(allMatches);

                if (request.HttpMethod.Equals(HttpMethod.Options, StringComparison.OrdinalIgnoreCase))
                {
                    RespondWithOptions(httpEntity, allowedMethods);
                    return;
                }

                var match = allMatches.LastOrDefault(
                    m => m.ControllerAction.HttpMethod.Equals(request.HttpMethod, StringComparison.OrdinalIgnoreCase));
                if (match == null)
                {
                    MethodNotAllowed(httpEntity, allowedMethods);
                    return;
                }

                ICodec requestCodec           = null;
                var    supportedRequestCodecs = match.ControllerAction.SupportedRequestCodecs;
                if (supportedRequestCodecs != null && supportedRequestCodecs.Length > 0)
                {
                    requestCodec = SelectRequestCodec(request.HttpMethod, request.ContentType, supportedRequestCodecs);
                    if (requestCodec == null)
                    {
                        BadContentType(httpEntity, "Invalid or missing Content-Type");
                        return;
                    }
                }

                ICodec responseCodec = SelectResponseCodec(request.QueryString,
                                                           request.AcceptTypes,
                                                           match.ControllerAction.SupportedResponseCodecs,
                                                           match.ControllerAction.DefaultResponseCodec);
                if (responseCodec == null)
                {
                    BadCodec(httpEntity, "Requested URI is not available in requested format");
                    return;
                }


                try {
                    var manager =
                        httpEntity.CreateManager(requestCodec, responseCodec, allowedMethods, satisfied => { });
                    var reqParams = match.RequestHandler(manager, match.TemplateMatch);
                    if (!reqParams.IsDone)
                    {
                        _pending.Add(Tuple.Create(DateTime.UtcNow + reqParams.Timeout, manager));
                    }
                } catch (Exception exc) {
                    Log.ErrorException(exc, "Error while handling HTTP request '{url}'.", request.Url);
                    InternalServerError(httpEntity);
                }
            } catch (Exception exc) {
                Log.ErrorException(exc, "Unhandled exception while processing HTTP request at [{listenPrefixes}].",
                                   string.Join(", ", httpService.ListenPrefixes));
                InternalServerError(httpEntity);
            }

            PurgeTimedOutRequests();
        }
Example #49
0
        private ICodec SelectResponseCodec(NameValueCollection query, string[] acceptTypes, ICodec[] supported, ICodec @default)
        {
            var requestedFormat = GetFormatOrDefault(query);

            if (requestedFormat == null && acceptTypes.IsEmpty())
            {
                return(@default);
            }

            if (requestedFormat != null)
            {
                return(supported.FirstOrDefault(c => c.SuitableForResponse(MediaType.Parse(requestedFormat))));
            }

            return(acceptTypes.Select(MediaType.TryParse)
                   .Where(x => x != null)
                   .OrderByDescending(v => v.Priority)
                   .Select(type => supported.FirstOrDefault(codec => codec.SuitableForResponse(type)))
                   .FirstOrDefault(corresponding => corresponding != null));
        }
Example #50
0
 /// <summary>
 /// Create new NsMessageCodec.
 /// </summary>
 /// <param name="codec">The codec used to serialize message data</param>
 /// <param name="idFactory">Used to create identifier from string.</param>
 public NsMessageCodec(ICodec <T> codec, IIdentifierFactory idFactory)
 {
     _codec     = codec;
     _idFactory = idFactory;
 }
Example #51
0
 public static ICodec CreateCustom(ICodec codec, string contentType, string format)
 {
     return(new CustomCodec(codec, contentType, format));
 }
 /// <summary>
 /// Gets the token to use as the value.
 /// </summary>
 /// <param name="codec">The codec to use for encoding values.</param>
 protected override JToken GetValueJToken(ICodec codec)
 {
     return(new JValue(codec.Encode(_prefix)));
 }
Example #53
0
 /// <summary>
 ///   Converts the filter to its JSON representation.
 /// </summary>
 /// <param name="codec">Not used</param>
 public JObject ConvertToJson(ICodec codec)
 {
     return(_jObject);
 }
 /// <summary>
 ///    Gets the token to use as the value.
 /// </summary>
 /// <param name="codec">The codec to use for encoding values.</param>
 protected abstract JToken GetValueJToken(ICodec codec);
Example #55
0
 /// <summary>
 /// Initializes a new instance of the <see cref="XmlMimeConverter" /> class.
 /// </summary>
 /// <param name="valueConverter">The value converter.</param>
 /// <param name="codec">The codec.</param>
 public XmlMimeConverter(ISimpleValueConverter valueConverter, ICodec codec)
 {
     _valueConverter = valueConverter;
     _codec          = codec;
 }
Example #56
0
        public virtual int sceAudiocodecDecode(TPointer workArea, int codecType)
        {
            workArea.setValue32(8, 0);             // err FieldInfo

            AudiocodecInfo info = infos[workArea.Address];

            if (info == null)
            {
                Console.WriteLine(string.Format("sceAudiocodecDecode no info available for workArea={0}", workArea));
                return(-1);
            }

            int inputBuffer  = workArea.getValue32(24);
            int outputBuffer = workArea.getValue32(32);
            int unknown1     = workArea.getValue32(40);
            int codingMode   = 0;
            int channels     = info.outputChannels;

            int inputBufferSize;

            switch (codecType)
            {
            case PSP_CODEC_AT3PLUS:
                if (workArea.getValue32(48) == 0)
                {
                    inputBufferSize = workArea.getValue32(64) + 2;
                }
                else
                {
                    inputBufferSize = 0x100A;
                }

                // Skip any audio frame header (found in PSMF files)
                Memory mem = workArea.Memory;
                if (mem.read8(inputBuffer) == 0x0F && mem.read8(inputBuffer + 1) == 0xD0)
                {
                    int frameHeader23    = (mem.read8(inputBuffer + 2) << 8) | mem.read8(inputBuffer + 3);
                    int audioFrameLength = (frameHeader23 & 0x3FF) << 3;
                    inputBufferSize = audioFrameLength;
                    inputBuffer    += 8;
                }
                break;

            case PSP_CODEC_AT3:
                switch (workArea.getValue32(40))
                {
                case 0x4:
                    inputBufferSize = 0x180;
                    break;

                case 0x6:
                    inputBufferSize = 0x130;
                    break;

                case 0xB:
                    inputBufferSize = 0xC0;
                    codingMode      = 1;
                    break;                                     // JOINT_STEREO
                    goto case 0xE;

                case 0xE:
                    inputBufferSize = 0xC0;
                    break;

                case 0xF:
                    inputBufferSize = 0x98;
                    channels        = 1;
                    break;                                     // MONO
                    goto default;

                default:
                    Console.WriteLine(string.Format("sceAudiocodecDecode Atrac3 unknown value 0x{0:X} at offset 40", workArea.getValue32(40)));
                    inputBufferSize = 0x180;
                    break;
                }
                break;

            case PSP_CODEC_MP3:
                inputBufferSize = workArea.getValue32(40);
                break;

            case PSP_CODEC_AAC:
                if (workArea.getValue8(44) == 0)
                {
                    inputBufferSize = 0x600;
                }
                else
                {
                    inputBufferSize = 0x609;
                }
                break;

            case 0x1004:
                inputBufferSize = workArea.getValue32(40);
                break;

            default:
                return(-1);
            }

            int outputBufferSize = getOutputBufferSize(workArea, codecType);

            workArea.setValue32(36, outputBufferSize);

            //if (log.DebugEnabled)
            {
                Console.WriteLine(string.Format("sceAudiocodecDecode inputBuffer=0x{0:X8}, outputBuffer=0x{1:X8}, inputBufferSize=0x{2:X}, outputBufferSize=0x{3:X}", inputBuffer, outputBuffer, inputBufferSize, outputBufferSize));
                Console.WriteLine(string.Format("sceAudiocodecDecode unknown1=0x{0:X8}", unknown1));
                if (log.TraceEnabled)
                {
                    log.trace(string.Format("sceAudiocodecDecode inputBuffer: {0}", Utilities.getMemoryDump(inputBuffer, inputBufferSize)));
                }
            }

            ICodec codec = info.Codec;

            if (!info.CodecInitialized)
            {
                codec.init(inputBufferSize, channels, info.outputChannels, codingMode);
                info.setCodecInitialized();
            }

            if (codec == null)
            {
                Console.WriteLine(string.Format("sceAudiocodecDecode no codec available for codecType=0x{0:X}", codecType));
                return(-1);
            }

            int bytesConsumed = codec.decode(inputBuffer, inputBufferSize, outputBuffer);

            //if (log.DebugEnabled)
            {
                if (bytesConsumed < 0)
                {
                    Console.WriteLine(string.Format("codec.decode returned error 0x{0:X8}, data: {1}", bytesConsumed, Utilities.getMemoryDump(inputBuffer, inputBufferSize)));
                }
                else
                {
                    Console.WriteLine(string.Format("sceAudiocodecDecode bytesConsumed=0x{0:X}", bytesConsumed));
                }
            }

            if (codec is Mp3Decoder)
            {
                Mp3Header mp3Header = ((Mp3Decoder)codec).Mp3Header;
                if (mp3Header != null)
                {
                    // See https://github.com/uofw/uofw/blob/master/src/avcodec/audiocodec.c
                    workArea.setValue32(68, mp3Header.bitrateIndex);                     // MP3 bitrateIndex [0..14]
                    workArea.setValue32(72, mp3Header.rawSampleRateIndex);               // MP3 freqType [0..3]

                    int type;
                    if (mp3Header.mpeg25 != 0)
                    {
                        type = 2;
                    }
                    else if (mp3Header.lsf != 0)
                    {
                        type = 0;
                    }
                    else
                    {
                        type = 1;
                    }
                    workArea.setValue32(56, type);                     // type [0..2]

                    //if (log.DebugEnabled)
                    {
                        Console.WriteLine(string.Format("sceAudiocodecDecode MP3 bitrateIndex={0:D}, rawSampleRateIndex={1:D}, type={2:D}", mp3Header.bitrateIndex, mp3Header.rawSampleRateIndex, type));
                    }
                }
            }

            workArea.setValue32(28, bytesConsumed > 0 ? bytesConsumed : inputBufferSize);

            Modules.ThreadManForUserModule.hleKernelDelayThread(sceMpeg.atracDecodeDelay, false);

            return(0);
        }
Example #57
0
 public Channel(ICodec <TPacket> codec, Stream stream) : this(codec, stream, stream)
 {
 }
 private string QueryConfigFormatter(ICodec codec, ProjectionManagementMessage.ProjectionQuery state)
 {
     return(state.ToJson());
 }
 private ResponseConfiguration ProjectionConfigConfigurator(ICodec codec, ProjectionManagementMessage.ProjectionConfig state)
 {
     return(Configure.Ok("application/json", Helper.UTF8NoBom, null, null, false));
 }
 private string ProjectionConfigFormatter(ICodec codec, ProjectionManagementMessage.ProjectionConfig config)
 {
     return(config.ToJson());
 }