internal Codec(string name, CodecType type) { EncoderFeatureLevel = new FeatureLevel(); DecoderFeatureLevel = new FeatureLevel(); Name = name; Type = type; }
public AvroOutputFormatter(CodecType codec = CodecType.Null) { _codec = codec; this.SupportedMediaTypes.Clear(); this.SupportedMediaTypes.Add(MediaTypeHeaderValue.Parse(Consts.AvroHeader)); }
/// <summary> /// /// </summary> /// <param name="payloadType"></param> /// <param name="rtxPayloadType"></param> /// <param name="priority"></param> /// <param name="codecType"></param> protected AbstractCodec(byte payloadType, byte rtxPayloadType, int priority, CodecType codecType) { _payloadType = payloadType; _rtxPayloadType = rtxPayloadType; _priority = priority; _codecType = codecType; }
/// <summary> /// Converts Json object directly to Avro format /// Choosing <paramref name="codecType"/> reduces output object size /// </summary> public static byte[] Json2Avro <T>(string json, CodecType codecType) { var deserializedJson = JsonConvert.DeserializeObject <T>(json); var result = AvroConvert.Serialize(deserializedJson, codecType); return(result); }
public RTMAudioHeader(byte version, ContainerType containerType, CodecType codecType, string lang, long duration, int sampleRate) { this.version = version; this.containerType = containerType; this.codecType = codecType; this.lang = lang; this.duration = duration; this.sampleRate = sampleRate; MemoryStream stream = new MemoryStream(); Dictionary <string, object> infoData = new Dictionary <string, object>() { { "lang", this.lang }, { "dur", this.duration }, { "srate", this.sampleRate } }; byte[] bytes = new byte[0]; using (MemoryStream outputStream = new MemoryStream()) { MsgPacker.Pack(outputStream, infoData); bytes = outputStream.ToArray(); } stream.Write(new byte[4] { CurrentVersion, (byte)containerType, (byte)codecType, 1 }, 0, 4); stream.Write(BitConverter.GetBytes(bytes.Length), 0, 4); stream.Write(bytes, 0, bytes.Length); this.headerArray = stream.ToArray(); }
public AvroOutputFormatter(CodecType codec = CodecType.Null) { _codec = codec; this.SupportedMediaTypes.Clear(); this.SupportedMediaTypes.Add(MediaTypeHeaderValue.Parse("application/avro")); }
private void Codec(CodecType codec) { switch (codec) { case CodecType.EncodeUrl: if (string.IsNullOrWhiteSpace(_viewModel.SourceText)) { _viewModel.EncodeText = ""; return; } _viewModel.EncodeText = Uri.EscapeUriString(_viewModel.SourceText); break; case CodecType.DecodeUrl: if (string.IsNullOrWhiteSpace(_viewModel.EncodeText)) { _viewModel.SourceText = ""; return; } _viewModel.SourceText = Uri.UnescapeDataString(_viewModel.EncodeText); break; case CodecType.EncodeUrlComponent: if (string.IsNullOrWhiteSpace(_viewModel.SourceText)) { _viewModel.EncodeText = ""; return; } _viewModel.EncodeText = HttpUtility.UrlEncode(_viewModel.SourceText); break; case CodecType.DecodeUrlComponent: if (string.IsNullOrWhiteSpace(_viewModel.EncodeText)) { _viewModel.SourceText = ""; return; } _viewModel.SourceText = HttpUtility.UrlDecode(_viewModel.EncodeText); break; case CodecType.EncodeHtml: if (string.IsNullOrWhiteSpace(_viewModel.SourceText)) { _viewModel.EncodeText = ""; return; } _viewModel.EncodeText = HttpUtility.HtmlEncode(_viewModel.SourceText); break; case CodecType.DecodeHtml: if (string.IsNullOrWhiteSpace(_viewModel.EncodeText)) { _viewModel.SourceText = ""; return; } _viewModel.SourceText = HttpUtility.HtmlDecode(_viewModel.EncodeText); break; } }
/// <summary> /// Constructeur pour des codecs spécifiques /// </summary> /// <param name="audioCodec"></param> /// <param name="videoCodec"></param> public Codecs(eConf.eCodecType audioCodec, eConf.eCodecType videoCodec) { _AudioCodec = audioCodec; _VideoCodec = videoCodec; _Type = CodecType.Specific; //Table de booleen à true EconfClassPlayer.EConfPlayer.Instance.GetValidCodecs(out _Codecs); }
public void DecompressRecordAndWriteToStream(CodecType codecType, TlkFileRecord record, Stream stream) { Array.Clear(_compressedAudioBuffer, 0, _compressedAudioBuffer.Length); Array.Clear(_uncompressedAudioBuffer, 0, _uncompressedAudioBuffer.Length); GetCompressedAudioDataFromRecord(record, _compressedAudioBuffer, 0); DecompressAudioDataAsWAVToStream(codecType, _compressedAudioBuffer, 0, (int)record.compressedDataLength, stream); }
internal Codec(CodecType codecType, int index, string name, MediaApi api, bool hasConfigWindow = false) { _codecType = codecType; _index = index; _name = name; _api = api; _hasConfigWindow = hasConfigWindow; }
public static extern CodecID av_guess_codec(IntPtr pAVOutoutFormat, [MarshalAs(UnmanagedType.LPTStr)] String short_name, [MarshalAs(UnmanagedType.LPTStr)] String filename, [MarshalAs(UnmanagedType.LPTStr)] String mime_type, CodecType type);
internal void WriteHeader(string schema, CodecType codecType) { GenerateSyncData(); _header.AddMetadata(DataFileConstants.CodecMetadataKey, AbstractCodec.CreateCodec(codecType).Name); _header.AddMetadata(DataFileConstants.SchemaMetadataKey, schema); _writer.WriteHeader(_header); }
public NativeEncodeConfig(int w, int h, int bitrate, int f, CodecType codectype, string path) { this.Width = w; this.height = h; this.bitRate = bitrate; this.fps = f; this.codecType = (int)codectype; this.outPutPath = path; }
private static void DecompressAudioDataAsWAVToStream(CodecType codecType, byte[] compressedAudioBuffer, int compressedAudioBufferOffset, int compressedDataLength, Stream stream) { var uncompressedSize = DecompressAudioData(codecType, compressedAudioBuffer, compressedAudioBufferOffset, compressedDataLength, _uncompressedAudioBuffer, 0); WritePCMBitsAsWAVToStream(_uncompressedAudioBuffer, 0, uncompressedSize, stream); }
public static Codec FindCodec(CodecType codecType, string name) { CheckInit(); Codec result = null; CodecList codecs = GetCodecs(codecType); result = codecs.FindCodec(name); return(result); }
public static int CompressAudioToStream(CodecType codecType, Stream uncompressedDataStream, int uncompressedDataLength, Stream compressedDataStream) { var codec = GetCodec(codecType); using (codec) { return(codec.Encode(uncompressedDataStream, uncompressedDataLength, compressedDataStream)); } }
private void Initialize(CodecType type, string name) { ID = null; Type = type; Name = name; if (!string.IsNullOrEmpty(Name)) { CreateID(); } }
public NativeEncodeConfig(int w, int h, int bitrate, int f, CodecType codectype, string path, bool usealpha = false) { this.width = w; this.height = h; this.bitRate = bitrate; this.fps = 20; this.codecType = (int)codectype; this.outPutPath = path; this.useStepTime = 0; this.useAlpha = usealpha; }
protected MediaStream(AVStream *pointer, CodecType codecType = CodecType.Decoder) { if (pointer == null) { throw new ArgumentNullException(nameof(pointer)); } this.pointer = pointer; Codec = Codec.Create(this, codecType); }
private static int DecompressAudioData(CodecType codecType, byte[] compressedDataBuffer, int compressedBufferOffset, int compressedDataLength, byte[] outputBuffer, int outputBufferOffset) { var codec = GetCodec(codecType); using (codec) { return(codec.Decode(compressedDataBuffer, compressedBufferOffset, compressedDataLength, ref outputBuffer, outputBufferOffset)); } }
public ActionResult Edit(CodecType model) { if (!string.IsNullOrWhiteSpace(model.Name)) { model.UpdatedBy = User.Identity.Name; _codecTypeRepository.Save(model); return(RedirectToAction("Index")); } ModelState.AddModelError("Name", Resources.Name_Required); return(View(model)); }
public static int DecompressAudioDataFromStream(CodecType codecType, Stream compressedDataStream, int compressedDataLength, int uncompressedLength, Stream uncompressedDataStream) { var codec = GetCodec(codecType); using (codec) { return(codec.Decode(compressedDataStream, compressedDataLength, uncompressedLength, uncompressedDataStream)); } }
/// <summary> Constructor. </summary> /// <param name="w"> The width.</param> /// <param name="h"> The height.</param> /// <param name="bitrate"> The bitrate.</param> /// <param name="f"> An int to process.</param> /// <param name="codectype"> The codectype.</param> /// <param name="path"> Full pathname of the file.</param> /// <param name="usealpha"> (Optional) True to usealpha.</param> public NativeEncodeConfig(int w, int h, int bitrate, int f, CodecType codectype, string path, bool usealpha = false) { this.width = w; this.height = h; this.bitRate = bitrate; this.fps = 20; this.codecType = (int)codectype; this.outPutPath = path; this.useStepTime = 0; this.useAlpha = usealpha; this.useLinnerTexture = NRRenderer.isLinearColorSpace; }
private CodecType _codecType; // тип кодека #endregion </Fields> #region <Ctors> public StreamInfo() { HasAudio = false; HasVideo = false; ContainerType = ContainerType.Unknown; _msNaturalDuration = 0; _frameWidth = 0; _frameHeight = 0; Bitrate = 0; _bitrateType = BitrateType.Unknown; _codecType = CodecType.Unknown; Fps = 0; }
/// <summary> /// Serializes given object to AVRO format (including header with metadata) /// Choosing <paramref name="codecType"/> reduces output object size /// </summary> public static byte[] Serialize(object obj, CodecType codecType) { using (MemoryStream resultStream = new MemoryStream()) { var schema = Schema.Create(obj); using (var writer = new Encoder(schema, resultStream, codecType)) { writer.Append(obj); } var result = resultStream.ToArray(); return(result); } }
public static byte[] SerializeHeadless(object obj, string schema, CodecType codecType = CodecType.Null) { MemoryStream resultStream = new MemoryStream(); var encoder = new Writer(resultStream); var writer = Resolver.ResolveWriter(Schema.Schema.Parse(schema)); writer(obj, encoder); var result = resultStream.ToArray(); return(result); }
/// <summary> /// Serializes given object to AVRO format (including header with metadata) /// Choosing <paramref name="codecType"/> reduces output object size /// </summary> public static byte[] Serialize(object obj, CodecType codecType) { using (MemoryStream resultStream = new MemoryStream()) { string schema = GenerateSchema(obj?.GetType()); using (var writer = new Encoder(Schema.Schema.Parse(schema), resultStream, codecType)) { writer.Append(obj); } var result = resultStream.ToArray(); return(result); } }
public void DecompressRecordAndWriteToFile(CodecType codecType, TlkFileRecord record, string fileName) { using (var stream = new FileStream(fileName, FileMode.Create, FileAccess.ReadWrite)) { try { DecompressRecordAndWriteToStream(codecType, record, stream); } finally { stream.Close(); } } }
protected Codec(AVCodecContext *context, CodecType type) { Id = (context->codec_id).ToCodecId(); Type = type; this.pointer = context->codec; if (pointer == null) { pointer = Get(Id, Type); } this.context = new CodecContext(context); }
private static IAudioCodec GetCodec(CodecType codecType) { switch (codecType) { case CodecType.LH: return(new LHCodec()); case CodecType.SPX: return(new SpeexCodec()); default: return(null); } }
/// <summary> /// Decode this PDU from the PduMarshaler. /// </summary> /// <param name="marshaler">This is used to decode the fields of this PDU.</param> public override bool Decode(PduMarshaler marshaler) { try { base.Decode(marshaler); this.surfaceId = marshaler.ReadUInt16(); pduLen += 2; this.codecId = (CodecType)marshaler.ReadUInt16(); pduLen += 2; this.codecContextId = marshaler.ReadUInt32(); pduLen += 4; this.pixelFormat = (PixelFormat)marshaler.ReadByte(); pduLen++; // Bitmap. this.bitmapDataLength = marshaler.ReadUInt32(); pduLen += 4; this.bitmapData = marshaler.ReadBytes((int)this.bitmapDataLength); pduLen += this.bitmapDataLength; return true; } catch { marshaler.Reset(); throw new PDUDecodeException(this.GetType(), marshaler.ReadToEnd()); } }
public Atrac(PspEmulatorContext PspEmulatorContext, CodecType CodecType) { this.CodecType = CodecType; }
public Atrac(PspEmulatorContext PspEmulatorContext, byte[] Data) { CodecType = CodecType.PSP_MODE_AT_3_PLUS; SetData(Data); }
public MediaFile(FileStream File, CodecType codecType) : this(File.Name) { }
public Atrac sceAtracGetAtracID(CodecType CodecType) { if (CodecType != CodecType.PSP_MODE_AT_3 && CodecType != CodecType.PSP_MODE_AT_3_PLUS) { throw(new SceKernelException(SceKernelErrors.ATRAC_ERROR_INVALID_CODECTYPE)); } return TryToAlloc(new Atrac(InjectContext, CodecType)); }
public Atrac(InjectContext InjectContext, byte* Data, int DataLength) { InjectContext.InjectDependencesTo(this); PrimaryBuffer = HleMemoryManager.GetPartition(MemoryPartitions.User).Allocate(1024); CodecType = CodecType.PSP_MODE_AT_3_PLUS; SetData(Data, DataLength); }
public bool UpdateCodecsAccessibility(VATRPAccount account, CodecType codecType) { var retValue = true; if (linphoneCore == IntPtr.Zero) throw new Exception("Linphone not initialized"); if (account == null) throw new ArgumentNullException("Account is not defined"); var cfgCodecs = codecType == CodecType.Video ? account.VideoCodecsList : account.AudioCodecsList; var linphoneCodecs = codecType == CodecType.Video ? _videoCodecs : _audioCodecs; foreach (var cfgCodec in cfgCodecs) { var payloadPtr = LinphoneAPI.linphone_core_find_payload_type(linphoneCore, cfgCodec.CodecName, cfgCodec.IPBitRate, cfgCodec.Channels); if (payloadPtr == IntPtr.Zero) continue; if (cfgCodec.Status == ( LinphoneAPI.linphone_core_payload_type_enabled(linphoneCore, payloadPtr) == 1)) continue; LinphoneAPI.linphone_core_enable_payload_type(linphoneCore, payloadPtr, cfgCodec.Status); } return retValue; }
/// <summary> /// Constructor, create a wire to surface message to transfer a bitmap. /// </summary> /// <param name="sId">This is used to indicate the target surface id.</param> /// <param name="cId">This is used to indicate the codecId.</param> /// <param name="pixFormat">This is used to indicate the pixel format to fill target surface.</param> /// <param name="bmRect">This is used to indicate border of bitmap on target surface.</param> /// <param name="bmLen">This is used to indicate the length of bitmap data.</param> /// <param name="bmData">This is used to indicate the bitmap data encoded by cId codec.</param> public RDPGFX_WIRE_TO_SURFACE_PDU_1(ushort sId, CodecType cId, PixelFormat pixFormat, RDPGFX_RECT16 bmRect, byte[] bmData) { this.Header.cmdId = PacketTypeValues.RDPGFX_CMDID_WIRETOSURFACE_1; this.Header.flags = 0x0; this.Header.pduLength = (uint)Marshal.SizeOf(Header); this.surfaceId = sId; this.codecId = cId; this.pixelFormat = pixFormat; this.destRect = bmRect; this.Header.pduLength += 13; this.bitmapData = bmData; if (bmData != null) { this.bitmapDataLength = (uint)bmData.Length; this.Header.pduLength += this.bitmapDataLength; // add length of bitmapData } this.Header.pduLength += 4; // 4 bytes for bitmapDataLength field. }
/** * Sets the video encoding parameters. * @param width : Video width. * @param height : Video height. * @param fps : Video frames per second. * @param type : Enumerator with encoding types (see CodecType doc.). * @param bitrate : Video bitrate, only applies when type = CBR or type = VBR_BITRATE. * @param quality : Video quality, only applies when type = VBR_QUALITY. * @param peak : Video bandwidth peak, only applies when type = VBR_PEAK. * @param key_interval : Intervalo de tiempo entre "key frames" (en segundos) */ public DVRBResult SetVideoParams( int width, int height, int fps, CodecType type, int bitrate, int quality, int peak, ulong key_interval) { try { this.wmEncoderAudience.set_VideoWidth(0, width); this.wmEncoderAudience.set_VideoHeight(0, height); this.wmEncoderAudience.set_VideoFPS(0, fps * 1000); this.wmEncoderAudience.set_VideoKeyFrameDistance(0, key_interval * 1000); switch (type) { case CodecType.CBR: this.wmEncoderProfile.set_VBRMode(WMENC_SOURCE_TYPE.WMENC_VIDEO, 0, WMENC_PROFILE_VBR_MODE.WMENC_PVM_NONE); this.wmEncoderAudience.set_VideoBitrate(0, bitrate); break; case CodecType.VBR_QUALITY: this.wmEncoderProfile.set_VBRMode(WMENC_SOURCE_TYPE.WMENC_VIDEO, 0, WMENC_PROFILE_VBR_MODE.WMENC_PVM_UNCONSTRAINED); this.wmEncoderAudience.set_VideoCompressionQuality(0, quality); break; case CodecType.VBR_BITRATE: this.wmEncoderProfile.set_VBRMode(WMENC_SOURCE_TYPE.WMENC_VIDEO, 0, WMENC_PROFILE_VBR_MODE.WMENC_PVM_BITRATE_BASED); this.wmEncoderAudience.set_VideoBitrate(0, bitrate); break; case CodecType.VBR_PEAK: this.wmEncoderProfile.set_VBRMode(WMENC_SOURCE_TYPE.WMENC_VIDEO, 0, WMENC_PROFILE_VBR_MODE.WMENC_PVM_PEAK); this.wmEncoderAudience.set_VideoPeakBitrate(0, peak); break; } } catch (Exception e) { return new DVRBResult(DVRBResult.ERROR, e.Message); } return new DVRBResult(); }
/// <summary> /// Constructor, create a wire to surface message to transfer a bitmap. /// </summary> /// <param name="sId">This is used to indicate the target surface id.</param> /// <param name="codecCtxId">This is used to indicate the codecContextId.</param> /// <param name="pixFormat">This is used to indicate the pixel format to fill target surface.</param> /// <param name="bmLen">This is used to indicate the length of bitmap data.</param> /// <param name="bmData">This is used to indicate the bitmap data encoded by cId codec.</param> public RDPGFX_WIRE_TO_SURFACE_PDU_2(ushort sId, uint codecCtxId, PixelFormat pixFormat, byte[] bmData) { this.Header.cmdId = PacketTypeValues.RDPGFX_CMDID_WIRETOSURFACE_2; this.Header.flags = 0x0; this.Header.pduLength = (uint)Marshal.SizeOf(Header); this.surfaceId = sId; this.codecId = CodecType.RDPGFX_CODECID_CAPROGRESSIVE; this.codecContextId = codecCtxId; this.pixelFormat = pixFormat; this.bitmapDataLength = (uint)bmData.Length; this.Header.pduLength += 13; this.bitmapData = bmData; this.Header.pduLength += this.bitmapDataLength; }
/// <summary> /// Decode this PDU from the PduMarshaler. /// </summary> /// <param name="marshaler">This is used to decode the fields of this PDU.</param> public override bool Decode(PduMarshaler marshaler) { try { base.Decode(marshaler); this.surfaceId = marshaler.ReadUInt16(); pduLen += 2; this.codecId = (CodecType)marshaler.ReadUInt16(); pduLen += 2; this.pixelFormat = (PixelFormat)marshaler.ReadByte(); pduLen ++; // Destination rectangle. this.destRect.left = marshaler.ReadUInt16(); this.destRect.top = marshaler.ReadUInt16(); this.destRect.right = marshaler.ReadUInt16(); this.destRect.bottom = marshaler.ReadUInt16(); pduLen += 8; // Bitmap. this.bitmapDataLength = marshaler.ReadUInt32(); pduLen += 4; this.bitmapData = marshaler.ReadBytes((int)this.bitmapDataLength); pduLen += this.bitmapDataLength; return true; } catch { marshaler.Reset(); throw new PDUDecodeException(this.GetType(), marshaler.ReadToEnd()); } }
public static extern CodecID av_guess_codec(ref AVOutputFormat pAVOutoutFormat, string short_name, string filename, string mime_type, CodecType type);
public Atrac(InjectContext InjectContext, CodecType CodecType) { InjectContext.InjectDependencesTo(this); PrimaryBuffer = HleMemoryManager.GetPartition(MemoryPartitions.User).Allocate(1024); this.CodecType = CodecType; }
/// <summary> /// Method to make a wire to surface Pdu1 /// </summary> /// <param name="sId">This is used to indicate the target surface id.</param> /// <param name="cId">This is used to indicate the codecId.</param> /// <param name="pixFormat">This is used to indicate the pixel format to fill target surface.</param> /// <param name="bmRect">This is used to indicate border of bitmap on target surface.</param> /// <param name="bmLen">This is used to indicate the length of bitmap data.</param> /// <param name="bmData">This is used to indicate the bitmap data encoded by cId codec.</param> void MakeWireToSurfacePdu1(ushort sId, CodecType cId, PixelFormat pixFormat, RDPGFX_RECT16 bmRect, byte[] bmData) { RDPGFX_WIRE_TO_SURFACE_PDU_1 wireToSurf1 = egfxServer.CreateWireToSurfacePdu1(sId, cId, pixFormat, bmRect, bmData); AddPdusToBuffer(wireToSurf1); }
public Atrac(PspEmulatorContext PspEmulatorContext, CodecType CodecType) { PspEmulatorContext.InjectDependencesTo(this); PrimaryBuffer = HleMemoryManager.GetPartition(Managers.HleMemoryManager.Partitions.User).Allocate(1024); this.CodecType = CodecType; }
public bool UpdateNativeCodecs(VATRPAccount account, CodecType codecType) { var retValue = true; if (linphoneCore == IntPtr.Zero) throw new Exception("Linphone not initialized"); if (account == null) throw new ArgumentNullException("Account is not defined"); var cfgCodecs = codecType == CodecType.Video ? account.VideoCodecsList : account.AudioCodecsList; var linphoneCodecs = codecType == CodecType.Video ? _videoCodecs : _audioCodecs; var tmpCodecs = new List<VATRPCodec>(); foreach (var cfgCodec in cfgCodecs) { // find cfgCodec in linphone codec list var pt = LinphoneAPI.linphone_core_find_payload_type(linphoneCore, cfgCodec.CodecName, LinphoneAPI.LINPHONE_FIND_PAYLOAD_IGNORE_RATE, cfgCodec.Channels); if (pt == IntPtr.Zero) { LOG.Warn(string.Format("Codec not found: {0} , Channels: {1} ", cfgCodec.CodecName, cfgCodec.Channels)); tmpCodecs.Add(cfgCodec); } } foreach (var codec in linphoneCodecs) { if (!cfgCodecs.Contains(codec)) { LOG.Info(string.Format("Adding codec into configuration: {0} , Channels: {1} ", codec.CodecName, codec.Channels)); cfgCodecs.Add(codec); } } foreach (var codec in tmpCodecs) { LOG.Info(string.Format("Removing Codec from configuration: {0} , Channels: {1} ", codec.CodecName, codec.Channels)); cfgCodecs.Remove(codec); } foreach (var codec in linphoneCodecs) { for (int i = 0; i < cfgCodecs.Count; i++) { if (cfgCodecs[i].CodecName == codec.CodecName && cfgCodecs[i].Rate == codec.Rate && cfgCodecs[i].Channels == codec.Channels) { cfgCodecs[i].Priority = codec.Priority; cfgCodecs[i].Status = codec.Status; } } } return retValue; }
public Atrac(PspEmulatorContext PspEmulatorContext, byte[] Data) { PspEmulatorContext.InjectDependencesTo(this); PrimaryBuffer = HleMemoryManager.GetPartition(Managers.HleMemoryManager.Partitions.User).Allocate(1024); CodecType = CodecType.PSP_MODE_AT_3_PLUS; SetData(Data); }
public void FillCodecsList(VATRPAccount account, CodecType codecType) { if (account == null) throw new ArgumentNullException("Account is not defined"); var cfgCodecs = codecType == CodecType.Video ? account.VideoCodecsList : account.AudioCodecsList; var linphoneCodecs = codecType == CodecType.Video ? _videoCodecs : _audioCodecs; cfgCodecs.Clear(); cfgCodecs.AddRange(linphoneCodecs); }
public int sceAtracGetAtracID(CodecType CodecType) { var Atrac = new Atrac(PspEmulatorContext, CodecType); var AtracId = AtracList.Create(Atrac); return AtracId; }