public static IPipelineFilterStep ForAcceleration( HardwareAccelerationMode accelMode, FrameState currentState, WatermarkState watermarkState, FrameSize resolution) => accelMode switch {
bool ParseFrameSize() { if (string.IsNullOrEmpty(FrameSize)) { return(false); } Width = 0; Height = 0; var parts = FrameSize.Split('x'); if (parts.Length != 2) { return(false); } try { Width = Convert.ToInt32(parts[0]); Height = Convert.ToInt32(parts[1]); } catch (Exception) { return(false); } return(true); }
/// <summary> /// Reads the chart with price box. over the chart /// Data for this chart are added inside tag chartDiv /// PriceBox contains open, close, min, max records /// Things shown inside the priceBox are shown by javascripts on the client side. /// !! There is css class called barchartStyle is inside, you can set how will price box looks like /// </summary> /// <param name="contractCode">The contract code.</param> /// <param name="periodSize">BarChartProxy.Constants.PeriodSize.Monthly next parameters are dayly, weekly</param> /// <param name="frameSize">Size of the frame.</param> /// <param name="movingAverages">add indicators higher than 1</param> /// <returns></returns> public string GetChart(string contractCode, Period periodSize, FrameSize frameSize, List<int> movingAverages) { //http://www.barchart.com/chart.php?sym=KCZ10&indicators= //http://www.barchart.com/chart.php?sym={0}&style=technical{1}&d=M&sd=&ed=&size=M&log=0&t=BAR&v=2&g=1&evnt=1&late=1&o1=&o2=&o3=&sh=100{2}&txtDate=#jump var address = GetChartPath(contractCode, periodSize, frameSize, movingAverages); return GetChart(contractCode, address, frameSize,periodSize, movingAverages); }
void AddFrameSizesToVideoStreamConfig(ref XmlNode streamconfig, FrameSize newFrameSize) { bool foo; XmlNode xnodewmmediatype; XmlNode xnodevidinfoheader; XmlNode xnode; if (FindChildByName(streamconfig, "wmmediatype", out xnodewmmediatype)) { if (FindChildByName(xnodewmmediatype, "videoinfoheader", out xnodevidinfoheader)) { if (FindChildByName(xnodevidinfoheader, "rcsource", out xnode)) { foo = SetAttributeIfFound(xnode, "right", newFrameSize.Width); foo = SetAttributeIfFound(xnode, "bottom", newFrameSize.Height); } if (FindChildByName(xnodevidinfoheader, "rctarget", out xnode)) { foo = SetAttributeIfFound(xnode, "right", newFrameSize.Width); foo = SetAttributeIfFound(xnode, "bottom", newFrameSize.Height); } if (FindChildByName(xnodevidinfoheader, "bitmapinfoheader", out xnode)) { foo = SetAttributeIfFound(xnode, "biwidth", newFrameSize.Width); foo = SetAttributeIfFound(xnode, "biheight", newFrameSize.Height); } } } }
public static IPipelineFilterStep ForAcceleration( HardwareAccelerationMode accelMode, FrameState currentState, FrameSize scaledSize, FrameSize paddedSize) => accelMode switch {
public VoiceSettings() { _quality = AudioQuality.Medium; _frameSize = FrameSize.Medium; _denoiseAmount = (int)NoiseSuppressionLevels.High; }
/// <summary> /// Creates a new <see cref="CommonFrameHeader"/> from given <paramref name="buffer"/>. /// </summary> /// <param name="buffer">Buffer that contains data to parse.</param> /// <param name="startIndex">Start index into buffer where valid data begins.</param> public CommonFrameHeader(byte[] buffer, int startIndex) { // Validate SEL Fast Message data image if (buffer[startIndex] != Common.HeaderByte1 || buffer[startIndex + 1] != Common.HeaderByte2) { throw new InvalidOperationException($"Bad data stream, expected header bytes 0xA546 as first bytes in SEL Fast Message frame, got 0x{buffer[startIndex].ToString("X").PadLeft(2, '0')}{buffer[startIndex + 1].ToString("X").PadLeft(2, '0')}"); } ushort sampleCount; uint secondOfCentury; NtpTimeTag timetag; // Parse relevant common header values m_frameSize = (FrameSize)buffer[startIndex + 2]; m_idCode = BigEndian.ToUInt32(buffer, startIndex + 12); sampleCount = BigEndian.ToUInt16(buffer, startIndex + 18); secondOfCentury = BigEndian.ToUInt32(buffer, startIndex + 20); // We use an NTP time tag since SEL Fast Message SOC also starts at 1/1/1900 timetag = new NtpTimeTag(secondOfCentury, 0); // Data frames have subsecond time information, so we add this fraction of time to current seconds value timetag = new NtpTimeTag(timetag.Value + sampleCount * 50.0M / 1000.0M); // Cache timestamp value m_timestamp = timetag.ToDateTime().Ticks; }
/// <summary> /// Use XMLDocument to go through a PRX string and change the frame size /// </summary> /// <param name="txtWMPrf"></param> /// <param name="fsize"></param> private void SetProfileFrameSize(ref string txtWMPrf, FrameSize fsize) { SendDebugMessage("Setting WMProfile Frame size to: " + fsize.Width.ToString() + " x " + fsize.Height.ToString()); XmlDocument xDoc = new XmlDocument(); xDoc.LoadXml(txtWMPrf); XmlNode root = xDoc.DocumentElement; // Configure first located video stream foreach (XmlNode rootchild in root.ChildNodes) { if (rootchild.Name == "streamconfig") { if (AttributeEqualsValue(rootchild.Attributes["majortype"], PRX_GuidVideoStream)) { XmlNode passChild = rootchild; AddFrameSizesToVideoStreamConfig(ref passChild, fsize); break; } } } // SAVE txtWMPrf = xDoc.InnerXml; }
public static List <Cattle> GetUserAnimals(int UId, int AnimalID) { ac.OpenConnection(); List <Cattle> cattle = new List <Cattle>(); //parameter and variable of this Procedure string[] variables = new string[] { "@UID", "@AnimalID" }; string[] values = new string[] { "" + UId, "" + AnimalID }; //get data from database DataTable data = ac.ExecuteDataTableProcedure("GetOneAnimal", variables, values, ac); if (data.Rows.Count != 0) { foreach (DataRow row in data.Rows) { //offspring List <Cattle> offSpring = new List <Cattle>(); //frame size FrameSize frmeSize = (FrameSize)Enum.Parse(typeof(FrameSize), row["CattleFrameSize"].ToString()); //Breeding status BreedingStatus bStatus = (BreedingStatus)Enum.Parse(typeof(BreedingStatus), row["CattleBreedingStatus"].ToString()); //Status Enum StatusEnum sStatus = (StatusEnum)Enum.Parse(typeof(StatusEnum), row["AnimalStatus"].ToString()); //Gender Enum GenderEnum gEnum = (GenderEnum)Enum.Parse(typeof(GenderEnum), row["AnimalGender"].ToString()); //animal type AnimalTypeEnum animalType = (AnimalTypeEnum)Enum.Parse(typeof(AnimalTypeEnum), row["AnimalType"].ToString()); cattle.Add ( new Cattle ( int.Parse(row["AnimalTid"].ToString()), row["AnimalBreed"].ToString(), gEnum, double.Parse(row["AnimalAge"].ToString()), int.Parse(row["AnimalYear"].ToString()), int.Parse(row["AnimalMonth"].ToString()), int.Parse(row["AnimalDay"].ToString()), animalType, row[4].ToString(), row[5].ToString(), (int)(Math.Round(double.Parse(row["AnimalAge"].ToString()) / 24)), int.Parse(row["personid"].ToString()), row["personname"].ToString(), row["personSurname"].ToString(), DateTime.Parse(row["personDOB"].ToString()), int.Parse(row["CattleID"].ToString()), int.Parse(row["CattleParentFatherID"].ToString()), int.Parse(row["CattleParentMotherID"].ToString()), (row["CattleImage"].ToString()) , offSpring, sStatus, double.Parse(row["CattleScotralSize"].ToString()), row["CattleColor"].ToString(), bStatus, frmeSize, double.Parse(row["CattleBirthWeight"].ToString()), double.Parse(row["CattleWeaningWeight"].ToString()), double.Parse(row["CattlePostWeaningWeight"].ToString()), double.Parse(row["CattleAdultSxWeight"].ToString()), double.Parse(row["CattleCurrentAdultWeight"].ToString()), DateTime.Parse(row["CattleCurrentWeightDateTaken"].ToString()), null, null, null )); } } return(cattle); }
public void UpdateDrodownLists() { PrevResDropdown.ClearOptions(); previewResolutionsList = JMRCameraManager.Instance.GetPreviewResolutions(); if (previewResolutionsList != null && previewResolutionsList.Count > 0) { //Debug.LogError("+++++++++++++++++++++++Preview Resolution count: " + previewResolutionsList.Count); FrameSize cr = JMRCameraManager.Instance.GetCurrentPreviewResolution(); for (int i = 0; i < previewResolutionsList.Count; i++) { //Debug.LogError("+++++++++++++++++++++++Preview Resolution Data " + i + " " + previewResolutionsList[i].frameSizeText + "=" + previewResolutionsList[i].Width + "x" + previewResolutionsList[i].Height + "@" + previewResolutionsList[i].frameSizeText); Dropdown.OptionData option = new Dropdown.OptionData(); option.text = previewResolutionsList[i].frameSizeText; PrevResDropdown.options.Add(option); if (cr.Width == previewResolutionsList[i].Width && cr.Height == previewResolutionsList[i].Height) { PrevResDropdown.value = i; PrevResDropdown.captionText.text = option.text; PreviewImageRect.sizeDelta = new Vector2(cr.Width, cr.Height); } } } else { Debug.LogError("Preview Resolution List NULL"); Dropdown.OptionData option = new Dropdown.OptionData(); } CaptureResDropdown.ClearOptions(); captureResolutionsList = JMRCameraManager.Instance.GetCaptureResolutions(); if (captureResolutionsList != null) { //Debug.LogError("+++++++++++++++++++++++Capture Resolution count: " + captureResolutionsList.Count); FrameSize cr = JMRCameraManager.Instance.GetCurrentCaptureResolution(); for (int i = 0; i < captureResolutionsList.Count; i++) { //Debug.LogError("+++++++++++++++++++++++Capture Resolution Data " + i + " " + captureResolutionsList[i].frameSizeText + "=" + captureResolutionsList[i].Width + "x" + captureResolutionsList[i].Height + "@" + captureResolutionsList[i].frameSizeText); Dropdown.OptionData option = new Dropdown.OptionData(); option.text = captureResolutionsList[i].frameSizeText; CaptureResDropdown.options.Add(option); if (cr.Width == captureResolutionsList[i].Width && cr.Height == captureResolutionsList[i].Height) { CaptureResDropdown.value = i; CaptureResDropdown.captionText.text = option.text; } } } else { Debug.LogError("Capture Resolution List NULL"); } }
public byte[] Compile() { var bodyBuffer = new List <byte>(); bodyBuffer.AddRange(FrameSize.ToBytes()); bodyBuffer.AddRange(BitConverter.GetBytes((ushort)(FrameRate << 8))); bodyBuffer.AddRange(BitConverter.GetBytes((ushort)FrameCount)); IList <IFlashTag> tags = Tags; foreach (IFlashTag tag in tags) { bodyBuffer.AddRange(tag.ToBytes()); } var buffer = new List <byte>(8 + bodyBuffer.Count); buffer.AddRange(Encoding.UTF8.GetBytes(FWS)); buffer.Add((byte)Version); buffer.AddRange(BitConverter.GetBytes(buffer.Capacity)); buffer.AddRange(bodyBuffer); _buffer = buffer.ToArray(); _position = _buffer.Length; return(_buffer); }
private static ReadOnlySpan <Vp9MvRef> GetMvsInput(MemoryManager gmm, FrameSize size, uint offset) { int miCols = BitUtils.DivRoundUp(size.Width, 8); int miRows = BitUtils.DivRoundUp(size.Height, 8); return(MemoryMarshal.Cast <byte, Vp9MvRef>(gmm.DeviceGetSpan(offset, miRows * miCols * 16))); }
/// <summary> /// Creates a new <see cref="ConfigurationFrame"/> from serialization parameters. /// </summary> /// <param name="info">The <see cref="SerializationInfo"/> with populated with data.</param> /// <param name="context">The source <see cref="StreamingContext"/> for this deserialization.</param> protected ConfigurationFrame(SerializationInfo info, StreamingContext context) : base(info, context) { // Deserialize configuration frame m_frameSize = (FrameSize)info.GetValue("frameSize", typeof(FrameSize)); m_messagePeriod = (MessagePeriod)info.GetValue("messagePeriod", typeof(MessagePeriod)); m_idCode = info.GetUInt32("idCode32Bit"); }
public void Start(Codec codec = Codec.Opus) { //Save encoder settings to ensure we use the same settings every time it is restarted _codec = codec; _encoderQuality = VoiceSettings.Instance.Quality; _encoderFrameSize = VoiceSettings.Instance.FrameSize; _started = true; }
/// <summary> /// Creates a new <see cref="ConfigurationFrame"/>. /// </summary> /// <remarks> /// This constructor is used by a consumer to generate a SEL Fast Message configuration frame. /// </remarks> /// <param name="frameSize">A <see cref="FrameSize"/> object.</param> /// <param name="idCode">An <see cref="uint"/> as the id code.</param> /// <param name="messagePeriod">A <see cref="MessagePeriod"/> object.</param> public ConfigurationFrame(FrameSize frameSize, MessagePeriod messagePeriod, uint idCode) : base(0, new ConfigurationCellCollection(), 0, 0) { FrameSize = frameSize; MessagePeriod = messagePeriod; IDCode = idCode; ConfigurationCell configCell = new(this); // Assign station name configCell.StationName = $"SEL Unit - {idCode}"; // Add a single frequency definition configCell.FrequencyDefinition = new FrequencyDefinition(configCell, "Line frequency"); // Add phasors based on frame size switch (frameSize) { case FrameSize.V1: // Add a single positive sequence voltage phasor definition configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "V1", PhasorType.Voltage, null)); break; case FrameSize.V: // Add three-phase and positive sequence voltage phasors configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "VA", PhasorType.Voltage, null)); configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "VB", PhasorType.Voltage, null)); configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "VC", PhasorType.Voltage, null)); configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "V1", PhasorType.Voltage, null)); break; case FrameSize.A: // Add three-phase and positive sequence voltage and current phasors configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "VA", PhasorType.Voltage, null)); configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "VB", PhasorType.Voltage, null)); configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "VC", PhasorType.Voltage, null)); configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "V1", PhasorType.Voltage, null)); configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "IA", PhasorType.Current, configCell.PhasorDefinitions[0] as PhasorDefinition)); configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "IB", PhasorType.Current, configCell.PhasorDefinitions[1] as PhasorDefinition)); configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "IC", PhasorType.Current, configCell.PhasorDefinitions[2] as PhasorDefinition)); configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "I1", PhasorType.Current, configCell.PhasorDefinitions[3] as PhasorDefinition)); break; } // SEL Fast Message protocol sends data for one device Cells.Add(configCell); // Define message rate (best-fit) FrameRate = messagePeriod switch { MessagePeriod.DefaultRate or MessagePeriod.TwentyPerSecond => 20, MessagePeriod.TenPerSecond => 10, MessagePeriod.FivePerSecond => 5, MessagePeriod.FourPerSecond => 4, MessagePeriod.TwoPerSecond => 2, MessagePeriod.OnePerSecond => 1, _ => 0, }; }
public OverlayWatermarkCudaFilter( FrameState currentState, WatermarkState watermarkState, FrameSize resolution) : base( currentState, watermarkState, resolution) { }
public OpusEncoder(AudioQuality quality, FrameSize frameSize) { _encoder = new OpusNative.OpusEncoder(SampleRate, 1) { EnableForwardErrorCorrection = false, Bitrate = GetTargetBitrate(quality) }; _frameSize = GetFrameSize(frameSize); }
public async Task OnPostCreate([Bind("Size, MinHeight, MaxHeight")] FrameSize frameSize) { ValidateSize(frameSize, ModelState); if (ModelState.IsValid) { _context.FrameSizes.Add(frameSize); await _context.SaveChangesAsync(); } Sizes = await _context.FrameSizes.ToListAsync(); }
/// <summary> /// Gets the historical data. /// </summary> public List<Quote> GetHistoricalData(string contractCode,Period periodSize, FrameSize frameSize) { contractCode = contractCode.Replace(" ", string.Empty); var address = GetChartPath(contractCode, periodSize, frameSize,new List<int>()); var xmlReader = GetXmlReader(contractCode, address); var ochlList = new List<BarchartQuote>(); while (xmlReader.Read()) { if (xmlReader.Name == "area") ochlList.Add(ParseHistoriaclDeliverAreaNode(xmlReader)); } return MakeChartPeriodList(ochlList); }
/// <summary> /// Fit a frame within a container /// </summary> /// <param name="sourceSize"></param> /// <param name="fitWithinSize"></param> public FrameSize(FrameSize sourceSize, FrameSize fitWithinSize) { if (sourceSize.AspectRatio >= 1) { Width = fitWithinSize.Width; Height = Convert.ToInt32(Width * sourceSize.AspectRatio); } else { Height = fitWithinSize.Height; Width = Convert.ToInt32(Height / sourceSize.AspectRatio); } }
private int GetFrameSize(FrameSize size) { switch (size) { case Dissonance.FrameSize.Small: return _encoder.PermittedFrameSizes[3]; // 20ms case Dissonance.FrameSize.Medium: return _encoder.PermittedFrameSizes[4]; // 40ms case Dissonance.FrameSize.Large: return _encoder.PermittedFrameSizes[5]; // 60ms default: throw new ArgumentOutOfRangeException("size", size, null); } }
[NotNull] private IVoiceEncoder CreateEncoder(AudioQuality quality, FrameSize frameSize) { switch (_codec) { case Codec.Identity: return(new IdentityEncoder(44100, 441)); case Codec.Opus: return(new OpusEncoder(quality, frameSize)); default: throw Log.CreatePossibleBugException(string.Format("Unknown Codec {0}", _codec), "6232F4FA-6993-49F9-AA79-2DBCF982FD8C"); } }
public PreviewParameter() { FrameTypeLastState = FrameType.Unkown; Frametype = FrameType.Unkown; PreviewLastState = PreviewState.Stop; PreviewStates = PreviewState.Stop; IsAmbientSubtractionEnabled = true; IsIlluminationValueEnabled = true; IsRecoveryOriginalSize = true; IsControllerResize = true; IsAutoOpenIRDevice = true; ImageFrameSize = new FrameSize(); ImageFrameSize.Height = 300; ImageFrameSize.Width = 300; }
public string GetFrame(string inputFile, int timeInSeconds, FrameSize frameSize, string outputFile) { //ffmpeg -ss 01:23:45 -i input -vframes 1 -q:v 2 output.jpg var arguments = " -v quiet -stats " + "-ss " + timeInSeconds + " -i " + "\"" + inputFile + "\"" + " -frames:v 1" + _arguments.GetValue(frameSize.ToString()) + " -q:v 2 " + " -y " + "\"" + outputFile + "\""; return(arguments); }
/// <summary> /// Create a frame based on a scaling an existing frame /// </summary> /// <param name="basedOnSize"></param> /// <param name="dMultiplier"></param> public FrameSize(FrameSize basedOnSize, double dMultiplier) { if (dMultiplier == 0) { dMultiplier = 0.2; } if ( (basedOnSize.Width == 0) || (basedOnSize.Height == 0) ) { basedOnSize.Width = 320; basedOnSize.Height = 240; } Width = Convert.ToInt32(basedOnSize.Width * dMultiplier); Height = Convert.ToInt32(basedOnSize.Height * dMultiplier); }
public Cattle ( int aId, string aBreed, GenderEnum aGender, double aAge, int aYear, int aMonth, int aDay, AnimalTypeEnum aType, string aIdentifactionChar, string aCustomeIdentifaction, int oId, int pId, string pName, string pSurname, DateTime dob, int cId, int cParentFId, int cParentMId, string cImagePath, List <Cattle> cOffSpring, StatusEnum status, double cScotralSize, string cColor, BreedingStatus cBreedingStatus, FrameSize cFrameSize, double cBirthWeight, double cWeaningWeight, double cPostWeaningWeight, double cAdultSxWeight, double cCurrentAdultWeight, DateTime cCurrentWeightDateTaken, List <string> cDiagnoses, List <double> cAdultWeightHistory, List <DateTime> cAdultWeightDateHistory, DateTime aDOB ) : base(aId, aBreed, aGender, aAge, aYear, aMonth, aDay, aType, oId, pId, pName, pSurname, dob, status, aIdentifactionChar, aCustomeIdentifaction) { this.cId = cId; this.cParentFId = cParentFId; this.cParentMId = cParentMId; this.cImagePath = cImagePath; this.cOffSpring = cOffSpring; this.cScotralSize = cScotralSize; this.cColor = cColor; this.cBreedingStatus = cBreedingStatus; this.cFrameSize = cFrameSize; this.cBirthWeight = cBirthWeight; this.cWeaningWeight = cWeaningWeight; this.cPostWeaningWeight = cPostWeaningWeight; this.cAdultSxWeight = cAdultSxWeight; this.cCurrentAdultWeight = cCurrentAdultWeight; this.cCurrentWeightDateTaken = cCurrentWeightDateTaken; //------ this.cDiagnoses = cDiagnoses; this.cAdultWeightDateHistory = cAdultWeightDateHistory; this.cAdultWeightHistory = cAdultWeightHistory; //------ this.aDOB = aDOB; }
[NotNull] private IVoiceEncoder CreateEncoder(AudioQuality quality, FrameSize frameSize) { switch (_codec) { case Codec.Identity: return(new IdentityEncoder(44100, 441)); //ncrunch: no coverage start (Justification: We don't want to load the opus binaries into a testing context) case Codec.Opus: return(new OpusEncoder(quality, frameSize)); //ncrunch: no coverage end default: throw Log.CreatePossibleBugException(string.Format("Unknown Codec {0}", _codec), "6232F4FA-6993-49F9-AA79-2DBCF982FD8C"); } }
public VoiceSettings() { _quality = AudioQuality.Medium; _frameSize = FrameSize.Medium; _denoiseAmount = (int)NoiseSuppressionLevels.High; _aecAmount = (int)AecSuppressionLevels.Disabled; _aecDelayAgnostic = Convert.ToInt32(true); _aecExtendedFilter = Convert.ToInt32(true); _aecRefinedAdaptiveFilter = Convert.ToInt32(true); _aecmRoutingMode = (int)AecmRoutingMode.Disabled; _aecmComfortNoise = Convert.ToInt32(true); _voiceDuckLevel = 0.8f; }
public FFmpegPipeline Resize(string outputFile, FrameSize scaledSize) { _pipelineSteps.Clear(); _pipelineSteps.Add(new NoStandardInputOption()); _pipelineSteps.Add(new HideBannerOption()); _pipelineSteps.Add(new NoStatsOption()); _pipelineSteps.Add(new LoglevelErrorOption()); IPipelineFilterStep scaleStep = new ScaleImageFilter(scaledSize); _videoInputFile.Iter(f => f.FilterSteps.Add(scaleStep)); _pipelineSteps.Add(new VideoFilter(new[] { scaleStep })); _pipelineSteps.Add(scaleStep); _pipelineSteps.Add(new FileNameOutputOption(outputFile)); return(new FFmpegPipeline(_pipelineSteps)); }
public ComplexFilter( FrameState currentState, FFmpegState ffmpegState, Option <VideoInputFile> maybeVideoInputFile, Option <AudioInputFile> maybeAudioInputFile, Option <WatermarkInputFile> maybeWatermarkInputFile, Option <SubtitleInputFile> maybeSubtitleInputFile, FrameSize resolution, string fontsDir) { _currentState = currentState; _ffmpegState = ffmpegState; _maybeVideoInputFile = maybeVideoInputFile; _maybeAudioInputFile = maybeAudioInputFile; _maybeWatermarkInputFile = maybeWatermarkInputFile; _maybeSubtitleInputFile = maybeSubtitleInputFile; _resolution = resolution; _fontsDir = fontsDir; }
/// <summary> /// Load defaults into fields, but do not clear prefs /// </summary> private void LoadDefaults() { _quality = AudioQuality.Medium; _frameSize = FrameSize.Medium; _forwardErrorCorrection = Convert.ToInt32(true); _denoiseAmount = (int)NoiseSuppressionLevels.High; _vadSensitivity = (int)VadSensitivityLevels.MediumSensitivity; _aecAmount = (int)AecSuppressionLevels.Disabled; _aecDelayAgnostic = Convert.ToInt32(true); _aecExtendedFilter = Convert.ToInt32(true); _aecRefinedAdaptiveFilter = Convert.ToInt32(true); _aecmRoutingMode = (int)AecmRoutingMode.Disabled; _aecmComfortNoise = Convert.ToInt32(true); _voiceDuckLevel = 0.75f; }
public async Task <IActionResult> Buy(int?modelId, int itemsAmount, string returnUrl, string info) { if (modelId == null) { return(NotFound()); } List <Item> cart = SessionHelper.GetObjectFromJson <List <Item> >(HttpContext.Session, "cart"); cart ??= new List <Item>(); int[] arr = info.Split(" ").Select(x => Convert.ToInt32(x)).ToArray(); if (cart.Find(i => i.Model.Id == modelId && i.ModelColour.Id == arr[0] && i.FrameSize.Id == arr[1]) != null) { cart.Find(i => i.Model.Id == modelId).Quantity++; } else { ModelColour modelColour = await _context.ModelColours.Include(mc => mc.Colour).FirstOrDefaultAsync(x => x.Id == arr[0]); FrameSize size = await _context.FrameSizes.FirstOrDefaultAsync(x => x.Id == arr[1]); Model model = await _context.Models .Include(m => m.ModelName) .Include(m => m.ModelPrefix) .FirstOrDefaultAsync(m => m.Id == modelId); if (model == null) { return(NotFound()); } cart.Add(new Item() { Model = model, ModelColour = modelColour, FrameSize = size, Quantity = itemsAmount }); } SessionHelper.SetObjectAsJson(HttpContext.Session, "cart", cart); return(Redirect(returnUrl)); }
/// <summary> /// Gets the chart path with ma selective period or frame /// </summary> private string GetChartPath(string contractCode, Period periodSize, FrameSize frameSize, List<int> movingAverages) { var period = string.Empty; var indicatorString = string.Empty; switch (periodSize) { case Period.Daily: period = "DO"; break; case Period.Weekly: period = "WN"; break; case Period.Monthly: period = "MN"; break; } if (!string.IsNullOrEmpty(period)) period = "&p=" + period; var indexes = new[] {string.Empty, "L", "O", "M", "H", "X" }; var selectedValue = (int) frameSize; var frame = "&d=" +indexes[selectedValue]; if (!movingAverages.IsNullOrEmpty()) { indicatorString += "&indicators="; indicatorString = movingAverages.Where(indicatorSma => indicatorSma > 1).Aggregate(indicatorString, (current, indicatorSma) => current + string.Format("SMA({0},11650);", indicatorSma)); } if (string.IsNullOrEmpty(contractCode)) return string.Empty; var address = string.Format("http://www.barchart.com/chart.php?sym={0}&style=technical{1}{3}&sd=&ed=&size=M&log=0&t=BAR&v=2&g=1&evnt=1&late=1&sh=100{2}&txtDate=#jump", contractCode.ToUpper(), period, indicatorString,frame); return address; }
void ConfigureASFWriter(WMAsfWriter asf_filter, WTVStreamingVideoRequest strq, FrameSize SourceFrameSize) { int hr; // Now it's added to the graph, configure it with the selected WM Profile SendDebugMessage("Getting WM profile with quality of " + strq.Quality.ToString(), 0); WindowsMediaLib.IWMProfileManager profileManager; WMUtils.WMCreateProfileManager(out profileManager); IWMProfile wmProfile; string txtPrxProfile = getPRXProfileForQuality(strq.Quality); if (!(string.IsNullOrEmpty(txtPrxProfile))) { SendDebugMessage("Adjusting WM profile to fit video within designated frame size", 0); // SET VIDEO SIZE TO FIT WITHIN THE RIGHT FRAME SendDebugMessage("Source video size is " + SourceFrameSize.ToString(), 0); FrameSize containerSize = frameSizeForStreamRequest(strq); SendDebugMessage("Container size is " + containerSize.ToString() , 0); FrameSize newVideoSize = new FrameSize(SourceFrameSize, containerSize); SendDebugMessage("Output size is " + newVideoSize.ToString(), 0); SetProfileFrameSize(ref txtPrxProfile, newVideoSize); SetProfileCustomSettings(ref txtPrxProfile, ref strq); // returns immediately if not custom quality SendDebugMessage("Configuring ASF Writer with profile", 0); profileManager.LoadProfileByData(txtPrxProfile, out wmProfile); WindowsMediaLib.IConfigAsfWriter configWriter = (WindowsMediaLib.IConfigAsfWriter)asf_filter; configWriter.ConfigureFilterUsingProfile(wmProfile); configWriter.SetIndexMode(true); // yes index - DEFAULT /* Additional config - TEST //DirectShowLib.IConfigAsfWriter2 configAsfWriter2 = (DirectShowLib.IConfigAsfWriter2)asf_filter; //configAsfWriter2.SetParam(ASFWriterConfig.AutoIndex, 0, 0); // IT IS DEFAULT */ // (NOT WORKING) // SET ANAMORPHIC VIDEO MARKERS WITHIN STREAM (ASPECT RATIO) ******************************* UInt32 uiAspectX = (UInt32)SourceFrameSize.Width; byte[] bAspectX = BitConverter.GetBytes(uiAspectX); UInt32 uiAspectY = (UInt32)SourceFrameSize.Height; byte[] bAspectY = BitConverter.GetBytes(uiAspectY); DirectShowLib.IServiceProvider pServiceProvider; // http://msdn.microsoft.com/en-us/library/dd390985%28VS.85%29.aspx pServiceProvider = (DirectShowLib.IServiceProvider)asf_filter; DsGuid dsgIWMHeaderinfo = DsGuid.FromGuid(new Guid(GUIDs.IWMWriterAdvanced2)); object o3 = null; hr = pServiceProvider.QueryService(dsgIWMHeaderinfo, dsgIWMHeaderinfo, out o3); // FAILS IN A STA THREAD DsError.ThrowExceptionForHR(hr); IWMHeaderInfo headerinfo = (IWMHeaderInfo)o3; // Get access to WMwriterAdvanced2 object using pServiceProvider (poss not futureproof) (see http://groups.google.com/group/microsoft.public.win32.programmer.directx.video/browse_thread/thread/36b154d41cb76ffd/c571d6ef56de11af?#c571d6ef56de11af ) DsGuid dsgWMwriterAdvanced2 = DsGuid.FromGuid(new Guid(GUIDs.IWMWriterAdvanced2)); object o = null; hr = pServiceProvider.QueryService(dsgWMwriterAdvanced2, dsgWMwriterAdvanced2, out o); // FAILS IN A STA THREAD DsError.ThrowExceptionForHR(hr); IWMWriterAdvanced2 WMWriterAdvanced2 = null; WMWriterAdvanced2 = (IWMWriterAdvanced2)o; // Get Access to IWMHeaderInfo3 through WMWriterAdvanced2 object o2 = null; //pServiceProvider = (DirectShowLib.IServiceProvider)WMWriterAdvanced2; DsGuid dsgIWMHeaderInfo3 = DsGuid.FromGuid(new Guid(GUIDs.IWMHeaderInfo3)); hr = pServiceProvider.QueryService(dsgWMwriterAdvanced2, dsgIWMHeaderInfo3, out o2); // LET'S SEE DsError.ThrowExceptionForHR(hr); IWMHeaderInfo3 WMHeaderInfo3 = null; WMHeaderInfo3 = (IWMHeaderInfo3)o2; short pwIndex; // Add Aspect Ratio information WMHeaderInfo3.AddAttribute(2, "AspectRatioX", out pwIndex, AttrDataType.DWORD, 0, bAspectX, bAspectX.Length); WMHeaderInfo3.AddAttribute(2, "AspectRatioY", out pwIndex, AttrDataType.DWORD, 0, bAspectY, bAspectY.Length); // Try with other interface too headerinfo.SetAttribute(2, "AspectRatioX", AttrDataType.DWORD, bAspectX, Convert.ToInt16(bAspectX.Length)); headerinfo.SetAttribute(2, "AspectRatioY", AttrDataType.DWORD, bAspectY, Convert.ToInt16(bAspectY.Length)); // ************ DEINTERLACE (experimental) if (strq.DeInterlaceMode > 0) { DeInterlaceModes dimode = DeInterlaceModes.WM_DM_NOTINTERLACED; // Deinterlace Mode if (strq.DeInterlaceMode == 1) dimode = DeInterlaceModes.WM_DM_DEINTERLACE_NORMAL; else if (strq.DeInterlaceMode == 2) dimode = DeInterlaceModes.WM_DM_DEINTERLACE_HALFSIZE; // Index of video pin int pinIndex = FilterGraphTools.FindPinIndexByMediaType(currentOutputFilter, PinDirection.Input, MediaType.Video, MediaSubType.Null); byte[] bDiMode = BitConverter.GetBytes((int)dimode); short szOf = (short)bDiMode.Length; // Set to use deinterlace mode try { WMWriterAdvanced2.SetInputSetting(pinIndex, g_wszDeinterlaceMode, AttrDataType.DWORD, bDiMode, szOf); } catch (Exception ex) { SendDebugMessageWithException("Could not set interlace mode:", ex); } } } else { SendDebugMessage("Warning - PRX Profile string was empty; using default WM config."); } }
public DepthStreamMessage(byte[] depthPixels, FrameSize depthFrameSize) { DepthPixels = depthPixels; DepthFrameSize = depthFrameSize; }
// Sorry barchart.com :D private string GetChart(string contractCode, string address, FrameSize frameSize, Period periodSize, List<int> map) { try { var chart = GetClearChartData(new Uri(address)); var imageName = GetPictureName(chart); var mapList = string.Empty; if (!map.IsNullOrEmpty()) mapList = string.Format("{0}", map.Aggregate((s, d) => s + ',' + d)); chart = chart.Replace(Constants.BarchartCache + imageName, string.Format("/WebParts/ChartData/ChartImage.aspx?_imageName={0}&_contractCode={1}&_frame={2}&_period={3}&_ma={4}", imageName, contractCode,frameSize,periodSize,mapList)); return "<div id=\"chartpricebox\" class=\"barchartStyle\">Move cursor over the chart </div>" + chart; } catch { return "<div id=\"chartdiv\"/>"; } }
/// <summary> /// Create a frame based on a scaling an existing frame /// </summary> /// <param name="basedOnSize"></param> /// <param name="dMultiplier"></param> public FrameSize(FrameSize basedOnSize, double dMultiplier) { if (dMultiplier == 0) dMultiplier = 0.2; if ( (basedOnSize.Width == 0) || (basedOnSize.Height == 0) ) { basedOnSize.Width = 320; basedOnSize.Height = 240; } Width = Convert.ToInt32(basedOnSize.Width * dMultiplier ); Height = Convert.ToInt32(basedOnSize.Height * dMultiplier); }
/// <summary> /// Creates a new <see cref="ConfigurationFrame"/>. /// </summary> /// <remarks> /// This constructor is used by a consumer to generate a SEL Fast Message configuration frame. /// </remarks> public ConfigurationFrame(FrameSize frameSize, MessagePeriod messagePeriod, uint idCode) : base(0, new ConfigurationCellCollection(), 0, 0) { m_frameSize = frameSize; m_messagePeriod = messagePeriod; IDCode = idCode; ConfigurationCell configCell = new ConfigurationCell(this); // Assign station name configCell.StationName = "SEL Unit - " + idCode; // Add a single frequency definition configCell.FrequencyDefinition = new FrequencyDefinition(configCell, "Line frequency"); // Add phasors based on frame size switch (frameSize) { case FrameSize.V1: // Add a single positive sequence voltage phasor definition configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "V1", PhasorType.Voltage, null)); break; case FrameSize.V: // Add three-phase and positive sequence voltage phasors configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "VA", PhasorType.Voltage, null)); configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "VB", PhasorType.Voltage, null)); configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "VC", PhasorType.Voltage, null)); configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "V1", PhasorType.Voltage, null)); break; case FrameSize.A: // Add three-phase and positive sequence voltage and current phasors configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "VA", PhasorType.Voltage, null)); configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "VB", PhasorType.Voltage, null)); configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "VC", PhasorType.Voltage, null)); configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "V1", PhasorType.Voltage, null)); configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "IA", PhasorType.Current, configCell.PhasorDefinitions[0] as PhasorDefinition)); configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "IB", PhasorType.Current, configCell.PhasorDefinitions[1] as PhasorDefinition)); configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "IC", PhasorType.Current, configCell.PhasorDefinitions[2] as PhasorDefinition)); configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "I1", PhasorType.Current, configCell.PhasorDefinitions[3] as PhasorDefinition)); break; } // SEL Fast Message protocol sends data for one device Cells.Add(configCell); // Define message rate (best-fit) switch (messagePeriod) { case MessagePeriod.DefaultRate: case MessagePeriod.TwentyPerSecond: FrameRate = 20; break; case MessagePeriod.TenPerSecond: FrameRate = 10; break; case MessagePeriod.FivePerSecond: FrameRate = 5; break; case MessagePeriod.FourPerSecond: FrameRate = 4; break; case MessagePeriod.TwoPerSecond: FrameRate = 2; break; case MessagePeriod.OnePerSecond: FrameRate = 1; break; default: FrameRate = 0; break; } }
DSStreamResultCodes InitWithStreamBufferFile(WTVStreamingVideoRequest strq) { // Init variables //IPin[] pin = new IPin[1]; IBaseFilter DecFilterAudio = null; IBaseFilter DecFilterVideo = null; IBaseFilter MainAudioDecoder = null; IBaseFilter MainVideoDecoder = null; string dPin = string.Empty; string sName = string.Empty; string dName = string.Empty; string sPin = string.Empty; FileInfo fiInputFile = new FileInfo(strq.FileName); string txtOutputFNPath = fiInputFile.FullName + ".wmv"; if ( (!fiInputFile.Extension.ToLowerInvariant().Equals(".wtv")) && (!fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms")) ) return DSStreamResultCodes.ErrorInvalidFileType; int hr = 0; try { // Get the graphbuilder interface SendDebugMessage("Creating Graph Object",0); IGraphBuilder graphbuilder = (IGraphBuilder)currentFilterGraph; // Add the DVRMS/WTV file / filter to the graph SendDebugMessage("Add SBE Source Filter", 0); hr = graphbuilder.AddSourceFilter(fiInputFile.FullName, "SBE Filter", out currentSBEfilter); // class variable DsError.ThrowExceptionForHR(hr); dc.Add(currentSBEfilter); // Get the SBE audio and video out pins IPin SBEVidOutPin, SBEAudOutPin; SBEAudOutPin = FilterGraphTools.FindPinByMediaType(currentSBEfilter, PinDirection.Output, MediaType.Audio, MediaSubType.Null); SBEVidOutPin = FilterGraphTools.FindPinByMediaType(currentSBEfilter, PinDirection.Output, MediaType.Video, MediaSubType.Null); // Set up two decrypt filters according to file extension (assume audio and video both present ) if (fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms")) { // Add DVR-MS decrypt filters SendDebugMessage("Add DVRMS (bda) decryption", 0); DecFilterAudio = (IBaseFilter)new DTFilter(); // THESE ARE FOR DVR-MS (BDA DTFilters) DecFilterVideo = (IBaseFilter)new DTFilter(); graphbuilder.AddFilter(DecFilterAudio, "Decrypt / Tag"); graphbuilder.AddFilter(DecFilterVideo, "Decrypt / Tag 0001"); } else // Add WTV decrypt filters { SendDebugMessage("Add WTV (pbda) decryption", 0); DecFilterAudio = FilterDefinition.AddToFilterGraph(FilterDefinitions.Decrypt.DTFilterPBDA, ref graphbuilder); DecFilterVideo = FilterDefinition.AddToFilterGraph(FilterDefinitions.Decrypt.DTFilterPBDA, ref graphbuilder, "PBDA DTFilter 0001"); } dc.Add(DecFilterAudio); dc.Add(DecFilterVideo); // Make the first link in the graph: SBE => Decrypts SendDebugMessage("Connect SBE => Decrypt filters", 0); IPin DecVideoInPin = DsFindPin.ByDirection(DecFilterVideo, PinDirection.Input, 0); FilterGraphTools.ConnectFilters(graphbuilder, SBEVidOutPin, DecVideoInPin, false); IPin DecAudioInPin = DsFindPin.ByDirection(DecFilterAudio, PinDirection.Input, 0); if (DecAudioInPin == null) SendDebugMessage("WARNING: No Audio Input to decrypt filter."); else FilterGraphTools.ConnectFilters(graphbuilder, SBEAudOutPin, DecAudioInPin, false); // Get Dec Audio Out pin IPin DecAudioOutPin = DsFindPin.ByDirection(DecFilterAudio, PinDirection.Output, 0); // Examine Dec Audio out for audio format SendDebugMessage("Examining source audio", 0); AMMediaType AudioMediaType = null; getPinMediaType(DecAudioOutPin, MediaType.Audio, Guid.Empty, Guid.Empty, ref AudioMediaType); SendDebugMessage("Audio media subtype: " + AudioMediaType.subType.ToString()); SendDebugMessage("Examining Audio StreamInfo"); StreamInfo si = FileInformation.GetStreamInfo(AudioMediaType); bool AudioIsAC3 = (si.SimpleType == "AC-3"); if (AudioIsAC3) SendDebugMessage("Audio type is AC3"); else SendDebugMessage("Audio type is not AC3"); si = null; DsUtils.FreeAMMediaType(AudioMediaType); // Add an appropriate audio decoder if (AudioIsAC3) { if (!FilterGraphTools.IsThisComObjectInstalled(FilterDefinitions.Audio.AudioDecoderMPCHC.CLSID)) { SendDebugMessage("Missing AC3 Audio Decoder, and AC3 audio detected."); return DSStreamResultCodes.ErrorAC3CodecNotFound; } else { MainAudioDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Audio.AudioDecoderMPCHC, ref graphbuilder); //MainAudioDecoder = FatAttitude.WTVTranscoder.FilterDefinitions.Audio.AudioDecoderFFDShow.AddToFilterGraph(ref graph); Guid tmpGuid; MainAudioDecoder.GetClassID(out tmpGuid); SendDebugMessage("Main Audio decoder CLSID is " + tmpGuid.ToString()); } } else MainAudioDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Audio.AudioDecoderMSDTV, ref graphbuilder); // Add a video decoder SendDebugMessage("Add DTV decoder", 0); MainVideoDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Video.VideoDecoderMSDTV, ref graphbuilder); dc.Add(MainAudioDecoder); dc.Add(MainVideoDecoder); //SetAudioDecoderOutputToPCMStereo(MainAudioDecoder); // Add a null renderer SendDebugMessage("Add null renderer", 0); NullRenderer MyNullRenderer = new NullRenderer(); dc.Add(MyNullRenderer); hr = graphbuilder.AddFilter((IBaseFilter)MyNullRenderer, @"Null Renderer"); DsError.ThrowExceptionForHR(hr); // Link up video through to null renderer SendDebugMessage("Connect video to null renderer", 0); // Make the second link: Decrypts => DTV IPin DecVideoOutPin = DsFindPin.ByDirection(DecFilterVideo, PinDirection.Output, 0); IPin DTVVideoInPin = DsFindPin.ByName(MainVideoDecoder, @"Video Input"); // IPin DTVVideoInPin = DsFindPin.ByDirection(DTVVideoDecoder, PinDirection.Input, 0); // first one should be video input? // FilterGraphTools.ConnectFilters(graphbuilder, DecVideoOutPin, DTVVideoInPin, false); // 3. DTV => Null renderer IPin NullRInPin = DsFindPin.ByDirection((IBaseFilter)MyNullRenderer, PinDirection.Input, 0); IPin DTVVideoOutPin = FilterGraphTools.FindPinByMediaType(MainVideoDecoder, PinDirection.Output, MediaType.Video, MediaSubType.Null); FilterGraphTools.ConnectFilters(graphbuilder, DTVVideoOutPin, NullRInPin, false); Marshal.ReleaseComObject(NullRInPin); NullRInPin = null; // Run graph [can use this also to get media type => see, e.g. dvrmstowmvhd by Babgvant] SendDebugMessage("Run graph for testing purposes", 0); IMediaControl tempControl = (IMediaControl)graphbuilder; IMediaEvent tempEvent = (IMediaEvent)graphbuilder; DsError.ThrowExceptionForHR(tempControl.Pause()); DsError.ThrowExceptionForHR(tempControl.Run()); EventCode pEventCode; hr = tempEvent.WaitForCompletion(1000, out pEventCode); //DsError.ThrowExceptionForHR(hr); // DO *NOT* DO THIS HERE! THERE MAY WELL BE AN ERROR DUE TO EVENTS RAISED BY THE STREAM BUFFER ENGINE, THIS IS A DELIBERATE TEST RUN OF THE GRAPH // Stop graph if necessary FilterState pFS; hr = tempControl.GetState(1000, out pFS); if (pFS == FilterState.Running) DsError.ThrowExceptionForHR(tempControl.Stop()); // Remove null renderer hr = graphbuilder.RemoveFilter((IBaseFilter)MyNullRenderer); // Now graph has been run and stopped we can get the video width and height from the output pin of the main video decoder AMMediaType pmt = null; getPinMediaType(DTVVideoOutPin, MediaType.Video, MediaSubType.YUY2, Guid.Empty, ref pmt); FrameSize SourceFrameSize; if (pmt.formatType == FormatType.VideoInfo2) { VideoInfoHeader2 pvih2 = new VideoInfoHeader2(); Marshal.PtrToStructure(pmt.formatPtr, pvih2); int VideoWidth = pvih2.BmiHeader.Width; int VideoHeight = pvih2.BmiHeader.Height; SourceFrameSize = new FrameSize(VideoWidth, VideoHeight); } else SourceFrameSize = new FrameSize(320, 240); // Free up DsUtils.FreeAMMediaType(pmt); pmt = null; // Link up audio // 2. Audio Decrypt -> Audio decoder IPin MainAudioInPin = DsFindPin.ByDirection(MainAudioDecoder, PinDirection.Input, 0); FilterGraphTools.ConnectFilters(graphbuilder, DecAudioOutPin, MainAudioInPin, false); // Add ASF Writer // Create an ASF writer filter SendDebugMessage("Creating ASF Writer", 0); WMAsfWriter asf_filter = new WMAsfWriter(); dc.Add(asf_filter); // CHECK FOR ERRORS currentOutputFilter = (IBaseFilter)asf_filter; // class variable // Add the ASF filter to the graph hr = graphbuilder.AddFilter((IBaseFilter)asf_filter, "WM Asf Writer"); DsError.ThrowExceptionForHR(hr); // Set the filename IFileSinkFilter sinkFilter = (IFileSinkFilter)asf_filter; string destPathFN = fiInputFile.FullName + ".wmv"; hr = sinkFilter.SetFileName(destPathFN, null); DsError.ThrowExceptionForHR(hr); // Make the final links: DTV => writer SendDebugMessage("Linking audio/video through to decoder and writer", 0); IPin DTVAudioOutPin = DsFindPin.ByDirection(MainAudioDecoder, PinDirection.Output, 0); IPin ASFAudioInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Audio, MediaSubType.Null); IPin ASFVideoInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Video, MediaSubType.Null); FilterGraphTools.ConnectFilters(graphbuilder, DTVAudioOutPin, ASFAudioInputPin, false); if (ASFVideoInputPin != null) FilterGraphTools.ConnectFilters(graphbuilder, DTVVideoOutPin, ASFVideoInputPin, false); // Configure ASFWriter ConfigureASFWriter(asf_filter, strq, SourceFrameSize); // Release pins SendDebugMessage("Releasing COM objects (pins)", 0); // dec Marshal.ReleaseComObject(DecAudioInPin); DecAudioInPin = null; Marshal.ReleaseComObject(DecVideoInPin); DecVideoInPin = null; Marshal.ReleaseComObject(DecVideoOutPin); DecVideoOutPin = null; Marshal.ReleaseComObject(DecAudioOutPin); DecAudioOutPin = null; // dtv Marshal.ReleaseComObject(MainAudioInPin); MainAudioInPin = null; Marshal.ReleaseComObject(DTVVideoInPin); DTVVideoInPin = null; Marshal.ReleaseComObject(DTVVideoOutPin); DTVVideoOutPin = null; Marshal.ReleaseComObject(DTVAudioOutPin); DTVAudioOutPin = null; // asf Marshal.ReleaseComObject(ASFAudioInputPin); ASFAudioInputPin = null; Marshal.ReleaseComObject(ASFVideoInputPin); ASFVideoInputPin = null; } catch (Exception ex) { SendDebugMessageWithException(ex.Message, ex); return DSStreamResultCodes.ErrorExceptionOccurred; } return DSStreamResultCodes.OK; }
DSStreamResultCodes InitWithVideoFile(WTVStreamingVideoRequest strq) { UsingSBEFilter = false; // Not using stream buffer // Init variables IPin[] pin = new IPin[1]; string dPin = string.Empty; string sName = string.Empty; string dName = string.Empty; string sPin = string.Empty; FileInfo fiInputFile = new FileInfo(strq.FileName); string txtOutputFNPath = fiInputFile.FullName + ".wmv"; if ( (fiInputFile.Extension.ToLowerInvariant().Equals(".wtv")) || (fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms")) ) return DSStreamResultCodes.ErrorInvalidFileType; int hr = 0; try { // Get the graphbuilder interface SendDebugMessage("Creating Graph Object", 0); IGraphBuilder graphbuilder = (IGraphBuilder)currentFilterGraph; // Create an ASF writer filter SendDebugMessage("Creating ASF Writer", 0); WMAsfWriter asf_filter = new WMAsfWriter(); dc.Add(asf_filter); // CHECK FOR ERRORS currentOutputFilter = (IBaseFilter)asf_filter; // class variable // Add the ASF filter to the graph hr = graphbuilder.AddFilter((IBaseFilter)asf_filter, "WM Asf Writer"); DsError.ThrowExceptionForHR(hr); // Set the filename SendDebugMessage("Setting filename", 0); IFileSinkFilter sinkFilter = (IFileSinkFilter)asf_filter; string destPathFN = fiInputFile.FullName + ".wmv"; hr = sinkFilter.SetFileName(destPathFN, null); DsError.ThrowExceptionForHR(hr); // Handy to have an ACM Wrapper filter hanging around for AVI files with MP3 audio SendDebugMessage("Adding ACM Wrapper", 0); IBaseFilter ACMFilter = FilterDefinition.AddToFilterGraph(FilterDefinitions.Other.ACMWrapperFilter, ref graphbuilder); dc.Add(ACMFilter); // Render file - then build graph SendDebugMessage("Rendering file", 0); graphbuilder.RenderFile(fiInputFile.FullName, null); SendDebugMessage("Saving graph", 0); FilterGraphTools.SaveGraphFile(graphbuilder, "C:\\ProgramData\\RemotePotato\\lastfiltergraph.grf"); // Are both our ASF pins connected? IPin ASFVidInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Video, MediaSubType.Null); IPin ASFAudInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Audio, MediaSubType.Null); // Run graph [can use this also to get media type => see, e.g. dvrmstowmvhd by Babgvant] SendDebugMessage("Run graph for testing purposes", 0); IMediaControl tempControl = (IMediaControl)graphbuilder; IMediaEvent tempEvent = (IMediaEvent)graphbuilder; DsError.ThrowExceptionForHR(tempControl.Pause()); EventCode pEventCode; hr = tempEvent.WaitForCompletion(1000, out pEventCode); // Get media type from vid input pin for ASF writer AMMediaType pmt = new AMMediaType(); hr = ASFVidInputPin.ConnectionMediaType(pmt); FrameSize SourceFrameSize = null; if (pmt.formatType == FormatType.VideoInfo2) { // Now graph has been run and stopped we can get the video width and height from the output pin of the main video decoder VideoInfoHeader2 pvih2 = new VideoInfoHeader2(); Marshal.PtrToStructure(pmt.formatPtr, pvih2); SourceFrameSize = new FrameSize(pvih2.BmiHeader.Width, pvih2.BmiHeader.Height); } else if (pmt.formatType == FormatType.VideoInfo) //{05589f80-c356-11ce-bf01-00aa0055595a} { VideoInfoHeader pvih = new VideoInfoHeader(); Marshal.PtrToStructure(pmt.formatPtr, pvih); SourceFrameSize = new FrameSize(pvih.BmiHeader.Width, pvih.BmiHeader.Height); } else SourceFrameSize = new FrameSize(200, 200); // SQUARE // Stop graph if necessary FilterState pFS; hr = tempControl.GetState(1000, out pFS); if (pFS != FilterState.Stopped) DsError.ThrowExceptionForHR(tempControl.Stop()); // Free up media type DsUtils.FreeAMMediaType(pmt); pmt = null; // (re)Configure the ASF writer with the selected WM Profile ConfigureASFWriter(asf_filter, strq, SourceFrameSize); // Release pins SendDebugMessage("Releasing COM objects (pins)", 0); // source Marshal.ReleaseComObject(ASFVidInputPin); ASFVidInputPin = null; Marshal.ReleaseComObject(ASFAudInputPin); ASFAudInputPin = null; } catch (Exception ex) { SendDebugMessageWithException(ex.Message, ex); return DSStreamResultCodes.ErrorExceptionOccurred; } return DSStreamResultCodes.OK; }