TIntVar KVZ; //Количество вошедших заявок #endregion Fields #region Constructors public UstroystvoVvoda(VU parent, string name) : base(parent, name) { #region Инициализация переменных объектов модели KOZ = InitModelObject<TIntVar>(); KVZ = InitModelObject<TIntVar>(); Zanyatost = InitModelObject<TRefVar<VS.Zayavka>>(); Que = InitModelObject<SimpleModelList<QRec>>(); parentVU = parent; #endregion #region Инициализация сборщиков статистики Variance_QueCount = InitModelObject<Variance<int>>(); Variance_QueCount.ConnectOnSet(Que.Count); Min_QueCount = InitModelObject<Min<int>>(); Min_QueCount.ConnectOnSet(Que.Count); Max_QueCount = InitModelObject<Max<int>>(); Max_QueCount.ConnectOnSet(Que.Count); Zanyto = InitModelObject<BoolCollector>(); Zanyto.ConnectOnSet(Zanyatost); #endregion }
public static List <Variance> CompareGetVariance <T>(this T compareObject, T withObject) { var variances = new List <Variance>(); var properties = typeof(T).GetProperties(BindingFlags.Public | BindingFlags.Instance); foreach (var property in properties) { var variance = new Variance { PropertyType = Nullable.GetUnderlyingType(property.PropertyType) ?? property.PropertyType, PropertyName = property.Name, Object1 = property.GetValue(compareObject), Object2 = property.GetValue(withObject) }; variance.Default = GetDefaultValue(variance.PropertyType); if ((null == variance.Object1 || variance.Object1 == variance.Default) && (null == variance.Object2 || variance.Object2 == variance.Default)) { continue; } if (((null == variance.Object1 || variance.Object1 == variance.Default) && null != variance.Object2 && variance.Object2 != variance.Default) || (null != variance.Object1 && variance.Object1 != variance.Default && (null == variance.Object2 || variance.Object2 == variance.Default))) { variances.Add(variance); continue; } if (null != variance.Object1 && null != variance.Object2 && !variance.Object1.Equals(variance.Object2)) { variances.Add(variance); } } return(variances); }
public static List <Variance> DetailedCompare <T>(this T val1, T val2) { try { List <Variance> variances = new List <Variance>(); PropertyInfo[] fi = val1.GetType().GetProperties(BindingFlags.Instance | BindingFlags.Static | BindingFlags.NonPublic | BindingFlags.Public); foreach (PropertyInfo f in fi) { Variance v = new Variance(); v.Prop = f.Name; v.valA = f.GetValue(val1); v.valB = f.GetValue(val2); if (!Equals(v.valA, v.valB)) { variances.Add(v); } } return(variances); } catch (Exception ex) { string a = ex.ToString(); } return(null); }
/// <summary> /// <para>(Accord .NET internal call)</para> /// The Variance filter replaces each pixel in an image by its /// neighborhood variance. The end result can be regarded as an /// border enhancement, making the Variance filter suitable to /// be used as an edge detection mechanism. /// </summary> /// <param name="img">Image.</param> /// <param name="radius">The radius neighborhood used to compute a pixel's local variance.</param> /// <returns>Processed image.</returns> public static TColor[,] Variance <TColor>(this TColor[,] img, int radius = 2) where TColor : struct, IColor3 <byte> { Variance v = new Variance(radius); return(img.ApplyFilter(v)); }
// Generates the average (and variance) of the list of sound features, ASSUMING // that all sound features use the same time values public static SoundFeature GenerateAverage(List <SoundFeature> soundFeatureList) { SoundFeature averageSoundFeature = new SoundFeature(); if (soundFeatureList.Count > 0) { averageSoundFeature.Name = soundFeatureList[0].Name; int size = soundFeatureList[0].TimeList.Count; averageSoundFeature.SetSize(size); for (int jj = 0; jj < size; jj++) { averageSoundFeature.TimeList[jj] = soundFeatureList[0].TimeList[jj]; } for (int jj = 0; jj < size; jj++) { List <double> featureValueList = new List <double>(); for (int ii = 0; ii < soundFeatureList.Count; ii++) { featureValueList.Add(soundFeatureList[ii].ValueList[jj]); } double average = featureValueList.Average(); double variance = Variance.Compute(featureValueList); averageSoundFeature.ValueList[jj] = average; averageSoundFeature.VarianceList[jj] = variance; } } return(averageSoundFeature); }
/// <summary> /// Gets a json string of the changes based on the difference between an old value and a new value /// </summary> /// <typeparam name="T"></typeparam> /// <param name="oldValue"></param> /// <param name="newValue"></param> /// <returns>json serialized string</returns> public string DetailedCompare <T>(T oldValue, T newValue) { IList <Variance> variances = new List <Variance>(); ComparisonConfig comparisonConfig = new ComparisonConfig { MaxDifferences = 999, CaseSensitive = true, }; CompareLogic compareLogic = new CompareLogic(comparisonConfig); ComparisonResult result = compareLogic.Compare(oldValue, newValue); if (!result.AreEqual) { foreach (var difference in result.Differences) { Variance variance = new Variance { Property = difference.PropertyName, OldValue = difference.Object1Value, NewValue = difference.Object2Value }; variances.Add(variance); } } var json = JsonConvert.SerializeObject(variances); return(json); }
public static List <Variance> DetailedCompare <T>(this T left, T right) { var variances = new List <Variance>(); PropertyInfo[] leftProps = left.GetType().GetProperties(); foreach (PropertyInfo prop in leftProps) { var v = new Variance(); v.PropertyName = prop.Name; v.Left = prop.GetValue(left); v.Right = prop.GetValue(right); if (v.Left == null) { continue; } if (!Equals(v.Left, v.Right)) { variances.Add(v); } } return(variances); }
public KPD(VS parent, string name) : base(parent, name) { #region Инициализация переменных объектов модели KOZ = InitModelObject<TIntVar>(); KPZ = InitModelObject<TIntVar>(); Zanyatost = InitModelObject<TRefVar<VS.Zayavka>>(); Q_Vhod = InitModelObject<SimpleModelList<QRec>>(); Q_Vozvrat = InitModelObject<SimpleModelList<QRec>>(); #endregion #region Инициализация сборщиков статистики Variance_Q_Vhod = InitModelObject<Variance<int>>(); Variance_Q_Vhod.ConnectOnSet(Q_Vhod.Count); Min_Q_Vhod = InitModelObject<Min<int>>(); Min_Q_Vhod.ConnectOnSet(Q_Vhod.Count); Max_Q_Vhod = InitModelObject<Max<int>>(); Max_Q_Vhod.ConnectOnSet(Q_Vhod.Count); Variance_Q_Vozvrat = InitModelObject<Variance<int>>(); Variance_Q_Vozvrat.ConnectOnSet(Q_Vozvrat.Count); Min_Q_Vozvrat = InitModelObject<Min<int>>(); Min_Q_Vozvrat.ConnectOnSet(Q_Vozvrat.Count); Max_Q_Vozvrat = InitModelObject<Max<int>>(); Max_Q_Vozvrat.ConnectOnSet(Q_Vozvrat.Count); Zanyato = InitModelObject<BoolCollector>(); Zanyato.ConnectOnSet(Zanyatost); #endregion }
/// <summary> /// 변량으로부터 대표적 통계 값들을 계산합니다. /// </summary> /// <param name="source"></param> private void Compute(IEnumerable <double?> source) { Mean = source.Mean(); double variance = 0; double correction = 0; double skewness = 0; double kurtosis = 0; double minimum = Double.PositiveInfinity; double maximum = Double.NegativeInfinity; int n = 0; foreach (var xi in source) { if (xi.HasValue) { double diff = xi.Value - Mean; double tmp = diff * diff; correction += diff; variance += tmp; tmp *= diff; skewness += tmp; tmp *= diff; kurtosis += tmp; if (minimum > xi) { minimum = xi.Value; } if (maximum < xi) { maximum = xi.Value; } n++; } } Count = n; if (n > 0) { Minimum = minimum; Maximum = maximum; Variance = (variance - (correction * correction / n)) / (n - 1); StandardDeviation = System.Math.Sqrt(Variance); if (Variance.ApproximateEqual(0.0) == false) { if (n > 2) { Skewness = (double)n / ((n - 1) * (n - 2)) * (skewness / (Variance * StandardDeviation)); } if (n > 3) { Kurtosis = (((double)n * (n + 1)) / ((n - 1) * (n - 2) * (n - 3)) * (kurtosis / (Variance * Variance))) - ((3.0 * (n - 1) * (n - 1)) / ((n - 2) * (n - 3))); } } } }
public void test_Covariance_returns_correct_result() { /*Covariance for testList and testList2 calculated with R: -9.925833. * For test result is rounded to four decimals.*/ Assert.AreEqual(Math.Round(-9.9258, 4), Math.Round(Variance.Covariance(testList, testList2), 4)); }
public void ProcessImageTest() { double[,] diag = Matrix.Magic(5); Bitmap input; new MatrixToImage().Convert(diag, out input); // Create a new Variance filter Variance filter = new Variance(); // Apply the filter Bitmap output = filter.Apply(input); double[,] actual; new ImageToMatrix().Convert(output, out actual); string str = actual.ToString(CSharpMatrixFormatProvider.InvariantCulture); double[,] expected = { { 0, 0, 0, 0, 0 }, { 0.0941176470588235, 0.545098039215686, 0.396078431372549, 0.376470588235294, 0.192156862745098 }, { 0.298039215686275, 0.376470588235294, 0.27843137254902, 0.211764705882353, 0.133333333333333 }, { 0.317647058823529, 0.203921568627451, 0.2, 0.16078431372549, 0.109803921568627 }, { 0.0509803921568627, 0.109803921568627, 0.16078431372549, 0.2, 0.203921568627451 } }; Assert.IsTrue(expected.IsEqual(actual, 1e-6)); }
public void VarianceTest1() { Bitmap image = Properties.Resources.lena512; Variance variance = new Variance(); Bitmap result = variance.Apply(image); Assert.IsNotNull(result); }
/// <summary> /// <para>(Accord .NET internal call)</para> /// The Variance filter replaces each pixel in an image by its /// neighborhood variance. The end result can be regarded as an /// border enhancement, making the Variance filter suitable to /// be used as an edge detection mechanism. /// </summary> /// <param name="img">Image.</param> /// <param name="radius">The radius neighborhood used to compute a pixel's local variance.</param> /// <returns>Processed image.</returns> internal static Image <TColor, TDepth> Variance <TColor, TDepth>(this Image <TColor, TDepth> img, int radius = 2) where TColor : IColor where TDepth : struct { Variance v = new Variance(radius); return(img.ApplyFilter(v)); }
public void ComparesAgainstExternalDataAfterReset() { var variance = new Variance("VAR", 10); RunTestIndicator(variance); variance.Reset(); RunTestIndicator(variance); }
public void VarianceTest1() { Bitmap image = Accord.Imaging.Image.Clone(Resources.lena512); Variance variance = new Variance(); Bitmap result = variance.Apply(image); Assert.IsNotNull(result); }
public override string ToString() { return(string.Format("True parameters:\n N = {0}\n Period = {1}\n Mean = {2}\n Variance = {3}\n Precision = {4}\n Pk = {5}\n\n", N, Period, JsonConvert.SerializeObject(Mean), JsonConvert.SerializeObject(Variance), JsonConvert.SerializeObject(Variance.Select(vv => 1.0 / vv)), JsonConvert.SerializeObject(Pk))); }
public void test_Covariance_null_lists_throws_MathError() { /*If either of Covariance parameter lists are null, MathError should be thrown * with error "Parameter list cannot be a null item"*/ MathError err = Assert.Throws <MathError>(() => Variance.Covariance(testList, null)); Assert.AreEqual("Parameter list cannot be a null item", err.error); }
public override int GetHashCode() { unchecked { var hashCode = (LastModifiedDate != null ? LastModifiedDate.GetHashCode() : 0); hashCode = (hashCode * 397) ^ Rating.GetHashCode(); hashCode = (hashCode * 397) ^ Variance.GetHashCode(); return(hashCode); } }
/// <summary> /// 변량으로부터 대표적 통계 값들을 계산합니다. /// </summary> /// <param name="source"></param> private void ComputeHighAccuracy(IEnumerable <double> source) { Mean = source.Mean(); decimal mean = (decimal)Mean; decimal variance = 0; decimal correction = 0; decimal skewness = 0; decimal kurtosis = 0; decimal minimum = Decimal.MaxValue; decimal maximum = Decimal.MinValue; int n = 0; foreach (decimal xi in source) { decimal diff = xi - mean; decimal tmp = diff * diff; correction += diff; variance += tmp; tmp *= diff; skewness += tmp; tmp *= diff; kurtosis += tmp; if (minimum > xi) { minimum = xi; } if (maximum < xi) { maximum = xi; } n++; } Count = n; Minimum = (double)minimum; Maximum = (double)maximum; Variance = (double)(variance - (correction * correction / n)) / (n - 1); StandardDeviation = Math.Sqrt(Variance); if (Variance.ApproximateEqual(0.0) == false) { if (n > 2) { Skewness = (double)n / ((n - 1) * (n - 2)) * ((double)skewness / (Variance * StandardDeviation)); } if (n > 3) { Kurtosis = (((double)n * (n + 1)) / ((n - 1) * (n - 2) * (n - 3)) * ((double)kurtosis / (Variance * Variance))) - ((3.0 * (n - 1) * (n - 1)) / ((n - 2) * (n - 3))); } } }
public void test_Covariance_different_Counts_throw_MathError() { /*MathError with error message "Variable lists not the same length" should be thrown.*/ List <double> list1 = new List <double>(); list1.Add(2.2); MathError err = Assert.Throws <MathError>(() => Variance.Covariance(testList, list1)); Assert.AreEqual("Variable lists not the same length", err.error); }
public RDomTypeParameter(string name, int ordinal = 0, bool hasConstructorConstraint = false, bool hasReferenceTypeConstraint = false, bool hasValueTypeConstraint = false, Variance variance = Variance.None) : this(null, null, null) { _name = name; _ordinal = ordinal; _hasConstructorConstraint = hasConstructorConstraint; _hasReferenceTypeConstraint = hasReferenceTypeConstraint; _hasValueTypeConstraint = hasValueTypeConstraint; _variance = variance; }
public static SyntaxKind VarianceKindFromVariance(Variance variance) { foreach (var tuple in GenericVarianceMap) { if (tuple.Item2 == variance) { return(tuple.Item1); } } return(SyntaxKind.None); }
public void test_Covariance_0_Count_lists_throws_MathError() { /*If both list are of 0 length, MathError should be thrown with error "Parameter lists have 0 items"*/ List <double> list1 = new List <double>(); List <double> list2 = new List <double>(); MathError err = Assert.Throws <MathError>(() => Variance.Covariance(list1, list2)); Assert.AreEqual("Parameter lists have 0 items", err.error); list1 = null; list2 = null; }
public bool Equals(IDistribution other) { var d = other as UnivariateContinuousDistribution; if (d == null) { //throw new ApplicationException("Incompatable types, other is not UnivariateDiscreteDistribution"); return(false); } return(!(Variance.EqualsTo(d.Variance) || Mean.EqualsTo(d.Mean))); }
public void test_StandardDeviation_throws_MathError_with_0_items_list() { /*Zero items in list should return 0 standard deviation. List is cleared and after * assertion it is restored.*/ testList.Clear(); Assert.Throws <MathError>(() => Variance.StandardDeviation(testList)); testList.Add(2.2); testList.Add(3.5); testList.Add(3); testList.Add(14); }
public MixtureDistribution([NotNull, ItemNotNull] IReadOnlyList <IContinuousDistribution> distributions, [CanBeNull] IReadOnlyList <double> weights = null) { Assertion.NotNullOrEmpty(nameof(distributions), distributions); Assertion.ItemNotNull(nameof(distributions), distributions); bool isWeighted = weights != null; weights ??= GetDefaultWeights(distributions); Assertion.NotNullOrEmpty(nameof(weights), weights); Assertion.Equal($"{nameof(distributions)}.Length", distributions.Count, $"{nameof(weights)}.Length", weights.Count); Assertion.Positive(nameof(weights), weights); double totalWeight = weights.Sum(); if (Math.Abs(totalWeight - 1) < 1e-9) { weights = weights.Select(w => w / totalWeight).ToArray(); } n = distributions.Count; this.distributions = distributions; this.weights = weights; inverseCdf = new InverseMonotonousFunction(Cdf); Median = Quantile(0.5); Mean = Aggregate(d => d.Mean); Variance = Aggregate(d => d.Variance + d.Mean.Sqr()) - Mean * Mean; StandardDeviation = Variance.Sqrt(); lazyToString = new Lazy <string>(() => { var builder = new StringBuilder(); builder.Append("Mix("); for (int i = 0; i < distributions.Count; i++) { if (i != 0) { builder.Append(";"); } builder.Append(distributions[i]); if (isWeighted) { builder.Append("|"); builder.Append(weights[i].ToStringInvariant()); } } builder.Append(")"); return(builder.ToString()); }); }
public static bool IsEqual(TypeSpec type1, TypeSpec type2) { if (!type1.IsGeneric || !type2.IsGeneric) { return(false); } var target_type_def = type2.MemberDefinition; if (type1.MemberDefinition != target_type_def) { return(false); } var t1_targs = type1.TypeArguments; var t2_targs = type2.TypeArguments; var targs_definition = target_type_def.TypeParameters; if (!type1.IsInterface && !type1.IsDelegate) { return(false); } for (int i = 0; i < targs_definition.Length; ++i) { Variance v = targs_definition[i].Variance; if (v == Variance.None) { if (t1_targs[i] == t2_targs[i]) { continue; } return(false); } if (v == Variance.Covariant) { if (!Convert.ImplicitReferenceConversionExists(new EmptyExpression(t1_targs[i]), t2_targs[i])) { return(false); } } else if (!Convert.ImplicitReferenceConversionExists(new EmptyExpression(t2_targs[i]), t1_targs[i])) { return(false); } } return(true); }
public bool Equals(IDistribution other) { var d = other as MultivariateDiscreteDistribution; if (d == null) { //throw new ApplicationException("Incompatable types, other is not UnivariateDiscreteDistribution"); return(false); } return(Variance.EqualsTo(d.Variance) && Mean.EqualsTo(d.Mean) && Covariance.EqualsTo(d.Covariance) && Dimension == d.Dimension); }
public GenericArgumentInfo( Type type, string name, int index, Variance variance, GenericSource genericSource, GenericArgumentRestriction[] restrictions) { Type = type; Name = name; Index = index; Variance = variance; GenericSource = genericSource; Restrictions = restrictions; }
public void ResetsProperly() { var date = DateTime.Today; var variance = new Variance("VAR", 10); foreach (var data in TestHelper.GetTradeBarStream("spy_var.txt")) { variance.Update(date, data.Close); } Assert.IsTrue(variance.IsReady); variance.Reset(); TestHelper.AssertIndicatorIsInDefaultState(variance); }
/// <summary> /// Load in the XML information /// </summary> public void Setup() { Exists = false; Error = false; ErrorMessage = ""; VarianceRates = new List <Variance>(); try { var resourceStream = Common.Properties.Resources.ResourceManager.GetObject("variance"); if (resourceStream == null) { throw new ApplicationException("Resource stream for variance.xml is null"); } // Build a XmlDocument to load the XML information into XmlTextReader resourceXmlTextReader = new XmlTextReader(new StringReader(resourceStream.ToString())); // Build a XmlDocument to load the XML information into XmlDocument docXML = new XmlDocument(); docXML.Load(resourceXmlTextReader); XmlNodeList nlXMLData = docXML.SelectNodes("//variances/variance_record"); // Only build out the Category Nodes if it exist in the XML information. if (nlXMLData.Count > 0) { foreach (XmlNode xmlNode in nlXMLData) { Variance variance = new Variance(); variance.DocType = Convert.ToChar(xmlNode["var_doc_type"].InnerText); variance.MaxAmount = Convert.ToDecimal(xmlNode["var_max_amount"].InnerText); variance.MinAmount = Convert.ToDecimal(xmlNode["var_min_amount"].InnerText); variance.Percent = Convert.ToDecimal(xmlNode["var_percent"].InnerText); VarianceRates.Add(variance); } Exists = true; } } catch (Exception exp) { Error = true; ErrorMessage = exp.Message; } }
public string ToString(IFormatProvider format) { StringBuilder sb = new StringBuilder(); sb.AppendFormat("{0}: {1}\r\n", nameof(Minimum), Minimum.ToString(format)); sb.AppendFormat("{0}: {1}\r\n", nameof(Maximum), Maximum.ToString(format)); sb.AppendFormat("{0}: {1}\r\n", nameof(Range), Range.ToString(format)); sb.AppendFormat("{0}: {1}\r\n", nameof(Median), Median.ToString(format)); sb.AppendFormat("{0}: {1}\r\n", nameof(Mode), Mode.ToString(format)); sb.AppendFormat("{0}: {1}\r\n", nameof(Sum), Sum.ToString(format)); sb.AppendFormat("{0}: {1}\r\n", nameof(Average), Average.ToString(format)); sb.AppendFormat("{0}: {1}\r\n", nameof(RootMeanSquare), RootMeanSquare.ToString(format)); sb.AppendFormat("{0}: {1}\r\n", nameof(Variance), Variance.ToString(format)); sb.AppendFormat("{0}: {1}\r\n", nameof(StandardDeviation), StandardDeviation.ToString(format)); return(sb.ToString()); }
public static List <Variance> DetailedCompare <T>(this T val1, T val2) { List <Variance> variances = new List <Variance>(); PropertyInfo[] fi = val1.GetType().GetProperties(); foreach (PropertyInfo f in fi) { Variance v = new Variance(); v.Prop = f.Name; v.valA = f.GetValue(val1); v.valB = f.GetValue(val2); if ((v.valA is null && !(v.valB is null)) || (!(v.valA is null) && v.valB is null)) { variances.Add(v); }
public bool Equals(MatchmakingRanking other) { if (ReferenceEquals(null, other)) { return(false); } if (ReferenceEquals(this, other)) { return(true); } return(Equals(LastModifiedDate, other.LastModifiedDate) && Rating.Equals(other.Rating) && Variance.Equals(other.Variance)); }
public SMOModel(Model parent, string name) : base(parent, name) { KVZS = InitModelObject<TIntVar>(); KVZ = InitModelObjectArray<TIntVar>(3, ""); KPZ = InitModelObjectArray<TIntVar>(3, ""); KZ = InitModelObjectArray<TBoolVar>(2, ""); TZKO = InitModelObjectArray<TRealVar>(2, ""); KPZ = InitModelObjectArray<TIntVar>(3, ""); queue = InitModelObjectArray<SimpleModelList<QueueRec>>(3, ""); TimeIn_FirstFlow = InitModelObject<TRealVar>(); TimeIn_SecondFlow = InitModelObject<TRealVar>(); inFlowGenerator = InitModelObjectArray<PoissonStream>(2, ""); servFlowGenerator = InitModelObjectArray<ExpStream>(2, ""); repeateGenerator = InitModelObject<UniformStream>(); Variance_QueueCount = InitModelObjectArray<Variance<int>>(3, ""); Variance_QueueCount[0].ConnectOnSet(queue[0].Count); Variance_QueueCount[1].ConnectOnSet(queue[1].Count); Variance_QueueCount[2].ConnectOnSet(queue[2].Count); Variance_TimeIn_FirstFlow = InitModelObject<Variance<double>>(); Variance_TimeIn_FirstFlow.ConnectOnSet(TimeIn_FirstFlow); Variance_TimeIn_SecondFlow = InitModelObject<Variance<double>>(); Variance_TimeIn_SecondFlow.ConnectOnSet(TimeIn_SecondFlow); Min_TimeIn_FirstFlow = InitModelObject<Min<double>>(); Min_TimeIn_FirstFlow.ConnectOnSet(TimeIn_FirstFlow); Min_TimeIn_SecondFlow = InitModelObject<Min<double>>(); Min_TimeIn_SecondFlow.ConnectOnSet(TimeIn_SecondFlow); Max_TimeIn_FirstFlow = InitModelObject<Max<double>>(); Max_TimeIn_FirstFlow.ConnectOnSet(TimeIn_FirstFlow); Max_TimeIn_SecondFlow = InitModelObject<Max<double>>(); Max_TimeIn_SecondFlow.ConnectOnSet(TimeIn_SecondFlow); Bool_Kanal = InitModelObjectArray<BoolCollector>(2, ""); Bool_Kanal[0].ConnectOnSet(KZ[0]); Bool_Kanal[1].ConnectOnSet(KZ[1]); }
TRealVar TOZ; //Время обработки заявки в системе #endregion Fields #region Constructors public VU(VS parent, string name) : base(parent, name) { ParentVS = parent; UVD = new UstroystvoVvoda(this, "УВД" + "(" + name + ")"); UR = new UstroystvoRabota(this, "УОД" + "(" + name + ")"); UV = new UstroystvoVyvoda(this, "УВР" + "(" + name + ")"); this.AddModelObject(UVD); this.AddModelObject(UR); this.AddModelObject(UV); #region Инициализация переменных объектов модели KVZ = InitModelObject<TIntVar>(); KOZ = InitModelObject<TIntVar>(); TOZ = InitModelObject<TRealVar>(); KPZ = new int[3]; Gener_Vhod = InitModelObject<ExpStream>(); Gener_RazmerVvod = InitModelObject<UniformStream>(); Gener_RazmerVyvoda = InitModelObject<NormalStream>(); // Gener_RazmerRabota = InitModelObject<ExpStream>(); #endregion #region Инициализация сборщиков статистики Variance_TOZ = InitModelObject<Variance<Double>>(); //создаем сборщик Variance_TOZ.ConnectOnSet(TOZ); //подключаем сборщик к переменной Min_TOZ = InitModelObject<Min<double>>(); //создаем сборщик Min_TOZ.ConnectOnSet(TOZ); //подключаем сборщик к переменной Max_TOZ = InitModelObject<Max<double>>(); //создаем сборщик Max_TOZ.ConnectOnSet(TOZ); //подключаем сборщик к переменной His_TOZ = InitModelObject<DynamicHistogram>(); His_TOZ.ConnectOnSet(TOZ); #endregion }
private void QuickCreateZipAndChecksums(string zipFile, Variance variance, object compressionMethodOrLevel, EncryptionAlgorithm encryption, string password, out string[] files, out Dictionary<string, byte[]> checksums ) { string srcDir = Path.Combine(SourceDir, "Zip.Portable.Tests"); files = Directory.GetFiles(srcDir, "*.cs", SearchOption.TopDirectoryOnly); checksums = new Dictionary<string, byte[]>(); foreach (string f in files) { var chk = TestUtilities.ComputeChecksum(f); var key = Path.GetFileName(f); checksums.Add(key, chk); } using (var zip = new ZipFile()) { if (variance == Variance.Level) { zip.CompressionLevel= (Ionic.Zlib.CompressionLevel) compressionMethodOrLevel; } else { if ((Ionic.Zip.CompressionMethod)compressionMethodOrLevel == CompressionMethod.None) zip.CompressionLevel = Ionic.Zlib.CompressionLevel.None; } if (password != null) { zip.Password = password; zip.Encryption = encryption; } zip.AddFiles(files, ""); zip.Save(zipFile); } int count = TestUtilities.CountEntries(zipFile); Assert.IsTrue(count > 5, "Unexpected number of entries ({0}) in the zip file.", count); }
private void VerifyEntries(string zipFile, Variance variance, int[] values, EncryptionAlgorithm[] a, int stage, int compFlavor, int encryptionFlavor) { using (var zip = FileSystemZip.Read(zipFile)) { foreach (ZipEntry e in zip) { var compCheck = false; if (variance == Variance.Method) { compCheck = (e.CompressionMethod == (CompressionMethod)values[stage]); } else { // Variance.Level CompressionMethod expectedMethod = ((Ionic.Zlib.CompressionLevel)values[stage] == Ionic.Zlib.CompressionLevel.None) ? CompressionMethod.None : CompressionMethod.Deflate; compCheck = (e.CompressionMethod == expectedMethod); } Assert.IsTrue(compCheck, "Unexpected compression method ({0}) on entry ({1}) variance({2}) flavor({3},{4}) stage({5})", e.CompressionMethod, e.FileName, variance, compFlavor, encryptionFlavor, stage ); var cryptoCheck = (e.Encryption == a[stage]); Assert.IsTrue(cryptoCheck, "Unexpected encryption ({0}) on entry ({1}) variance({2}) flavor({3},{4}) stage({5})", e.Encryption, e.FileName, variance, compFlavor, encryptionFlavor, stage ); } } }
public TypeParameter (DeclSpace parent, DeclSpace decl, string name, Constraints constraints, Attributes attrs, Variance variance, Location loc) : base (parent, new MemberName (name, loc), attrs) { this.decl = decl; this.constraints = constraints; this.variance = variance; }
public void ComparesAgainstExternalData() { var variance = new Variance("VAR", 10); TestHelper.TestIndicator(variance, "spy_var.txt", "Var", (ind, expected) => Assert.AreEqual(expected, (double)ind.Current.Value, 1e-3)); }
Variance<Double> Variance_TOZ; //МО и дисперсия #endregion Fields #region Constructors public VS(Model parent, string name) : base(parent, name) { #region Инициализация переменных объектов модели KVZ = InitModelObject<TIntVar>(); KOZ = InitModelObject<TIntVar>(); TOZ = InitModelObject<TRealVar>(); UZEL = new VU[3]; UZEL[0] = new VU(this, "ВУ1"); UZEL[1] = new VU(this, "ВУ2"); UZEL[2] = new VU(this, "ВУ3"); this.AddModelObject(UZEL[0]); this.AddModelObject(UZEL[1]); this.AddModelObject(UZEL[2]); KANAL = new KPD[3]; KANAL[0] = new KPD(this, "КПД(1,2)"); KANAL[1] = new KPD(this, "КПД(1,3)"); KANAL[2] = new KPD(this, "КПД(2,3)"); this.AddModelObject(KANAL[0]); this.AddModelObject(KANAL[1]); this.AddModelObject(KANAL[2]); KANAL[0].VU_1 = UZEL[0]; KANAL[0].VU_2 = UZEL[1]; KANAL[1].VU_1 = UZEL[0]; KANAL[1].VU_2 = UZEL[2]; KANAL[2].VU_1 = UZEL[1]; KANAL[2].VU_2 = UZEL[2]; #endregion #region Инициализация сборщиков статистики Variance_TOZ = InitModelObject<Variance<Double>>(); //создаем сборщик Variance_TOZ.ConnectOnSet(TOZ); //подключаем сборщик к переменной Min_TOZ = InitModelObject<Min<double>>(); //создаем сборщик Min_TOZ.ConnectOnSet(TOZ); //подключаем сборщик к переменной Max_TOZ = InitModelObject<Max<double>>(); //создаем сборщик Max_TOZ.ConnectOnSet(TOZ); //подключаем сборщик к переменной His_TOZ = InitModelObject<DynamicHistogram>(); His_TOZ.ConnectOnSet(TOZ); //-------------------------------------------------- allValuesTest = InitModelObject<AllValues<int>>("Сборщик полной статистики"); dhTest = InitModelObject<DynamicHistogram>("Динамическая гистограмма"); testAV = InitModelObject<TIntVar>("Тестовая переменная 1"); testDH = InitModelObject<TRealVar>("Тестовая переменная 2"); testAV.AddCollectors_OnSet(allValuesTest); testDH.AddCollectors_OnSet(dhTest); //-------------------------------------------------- #endregion /*Tracer.AddAutoTabModel(this); UZEL.ToList().ForEach(u => Tracer.AddAutoTabModel(u)); KANAL.ToList().ForEach(k => Tracer.AddAutoTabModel(k));*/ }
public void ResetsProperly() { var variance = new Variance("VAR", 10); TestHelper.TestIndicatorReset(variance, "spy_var.txt"); }
public static Variance CheckTypeVariance (TypeSpec t, Variance expected, IMemberContext member) { var tp = t as TypeParameterSpec; if (tp != null) { Variance v = tp.Variance; if (expected == Variance.None && v != expected || expected == Variance.Covariant && v == Variance.Contravariant || expected == Variance.Contravariant && v == Variance.Covariant) { ((TypeParameter)tp.MemberDefinition).ErrorInvalidVariance (member, expected); } return expected; } if (t.TypeArguments.Length > 0) { var targs_definition = t.MemberDefinition.TypeParameters; TypeSpec[] targs = GetTypeArguments (t); for (int i = 0; i < targs.Length; ++i) { Variance v = targs_definition[i].Variance; CheckTypeVariance (targs[i], (Variance) ((int)v * (int)expected), member); } return expected; } if (t.IsArray) return CheckTypeVariance (GetElementType (t), expected, member); return Variance.None; }
public TypeParameter (DeclSpace parent, int index, MemberName name, Constraints constraints, Attributes attrs, Variance variance) : base (parent, name, attrs) { this.constraints = constraints; // this.variance = variance; this.spec = new TypeParameterSpec (null, index, this, SpecialConstraint.None, variance, null); }
public void ErrorInvalidVariance (IMemberContext mc, Variance expected) { Report.SymbolRelatedToPreviousError (mc.CurrentMemberDefinition); string input_variance = Variance == Variance.Contravariant ? "contravariant" : "covariant"; string gtype_variance; switch (expected) { case Variance.Contravariant: gtype_variance = "contravariantly"; break; case Variance.Covariant: gtype_variance = "covariantly"; break; default: gtype_variance = "invariantly"; break; } Delegate d = mc as Delegate; string parameters = d != null ? d.Parameters.GetSignatureForError () : ""; Report.Error (1961, Location, "The {2} type parameter `{0}' must be {3} valid on `{1}{4}'", GetSignatureForError (), mc.GetSignatureForError (), input_variance, gtype_variance, parameters); }
public static Variance CheckTypeVariance (Type type, Variance v, IMemberContext mc) { return v; }
private void _Internal_Resave(string zipFile, Variance variance, int[] values, EncryptionAlgorithm[] cryptos, int compFlavor, int encryptionFlavor ) { // Create a zip file, then re-save it with changes in compression methods, // compression levels, and/or encryption. The methods/levels, cryptos are // for original and re-saved values. This tests whether we can update a zip // entry with changes in those properties. string[] passwords = new string[2]; passwords[0]= (cryptos[0]==EncryptionAlgorithm.None) ? null : GeneratePassword(); passwords[1]= passwords[0] ?? ((cryptos[1]==EncryptionAlgorithm.None) ? null : GeneratePassword()); //TestContext.WriteLine(" crypto: '{0}' '{1}'", crypto[0]??"-NONE-", passwords[1]??"-NONE-"); TestContext.WriteLine(" crypto: '{0}' '{1}'", cryptos[0], cryptos[1]); // first, create a zip file string[] filesToZip; Dictionary<string, byte[]> checksums; QuickCreateZipAndChecksums(zipFile, variance, values[0], cryptos[0], passwords[0], out filesToZip, out checksums); // check that the zip was constructed as expected VerifyEntries(zipFile, variance, values, cryptos, 0, compFlavor, encryptionFlavor); // modify some properties (CompressionLevel, CompressionMethod, and/or Encryption) on each entry using (var zip = FileSystemZip.Read(zipFile)) { zip.Password = passwords[1]; foreach (ZipEntry e in zip) { if (variance == Variance.Method) e.CompressionMethod = (CompressionMethod)values[1]; else e.CompressionLevel = (Ionic.Zlib.CompressionLevel)values[1]; e.Encryption = cryptos[1]; } zip.Save(zipFile); } // Check that the zip was modified as expected VerifyEntries(zipFile, variance, values, cryptos, 1, compFlavor, encryptionFlavor); // now extract the items and verify checksums string extractDir = "ex"; int c=0; while (Directory.Exists(extractDir + c)) c++; extractDir += c; // extract using (var zip = FileSystemZip.Read(zipFile)) { zip.Password = passwords[1]; zip.ExtractAll(extractDir); } VerifyChecksums(extractDir, filesToZip, checksums); }
private static void RunTestIndicator(Variance var) { TestHelper.TestIndicator(var, "spy_var.txt", "Var", (ind, expected) => Assert.AreEqual(expected, (double)ind.Current.Value, 1e-3)); }
private void _Internal_Resave(Variance variance, int compFlavor, int encryptionFlavor) { // Check that re-saving a zip, after modifying properties on // each entry, actually does what we want. if (encryptionFlavor == 0) TestContext.WriteLine("Resave workdir: {0}", TopLevelDir); string rootname = String.Format("Resave_Compression{0}_{1}_Encryption_{2}.zip", variance, compFlavor, encryptionFlavor); string zipFileToCreate = Path.Combine(TopLevelDir, rootname); int[] values = VariancePairs[(int)variance][compFlavor]; TestContext.WriteLine("Resave {0} {1} {2} file({3})", variance, compFlavor, encryptionFlavor, Path.GetFileName(zipFileToCreate)); _Internal_Resave(zipFileToCreate, variance, values, CryptoPairs[encryptionFlavor], compFlavor, encryptionFlavor); }
public TypeParameter (DeclSpace parent, DeclSpace decl, string name, Constraints constraints, Attributes attrs, Variance variance, Location loc) : base (parent, new MemberName (name, loc), attrs) { throw new NotImplementedException (); }
public static Variance CheckTypeVariance (Type t, Variance expected, IMemberContext member) { TypeParameter tp = LookupTypeParameter (t); if (tp != null) { Variance v = tp.Variance; if (expected == Variance.None && v != expected || expected == Variance.Covariant && v == Variance.Contravariant || expected == Variance.Contravariant && v == Variance.Covariant) tp.ErrorInvalidVariance (member, expected); return expected; } if (t.IsGenericType) { Type[] targs_definition = GetTypeArguments (DropGenericTypeArguments (t)); Type[] targs = GetTypeArguments (t); for (int i = 0; i < targs_definition.Length; ++i) { Variance v = GetTypeParameterVariance (targs_definition[i]); CheckTypeVariance (targs[i], (Variance) ((int)v * (int)expected), member); } return expected; } if (t.IsArray) return CheckTypeVariance (GetElementType (t), expected, member); return Variance.None; }
public TypeParameterName (string name, Attributes attrs, Variance variance, Location loc) : base (name, loc) { attributes = attrs; this.variance = variance; }
// // Creates type owned type parameter // public TypeParameterSpec (TypeSpec declaringType, int index, ITypeDefinition definition, SpecialConstraint spec, Variance variance, Type info) : base (MemberKind.TypeParameter, declaringType, definition, info, Modifiers.PUBLIC) { this.variance = variance; this.spec = spec; state &= ~StateFlags.Obsolete_Undetected; tp_pos = index; }
public void ErrorInvalidVariance (MemberCore mc, Variance v) { }
// // Creates method owned type parameter // public TypeParameterSpec (int index, ITypeDefinition definition, SpecialConstraint spec, Variance variance, Type info) : this (null, index, definition, spec, variance, info) { }
public static SyntaxKind VarianceKindFromVariance(Variance variance) { foreach (var tuple in GenericVarianceMap) { if (tuple.Item2 == variance) { return tuple.Item1; } } return SyntaxKind.None; }