/// <summary>
        /// Public static function to pre-process (data cleaning, normalization) the tokenized list of words. (version on list of strings)
        /// </summary>
        /// <param name="words"></param>
        /// <param name="toLowerOption"></param>
        /// <param name="keepOnlyCapitalizedWords"></param>
        /// <returns></returns>
        public static List <string> PreProcessingPipeline(List <string> words, bool toLowerOption = false, bool keepOnlyCapitalizedWords = false)
        {
            List <string> newWords = new List <string>();

            foreach (var sw in words)
            {
                if (Cleaning.IsStopWord(sw))
                {
                    continue;
                }
                string tsw = Cleaning.EliminateDigitsFromWord(sw);
                if (string.IsNullOrEmpty(tsw))
                {
                    continue;
                }
                if (toLowerOption)
                {
                    tsw = Normalization.ToLowerCaseNormalization(tsw);
                }

                if (keepOnlyCapitalizedWords)
                {
                    if (!char.IsUpper(tsw[0]))
                    {
                        continue;
                    }
                }

                newWords.Add(tsw);
            }
            return(newWords);
        }
Example #2
0
        public LiteFloatCrusher(LiteFloatCompressType compressType, float min, float max, bool accurateCenter, LiteOutOfBoundsHandling outOfBoundsHandling = LiteOutOfBoundsHandling.Clamp)
        {
            this.compressType  = compressType;
            this.normalization = Normalization.None;

            /// Don't allow min and max to equal.
            if (min == max)
            {
                max++;
                Debug.LogWarning("Float crusher is being given min and max values that are the same. This likely is not intentional. Check your range values. Value is <i>" + min +
                                 "</i>, changing the max to " + max + " to avoid division by zero errors.");
            }

            if (min < max)
            {
                this.min = min;
                this.max = max;
            }
            else
            {
                this.min = max;
                this.max = min;
            }

            this.accurateCenter      = accurateCenter;
            this.outOfBoundsHandling = outOfBoundsHandling;

            Recalculate(compressType, min, max, accurateCenter, this);
        }
Example #3
0
        private bool IsVisiblePoint(Vector3 center, Vector3 pos, out NormalizedPoint point)
        {
            var delta  = pos - center;
            var sqrMag = delta.sqrMagnitude;

            if (sqrMag >= m_MinDistanceSqr && sqrMag <= m_MaxDistanceSqr)
            {
                var lonLat = Geometry.GetLonLat(m_ReferenceFrame, delta);
                if (m_LonLatRect.Contains(lonLat))
                {
                    float d = Mathf.Sqrt(sqrMag) - m_MinDistance; // > 0
                    // lon/lat -> norm. x/y
                    point.Position   = Rect.PointToNormalized(m_LonLatRect, lonLat);
                    point.Position.z = d / m_DistanceRange;
                    point.Position.z = m_ApplyWeight
                        ? Normalization.InvSigmoid(point.Position.z, m_NormalizationWeight)
                        : 1 - point.Position.z;                // 0 at max, 1 at min distance
                    point.DistanceRatio = m_DistanceRange / d; // 1 at max distance

                    return(true);
                }
            }

            point = default;
            return(false);
        }
Example #4
0
        public LiteFloatCrusher(LiteFloatCompressType compressType, Normalization normalization = Normalization.None, LiteOutOfBoundsHandling outOfBoundsHandling = LiteOutOfBoundsHandling.Clamp)
        {
            this.compressType  = compressType;
            this.normalization = normalization;

            switch (normalization)
            {
            case Normalization.None:
                this.min       = 0;
                this.max       = 1;
                accurateCenter = false;
                break;

            case Normalization.Positive:
                this.min       = 0;
                this.max       = 1;
                accurateCenter = false;
                break;


            case Normalization.Negative:
                this.min       = -1;
                this.max       = 1;
                accurateCenter = true;
                break;
            }

            this.outOfBoundsHandling = outOfBoundsHandling;
            Recalculate(compressType, min, max, accurateCenter, this);
        }
Example #5
0
        //////////////////////////////////////////////////////////////////////////
        // Execution
        public void     Execute(Normalization _Normalization)
        {
            if (m_InputIsSpatial && m_PlanForward == IntPtr.Zero)
            {
                m_PlanForward = fftwlib.fftwf.dft_2d(m_Width, m_Height, m_Input, m_Output, fftwlib.fftw_direction.Forward, fftwlib.fftw_flags.DestroyInput);
            }
            else if (!m_InputIsSpatial && m_PlanBackward == IntPtr.Zero)
            {
                m_PlanBackward = fftwlib.fftwf.dft_2d(m_Width, m_Height, m_Input, m_Output, fftwlib.fftw_direction.Backward, fftwlib.fftw_flags.DestroyInput);
            }

            // Copy source data to FFTW memory
            Marshal.Copy(m_UserInput, 0, m_Input, m_Width * m_Height * 2);

            // FFT
            fftwlib.fftwf.execute(m_InputIsSpatial ? m_PlanForward : m_PlanBackward);

            // Retrieve results
            Marshal.Copy(m_Output, m_UserOutput, 0, m_Width * m_Height * 2);

            if (_Normalization != Normalization.NONE)
            {
                float Normalizer = _Normalization == Normalization.DIMENSIONS_PRODUCT ? 1.0f / (m_Width * m_Height) : 1.0f / (float)Math.Sqrt(m_Width * m_Height);
                for (int i = 0; i < 2 * m_Width * m_Height; i++)
                {
                    m_UserOutput[i] *= Normalizer;
                }
            }
        }
Example #6
0
        public override void CollectObservations(VectorSensor sensor)
        {
            if (opponent == null)
            {
                return;
            }

            actionStep = 0;
            Transform rt          = root.transform;
            Vector3   inclination = new Vector3(rt.right.y, rt.up.y, rt.forward.y);

            sensor.AddObservation(inclination);
            sensor.AddObservation(Normalization.Sigmoid(CrntForce, 0.25f) * 2f - 1f);
            CrntForce = 0;
            // Observe positions relative to this agent's root (hips).
            // All vectors are localized (root.InverseTransformVector(v))
            // and normalized using a sigmoid function. The idea here is
            // that small value changes matter less the farther away an
            // observed object is, or the faster it is moving.
            // The sigmoid function provides a higher value resolution
            // for small vectors and asymptotes towards -1/+1.
            Vector3 rootPos = rt.position;

            // This agent.
            // Hips position = root position. Since all positions are relative to
            // this agent's root, we don't observe this, as it will always be 0/0/0.
            AddVectorObs(sensor, Hips.Velocity);
            AddVectorObs(sensor, Hips.AngularVelocity);
            AddVectorObs(sensor, Head.Position - rootPos);
            AddVectorObs(sensor, Head.Velocity);
            AddVectorObs(sensor, LeftHand.Position - rootPos);
            AddVectorObs(sensor, LeftHand.Velocity);
            AddVectorObs(sensor, RightHand.Position - rootPos);
            AddVectorObs(sensor, RightHand.Velocity);
            AddVectorObs(sensor, LeftFoot.Position - rootPos);
            AddVectorObs(sensor, LeftFoot.Velocity);
            AddVectorObs(sensor, RightFoot.Position - rootPos);
            AddVectorObs(sensor, RightFoot.Velocity);
            // Opponent agent.
            AddVectorObs(sensor, opponent.Hips.Position - rootPos);
            AddVectorObs(sensor, opponent.Hips.Velocity);
            AddVectorObs(sensor, opponent.Hips.AngularVelocity);
            AddVectorObs(sensor, opponent.Head.Position - rootPos);
            AddVectorObs(sensor, opponent.Head.Velocity);
            AddVectorObs(sensor, opponent.LeftHand.Position - rootPos);
            AddVectorObs(sensor, opponent.LeftHand.Velocity);
            AddVectorObs(sensor, opponent.RightHand.Position - rootPos);
            AddVectorObs(sensor, opponent.RightHand.Velocity);
            AddVectorObs(sensor, opponent.LeftFoot.Position - rootPos);
            AddVectorObs(sensor, opponent.LeftFoot.Velocity);
            AddVectorObs(sensor, opponent.RightFoot.Position - rootPos);
            AddVectorObs(sensor, opponent.RightFoot.Velocity);
            // Normalized rotations (wrapped eulers / 180).
            sensor.AddObservation(GetJointObs());
            // Normalized distances.
            sensor.AddObservation(GetRayObs());

            // TODO AddObservation DownStepCount / MaxDownSteps
        }
Example #7
0
 public override void CollectObservations(VectorSensor sensor)
 {
     sensor.AddObservation(m_Car.Throttle);
     sensor.AddObservation(m_Car.Steering);
     sensor.AddObservation(m_Car.Gyro);
     sensor.AddObservation(Normalization.Sigmoid(m_Car.LocalSpin));
     sensor.AddObservation(Normalization.Sigmoid(m_Car.LocalVelocity));
 }
     /// <include file='../../docs.xml'
     /// path='docs/doc[@name="M:PeterO.Text.NormalizingCharacterInput.#ctor(System.String,System.Int32,System.Int32,PeterO.Text.Normalization)"]/*'/>
     public NormalizingCharacterInput(
 string str,
 int index,
 int length,
 Normalization form)
     {
         this.nci = new NormalizerInput(str, index, length, form);
     }
Example #9
0
        public void SizeRestrictionDefaultValue_Normalization()
        {
            Normalization normalization;

            normalization = new Normalization();
            Assert.AreEqual(NormalizationValue.None, normalization.General, "General is incorrect.");
            Assert.AreEqual(NormalizationValue.None, normalization.Storage, "Storageis incorrect.");
        }
        private void DrawNormalizationCurve()
        {
            Rect rect = GUILayoutUtility.GetRect(10, 1000, 50, 50);

            int   w = Mathf.CeilToInt(rect.width / 50);
            float y = rect.height / 2;

            if (Event.current.type == EventType.Repaint)
            {
                GUI.BeginClip(rect);
                GL.PushMatrix();

                GL.Clear(true, false, Color.black);
                m_GLMaterial.SetPass(0);

                GL.Begin(GL.QUADS);
                GL.Color(Color.black);
                GL.Vertex3(0, 0, 0);
                GL.Vertex3(rect.width, 0, 0);
                GL.Vertex3(rect.width, rect.height, 0);
                GL.Vertex3(0, rect.height, 0);
                GL.End();

                float weight = m_Comp.NormalizationWeight;

                GL.Begin(GL.LINES);
                GL.Color(m_CurveColor);
                for (int x = 0; x <= rect.width; x++)
                {
                    float t = x / rect.width;
                    float s = Normalization.InvSigmoid(t, weight);
                    GL.Vertex3(x, rect.height, 0);
                    GL.Vertex3(x, rect.height - s * rect.height, 0);
                }

                GL.Color(Color.grey);
                for (int i = 1; i < w; i++)
                {
                    float t = i / (float)w;
                    float x = Mathf.Lerp(0, rect.width, t);
                    GL.Vertex3(x, y, 0);
                    GL.Vertex3(x, y - 5, 0);
                }
                GL.End();

                GL.PopMatrix();
                GUI.EndClip();
            }

            for (int i = 1; i < w; i++)
            {
                float t = i / (float)w;
                float x = Mathf.Lerp(0, rect.width, t);
                float d = Mathf.Lerp(m_Comp.MinDistance, m_Comp.MaxDistance, t);
                EditorGUI.LabelField(new Rect(x + 4, rect.yMax - y, 80, 20), string.Format("{0:0.00}", d));
            }
        }
Example #11
0
     /// <include file='../../docs.xml'
     /// path='docs/doc[@name="M:PeterO.Text.NormalizerInput.#ctor(System.String,System.Int32,System.Int32,PeterO.Text.Normalization)"]/*'/>
     public NormalizerInput(
 string str,
 int index,
 int length,
 Normalization form)
         : this(new StringCharacterInput2(str, index, length),
 form)
     {
     }
Example #12
0
 public double[] Normalize(Normalization norm, params int[] param)
 {
     return(norm switch
     {
         Normalization.MinMax => Minmax(param[0], param[1]),
         Normalization.Mean => Mean(),
         Normalization.Standardize => Standarize(),
         _ => throw new ArgumentException("Invalid normalization method"),
     });
Example #13
0
        private TyresMachine([NotNull] NeuralTyresEntry[] tyres, [NotNull] NeuralTyresOptions options)
        {
            if (tyres == null)
            {
                throw new ArgumentNullException(nameof(tyres));
            }
            if (tyres.Length == 0)
            {
                throw new ArgumentException("Value cannot be an empty collection.", nameof(tyres));
            }
            _options = options ?? throw new ArgumentNullException(nameof(options));

            // Some details to identify Machine later if needed
            _tyresSources = tyres.Cast <NeuralTyresSource>().ToArray();

            // Tyres version
            TyresVersion = tyres[0].Version;
            if (tyres.Any(x => x.Version != TyresVersion))
            {
                throw new ArgumentException("Inconsistent versions");
            }

            // LUTs, just in case
            _luts = tyres.Select(x => x.Luts).ToArray();

            // Input normalizations and values
            _inputNormalized = new double[tyres.Length][];
            for (var i = 0; i < tyres.Length; i++)
            {
                _inputNormalized[i] = new double[3];
            }

            _inputNormalizations = new Normalization[options.InputKeys.Length];
            for (var i = 0; i < _inputNormalizations.Length; i++)
            {
                var limits = _options.NormalizationLimits.GetLimits(options.InputKeys[i]) ?? Tuple.Create(double.NegativeInfinity, double.PositiveInfinity);
                _inputNormalizations[i] = Normalization.BuildNormalization(tyres, options.InputKeys[i], options.ValuePadding,
                                                                           out var normalized, limits.Item1, limits.Item2);
                for (var j = 0; j < normalized.Length; j++)
                {
                    _inputNormalized[j][i] = normalized[j];
                }
            }

            // Output normalizations and values
            _outputKeys = tyres[0].Keys.Where(x => Array.IndexOf(options.IgnoredKeys, x) == -1 &&
                                              (options.OverrideOutputKeys == null || Array.IndexOf(options.OverrideOutputKeys, x) != -1)).ToArray();
            _outputNormalizations = new Normalization[_outputKeys.Length];
            _outputNormalized     = new double[_outputKeys.Length][];

            for (var i = 0; i < _outputKeys.Length; i++)
            {
                var limits = _options.NormalizationLimits.GetLimits(_outputKeys[i]) ?? Tuple.Create(double.NegativeInfinity, double.PositiveInfinity);
                _outputNormalizations[i] = Normalization.BuildNormalization(tyres, _outputKeys[i], options.ValuePadding,
                                                                            out _outputNormalized[i], limits.Item1, limits.Item2);
            }
        }
Example #14
0
	// RANGE Return a Float min < x < max
	public float Range(Int32 minValue, Int32 maxValue, Normalization n, float t)
	{
		if (n == Normalization.STDNORMAL) {
			return SpecialFunctions.ScaleFloatToRange( (float) NormalDistribution.Normalize(_rand.NextSingle(true), t), minValue, maxValue, 0, 1);
		} else if (n == Normalization.POWERLAW) {
			return (float) PowerLaw.Normalize(_rand.NextSingle(true), t, minValue, maxValue);
		} else {
			return _rand.Next(minValue, maxValue);
		}
	}	
Example #15
0
	// VALUE Return a Float 0 - 1
	public float Value( Normalization n , float t)
	{
		if (n == Normalization.STDNORMAL) {
			return (float) NormalDistribution.Normalize(_rand.NextSingle(true), t);
		} else if (n == Normalization.POWERLAW) {
			return (float) PowerLaw.Normalize(_rand.NextSingle(true), t, 0, 1);
		} else {
			return _rand.NextSingle(true);
		}
	}
Example #16
0
 /// <summary>
 /// Creates DFRSimilarity from the three components.
 /// <p>
 /// Note that <code>null</code> values are not allowed:
 /// if you want no normalization or after-effect, instead pass
 /// <seealso cref="NoNormalization"/> or <seealso cref="NoAfterEffect"/> respectively. </summary>
 /// <param name="basicModel"> Basic model of information content </param>
 /// <param name="afterEffect"> First normalization of information gain </param>
 /// <param name="normalization"> Second (length) normalization </param>
 public DFRSimilarity(BasicModel basicModel, AfterEffect afterEffect, Normalization normalization)
 {
     if (basicModel == null || afterEffect == null || normalization == null)
     {
         throw new System.NullReferenceException("null parameters not allowed.");
     }
     this.BasicModel_Renamed    = basicModel;
     this.AfterEffect_Renamed   = afterEffect;
     this.Normalization_Renamed = normalization;
 }
Example #17
0
 /// <summary>
 /// Constructor with a full set of coefficients specified.
 /// </summary>
 /// <param name="C">the coefficients <i>Cnm</i>.</param>
 /// <param name="S">the coefficients <i>Snm</i>.</param>
 /// <param name="N">the degree used to determine the layout of <i>C</i> and <i>S</i>.</param>
 /// <param name="nmx">the maximum degree used in the sum. The sum over <i>n</i> is from <c>0</c> thru <i>nmx</i>.</param>
 /// <param name="mmx">the maximum order used in the sum. The sum over <i>m</i> is from <c>0</c> thru min(<i>n</i>, <i>mmx</i>).</param>
 /// <param name="C1">the coefficients <i>C'nm</i>.</param>
 /// <param name="S1">the coefficients <i>S'nm</i>.</param>
 /// <param name="N1">the degree used to determine the layout of <i>C</i> ' and <i>S</i> '.</param>
 /// <param name="nmx1">the maximum degree used for <i>C</i> ' and <i>S</i> '.</param>
 /// <param name="mmx1">the maximum order used for <i>C</i> ' and <i>S</i> '.</param>
 /// <param name="a">the reference radius appearing in the definition of the sum.</param>
 /// <param name="norm">the normalization for the associated Legendre polynomials,
 /// either <see cref="Normalization.Full"/> (the default) or <see cref="Normalization.Schmidt"/>.</param>
 /// <remarks>
 /// See <see cref="SphericalHarmonic"/> for the way the coefficients should be stored.
 /// <para>
 /// The class stores pointers to the first elements of <i>C</i>, <i>S</i>, <i>C</i> ', and <i>S</i> '.
 /// These arrays should not be altered or destroyed during the lifetime of a <see cref="SphericalHarmonic"/> object.
 /// </para>
 /// </remarks>
 public SphericalHarmonic1(ReadOnlyMemory <double> C,
                           ReadOnlyMemory <double> S,
                           int N, int nmx, int mmx,
                           ReadOnlyMemory <double> C1,
                           ReadOnlyMemory <double> S1,
                           int N1, int nmx1, int mmx1,
                           double a, Normalization norm = Normalization.Full)
     : this(new SphericalEngine.Coeff(C, S, N, nmx, mmx), new SphericalEngine.Coeff(C1, S1, N1, nmx1, mmx1), a, norm)
 {
 }
        public LiteFloatCrusher()
        {
            this.compressType   = LiteFloatCompressType.Half16;
            this.normalization  = Normalization.Positive;
            this.min            = 0;
            this.max            = 1;
            this.accurateCenter = true;

            Recalculate(compressType, min, max, accurateCenter, this);
        }
Example #19
0
        private UnitCartesian(double x, double y, double z, Normalization normalization)
        {
            if (normalization != Normalization.Normalized)
            {
                double magnitude;
                NormalizeCoordinates(ref x, ref y, ref z, out magnitude);
            }

            m_x = x;
            m_y = y;
            m_z = z;
        }
Example #20
0
        private DataTable NormalizeData(DataTable dataTable)
        {
            //Normalization of data
            Normalization normalization = new Normalization(dataTable);

            mean = new double[dataTable.Columns.Count];
            rmsd = new double[dataTable.Columns.Count];
            for (int i = 0; i < dataTable.Columns.Count; i++)
            {
                mean[i] = normalization[i].Mean;
                rmsd[i] = normalization[i].StandardDeviation;
            }
            return(normalization.Apply(dataTable));
        }
Example #21
0
 /// <summary>
 /// Constructor with a subset of coefficients specified.
 /// </summary>
 /// <param name="C">the coefficients <i>Cnm</i>.</param>
 /// <param name="S">the coefficients <i>Snm</i>.</param>
 /// <param name="N">the maximum degree and order of the sum.</param>
 /// <param name="C1">the coefficients <i>C'nm</i>.</param>
 /// <param name="S1">the coefficients <i>S'nm</i>.</param>
 /// <param name="N1">the maximum degree and order of the first correction coefficients <i>C'nm</i> and <i>S'nm</i>.</param>
 /// <param name="C2">the coefficients <i>C''nm</i>.</param>
 /// <param name="S2">the coefficients <i>S''nm</i>.</param>
 /// <param name="N2">the maximum degree and order of the first correction coefficients <i>C''nm</i> and <i>S''nm</i>.</param>
 /// <param name="a">the reference radius appearing in the definition of the sum.</param>
 /// <param name="norm">the normalization for the associated Legendre polynomials,
 /// either <see cref="Normalization.Full"/> (the default) or <see cref="Normalization.Schmidt"/>.</param>
 /// <remarks>
 /// See <see cref="SphericalHarmonic"/> for the way the coefficients should be stored.
 /// <para>
 /// The class stores pointers to the first elements of <i>C</i>, <i>S</i>, <i>C</i> ', <i>S</i> ', <i>C</i> '' and <i>S</i> ''.
 /// These arrays should not be altered or destroyed during the lifetime of a <see cref="SphericalHarmonic"/> object.
 /// </para>
 /// </remarks>
 public SphericalHarmonic2(ReadOnlyMemory <double> C,
                           ReadOnlyMemory <double> S,
                           int N,
                           ReadOnlyMemory <double> C1,
                           ReadOnlyMemory <double> S1,
                           int N1,
                           ReadOnlyMemory <double> C2,
                           ReadOnlyMemory <double> S2,
                           int N2,
                           double a, Normalization norm = Normalization.Full)
     : this(new SphericalEngine.Coeff(C, S, N),
            new SphericalEngine.Coeff(C1, S1, N1),
            new SphericalEngine.Coeff(C2, S2, N2), a, norm)
 {
 }
Example #22
0
 // RANGE Return a Float min < x < max
 public float Range(Int32 minValue, Int32 maxValue, Normalization n, float t)
 {
     if (n == Normalization.STDNORMAL)
     {
         return(SpecialFunctions.ScaleFloatToRange((float)NormalDistribution.Normalize(_rand.NextSingle(true), t), minValue, maxValue, 0, 1));
     }
     else if (n == Normalization.POWERLAW)
     {
         return((float)PowerLaw.Normalize(_rand.NextSingle(true), t, minValue, maxValue));
     }
     else
     {
         return(_rand.Next(minValue, maxValue));
     }
 }
Example #23
0
    /// <include file='../../docs.xml'
    /// path='docs/doc[@name="M:PeterO.Text.NormalizerInput.#ctor(PeterO.Text.ICharacterInput,PeterO.Text.Normalization)"]/*'/>
    public NormalizerInput(
 ICharacterInput stream,
 Normalization form)
    {
        if (stream == null) {
          throw new ArgumentNullException("stream");
        }
        this.lastQcsIndex = -1;
        this.iterator = stream;
        this.form = form;
          this.readbuffer = new int[1];
        this.lastCharBuffer = new int[2];
        this.compatMode = form == Normalization.NFKC || form ==
        Normalization.NFKD;
    }
Example #24
0
 public String Normalize(NormalizationForm normalizationForm)
 {
     if (this.IsAscii())
     {
         // If its FastSort && one of the 4 main forms, then its already normalized
         if (normalizationForm == NormalizationForm.FormC ||
             normalizationForm == NormalizationForm.FormKC ||
             normalizationForm == NormalizationForm.FormD ||
             normalizationForm == NormalizationForm.FormKD)
         {
             return(this);
         }
     }
     return(Normalization.Normalize(this, normalizationForm));
 }
 // RANDOM RAINBOW COLOR
 public Color Rainbow(Normalization n, float t)
 {
     if (n == Normalization.STDNORMAL)
     {
         return(WaveToRgb.LinearToRgb((float)NormalDistribution.Normalize(_rand.NextSingle(true), t)));
     }
     else if (n == Normalization.POWERLAW)
     {
         return(WaveToRgb.LinearToRgb((float)PowerLaw.Normalize(_rand.NextSingle(true), t, 0, 1)));
     }
     else
     {
         return(WaveToRgb.LinearToRgb(_rand.NextSingle(true)));
     }
 }
        private void SaveButton_Normal_Click(object sender, RoutedEventArgs e)
        {
            int numOfColumn = ColumnCombo_Normal.SelectedIndex;

            if (numOfColumn < 0)
            {
                MessageBox.Show("Wybierz kolumnę");
            }
            else
            {
                Normalization.DoNormalization(numOfColumn);
                //odświeżenie widoku - wyświetlenie zmian
                DisplayNewDataInMenu(numOfColumn + 1);
            }
        }
 // VALUE Return a Float 0 - 1
 public float Value(Normalization n, float t)
 {
     if (n == Normalization.STDNORMAL)
     {
         return((float)NormalDistribution.Normalize(_rand.NextSingle(true), t));
     }
     else if (n == Normalization.POWERLAW)
     {
         return((float)PowerLaw.Normalize(_rand.NextSingle(true), t, 0, 1));
     }
     else
     {
         return(_rand.NextSingle(true));
     }
 }
Example #28
0
 public bool IsNormalized(NormalizationForm normalizationForm)
 {
     if (this.IsFastSort())
     {
         // If its FastSort && one of the 4 main forms, then its already normalized
         if (normalizationForm == NormalizationForm.FormC ||
             normalizationForm == NormalizationForm.FormKC ||
             normalizationForm == NormalizationForm.FormD ||
             normalizationForm == NormalizationForm.FormKD)
         {
             return(true);
         }
     }
     return(Normalization.IsNormalized(this, normalizationForm));
 }
Example #29
0
 public void TestMeanNorm()
 {
     double[,] tss = { { 0, 1, 2, 3 }, { 4, 5, 6, 7 } };
     using (KhivaArray arr = KhivaArray.Create(tss), meanNorm = Normalization.MeanNorm(arr))
     {
         double[,] expected = { { -0.5, -0.166666667, 0.166666667, 0.5 }, { -0.5, -0.166666667, 0.166666667, 0.5 } };
         var result = meanNorm.GetData2D <double>();
         for (var i = 0; i < result.GetLength(0); i++)
         {
             for (var j = 0; j < result.GetLength(1); j++)
             {
                 Assert.AreEqual(expected[i, j], result[i, j], Delta);
             }
         }
     }
 }
Example #30
0
 public void TestZNorm()
 {
     double[,] tss = { { 0, 1, 2, 3 }, { 4, 5, 6, 7 } };
     using (KhivaArray arr = KhivaArray.Create(tss), zNorm = Normalization.ZNorm(arr, 0.00000001))
     {
         double[] expected = { -1.341640786499870, -0.447213595499958, 0.447213595499958, 1.341640786499870 };
         var      result   = zNorm.GetData2D <double>();
         for (var i = 0; i < result.GetLength(0); i++)
         {
             for (var j = 0; j < result.GetLength(1); j++)
             {
                 Assert.AreEqual(expected[j], result[i, j], Delta);
             }
         }
     }
 }
Example #31
0
 public void TestDecimalScalingNorm()
 {
     float[,] tss = { { 0, 1, -2, 3 }, { 40, 50, 60, -70 } };
     using (KhivaArray arr = KhivaArray.Create(tss), decimalScalingNorm = Normalization.DecimalScalingNorm(arr))
     {
         float[,] expected = { { 0.0F, 0.1F, -0.2F, 0.3F }, { 0.4F, 0.5F, 0.6F, -0.7F } };
         var result = decimalScalingNorm.GetData2D <float>();
         for (var i = 0; i < result.GetLength(0); i++)
         {
             for (var j = 0; j < result.GetLength(1); j++)
             {
                 Assert.AreEqual(expected[i, j], result[i, j]);
             }
         }
     }
 }
Example #32
0
 public void TestMaxMinNorm()
 {
     double[,] tss = { { 0, 1, 2, 3 }, { 4, 5, 6, 7 } };
     using (KhivaArray arr = KhivaArray.Create(tss), maxMinNorm = Normalization.MaxMinNorm(arr, 2.0, 1.0))
     {
         double[,] expected = { { 1.0, 1.3333333333333, 1.66666667, 2.0 }, { 1.0, 1.3333333333333, 1.66666667, 2.0 } };
         var result = maxMinNorm.GetData2D <double>();
         for (var i = 0; i < result.GetLength(0); i++)
         {
             for (var j = 0; j < result.GetLength(1); j++)
             {
                 Assert.AreEqual(expected[i, j], result[i, j], Delta);
             }
         }
     }
 }
Example #33
0
    /// <summary>
    /// Resets the timer to the seconds passed in
    /// </summary>
    /// <param name="secondsToWait"> The amount of seconds to wait</param>
    public void ResetTimer(float secondsToWait)
    {
        // We execute this if the timer is stopped, to make it run for the first time
        // We update the TimeToCompare so that we can compare it with the actual time
        TimeToCompare = Time.time + secondsToWait;

        // We set the min and max for the normalization
        MinToNormalize = Time.time;
        MaxToNormalize = TimeToCompare;

        // We calculate the actual normalized time left
        NormalizedTimer = Normalization.Normalize(Time.time, MinToNormalize, MaxToNormalize);

        // We reset the offset as well to avoid having any weird values
        m_OffsetToAddWhenTimerResumes = 0f;
    }
Example #34
0
        public override int GetHashCode()
        {
            int hash = 1;

            if (Engine.Length != 0)
            {
                hash ^= Engine.GetHashCode();
            }
            if (Normalization.Length != 0)
            {
                hash ^= Normalization.GetHashCode();
            }
            if (NInput != 0)
            {
                hash ^= NInput.GetHashCode();
            }
            if (NOutput != 0)
            {
                hash ^= NOutput.GetHashCode();
            }
            if (LearningRate != 0D)
            {
                hash ^= LearningRate.GetHashCode();
            }
            if (BatchSize != 0)
            {
                hash ^= BatchSize.GetHashCode();
            }
            if (EpochSize != 0)
            {
                hash ^= EpochSize.GetHashCode();
            }
            if (Optimizer.Length != 0)
            {
                hash ^= Optimizer.GetHashCode();
            }
            if (LossFunc.Length != 0)
            {
                hash ^= LossFunc.GetHashCode();
            }
            hash ^= maxV_.GetHashCode();
            hash ^= minV_.GetHashCode();
            hash ^= meanV_.GetHashCode();
            hash ^= stdV_.GetHashCode();
            hash ^= layers_.GetHashCode();
            return(hash);
        }
Example #35
0
        [System.Security.SecuritySafeCritical]  // auto-generated
        public String Normalize(NormalizationForm normalizationForm)
        {
#if !FEATURE_NORM_IDNA_ONLY
            if (this.IsAscii())
            {
                // If its FastSort && one of the 4 main forms, then its already normalized
                if (normalizationForm == NormalizationForm.FormC ||
                    normalizationForm == NormalizationForm.FormKC ||
                    normalizationForm == NormalizationForm.FormD ||
                    normalizationForm == NormalizationForm.FormKD)
                {
                    return(this);
                }
            }
#endif // !FEATURE_NORM_IDNA_ONLY
            return(Normalization.Normalize(this, normalizationForm));
        }
        public void ApplyTest()
        {
            DataTable input = new DataTable("Sample data");
            input.Columns.Add("x", typeof(double));
            input.Columns.Add("y", typeof(double));
            input.Rows.Add(0.0, 0);
            input.Rows.Add(0.2, -20);
            input.Rows.Add(0.8, -80);
            input.Rows.Add(1.0, -100);

            DataTable expected = new DataTable("Sample data");
            expected.Columns.Add("x", typeof(double));
            expected.Columns.Add("y", typeof(double));
            expected.Rows.Add(-1.0502, 1.0502);
            expected.Rows.Add(-0.6301, 0.6301);
            expected.Rows.Add(0.6301, -0.6301);
            expected.Rows.Add(1.0502, -1.0502);



            Normalization target = new Normalization("x", "y");

            target.Detect(input);

            DataTable actual = target.Apply(input);

            for (int i = 0; i < actual.Rows.Count; i++)
            {
                double ex = (double)expected.Rows[i][0];
                double ey = (double)expected.Rows[i][1];

                double ax = (double)actual.Rows[i][0];
                double ay = (double)actual.Rows[i][1];

                Assert.AreEqual(ex, ax, 0.001);
                Assert.AreEqual(ey, ay, 0.001);

            }
        }
Example #37
0
 /// <summary>
 /// Creates IBSimilarity from the three components.
 /// <p>
 /// Note that <code>null</code> values are not allowed:
 /// if you want no normalization, instead pass
 /// <seealso cref="NoNormalization"/>. </summary>
 /// <param name="distribution"> probabilistic distribution modeling term occurrence </param>
 /// <param name="lambda"> distribution's &lambda;<sub>w</sub> parameter </param>
 /// <param name="normalization"> term frequency normalization </param>
 public IBSimilarity(Distribution distribution, Lambda lambda, Normalization normalization)
 {
     this.Distribution_Renamed = distribution;
     this.Lambda_Renamed = lambda;
     this.Normalization_Renamed = normalization;
 }
Example #38
0
        //////////////////////////////////////////////////////////////////////////
        // Execution
        public void Execute( Normalization _Normalization )
        {
            if ( m_InputIsSpatial && m_PlanForward == IntPtr.Zero )
                m_PlanForward = fftwlib.fftwf.dft_2d( m_Width, m_Height, m_Input, m_Output, fftwlib.fftw_direction.Forward, fftwlib.fftw_flags.DestroyInput );
            else if ( !m_InputIsSpatial && m_PlanBackward == IntPtr.Zero )
                m_PlanBackward = fftwlib.fftwf.dft_2d( m_Width, m_Height, m_Input, m_Output, fftwlib.fftw_direction.Backward, fftwlib.fftw_flags.DestroyInput );

            // Copy source data to FFTW memory
            Marshal.Copy( m_UserInput, 0, m_Input, m_Width*m_Height*2 );

            // FFT
            fftwlib.fftwf.execute( m_InputIsSpatial ? m_PlanForward : m_PlanBackward );

            // Retrieve results
            Marshal.Copy( m_Output, m_UserOutput, 0, m_Width*m_Height*2 );

            if ( _Normalization != Normalization.NONE )
            {
                float	Normalizer = _Normalization == Normalization.DIMENSIONS_PRODUCT ? 1.0f / (m_Width * m_Height) : 1.0f / (float) Math.Sqrt(m_Width * m_Height);
                for ( int i=0; i < 2*m_Width*m_Height; i++ )
                    m_UserOutput[i] *= Normalizer;
            }
        }
        public char characterRecognisation()
        {
            PixelExtraction pixelExtraction = new PixelExtraction();
            imgB2W = pixelExtraction.Img2BW(characterBitmap, BWThresh);
            Normalization normalization = new Normalization(pixelExtraction.getAllPoints(), pixelExtraction.getCountPoints());




            PCA pca = new PCA(); // added for the test

            pca.setTotalCount(pixelExtraction.getCountPoints());

            double[][] data = new double[5][];
            for (int i = 0; i < 5; i++)
            {
                data[i] = new double[5];
            }
            deg_value = new double[5][];
            for (int i = 0; i < 5; i++)
            {
                deg_value[i] = new double[2];
            }
            // 0 degree rotation
 //           Console.WriteLine("0 degree rotation");
            
            pca.pcaCalculation(normalization.getNormPoints());
            data[0] = pca.getEigenVector();
            deg_value[0] = pca.getEigenValue();
   //         Console.WriteLine("data[0]" + data[0][0] + "dafds" + data[0][1] + "0 3" + data[0][3] );


   //         Console.WriteLine("22 degree rotation");

            Normalization normalization1 = new Normalization(normalization.getNormPointsProjection22(), pixelExtraction.getCountPoints());
            pca.pcaCalculation(normalization1.getNormPoints());
            data[1] = pca.getEigenVector();
            deg_value[1] = pca.getEigenValue();


  //          Console.WriteLine("45 degree rotation");

            Normalization normalization3 = new Normalization(normalization.getNormPointsProjection45(), pixelExtraction.getCountPoints());
            pca.pcaCalculation(normalization3.getNormPoints());
            data[2] = pca.getEigenVector();
            deg_value[2] = pca.getEigenValue();

//            Console.WriteLine("67 degree rotation");

            Normalization normalization4 = new Normalization(normalization.getNormPointsProjection67(), pixelExtraction.getCountPoints());
            pca.pcaCalculation(normalization4.getNormPoints());
            data[3] = pca.getEigenVector();
            deg_value[3] = pca.getEigenValue();


//            Console.WriteLine("90 degree rotation");


            Normalization normalization5 = new Normalization(normalization.getNormPointsProjection90(), pixelExtraction.getCountPoints());
            pca.pcaCalculation(normalization5.getNormPoints());
            data[4] = pca.getEigenVector();
            deg_value[4] = pca.getEigenValue();

            DatabaseConnection connection = new DatabaseConnection();
            connection.retrieveDatabase(data, deg_value);
            //characters += connection.getRecognisedCharacter();
            return connection.getRecognisedCharacter();
 //          Console.WriteLine(" the main window characters" + characters);
        }
        public void ApplyTest2()
        {
            string colName = "(test ['a'])";

            DataTable input = new DataTable("Sample data");
            input.Columns.Add(colName, typeof(double));
            input.Rows.Add(-2);
            input.Rows.Add(-1);
            input.Rows.Add(0);
            input.Rows.Add(1);
            input.Rows.Add(2);

            DataTable expected = new DataTable("Sample data");
            expected.Columns.Add(colName, typeof(double));
            expected.Rows.Add(-1.2649110640673518);
            expected.Rows.Add(-0.63245553203367588);
            expected.Rows.Add(0);
            expected.Rows.Add(0.63245553203367588);
            expected.Rows.Add(1.2649110640673518);



            Normalization target = new Normalization(colName);

            target.Detect(input);

            DataTable actual = target.Apply(input);

            for (int i = 0; i < actual.Rows.Count; i++)
            {
                double ex = (double)expected.Rows[i][0];

                double ax = (double)actual.Rows[i][0];

                Assert.AreEqual(ex, ax, 0.001);

            }
        }
Example #41
0
        /// <include file='../../docs.xml'
        /// path='docs/doc[@name="M:PeterO.Text.NormalizerInput.IsNormalized(PeterO.Text.ICharacterInput,PeterO.Text.Normalization)"]/*'/>
        public static bool IsNormalized(
  ICharacterInput chars,
  Normalization form)
        {
            if (chars == null) {
            throw new ArgumentNullException("chars");
              }
               var listIndex = 0;
              var array = new int[16];
              var haveNonQcs = false;
              while (true) {
            int c = chars.ReadChar();
            if (c < 0) {
             break;
            }
            if ((c & 0x1ff800) == 0xd800) {
             return false;
            }
            bool isQcs = (c >= 0xf0000) ? true :
            UnicodeDatabase.IsQuickCheckStarter(
              c,
              form);

            if (isQcs) {
              if (haveNonQcs) {
            if (!NormalizeAndCheck(
             array,
             0,
             listIndex,
             form)) {
              return false;
            }
              }
              listIndex = 0;
              haveNonQcs = false;
            } else {
              haveNonQcs = true;
            }
            if (listIndex >= array.Length) {
              var newArray = new int[array.Length * 2];
              Array.Copy(array, 0, newArray, 0, listIndex);
              array = newArray;
            }
            array[listIndex++] = c;
              }
              if (haveNonQcs) {
            if (!NormalizeAndCheck(
                    array,
                    0,
                    listIndex,
                    form)) {
              return false;
            }
              }
              return true;
        }
 public MZSpectrum CalculateDistribuition(IChemicalFormula obj, int topNPeaks = int.MaxValue, Normalization normalization = Normalization.Sum)
 {
     return CalculateDistribuition(obj.ChemicalFormula, topNPeaks, normalization);
 }
Example #43
0
	public Vector3 PointOnCap(float spotAngle, Normalization n, float t)
	{
		throw new ArgumentException("Normalizations for PointOnCap is not yet implemented");
	}
Example #44
0
	// RANDOM POINT IN A SPHERE. Return a Vector3
	public Vector3 PointInASphere(Normalization n, float t)
	{
		throw new ArgumentException("Normalizations for Sphere is not yet implemented");
	}
Example #45
0
 /// <include file='../../docs.xml'
 /// path='docs/doc[@name="M:PeterO.Text.Normalizer.Normalize(System.String,PeterO.Text.Normalization)"]/*'/>
 public static string Normalize(string str, Normalization form) {
   return NormalizingCharacterInput.Normalize(str, form);
 }
Example #46
0
	// RANDOM POINT ON A CUBE. Return a Vector3
	public Vector3 PointOnACube(Normalization n, float t)
	{
		return RandomCube.Surface(ref _rand, n, t);
	}
Example #47
0
 /// <include file='../../docs.xml'
 /// path='docs/doc[@name="M:PeterO.Text.NormalizerInput.Normalize(System.String,PeterO.Text.Normalization)"]/*'/>
 public static string Normalize(string str, Normalization form)
 {
     if (str == null) {
       throw new ArgumentNullException("str");
     }
     if (str.Length <= 1024 && IsNormalized(str, form)) {
       return str;
     }
     return Encodings.InputToString(
       new NormalizerInput(str, form));
 }
Example #48
0
 /// <include file='../../docs.xml'
 /// path='docs/doc[@name="M:PeterO.Text.NormalizerInput.IsNormalized(System.String,PeterO.Text.Normalization)"]/*'/>
 public static bool IsNormalized(string str, Normalization form)
 {
     if (str == null) {
       throw new ArgumentNullException("str");
     }
      // DebugUtility.Log (str);
     int mask = (form == Normalization.NFC) ? 0xff : 0x7f;
       var lastQcsIndex = 0;
     var haveNonQcs = false;
     for (int i = 0; i < str.Length; ++i) {
       int c = str[i];
       if ((c & 0xfc00) == 0xd800 && i + 1 < str.Length &&
       str[i + 1] >= 0xdc00 && str[i + 1] <= 0xdfff) {
     // Get the Unicode code point for the surrogate pair
     c = 0x10000 + ((c - 0xd800) << 10) + (str[i + 1] - 0xdc00);
       } else if ((c & 0xf800) == 0xd800) {
     // unpaired surrogate
     return false;
       }
       var isQcs = false;
       if ((c & mask) == c && (i + 1 == str.Length || (str[i + 1] & mask)
     == str[i + 1])) {
     // Quick check for an ASCII character (or Latin-1 in NFC) followed
     // by another
     // ASCII character (or Latin-1 in NFC) or the end of string.
     // Treat the first character as QCS
     // in this situation.
     isQcs = true;
       } else {
       isQcs = (c >= 0xf0000) ? true :
       UnicodeDatabase.IsQuickCheckStarter(
     c,
     form);
       }
     if (isQcs) {
       if (haveNonQcs) {
             if (!NormalizeAndCheckString(
             str,
             lastQcsIndex,
             i - lastQcsIndex,
             form)) {
             return false;
             }
             }
       lastQcsIndex = i;
       haveNonQcs = false;
     } else {
       haveNonQcs = true;
     }
     // DebugUtility.Log ("ch=" + (// EC (c)) + " qcs=" + isQcs + " lastqcs="
     // + lastQcs + " nqs=" + nonQcsStart);
       if (c >= 0x10000) {
     ++i;
       }
     }
     if (haveNonQcs) {
     if (!NormalizeAndCheckString(
     str,
     lastQcsIndex,
     str.Length - lastQcsIndex,
     form)) {
     return false;
       }
     }
     return true;
 }
        private MZSpectrum CalculateFineGrain(List<List<Composition>> elementalComposition, Normalization normalization)
        {
            List<Polynomial> fPolynomial = MultiplyFinePolynomial(elementalComposition);
            fPolynomial = MergeFinePolynomial(fPolynomial);

            // Convert polynomial to spectrum
            int count = fPolynomial.Count;
            double[] mz = new double[count];
            double[] intensities = new double[count];
            double totalProbability = 0;
            double basePeak = 0;
            int i = 0;
            foreach (Polynomial polynomial in fPolynomial)
            {
                totalProbability += polynomial.Probablity;
                if (polynomial.Probablity > basePeak)
                {
                    basePeak = polynomial.Probablity;
                }
                mz[i] = polynomial.Power*_mwResolution;
                intensities[i] = polynomial.Probablity;
                i++;
            }

            double normalizedValue = normalization == Normalization.Sum ? totalProbability : basePeak;

            // Normalize
            for (i = 0; i < count; i++)
            {
                intensities[i] /= normalizedValue;
            }

            return new MZSpectrum(mz, intensities, false);
        }
Example #50
0
	public Vector3 PointOnRing(float innerAngle, float outerAngle, Normalization n, float t)
	{
		throw new ArgumentException("Normalizations for PointOnRing is not yet implemented");
	}
 public MZSpectrum CalculateDistribuition(string chemicalFormula, int topNPeaks = int.MaxValue, Normalization normalization = Normalization.Sum)
 {
     return CalculateDistribuition(new ChemicalFormula(chemicalFormula), topNPeaks, normalization);
 }
Example #52
0
	// RANDOM RAINBOW COLOR
	public Color Rainbow(Normalization n, float t)
	{
		if (n == Normalization.STDNORMAL) {
			return WaveToRgb.LinearToRgb ( (float) NormalDistribution.Normalize(_rand.NextSingle(true), t));
		} else if (n == Normalization.POWERLAW) {
			return WaveToRgb.LinearToRgb ( (float) PowerLaw.Normalize(_rand.NextSingle(true), t, 0, 1));
		} else {
			return WaveToRgb.LinearToRgb(_rand.NextSingle(true));
		}
	}
        public MZSpectrum CalculateDistribuition(ChemicalFormula formula, int topNPeaks = int.MaxValue, Normalization normalization = Normalization.Sum)
        {
            double monoisotopicMass = formula.MonoisotopicMass;
            SetResolution(monoisotopicMass);

            List<List<Composition>> elementalComposition = new List<List<Composition>>();

            // Get all the unique elements of the formula
            foreach (Element element in formula.GetElements())
            {
                int count = formula.Count(element);
                List<Composition> isotopeComposition = new List<Composition>();
                foreach (Isotope isotope in element.Isotopes.Values.OrderBy(iso => iso.AtomicMass))
                {
                    double probability = isotope.RelativeAbundance;
                    if (probability <= 0)
                        continue;

                    Composition c = new Composition
                    {
                        Atoms = count,
                        MolecularWeight = isotope.AtomicMass,
                        Power = isotope.AtomicMass,
                        Probability = isotope.RelativeAbundance
                    };

                    isotopeComposition.Add(c);
                }
                elementalComposition.Add(isotopeComposition);
            }

            foreach (List<Composition> compositions in elementalComposition)
            {
                double sumProb = compositions.Sum(t => t.Probability);
                foreach (Composition composition in compositions)
                {
                    composition.Probability /= sumProb;
                    composition.LogProbability = Math.Log(composition.Probability);
                    composition.Power = Math.Floor(composition.MolecularWeight/_mwResolution + 0.5);
                }
            }

            return CalculateFineGrain(elementalComposition, normalization);
        }
        public void ApplyTest3()
        {
            // Suppose we have a data table relating the age of
            // a person and its categorical classification, as 
            // in "child", "adult" or "elder".

            // The Normalization filter is able to transform
            // numerical data into Z-Scores, subtracting the
            // mean for each variable and dividing by their
            // standard deviation.

            // Create the aforementioned sample table
            DataTable table = new DataTable("Sample data");
            table.Columns.Add("Age", typeof(double));
            table.Columns.Add("Label", typeof(string));

            //            age   label
            table.Rows.Add(10, "child");
            table.Rows.Add(07, "child");
            table.Rows.Add(04, "child");
            table.Rows.Add(21, "adult");
            table.Rows.Add(27, "adult");
            table.Rows.Add(12, "child");
            table.Rows.Add(79, "elder");
            table.Rows.Add(40, "adult");
            table.Rows.Add(30, "adult");

            // The filter will ignore non-real (continuous) data
            Normalization normalization = new Normalization(table);

            double mean = normalization["Age"].Mean;              // 25.55
            double sdev = normalization["Age"].StandardDeviation; // 23.29

            // Now we can process another table at once:
            DataTable result = normalization.Apply(table);

            // The result will be a table with the same columns, but
            // in which any column named "Age" will have been normalized
            // using the previously detected mean and standard deviation:

            // DataGridBox.Show(result);

            Assert.AreEqual(25.555555555555557, mean);
            Assert.AreEqual(23.297591673342072, sdev);
        }
Example #55
0
     private static bool NormalizeAndCheck(
 int[] charArray,
 int start,
 int length,
 Normalization form)
     {
         var i = 0;
         int ch;
         var input = new NormalizerInput(
         new PartialArrayCharacterInput(charArray, start, length),
         form);
         while ((ch = input.ReadChar()) >= 0) {
             if (i >= length) {
                 return false;
             }
             if (ch != charArray[start + i]) {
                 return false;
             }
             ++i;
         }
         return i == length;
     }
Example #56
0
 /// <include file='../../docs.xml'
 /// path='docs/doc[@name="M:PeterO.Text.Normalizer.IsNormalized(System.String,PeterO.Text.Normalization)"]/*'/>
 public static bool IsNormalized(string str, Normalization form) {
   return NormalizingCharacterInput.IsNormalized(str, form);
 }
Example #57
0
 private static bool NormalizeAndCheckString(
 string charString,
 int start,
 int length,
 Normalization form)
 {
     int i = start;
     var norm = new NormalizerInput(
        charString,
        start,
        length,
        form);
     var ch = 0;
     int endIndex = start + length;
     while ((ch = norm.ReadChar()) >= 0) {
       int c = charString[i];
       if ((c & 0x1ffc00) == 0xd800 && i + 1 < endIndex &&
       charString[i + 1] >= 0xdc00 && charString[i + 1] <= 0xdfff) {
     // Get the Unicode code point for the surrogate pair
     c = 0x10000 + ((c - 0xd800) << 10) + (charString[i + 1] - 0xdc00);
     ++i;
       } else if ((c & 0x1ff800) == 0xd800) {
     // unpaired surrogate
     return false;
       }
       ++i;
       if (c != ch) {
     return false;
       }
     }
     return i == endIndex;
 }
Example #58
0
 /// <include file='../../docs.xml'
 /// path='docs/doc[@name="M:PeterO.Text.NormalizerInput.#ctor(System.String,PeterO.Text.Normalization)"]/*'/>
 public NormalizerInput(string str, Normalization form)
     : this(new StringCharacterInput2(str), form)
 {
 }
Example #59
0
 /// <include file='../../docs.xml'
 /// path='docs/doc[@name="M:PeterO.Text.Normalizer.#ctor(System.String,PeterO.Text.Normalization)"]/*'/>
 public Normalizer(string str, Normalization form) {
   this.nci = new NormalizingCharacterInput(str, form);
 }
Example #60
0
			public Unit(Point point, Normalization norm) {
				this.point = point;
				this.norm = norm.Normalize (point);
			}