Exemplo n.º 1
0
static Func<double,double> G=(x)=>-Cos(x); 		//anti-derivative of g
static void Main(){
	int n=5;					//number of sets of parameters
	var ann=new network(n,f,df,F);	//creating a new "network" using n and our activation function f
	double a=-PI,b=2*PI;			//a: "start" value, b: "end" value
	int nx=30;					//length of xs and ys
	var xs=new double[nx];
	var ys=new double[nx];
	for(int i=0;i<nx;i++){
		xs[i]=a+(b-a)*i/(nx-1);	//interval of x inbetween a and b
		ys[i]=g(xs[i]);			//function values of function to fit 
		Write($"{xs[i]}	{ys[i]} {dg(xs[i])} {G(xs[i])}\n");
		}
	Write("\n\n");
	for(int i=0;i<ann.n;i++){
		ann.p[3*i+0]=a+(b-a)*i/(ann.n-1);	//a
		ann.p[3*i+1]=1;						//b
		ann.p[3*i+2]=1;						//w
	}
	ann.p.fprint(Console.Error,"p=");
	ann.train(xs,ys);					//calling "train" which minimizes the deviation given our x's and y's
	double offset = G(xs[0])-ann.feedF(xs[0]);
	ann.p.fprint(Console.Error,"p=");
	for(double z=a;z<=b;z+=1.0/64)
		Write($"{z} {ann.feed(z)} {ann.feeddf(z)} {ann.feedF(z)+offset}\n");	//calling "feed" which returns the output signal

}//Main
Exemplo n.º 2
0
    static void Main()
    {
        WriteLine("______ Assignment A ______\n");
        WriteLine("Testing the neural network for fitting to a function:\n");
        Func <double, double> F_fit = delegate(double x){
            return(Cos(5 * x - 1) * Exp(-x * x));
        };

        int    n = 5;
        var    ann = new network(n);
        double a = -1, b = 1;
        int    nx = 20;
        vector xs = new vector(nx);
        vector ys = new vector(nx);

        for (int i = 0; i < nx; i++)
        {
            xs[i] = a + (b - a) * i / (nx - 1);
            ys[i] = F_fit(xs[i]);
            Error.Write($"{xs[i]} {ys[i]}\n");
        }
        Error.Write("\n\n");
        for (int i = 0; i < ann.n; i++)
        {
            ann.p[3 * i + 0] = a + (b - a) * i / (ann.n - 1);
            ann.p[3 * i + 1] = 1;
            ann.p[3 * i + 2] = 1;
        }
        ann.p.print("Before training: p =");
        (int nsteps, int ncalls) = ann.train(xs, ys);
        ann.p.print("After training:  p =");
        WriteLine($"Minimization steps: {nsteps}");
        WriteLine($"Function calls:     {ncalls}");
        for (double z = a; z <= b; z += 1.0 / 64)
        {
            Error.Write($"{z} {ann.feed(z)}\n");
        }
        Error.Write("\n\n");
        WriteLine("The fitted function can be seen in the figure Fit.svg.");
        Write("\n\n\n");


        WriteLine("______ Assignment B ______\n");

        WriteLine("We now use different feeders to get the derivative and antiderivative.\nThese can be seen in the figures Derivative.svg and Antiderivative.svg");
        for (double z = a; z <= b; z += 1.0 / 64)
        {
            Error.Write($"{z} {ann.feedDeriv(z)}\n");
        }
        Error.Write("\n\n");
        for (double z = a; z <= b; z += 1.0 / 64)
        {
            Error.Write($"{z} {ann.feedInt(z)}\n");
        }
        Error.Write("\n\n");
    }
Exemplo n.º 3
0
    public static int Main()
    {
        Func <double, double> activate = delegate(double x){
            return(Exp(-x * x));
        };
        Func <double, double> df = delegate(double x){
            return(-2 * Exp(-x * x) * x);
        };
        Func <double, double> adf = delegate(double x){
            return(Sqrt(PI) * math.erf(x) / 2);
        };

        Func <double, double> fitfun = delegate(double x){
            return(x * Exp(-x * x)); // I'm not terribly imaginative with my functions
        };

        Write("Part A:\n");
        int    n   = 8; // Wikibooks said this was a typical amount
        var    ann = new network(n, activate, df, adf);
        double a   = -2;
        double b   = 2;

        int nx = 50;

        double[] xs = new double[nx];
        double[] ys = new double[nx];
        for (int i = 0; i < nx; i++)
        {
            xs[i] = a + (b - a) * i / (nx - 1);
            ys[i] = fitfun(xs[i]);
            Error.Write("{0}\t{1}\n", xs[i], ys[i]);
        }

        Error.Write("\n\n");
        for (int i = 0; i < n; i++)
        {
            ann.p[3 * i]     = a + (b - a) * i / (n - 1);
            ann.p[3 * i + 1] = 1.0;
            ann.p[3 * i + 2] = 1.0;
        }
        ann.p.print("Initial p=");
        vector time = ann.train(xs, ys); // time=[minimizeriterations, functioncalls]

        ann.p.print("Post-training p=");
        Write($"The minimiztion took {time[0]} iterations, and the deviation function was called {time[1]} times\n");
        double z = a;

        for (int i = 1; i <= 100; i++)
        {
            Error.Write($"{z}\t{ann.feed(z)}\n");
            z += (b - a) / 100;
        }

        Write("\n The calculated function can be seen in A.svg\n\n");


        Write("Part B:\n");

        // Create derivative and anti-derivative functions
        Func <double, double> dfun = delegate(double x){
            return(Exp(-x * x) * (1 - 2 * x * x));
        };
        Func <double, double> adfun = delegate(double x){
            return(-Exp(-x * x) / 2.0);
        };

        Error.Write("\n\n");
        z = a;
        for (int i = 1; i <= 100; i++) // For the derivative
        {
            Error.Write($"{z}\t{ann.dfeed(df, z)}\t{dfun(z)}\n");
            z += (b - a) / 100;
        }
        Error.Write("\n\n");

        z = a;
        for (int i = 1; i <= 1000; i++) // For the antiderivative
        {
            Error.Write($"{z}\t{ann.adfeed(adf, z)}\t{adfun(z)}\n");
            z += (b - a) / 1000;
        }
        Write("The resulting derivates and antiderivatives can be seen in B.svg\n");

        return(0);
    }